mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-08 18:04:14 +01:00
Merge branch 'forgejo' into forgejo-federated-star
This commit is contained in:
commit
97343470bc
687 changed files with 20874 additions and 7044 deletions
|
@ -14,6 +14,7 @@ package "code.gitea.io/gitea/models"
|
|||
func (ErrUpdateTaskNotExist).Error
|
||||
func (ErrUpdateTaskNotExist).Unwrap
|
||||
func IsErrSHANotFound
|
||||
func IsErrMergeDivergingFastForwardOnly
|
||||
func GetYamlFixturesAccess
|
||||
|
||||
package "code.gitea.io/gitea/models/actions"
|
||||
|
@ -289,12 +290,14 @@ package "code.gitea.io/gitea/modules/timeutil"
|
|||
|
||||
package "code.gitea.io/gitea/modules/translation"
|
||||
func (MockLocale).Language
|
||||
func (MockLocale).TrString
|
||||
func (MockLocale).Tr
|
||||
func (MockLocale).TrN
|
||||
func (MockLocale).PrettyNumber
|
||||
|
||||
package "code.gitea.io/gitea/modules/util"
|
||||
func UnsafeStringToBytes
|
||||
func OptionalBoolFromGeneric
|
||||
|
||||
package "code.gitea.io/gitea/modules/util/filebuffer"
|
||||
func CreateFromReader
|
||||
|
|
|
@ -71,7 +71,6 @@ cpu.out
|
|||
/tests/e2e/test-artifacts
|
||||
/tests/e2e/test-snapshots
|
||||
/tests/*.ini
|
||||
/node_modules
|
||||
/yarn.lock
|
||||
/yarn-error.log
|
||||
/npm-debug.log*
|
||||
|
|
|
@ -12,6 +12,7 @@ plugins:
|
|||
- "@eslint-community/eslint-plugin-eslint-comments"
|
||||
- "@stylistic/eslint-plugin-js"
|
||||
- eslint-plugin-array-func
|
||||
- eslint-plugin-github
|
||||
- eslint-plugin-i
|
||||
- eslint-plugin-jquery
|
||||
- eslint-plugin-no-jquery
|
||||
|
@ -209,6 +210,29 @@ rules:
|
|||
func-names: [0]
|
||||
func-style: [0]
|
||||
getter-return: [2]
|
||||
github/a11y-aria-label-is-well-formatted: [0]
|
||||
github/a11y-no-title-attribute: [0]
|
||||
github/a11y-no-visually-hidden-interactive-element: [0]
|
||||
github/a11y-role-supports-aria-props: [0]
|
||||
github/a11y-svg-has-accessible-name: [0]
|
||||
github/array-foreach: [0]
|
||||
github/async-currenttarget: [2]
|
||||
github/async-preventdefault: [2]
|
||||
github/authenticity-token: [0]
|
||||
github/get-attribute: [0]
|
||||
github/js-class-name: [0]
|
||||
github/no-blur: [0]
|
||||
github/no-d-none: [0]
|
||||
github/no-dataset: [2]
|
||||
github/no-dynamic-script-tag: [2]
|
||||
github/no-implicit-buggy-globals: [2]
|
||||
github/no-inner-html: [0]
|
||||
github/no-innerText: [2]
|
||||
github/no-then: [2]
|
||||
github/no-useless-passive: [2]
|
||||
github/prefer-observers: [2]
|
||||
github/require-passive-events: [2]
|
||||
github/unescaped-html-literal: [0]
|
||||
grouped-accessor-pairs: [2]
|
||||
guard-for-in: [0]
|
||||
id-blacklist: [0]
|
||||
|
@ -272,7 +296,7 @@ rules:
|
|||
jquery/no-delegate: [2]
|
||||
jquery/no-each: [0]
|
||||
jquery/no-extend: [2]
|
||||
jquery/no-fade: [0]
|
||||
jquery/no-fade: [2]
|
||||
jquery/no-filter: [0]
|
||||
jquery/no-find: [0]
|
||||
jquery/no-global-eval: [2]
|
||||
|
@ -285,7 +309,7 @@ rules:
|
|||
jquery/no-is-function: [2]
|
||||
jquery/no-is: [0]
|
||||
jquery/no-load: [2]
|
||||
jquery/no-map: [0]
|
||||
jquery/no-map: [2]
|
||||
jquery/no-merge: [2]
|
||||
jquery/no-param: [2]
|
||||
jquery/no-parent: [0]
|
||||
|
@ -427,7 +451,7 @@ rules:
|
|||
no-jquery/no-load: [2]
|
||||
no-jquery/no-map-collection: [0]
|
||||
no-jquery/no-map-util: [2]
|
||||
no-jquery/no-map: [0]
|
||||
no-jquery/no-map: [2]
|
||||
no-jquery/no-merge: [2]
|
||||
no-jquery/no-node-name: [2]
|
||||
no-jquery/no-noop: [2]
|
||||
|
@ -558,7 +582,6 @@ rules:
|
|||
prefer-rest-params: [2]
|
||||
prefer-spread: [2]
|
||||
prefer-template: [2]
|
||||
quotes: [2, single, {avoidEscape: true, allowTemplateLiterals: true}]
|
||||
radix: [2, as-needed]
|
||||
regexp/confusing-quantifier: [2]
|
||||
regexp/control-character-escape: [2]
|
||||
|
@ -811,7 +834,7 @@ rules:
|
|||
wc/no-constructor-params: [2]
|
||||
wc/no-constructor: [2]
|
||||
wc/no-customized-built-in-elements: [2]
|
||||
wc/no-exports-with-element: [2]
|
||||
wc/no-exports-with-element: [0]
|
||||
wc/no-invalid-element-name: [2]
|
||||
wc/no-invalid-extends: [2]
|
||||
wc/no-method-prefixed-with-on: [2]
|
||||
|
|
5
.forgejo/testdata/build-release/Dockerfile
vendored
5
.forgejo/testdata/build-release/Dockerfile
vendored
|
@ -1,3 +1,4 @@
|
|||
FROM public.ecr.aws/docker/library/alpine:3.18
|
||||
FROM code.forgejo.org/oci/alpine:3.19
|
||||
ARG RELEASE_VERSION=unkown
|
||||
RUN mkdir -p /app/gitea
|
||||
RUN ( echo '#!/bin/sh' ; echo "echo forgejo v1.2.3" ) > /app/gitea/gitea ; chmod +x /app/gitea/gitea
|
||||
RUN ( echo '#!/bin/sh' ; echo "echo forgejo v$RELEASE_VERSION" ) > /app/gitea/gitea ; chmod +x /app/gitea/gitea
|
||||
|
|
|
@ -34,10 +34,10 @@ jobs:
|
|||
lxc-ip-prefix: 10.0.9
|
||||
|
||||
- name: publish the forgejo release
|
||||
shell: bash
|
||||
run: |
|
||||
set -x
|
||||
|
||||
version=1.2.3
|
||||
cat > /etc/docker/daemon.json <<EOF
|
||||
{
|
||||
"insecure-registries" : ["${{ steps.forgejo.outputs.host-port }}"]
|
||||
|
@ -53,6 +53,37 @@ jobs:
|
|||
url=http://root:admin1234@${{ steps.forgejo.outputs.host-port }}
|
||||
export FORGEJO_RUNNER_LOGS="${{ steps.forgejo.outputs.runner-logs }}"
|
||||
|
||||
function sanity_check() {
|
||||
local url=$1 version=$2
|
||||
#
|
||||
# Minimal sanity checks. Since the binary
|
||||
# is a script shell it does not test the sanity of the cross
|
||||
# build, only the sanity of the naming of the binaries.
|
||||
#
|
||||
for arch in amd64 arm64 arm-6 ; do
|
||||
local binary=forgejo-$version-linux-$arch
|
||||
for suffix in '' '.xz' ; do
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$binary$suffix > $binary$suffix
|
||||
if test "$suffix" = .xz ; then
|
||||
unxz --keep $binary$suffix
|
||||
fi
|
||||
chmod +x $binary
|
||||
./$binary --version | grep $version
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$binary$suffix.sha256 > $binary$suffix.sha256
|
||||
shasum -a 256 --check $binary$suffix.sha256
|
||||
rm $binary$suffix
|
||||
done
|
||||
done
|
||||
|
||||
local sources=forgejo-src-$version.tar.gz
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$sources > $sources
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$sources.sha256 > $sources.sha256
|
||||
shasum -a 256 --check $sources.sha256
|
||||
|
||||
docker pull ${{ steps.forgejo.outputs.host-port }}/root/forgejo:$version
|
||||
docker pull ${{ steps.forgejo.outputs.host-port }}/root/forgejo:$version-rootless
|
||||
}
|
||||
|
||||
#
|
||||
# Create a new project with a fake forgejo and the release workflow only
|
||||
#
|
||||
|
@ -62,46 +93,41 @@ jobs:
|
|||
cp $dir/Dockerfile $dir/Dockerfile.rootless
|
||||
|
||||
forgejo-test-helper.sh push $dir $url root forgejo
|
||||
sha=$(forgejo-test-helper.sh branch_tip $url root/forgejo main)
|
||||
|
||||
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"${{ steps.forgejo.outputs.token }}"}' $url/api/v1/repos/root/forgejo/actions/secrets/TOKEN
|
||||
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"root"}' $url/api/v1/repos/root/forgejo/actions/secrets/DOER
|
||||
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"true"}' $url/api/v1/repos/root/forgejo/actions/secrets/VERBOSE
|
||||
|
||||
#
|
||||
# Push a tag to trigger the release workflow and wait for it to complete
|
||||
#
|
||||
version=1.2.3
|
||||
sha=$(forgejo-test-helper.sh branch_tip $url root/forgejo main)
|
||||
forgejo-curl.sh api_json --data-raw '{"tag_name": "v'$version'", "target": "'$sha'"}' $url/api/v1/repos/root/forgejo/tags
|
||||
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"${{ steps.forgejo.outputs.token }}"}' $url/api/v1/repos/root/forgejo/actions/secrets/TOKEN
|
||||
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"root"}' $url/api/v1/repos/root/forgejo/actions/secrets/DOER
|
||||
LOOPS=180 forgejo-test-helper.sh wait_success "$url" root/forgejo $sha
|
||||
sanity_check $url $version
|
||||
|
||||
#
|
||||
# uncomment to see the logs even when everything is reported to be working ok
|
||||
# Push a commit to a branch that triggers the build of a test release
|
||||
#
|
||||
#cat $FORGEJO_RUNNER_LOGS
|
||||
version=1.2-test
|
||||
(
|
||||
git clone $url/root/forgejo /tmp/forgejo
|
||||
cd /tmp/forgejo
|
||||
date > DATE
|
||||
git config user.email root@example.com
|
||||
git config user.name username
|
||||
git add .
|
||||
git commit -m 'update'
|
||||
git push $url/root/forgejo main:forgejo
|
||||
)
|
||||
sha=$(forgejo-test-helper.sh branch_tip $url root/forgejo forgejo)
|
||||
LOOPS=180 forgejo-test-helper.sh wait_success "$url" root/forgejo $sha
|
||||
sanity_check $url $version
|
||||
|
||||
#
|
||||
# Minimal sanity checks. e2e test is for the setup-forgejo
|
||||
# action and the infrastructure playbook. Since the binary
|
||||
# is a script shell it does not test the sanity of the cross
|
||||
# build, only the sanity of the naming of the binaries.
|
||||
#
|
||||
for arch in amd64 arm64 arm-6 ; do
|
||||
binary=forgejo-$version-linux-$arch
|
||||
for suffix in '' '.xz' ; do
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$binary$suffix > $binary$suffix
|
||||
if test "$suffix" = .xz ; then
|
||||
unxz --keep $binary$suffix
|
||||
fi
|
||||
chmod +x $binary
|
||||
./$binary --version | grep $version
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$binary$suffix.sha256 > $binary$suffix.sha256
|
||||
shasum -a 256 --check $binary$suffix.sha256
|
||||
rm $binary$suffix
|
||||
done
|
||||
done
|
||||
|
||||
sources=forgejo-src-$version.tar.gz
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$sources > $sources
|
||||
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$sources.sha256 > $sources.sha256
|
||||
shasum -a 256 --check $sources.sha256
|
||||
|
||||
docker pull ${{ steps.forgejo.outputs.host-port }}/root/forgejo:$version
|
||||
docker pull ${{ steps.forgejo.outputs.host-port }}/root/forgejo:$version-rootless
|
||||
- name: full logs
|
||||
if: always()
|
||||
run: |
|
||||
sed -e 's/^/[RUNNER LOGS] /' ${{ steps.forgejo.outputs.runner-logs }}
|
||||
docker logs forgejo | sed -e 's/^/[FORGEJO LOGS]/'
|
||||
sleep 5 # hack to avoid mixing outputs in Forgejo v1.21
|
||||
|
|
|
@ -14,11 +14,12 @@
|
|||
# secrets.CASCADE_DESTINATION_TOKEN: <generated from code.forgejo.org/forgejo-ci> scope read:user, write:repository, write:issue
|
||||
# vars.CASCADE_DESTINATION_DOER: forgejo-ci
|
||||
#
|
||||
name: Build release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: 'v*'
|
||||
tags: 'v[0-9]+.[0-9]+.*'
|
||||
branches:
|
||||
- 'forgejo'
|
||||
- 'v*/forgejo'
|
||||
|
||||
jobs:
|
||||
release:
|
||||
|
@ -27,6 +28,8 @@ jobs:
|
|||
if: vars.ROLE == 'forgejo-integration' || github.repository_owner == 'root'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Sanitize the name of the repository
|
||||
id: repository
|
||||
|
@ -43,17 +46,39 @@ jobs:
|
|||
go-version: "1.21"
|
||||
check-latest: true
|
||||
|
||||
- name: version from ref_name
|
||||
id: tag-version
|
||||
- name: version from ref
|
||||
id: release-info
|
||||
shell: bash
|
||||
run: |
|
||||
version="${{ github.ref_name }}"
|
||||
version=${version##*v}
|
||||
echo "value=$version" >> "$GITHUB_OUTPUT"
|
||||
set -x
|
||||
ref="${{ github.ref }}"
|
||||
if [[ $ref =~ ^refs/heads/ ]] ; then
|
||||
if test "$ref" = "refs/heads/forgejo" ; then
|
||||
version=$(git tag -l --sort=version:refname --merged | grep -v -e '-test$' | tail -1 | sed -E -e 's/^(v[0-9]+\.[0-9]+).*/\1/')-test
|
||||
else
|
||||
version=${ref#refs/heads/}
|
||||
version=${version%/forgejo}-test
|
||||
fi
|
||||
override=true
|
||||
fi
|
||||
if [[ $ref =~ ^refs/tags/ ]] ; then
|
||||
version=${ref#refs/tags/}
|
||||
override=false
|
||||
fi
|
||||
if test -z "$version" ; then
|
||||
echo failed to figure out the release version from the reference=$ref
|
||||
exit 1
|
||||
fi
|
||||
version=${version#v}
|
||||
git describe --exclude '*-test' --tags --always
|
||||
echo "sha=${{ github.sha }}" >> "$GITHUB_OUTPUT"
|
||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||
echo "override=$override" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: release notes
|
||||
id: release-notes
|
||||
run: |
|
||||
anchor=${{ steps.tag-version.outputs.value }}
|
||||
anchor=${{ steps.release-info.outputs.version }}
|
||||
anchor=${anchor//./-}
|
||||
cat >> "$GITHUB_OUTPUT" <<EOF
|
||||
value<<ENDVAR
|
||||
|
@ -61,11 +86,23 @@ jobs:
|
|||
ENDVAR
|
||||
EOF
|
||||
|
||||
- name: cache node_modules
|
||||
id: node
|
||||
uses: https://code.forgejo.org/actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
node_modules
|
||||
key: node-${{ steps.release-info.outputs.version }}
|
||||
|
||||
- name: skip if node cache hit
|
||||
if: steps.node.outputs.cache-hit != 'true'
|
||||
run: echo no hit
|
||||
|
||||
- name: Build sources
|
||||
run: |
|
||||
set -x
|
||||
apt-get -qq install -y make
|
||||
version=${{ steps.tag-version.outputs.value }}
|
||||
version=${{ steps.release-info.outputs.version }}
|
||||
#
|
||||
# Make sure all files are owned by the current user.
|
||||
# When run as root `npx webpack` will assume the identity
|
||||
|
@ -122,38 +159,42 @@ jobs:
|
|||
|
||||
- name: build container & release
|
||||
if: ${{ secrets.TOKEN != '' }}
|
||||
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/build@v1
|
||||
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/build@v3
|
||||
with:
|
||||
forgejo: "${{ env.GITHUB_SERVER_URL }}"
|
||||
owner: "${{ env.GITHUB_REPOSITORY_OWNER }}"
|
||||
repository: "${{ steps.repository.outputs.value }}"
|
||||
doer: "${{ secrets.DOER }}"
|
||||
tag-version: "${{ steps.tag-version.outputs.value }}"
|
||||
release-version: "${{ steps.release-info.outputs.version }}"
|
||||
sha: "${{ steps.release-info.outputs.sha }}"
|
||||
token: "${{ secrets.TOKEN }}"
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6
|
||||
release-notes: "${{ steps.release-notes.outputs.value }}"
|
||||
binary-name: forgejo
|
||||
binary-path: /app/gitea/gitea
|
||||
verbose: ${{ vars.VERBOSE || 'false' }}
|
||||
override: "${{ steps.release-info.outputs.override }}"
|
||||
verbose: ${{ vars.VERBOSE || secrets.VERBOSE || 'false' }}
|
||||
|
||||
- name: build rootless container
|
||||
if: ${{ secrets.TOKEN != '' }}
|
||||
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/build@v1
|
||||
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/build@v3
|
||||
with:
|
||||
forgejo: "${{ env.GITHUB_SERVER_URL }}"
|
||||
owner: "${{ env.GITHUB_REPOSITORY_OWNER }}"
|
||||
repository: "${{ steps.repository.outputs.value }}"
|
||||
doer: "${{ secrets.DOER }}"
|
||||
tag-version: "${{ steps.tag-version.outputs.value }}"
|
||||
release-version: "${{ steps.release-info.outputs.version }}"
|
||||
sha: "${{ steps.release-info.outputs.sha }}"
|
||||
token: "${{ secrets.TOKEN }}"
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6
|
||||
suffix: -rootless
|
||||
dockerfile: Dockerfile.rootless
|
||||
verbose: ${{ vars.VERBOSE || 'false' }}
|
||||
override: "${{ steps.release-info.outputs.override }}"
|
||||
verbose: ${{ vars.VERBOSE || secrets.VERBOSE || 'false' }}
|
||||
|
||||
- name: end-to-end tests
|
||||
if: ${{ secrets.TOKEN != '' && vars.ROLE == 'forgejo-integration' }}
|
||||
uses: https://code.forgejo.org/actions/cascading-pr@v1
|
||||
uses: https://code.forgejo.org/actions/cascading-pr@v2
|
||||
with:
|
||||
origin-url: ${{ env.GITHUB_SERVER_URL }}
|
||||
origin-repo: ${{ github.repository }}
|
||||
|
@ -166,4 +207,26 @@ jobs:
|
|||
destination-token: ${{ secrets.CASCADE_DESTINATION_TOKEN }}
|
||||
update: .forgejo/cascading-release-end-to-end
|
||||
env:
|
||||
FORGEJO_BINARY: "${{ env.GITHUB_SERVER_URL }}/${{ github.repository }}/releases/download/v${{ steps.tag-version.outputs.value }}/forgejo-${{ steps.tag-version.outputs.value }}-linux-amd64"
|
||||
FORGEJO_BINARY: "${{ env.GITHUB_SERVER_URL }}/${{ github.repository }}/releases/download/v${{ steps.release-info.outputs.version }}/forgejo-${{ steps.release-info.outputs.version }}-linux-amd64"
|
||||
|
||||
- name: copy to experimental
|
||||
if: vars.ROLE == 'forgejo-integration' && secrets.TOKEN != ''
|
||||
run: |
|
||||
if test "${{ vars.VERBOSE }}" = true ; then
|
||||
set -x
|
||||
fi
|
||||
tag=v${{ steps.release-info.outputs.version }}
|
||||
url=https://any:${{ secrets.TOKEN }}@codeberg.org
|
||||
if test "${{ steps.release-info.outputs.override }}" = "true" ; then
|
||||
curl -sS -X DELETE $url/api/v1/repos/forgejo-experimental/forgejo/releases/tags/$tag > /dev/null
|
||||
curl -sS -X DELETE $url/api/v1/repos/forgejo-experimental/forgejo/tags/$tag > /dev/null
|
||||
fi
|
||||
# actions/checkout@v3 sets http.https://codeberg.org/.extraheader with the automatic token.
|
||||
# Get rid of it so it does not prevent using the token that has write permissions
|
||||
git config --local --unset http.https://codeberg.org/.extraheader
|
||||
if test -f .git/shallow ; then
|
||||
echo "unexptected .git/shallow file is present"
|
||||
echo "it suggests a checkout --depth X was used which may prevent pushing the commit"
|
||||
echo "it happens when actions/checkout is called without depth: 0"
|
||||
fi
|
||||
git push $url/forgejo-experimental/forgejo ${{ steps.release-info.outputs.sha }}:refs/tags/$tag
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
name: mirror
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'forgejo'
|
||||
- 'v*/forgejo'
|
||||
schedule:
|
||||
- cron: '@daily'
|
||||
|
||||
jobs:
|
||||
mirror:
|
||||
|
|
|
@ -42,16 +42,19 @@ jobs:
|
|||
- uses: actions/checkout@v3
|
||||
|
||||
- name: copy & sign
|
||||
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/publish@v1
|
||||
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/publish@v4
|
||||
with:
|
||||
forgejo: ${{ vars.FORGEJO }}
|
||||
from-forgejo: ${{ vars.FORGEJO }}
|
||||
to-forgejo: ${{ vars.FORGEJO }}
|
||||
from-owner: ${{ vars.FROM_OWNER }}
|
||||
to-owner: ${{ vars.TO_OWNER }}
|
||||
repo: ${{ vars.REPO }}
|
||||
ref-name: ${{ github.ref_name }}
|
||||
release-notes: "See https://codeberg.org/forgejo/forgejo/src/branch/forgejo/RELEASE-NOTES.md#{ANCHOR}"
|
||||
doer: ${{ vars.DOER }}
|
||||
token: ${{ secrets.TOKEN }}
|
||||
ref-name: ${{ github.ref_name }}
|
||||
sha: ${{ github.sha }}
|
||||
from-token: ${{ secrets.TOKEN }}
|
||||
to-doer: ${{ vars.DOER }}
|
||||
to-token: ${{ secrets.TOKEN }}
|
||||
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
gpg-passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
verbose: ${{ vars.VERBOSE }}
|
||||
|
|
|
@ -23,10 +23,22 @@ jobs:
|
|||
- run: make --always-make -j$(nproc) lint-backend checks-backend # ensure the "go-licenses" make target runs
|
||||
env:
|
||||
TAGS: bindata sqlite sqlite_unlock_notify
|
||||
frontend-checks:
|
||||
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
|
||||
runs-on: docker
|
||||
container:
|
||||
image: 'docker.io/node:20-bookworm'
|
||||
steps:
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
- run: make deps-frontend
|
||||
- run: make lint-frontend
|
||||
- run: make checks-frontend
|
||||
- run: make test-frontend
|
||||
- run: make frontend
|
||||
test-unit:
|
||||
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
|
||||
runs-on: docker
|
||||
needs: [backend-checks]
|
||||
needs: [backend-checks, frontend-checks]
|
||||
container:
|
||||
image: 'docker.io/node:20-bookworm'
|
||||
services:
|
||||
|
@ -67,7 +79,7 @@ jobs:
|
|||
test-mysql:
|
||||
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
|
||||
runs-on: docker
|
||||
needs: [backend-checks]
|
||||
needs: [backend-checks, frontend-checks]
|
||||
container:
|
||||
image: 'docker.io/node:20-bookworm'
|
||||
services:
|
||||
|
@ -113,7 +125,7 @@ jobs:
|
|||
test-pgsql:
|
||||
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
|
||||
runs-on: docker
|
||||
needs: [backend-checks]
|
||||
needs: [backend-checks, frontend-checks]
|
||||
container:
|
||||
image: 'docker.io/node:20-bookworm'
|
||||
services:
|
||||
|
@ -161,7 +173,7 @@ jobs:
|
|||
test-sqlite:
|
||||
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
|
||||
runs-on: docker
|
||||
needs: [backend-checks]
|
||||
needs: [backend-checks, frontend-checks]
|
||||
container:
|
||||
image: 'docker.io/node:20-bookworm'
|
||||
steps:
|
||||
|
|
15
.gitpod.yml
15
.gitpod.yml
|
@ -10,10 +10,19 @@ tasks:
|
|||
- name: Run backend
|
||||
command: |
|
||||
gp sync-await setup
|
||||
if [ ! -f custom/conf/app.ini ]
|
||||
then
|
||||
|
||||
# Get the URL and extract the domain
|
||||
url=$(gp url 3000)
|
||||
domain=$(echo $url | awk -F[/:] '{print $4}')
|
||||
|
||||
if [ -f custom/conf/app.ini ]; then
|
||||
sed -i "s|^ROOT_URL =.*|ROOT_URL = ${url}/|" custom/conf/app.ini
|
||||
sed -i "s|^DOMAIN =.*|DOMAIN = ${domain}|" custom/conf/app.ini
|
||||
sed -i "s|^SSH_DOMAIN =.*|SSH_DOMAIN = ${domain}|" custom/conf/app.ini
|
||||
sed -i "s|^NO_REPLY_ADDRESS =.*|SSH_DOMAIN = noreply.${domain}|" custom/conf/app.ini
|
||||
else
|
||||
mkdir -p custom/conf/
|
||||
echo -e "[server]\nROOT_URL=$(gp url 3000)/" > custom/conf/app.ini
|
||||
echo -e "[server]\nROOT_URL = ${url}/" > custom/conf/app.ini
|
||||
echo -e "\n[database]\nDB_TYPE = sqlite3\nPATH = $GITPOD_REPO_ROOT/data/gitea.db" >> custom/conf/app.ini
|
||||
fi
|
||||
export TAGS="sqlite sqlite_unlock_notify"
|
||||
|
|
|
@ -98,7 +98,7 @@ rules:
|
|||
at-rule-allowed-list: null
|
||||
at-rule-disallowed-list: null
|
||||
at-rule-empty-line-before: null
|
||||
at-rule-no-unknown: true
|
||||
at-rule-no-unknown: [true, {ignoreAtRules: [tailwind]}]
|
||||
at-rule-no-vendor-prefix: true
|
||||
at-rule-property-required-list: null
|
||||
block-no-empty: true
|
||||
|
|
|
@ -5,7 +5,7 @@ FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.21-alpine3.19 as build
|
|||
ARG GOPROXY
|
||||
ENV GOPROXY ${GOPROXY:-direct}
|
||||
|
||||
ARG GITEA_VERSION
|
||||
ARG RELEASE_VERSION
|
||||
ARG TAGS="sqlite sqlite_unlock_notify"
|
||||
ENV TAGS "bindata timetzdata $TAGS"
|
||||
ARG CGO_EXTRA_CFLAGS
|
||||
|
@ -33,10 +33,10 @@ RUN apk --no-cache add build-base git nodejs npm
|
|||
COPY . ${GOPATH}/src/code.gitea.io/gitea
|
||||
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
|
||||
|
||||
RUN make clean-all
|
||||
RUN make clean
|
||||
RUN make frontend
|
||||
RUN go build contrib/environment-to-ini/environment-to-ini.go && xx-verify environment-to-ini
|
||||
RUN make go-check generate-backend static-executable && xx-verify gitea
|
||||
RUN make RELEASE_VERSION=$RELEASE_VERSION go-check generate-backend static-executable && xx-verify gitea
|
||||
|
||||
# Copy local files
|
||||
COPY docker/root /tmp/local
|
||||
|
|
|
@ -5,7 +5,7 @@ FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.21-alpine3.19 as build
|
|||
ARG GOPROXY
|
||||
ENV GOPROXY ${GOPROXY:-direct}
|
||||
|
||||
ARG GITEA_VERSION
|
||||
ARG RELEASE_VERSION
|
||||
ARG TAGS="sqlite sqlite_unlock_notify"
|
||||
ENV TAGS "bindata timetzdata $TAGS"
|
||||
ARG CGO_EXTRA_CFLAGS
|
||||
|
@ -33,10 +33,10 @@ RUN apk --no-cache add build-base git nodejs npm
|
|||
COPY . ${GOPATH}/src/code.gitea.io/gitea
|
||||
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
|
||||
|
||||
RUN make clean-all
|
||||
RUN make clean
|
||||
RUN make frontend
|
||||
RUN go build contrib/environment-to-ini/environment-to-ini.go && xx-verify environment-to-ini
|
||||
RUN make go-check generate-backend static-executable && xx-verify gitea
|
||||
RUN make RELEASE_VERSION=$RELEASE_VERSION go-check generate-backend static-executable && xx-verify gitea
|
||||
|
||||
# Copy local files
|
||||
COPY docker/rootless /tmp/local
|
||||
|
|
70
Makefile
70
Makefile
|
@ -29,10 +29,10 @@ XGO_VERSION := go-1.21.x
|
|||
AIR_PACKAGE ?= github.com/cosmtrek/air@v1.49.0
|
||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.7.0
|
||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.6.0
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.56.1
|
||||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
|
||||
MISSPELL_PACKAGE ?= github.com/client9/misspell/cmd/misspell@v0.3.4
|
||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.5
|
||||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.4.1
|
||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.6-0.20240201115257-bcc7c78b7786
|
||||
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
|
||||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0
|
||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1.0.3
|
||||
|
@ -85,19 +85,18 @@ endif
|
|||
STORED_VERSION_FILE := VERSION
|
||||
HUGO_VERSION ?= 0.111.3
|
||||
|
||||
GITEA_COMPATIBILITY ?= gitea-1.22.0
|
||||
|
||||
STORED_VERSION=$(shell cat $(STORED_VERSION_FILE) 2>/dev/null)
|
||||
ifneq ($(STORED_VERSION),)
|
||||
GITEA_VERSION ?= $(STORED_VERSION)
|
||||
FORGEJO_VERSION ?= $(STORED_VERSION)
|
||||
else
|
||||
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
||||
FORGEJO_VERSION ?= $(shell git describe --exclude '*-test' --tags --always | sed 's/^v//')+${GITEA_COMPATIBILITY}
|
||||
endif
|
||||
VERSION = ${GITEA_VERSION}
|
||||
RELEASE_VERSION ?= ${FORGEJO_VERSION}
|
||||
VERSION ?= ${RELEASE_VERSION}
|
||||
|
||||
# SemVer
|
||||
FORGEJO_VERSION := 7.0.0+0-gitea-1.22.0
|
||||
|
||||
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)" -X "main.ForgejoVersion=$(FORGEJO_VERSION)"
|
||||
LDFLAGS := $(LDFLAGS) -X "main.ReleaseVersion=$(RELEASE_VERSION)" -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(FORGEJO_VERSION)" -X "main.Tags=$(TAGS)" -X "main.ForgejoVersion=$(FORGEJO_VERSION)"
|
||||
|
||||
LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64
|
||||
|
||||
|
@ -107,7 +106,7 @@ GO_TEST_PACKAGES ?= $(filter-out $(shell $(GO) list code.gitea.io/gitea/models/m
|
|||
FOMANTIC_WORK_DIR := web_src/fomantic
|
||||
|
||||
WEBPACK_SOURCES := $(shell find web_src/js web_src/css -type f)
|
||||
WEBPACK_CONFIGS := webpack.config.js
|
||||
WEBPACK_CONFIGS := webpack.config.js tailwind.config.js
|
||||
WEBPACK_DEST := public/assets/js/index.js public/assets/css/index.css
|
||||
WEBPACK_DEST_ENTRIES := public/assets/js public/assets/css public/assets/fonts public/assets/img/webpack
|
||||
|
||||
|
@ -134,6 +133,8 @@ TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(FOMAN
|
|||
GO_DIRS := build cmd models modules routers services tests
|
||||
WEB_DIRS := web_src/js web_src/css
|
||||
|
||||
SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) docs/content templates options/locale/locale_en-US.ini .github
|
||||
|
||||
GO_SOURCES := $(wildcard *.go)
|
||||
GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go" ! -path modules/options/bindata.go ! -path modules/public/bindata.go ! -path modules/templates/bindata.go)
|
||||
GO_SOURCES += $(GENERATED_GO_DEST)
|
||||
|
@ -154,8 +155,8 @@ endif
|
|||
FORGEJO_API_SPEC := public/assets/forgejo/api.v1.yml
|
||||
|
||||
SWAGGER_SPEC := templates/swagger/v1_json.tmpl
|
||||
SWAGGER_SPEC_S_TMPL := s|"basePath": *"/api/v1"|"basePath": "{{AppSubUrl \| JSEscape \| Safe}}/api/v1"|g
|
||||
SWAGGER_SPEC_S_JSON := s|"basePath": *"{{AppSubUrl \| JSEscape \| Safe}}/api/v1"|"basePath": "/api/v1"|g
|
||||
SWAGGER_SPEC_S_TMPL := s|"basePath": *"/api/v1"|"basePath": "{{AppSubUrl \| JSEscape}}/api/v1"|g
|
||||
SWAGGER_SPEC_S_JSON := s|"basePath": *"{{AppSubUrl \| JSEscape}}/api/v1"|"basePath": "/api/v1"|g
|
||||
SWAGGER_EXCLUDE := code.gitea.io/sdk
|
||||
SWAGGER_NEWLINE_COMMAND := -e '$$a\'
|
||||
SWAGGER_SPEC_BRANDING := s|Gitea API|Forgejo API|g
|
||||
|
@ -212,6 +213,8 @@ help:
|
|||
@echo " - lint-swagger lint swagger files"
|
||||
@echo " - lint-templates lint template files"
|
||||
@echo " - lint-yaml lint yaml files"
|
||||
@echo " - lint-spell lint spelling"
|
||||
@echo " - lint-spell-fix lint spelling and fix issues"
|
||||
@echo " - checks run various consistency checks"
|
||||
@echo " - checks-frontend check frontend files"
|
||||
@echo " - checks-backend check backend files"
|
||||
|
@ -303,10 +306,6 @@ fmt-check: fmt
|
|||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: misspell-check
|
||||
misspell-check:
|
||||
go run $(MISSPELL_PACKAGE) -error $(GO_DIRS) $(WEB_DIRS)
|
||||
|
||||
.PHONY: $(TAGS_EVIDENCE)
|
||||
$(TAGS_EVIDENCE):
|
||||
@mkdir -p $(MAKE_EVIDENCE_DIR)
|
||||
|
@ -368,13 +367,13 @@ checks: checks-frontend checks-backend
|
|||
checks-frontend: lockfile-check svg-check
|
||||
|
||||
.PHONY: checks-backend
|
||||
checks-backend: tidy-check swagger-check fmt-check misspell-check forgejo-api-validate swagger-validate security-check
|
||||
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check
|
||||
|
||||
.PHONY: lint
|
||||
lint: lint-frontend lint-backend
|
||||
lint: lint-frontend lint-backend lint-spell
|
||||
|
||||
.PHONY: lint-fix
|
||||
lint-fix: lint-frontend-fix lint-backend-fix
|
||||
lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix
|
||||
|
||||
.PHONY: lint-frontend
|
||||
lint-frontend: lint-js lint-css
|
||||
|
@ -412,6 +411,14 @@ lint-swagger: node_modules
|
|||
lint-md: node_modules
|
||||
npx markdownlint docs *.md
|
||||
|
||||
.PHONY: lint-spell
|
||||
lint-spell:
|
||||
@go run $(MISSPELL_PACKAGE) -error $(SPELLCHECK_FILES)
|
||||
|
||||
.PHONY: lint-spell-fix
|
||||
lint-spell-fix:
|
||||
@go run $(MISSPELL_PACKAGE) -w $(SPELLCHECK_FILES)
|
||||
|
||||
.PHONY: lint-go
|
||||
lint-go:
|
||||
$(GO) run $(GOLANGCI_LINT_PACKAGE) run $(GOLANGCI_LINT_ARGS)
|
||||
|
@ -617,8 +624,7 @@ test-mssql\#%: integrations.mssql.test generate-ini-mssql
|
|||
test-mssql-migration: migrations.mssql.test migrations.individual.mssql.test
|
||||
|
||||
.PHONY: playwright
|
||||
playwright: $(PLAYWRIGHT_DIR)
|
||||
npm install --no-save @playwright/test
|
||||
playwright: deps-frontend
|
||||
npx playwright install $(PLAYWRIGHT_FLAGS)
|
||||
|
||||
.PHONY: test-e2e%
|
||||
|
@ -631,7 +637,7 @@ test-e2e: test-e2e-sqlite
|
|||
|
||||
.PHONY: test-e2e-sqlite
|
||||
test-e2e-sqlite: playwright e2e.sqlite.test generate-ini-sqlite
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./e2e.sqlite.test
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./e2e.sqlite.test -test.run TestE2e
|
||||
|
||||
.PHONY: test-e2e-sqlite\#%
|
||||
test-e2e-sqlite\#%: playwright e2e.sqlite.test generate-ini-sqlite
|
||||
|
@ -639,7 +645,7 @@ test-e2e-sqlite\#%: playwright e2e.sqlite.test generate-ini-sqlite
|
|||
|
||||
.PHONY: test-e2e-mysql
|
||||
test-e2e-mysql: playwright e2e.mysql.test generate-ini-mysql
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./e2e.mysql.test
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./e2e.mysql.test -test.run TestE2e
|
||||
|
||||
.PHONY: test-e2e-mysql\#%
|
||||
test-e2e-mysql\#%: playwright e2e.mysql.test generate-ini-mysql
|
||||
|
@ -647,7 +653,7 @@ test-e2e-mysql\#%: playwright e2e.mysql.test generate-ini-mysql
|
|||
|
||||
.PHONY: test-e2e-pgsql
|
||||
test-e2e-pgsql: playwright e2e.pgsql.test generate-ini-pgsql
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./e2e.pgsql.test
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./e2e.pgsql.test -test.run TestE2e
|
||||
|
||||
.PHONY: test-e2e-pgsql\#%
|
||||
test-e2e-pgsql\#%: playwright e2e.pgsql.test generate-ini-pgsql
|
||||
|
@ -655,12 +661,17 @@ test-e2e-pgsql\#%: playwright e2e.pgsql.test generate-ini-pgsql
|
|||
|
||||
.PHONY: test-e2e-mssql
|
||||
test-e2e-mssql: playwright e2e.mssql.test generate-ini-mssql
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./e2e.mssql.test
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./e2e.mssql.test -test.run TestE2e
|
||||
|
||||
.PHONY: test-e2e-mssql\#%
|
||||
test-e2e-mssql\#%: playwright e2e.mssql.test generate-ini-mssql
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./e2e.mssql.test -test.run TestE2e/$*
|
||||
|
||||
.PHONY: test-e2e-debugserver
|
||||
test-e2e-debugserver: e2e.sqlite.test generate-ini-sqlite
|
||||
sed -i s/3003/3000/g tests/sqlite.ini
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./e2e.sqlite.test -test.run TestDebugserver -test.timeout 24h
|
||||
|
||||
.PHONY: bench-sqlite
|
||||
bench-sqlite: integrations.sqlite.test generate-ini-sqlite
|
||||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
|
||||
|
@ -997,7 +1008,7 @@ generate-gitignore:
|
|||
|
||||
.PHONY: generate-images
|
||||
generate-images: | node_modules
|
||||
npm install --no-save --no-package-lock fabric@5 imagemin-zopfli@7
|
||||
npm install --no-save fabric@6.0.0-beta19 imagemin-zopfli@7
|
||||
node build/generate-images.js $(TAGS)
|
||||
|
||||
.PHONY: generate-manpage
|
||||
|
@ -1015,3 +1026,8 @@ docker:
|
|||
|
||||
# This endif closes the if at the top of the file
|
||||
endif
|
||||
|
||||
# Disable parallel execution because it would break some targets that don't
|
||||
# specify exact dependencies like 'backend' which does currently not depend
|
||||
# on 'frontend' to enable Node.js-less builds from source tarballs.
|
||||
.NOTPARALLEL:
|
||||
|
|
|
@ -4,6 +4,54 @@ A Forgejo release is published shortly after a Gitea release is published and th
|
|||
|
||||
The Forgejo admin should carefully read the required manual actions before upgrading. A point release (e.g. v1.21.1-0 or v1.21.2-0) does not require manual actions but others might (e.g. v1.20, v1.21).
|
||||
|
||||
## 1.21.6-0
|
||||
|
||||
The [complete list of commits](https://codeberg.org/forgejo/forgejo/commits/branch/v1.21/forgejo) included in the `Forgejo v1.21.6-0` release can be reviewed from the command line with:
|
||||
|
||||
```shell
|
||||
$ git clone https://codeberg.org/forgejo/forgejo/
|
||||
$ git -C forgejo log --oneline --no-merges v1.21.5-0..v1.21.6-0
|
||||
```
|
||||
|
||||
This stable release contains bug fixes and a **security fix**, as explained in the [v1.21.6-0 companion blog post](https://forgejo.org/2024-02-release-v1-21-6-0/).
|
||||
|
||||
* Recommended Action
|
||||
|
||||
We **strongly recommend** that all Forgejo installations are [upgraded](https://forgejo.org/docs/v1.21/admin/upgrade/) to the latest version as soon as possible.
|
||||
|
||||
* [Forgejo Semantic Version](https://forgejo.org/docs/v1.21/user/semver/)
|
||||
|
||||
The semantic version was updated to `6.0.6+0-gitea-1.21.6`
|
||||
|
||||
* Security fix
|
||||
|
||||
* [Fix XSS vulnerabilities](https://codeberg.org/forgejo/forgejo/pulls/2434). It enabled attackers to inject client-side scripts into web pages displayed to Forgejo visitors.
|
||||
|
||||
* Bug fixes
|
||||
|
||||
The most prominent ones are described here, others can be found in the list of commits included in the release as described above.
|
||||
|
||||
* [Always write proc-receive hook for all git versions](https://codeberg.org/forgejo/forgejo/commit/a1fb6a2346193439dafaee5acf071632246e6dd7).
|
||||
* [Fix debian InRelease Acquire-By-Hash newline](https://codeberg.org/forgejo/forgejo/commit/8a2c4e9ff2743f47a8d1f081b9e35dcc16431115).
|
||||
* [Fix missing link on outgoing new release notifications](https://codeberg.org/forgejo/forgejo/commit/3a061083d65bdfc9acf0cb5839b84f6a9c17a727).
|
||||
* [Workaround to clean up old reviews on creating a new one](https://codeberg.org/forgejo/forgejo/commit/8377ecbfe1f2b72ec7d65c46cbc9022ad0ccd75f).
|
||||
* [Fix push to create with capitalize repo name](https://codeberg.org/forgejo/forgejo/commit/8782275c9c66ad6fc7c44503d7df9dae7196aa65).
|
||||
* In Markdown [don't try to make the link absolute if the link has a schema that's defined in `[markdown].CUSTOM_URL_SCHEMES`](https://codeberg.org/forgejo/forgejo/commit/6c100083c29fb0ccf0cc52e8767e540a260d9468), because they can't be made absolute.
|
||||
* [Fix Ctrl+Enter on submitting review comment](https://codeberg.org/forgejo/forgejo/commit/1c3a31d85112d10fb948d6f0b763191ed6f68e90).
|
||||
* In Git version v2.43.1, the behavior of `GIT_FLUSH` was accidentially flipped. This causes Forgejo to hang on the `check-attr` command, because no output was being flushed. [Workaround this by detecting if Git v2.43.1 is used and set `GIT_FLUSH=0` thus getting the correct behavior](https://codeberg.org/forgejo/forgejo/commit/ff468ab5e426582b068586ce13d5a5348365e783).
|
||||
* [When setting `url.host` on a URL object with no port specified (like is the case of default port), the resulting URL's port will not change. Workaround this quirk in the URL standard by explicitly setting port for the http and https protocols](https://codeberg.org/forgejo/forgejo/commit/628e1036cfbcfae442cb6494249fe11410447056).
|
||||
* [Fix elasticsearch Request Entity Too Large](https://codeberg.org/forgejo/forgejo/commit/e6f59f6e1489d63d53de0da1de406a7a71a82adb).
|
||||
* [Do not send update/delete release notifications when it is in a draft state](https://codeberg.org/forgejo/forgejo/commit/3c54a1dbf62e56d948feb1008512900140033737).
|
||||
* [Do not run Forgejo Actions workflows synchronized events on the same commit as the one used to create a pull request](https://codeberg.org/forgejo/forgejo/commit/ce96379aef6e92cff2e9982031d5248ef8b01947).
|
||||
* [Fix a MySQL performance regression introduced in v1.21.4-0](https://codeberg.org/forgejo/forgejo/commit/af98a0a7c6f4cbb5340974958ebe4389e3bf4e9a).
|
||||
* [Fix Internal Server Error when resolving comments](https://codeberg.org/forgejo/forgejo/commit/ad67d9ef1a219b21309f811c14e7353cbc4982e3).
|
||||
* Packages
|
||||
* Swift: [fix a failure to resolve from package registry](https://codeberg.org/forgejo/forgejo/commit/fab6780fda5d8ded020a98253a793e87ed94f634).
|
||||
* Alpine: [if the APKINFO contains an install if condition, write it in the APKINDEX](https://codeberg.org/forgejo/forgejo/commit/7afbc62057b876fb6711ef58743f664a2509dde4).
|
||||
* org-mode files
|
||||
* [It is possible that the description of an `Regularlink` is `Text` and not another `Regularlink`](https://codeberg.org/forgejo/forgejo/commit/781d2a68ccb276bf13caf0b378b74d9efeab3d39).
|
||||
* [Fix relative links on orgmode](https://codeberg.org/forgejo/forgejo/commit/fa700333ba2649d14f1670dd2745957704a33b40).
|
||||
|
||||
## 1.21.5-0
|
||||
|
||||
The [complete list of commits](https://codeberg.org/forgejo/forgejo/commits/branch/v1.21/forgejo) included in the `Forgejo v1.21.5-0` release can be reviewed from the command line with:
|
||||
|
|
10
assets/go-licenses.json
generated
10
assets/go-licenses.json
generated
File diff suppressed because one or more lines are too long
|
@ -1,20 +1,13 @@
|
|||
#!/usr/bin/env node
|
||||
import imageminZopfli from 'imagemin-zopfli';
|
||||
import {optimize} from 'svgo';
|
||||
import {fabric} from 'fabric';
|
||||
import {loadSVGFromString, Canvas, Rect, util} from 'fabric/node';
|
||||
import {readFile, writeFile} from 'node:fs/promises';
|
||||
import {argv, exit} from 'node:process';
|
||||
|
||||
function exit(err) {
|
||||
function doExit(err) {
|
||||
if (err) console.error(err);
|
||||
process.exit(err ? 1 : 0);
|
||||
}
|
||||
|
||||
function loadSvg(svg) {
|
||||
return new Promise((resolve) => {
|
||||
fabric.loadSVGFromString(svg, (objects, options) => {
|
||||
resolve({objects, options});
|
||||
});
|
||||
});
|
||||
exit(err ? 1 : 0);
|
||||
}
|
||||
|
||||
async function generate(svg, path, {size, bg}) {
|
||||
|
@ -35,14 +28,14 @@ async function generate(svg, path, {size, bg}) {
|
|||
return;
|
||||
}
|
||||
|
||||
const {objects, options} = await loadSvg(svg);
|
||||
const canvas = new fabric.Canvas();
|
||||
const {objects, options} = await loadSVGFromString(svg);
|
||||
const canvas = new Canvas();
|
||||
canvas.setDimensions({width: size, height: size});
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.scale(options.width ? (size / options.width) : 1, options.height ? (size / options.height) : 1);
|
||||
|
||||
if (bg) {
|
||||
canvas.add(new fabric.Rect({
|
||||
canvas.add(new Rect({
|
||||
left: 0,
|
||||
top: 0,
|
||||
height: size * (1 / (size / options.height)),
|
||||
|
@ -51,7 +44,7 @@ async function generate(svg, path, {size, bg}) {
|
|||
}));
|
||||
}
|
||||
|
||||
canvas.add(fabric.util.groupSVGElements(objects, options));
|
||||
canvas.add(util.groupSVGElements(objects, options));
|
||||
canvas.renderAll();
|
||||
|
||||
let png = Buffer.from([]);
|
||||
|
@ -64,7 +57,7 @@ async function generate(svg, path, {size, bg}) {
|
|||
}
|
||||
|
||||
async function main() {
|
||||
const gitea = process.argv.slice(2).includes('gitea');
|
||||
const gitea = argv.slice(2).includes('gitea');
|
||||
const logoSvg = await readFile(new URL('../assets/logo.svg', import.meta.url), 'utf8');
|
||||
const faviconSvg = await readFile(new URL('../assets/favicon.svg', import.meta.url), 'utf8');
|
||||
|
||||
|
@ -79,4 +72,8 @@ async function main() {
|
|||
]);
|
||||
}
|
||||
|
||||
main().then(exit).catch(exit);
|
||||
try {
|
||||
doExit(await main());
|
||||
} catch (err) {
|
||||
doExit(err);
|
||||
}
|
||||
|
|
|
@ -4,15 +4,16 @@ import {optimize} from 'svgo';
|
|||
import {parse} from 'node:path';
|
||||
import {readFile, writeFile, mkdir} from 'node:fs/promises';
|
||||
import {fileURLToPath} from 'node:url';
|
||||
import {exit} from 'node:process';
|
||||
|
||||
const glob = (pattern) => fastGlob.sync(pattern, {
|
||||
cwd: fileURLToPath(new URL('..', import.meta.url)),
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
function exit(err) {
|
||||
function doExit(err) {
|
||||
if (err) console.error(err);
|
||||
process.exit(err ? 1 : 0);
|
||||
exit(err ? 1 : 0);
|
||||
}
|
||||
|
||||
async function processFile(file, {prefix, fullName} = {}) {
|
||||
|
@ -59,8 +60,11 @@ async function main() {
|
|||
await Promise.all([
|
||||
...processFiles('node_modules/@primer/octicons/build/svg/*-16.svg', {prefix: 'octicon'}),
|
||||
...processFiles('web_src/svg/*.svg'),
|
||||
...processFiles('public/assets/img/gitea.svg', {fullName: 'gitea-gitea'}),
|
||||
]);
|
||||
}
|
||||
|
||||
main().then(exit).catch(exit);
|
||||
try {
|
||||
doExit(await main());
|
||||
} catch (err) {
|
||||
doExit(err);
|
||||
}
|
||||
|
|
|
@ -10,8 +10,8 @@ import (
|
|||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
pwd "code.gitea.io/gitea/modules/auth/password"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
@ -123,10 +123,10 @@ func runCreateUser(c *cli.Context) error {
|
|||
changePassword = c.Bool("must-change-password")
|
||||
}
|
||||
|
||||
restricted := util.OptionalBoolNone
|
||||
restricted := optional.None[bool]()
|
||||
|
||||
if c.IsSet("restricted") {
|
||||
restricted = util.OptionalBoolOf(c.Bool("restricted"))
|
||||
restricted = optional.Some(c.Bool("restricted"))
|
||||
}
|
||||
|
||||
// default user visibility in app.ini
|
||||
|
@ -142,7 +142,7 @@ func runCreateUser(c *cli.Context) error {
|
|||
}
|
||||
|
||||
overwriteDefault := &user_model.CreateUserOverwriteOptions{
|
||||
IsActive: util.OptionalBoolTrue,
|
||||
IsActive: optional.Some(true),
|
||||
IsRestricted: restricted,
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,8 @@ func SubcmdActionsGenerateRunnerToken(ctx context.Context) *cli.Command {
|
|||
return &cli.Command{
|
||||
Name: "generate-runner-token",
|
||||
Usage: "Generate a new token for a runner to use to register with the server",
|
||||
Action: prepareWorkPathAndCustomConf(ctx, func(cliCtx *cli.Context) error { return RunGenerateActionsRunnerToken(ctx, cliCtx) }),
|
||||
Before: prepareWorkPathAndCustomConf(ctx),
|
||||
Action: func(cliCtx *cli.Context) error { return RunGenerateActionsRunnerToken(ctx, cliCtx) },
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "scope",
|
||||
|
@ -59,7 +60,8 @@ func SubcmdActionsRegister(ctx context.Context) *cli.Command {
|
|||
return &cli.Command{
|
||||
Name: "register",
|
||||
Usage: "Idempotent registration of a runner using a shared secret",
|
||||
Action: prepareWorkPathAndCustomConf(ctx, func(cliCtx *cli.Context) error { return RunRegister(ctx, cliCtx) }),
|
||||
Before: prepareWorkPathAndCustomConf(ctx),
|
||||
Action: func(cliCtx *cli.Context) error { return RunRegister(ctx, cliCtx) },
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "secret",
|
||||
|
@ -219,25 +221,3 @@ func RunGenerateActionsRunnerToken(ctx context.Context, cliCtx *cli.Context) err
|
|||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func prepareWorkPathAndCustomConf(ctx context.Context, action cli.ActionFunc) func(cliCtx *cli.Context) error {
|
||||
return func(cliCtx *cli.Context) error {
|
||||
if !ContextGetNoInit(ctx) {
|
||||
var args setting.ArgWorkPathAndCustomConf
|
||||
// from children to parent, check the global flags
|
||||
for _, curCtx := range cliCtx.Lineage() {
|
||||
if curCtx.IsSet("work-path") && args.WorkPath == "" {
|
||||
args.WorkPath = curCtx.String("work-path")
|
||||
}
|
||||
if curCtx.IsSet("custom-path") && args.CustomPath == "" {
|
||||
args.CustomPath = curCtx.String("custom-path")
|
||||
}
|
||||
if curCtx.IsSet("config") && args.CustomConf == "" {
|
||||
args.CustomConf = curCtx.String("config")
|
||||
}
|
||||
}
|
||||
setting.InitWorkPathAndCommonConfig(os.Getenv, args)
|
||||
}
|
||||
return action(cliCtx)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -145,3 +145,25 @@ func handleCliResponseExtra(ctx context.Context, extra private.ResponseExtra) er
|
|||
}
|
||||
return cli.Exit(extra.Error, 1)
|
||||
}
|
||||
|
||||
func prepareWorkPathAndCustomConf(ctx context.Context) func(c *cli.Context) error {
|
||||
return func(c *cli.Context) error {
|
||||
if !ContextGetNoInit(ctx) {
|
||||
var args setting.ArgWorkPathAndCustomConf
|
||||
// from children to parent, check the global flags
|
||||
for _, curCtx := range c.Lineage() {
|
||||
if curCtx.IsSet("work-path") && args.WorkPath == "" {
|
||||
args.WorkPath = curCtx.String("work-path")
|
||||
}
|
||||
if curCtx.IsSet("custom-path") && args.CustomPath == "" {
|
||||
args.CustomPath = curCtx.String("custom-path")
|
||||
}
|
||||
if curCtx.IsSet("config") && args.CustomConf == "" {
|
||||
args.CustomConf = curCtx.String("config")
|
||||
}
|
||||
}
|
||||
setting.InitWorkPathAndCommonConfig(os.Getenv, args)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ func runKeys(c *cli.Context) error {
|
|||
ctx, cancel := installSignals()
|
||||
defer cancel()
|
||||
|
||||
setup(ctx, false)
|
||||
setup(ctx, c.Bool("debug"))
|
||||
|
||||
authorizedString, extra := private.AuthorizedPublicKeyByContent(ctx, content)
|
||||
// do not use handleCliResponseExtra or cli.NewExitError, if it exists immediately, it breaks some tests like Test_CmdKeys
|
||||
|
|
25
cmd/serv.go
25
cmd/serv.go
|
@ -63,21 +63,10 @@ func setup(ctx context.Context, debug bool) {
|
|||
setupConsoleLogger(log.FATAL, false, os.Stderr)
|
||||
}
|
||||
setting.MustInstalled()
|
||||
if debug {
|
||||
setting.RunMode = "dev"
|
||||
}
|
||||
|
||||
// Check if setting.RepoRootPath exists. It could be the case that it doesn't exist, this can happen when
|
||||
// `[repository]` `ROOT` is a relative path and $GITEA_WORK_DIR isn't passed to the SSH connection.
|
||||
if _, err := os.Stat(setting.RepoRootPath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
_ = fail(ctx, "Incorrect configuration, no repository directory.", "Directory `[repository].ROOT` %q was not found, please check if $GITEA_WORK_DIR is passed to the SSH connection or make `[repository].ROOT` an absolute value.", setting.RepoRootPath)
|
||||
} else {
|
||||
_ = fail(ctx, "Incorrect configuration, repository directory is inaccessible", "Directory `[repository].ROOT` %q is inaccessible. err: %v", setting.RepoRootPath, err)
|
||||
}
|
||||
_ = fail(ctx, "Unable to access repository path", "Unable to access repository path %q, err: %v", setting.RepoRootPath, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := git.InitSimple(context.Background()); err != nil {
|
||||
_ = fail(ctx, "Failed to init git", "Failed to init git, err: %v", err)
|
||||
}
|
||||
|
@ -216,16 +205,18 @@ func runServ(c *cli.Context) error {
|
|||
}
|
||||
}
|
||||
|
||||
// LowerCase and trim the repoPath as that's how they are stored.
|
||||
repoPath = strings.ToLower(strings.TrimSpace(repoPath))
|
||||
|
||||
rr := strings.SplitN(repoPath, "/", 2)
|
||||
if len(rr) != 2 {
|
||||
return fail(ctx, "Invalid repository path", "Invalid repository path: %v", repoPath)
|
||||
}
|
||||
|
||||
username := strings.ToLower(rr[0])
|
||||
reponame := strings.ToLower(strings.TrimSuffix(rr[1], ".git"))
|
||||
username := rr[0]
|
||||
reponame := strings.TrimSuffix(rr[1], ".git")
|
||||
|
||||
// LowerCase and trim the repoPath as that's how they are stored.
|
||||
// This should be done after splitting the repoPath into username and reponame
|
||||
// so that username and reponame are not affected.
|
||||
repoPath = strings.ToLower(strings.TrimSpace(repoPath))
|
||||
|
||||
if alphaDashDotPattern.MatchString(reponame) {
|
||||
return fail(ctx, "Invalid repo name", "Invalid repo name: %s", reponame)
|
||||
|
|
|
@ -991,6 +991,9 @@ LEVEL = Info
|
|||
;; Disable stars feature.
|
||||
;DISABLE_STARS = false
|
||||
;;
|
||||
;; Disable repository forking.
|
||||
;DISABLE_FORKS = false
|
||||
;;
|
||||
;; The default branch name of new repositories
|
||||
;DEFAULT_BRANCH = main
|
||||
;;
|
||||
|
@ -1061,7 +1064,7 @@ LEVEL = Info
|
|||
;; List of keywords used in Pull Request comments to automatically reopen a related issue
|
||||
;REOPEN_KEYWORDS = reopen,reopens,reopened
|
||||
;;
|
||||
;; Set default merge style for repository creating, valid options: merge, rebase, rebase-merge, squash
|
||||
;; Set default merge style for repository creating, valid options: merge, rebase, rebase-merge, squash, fast-forward-only
|
||||
;DEFAULT_MERGE_STYLE = merge
|
||||
;;
|
||||
;; In the default merge message for squash commits include at most this many commits
|
||||
|
@ -1489,6 +1492,9 @@ LEVEL = Info
|
|||
;DEFAULT_EMAIL_NOTIFICATIONS = enabled
|
||||
;; Send an email to all admins when a new user signs up to inform the admins about this act. Options: true, false
|
||||
;SEND_NOTIFICATION_EMAIL_ON_NEW_USER = false
|
||||
;; Disabled features for users, could be "deletion", more features can be disabled in future
|
||||
;; - deletion: a user cannot delete their own account
|
||||
;USER_DISABLED_FEATURES =
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
|
|
@ -126,7 +126,7 @@ In addition, there is _`StaticRootPath`_ which can be set as a built-in at build
|
|||
keywords used in Pull Request comments to automatically close a related issue
|
||||
- `REOPEN_KEYWORDS`: **reopen**, **reopens**, **reopened**: List of keywords used in Pull Request comments to automatically reopen
|
||||
a related issue
|
||||
- `DEFAULT_MERGE_STYLE`: **merge**: Set default merge style for repository creating, valid options: `merge`, `rebase`, `rebase-merge`, `squash`
|
||||
- `DEFAULT_MERGE_STYLE`: **merge**: Set default merge style for repository creating, valid options: `merge`, `rebase`, `rebase-merge`, `squash`, `fast-forward-only`
|
||||
- `DEFAULT_MERGE_MESSAGE_COMMITS_LIMIT`: **50**: In the default merge message for squash commits include at most this many commits. Set to `-1` to include all commits
|
||||
- `DEFAULT_MERGE_MESSAGE_SIZE`: **5120**: In the default merge message for squash commits limit the size of the commit messages. Set to `-1` to have no limit. Only used if `POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES` is `true`.
|
||||
- `DEFAULT_MERGE_MESSAGE_ALL_AUTHORS`: **false**: In the default merge message for squash commits walk all commits to include all authors in the Co-authored-by otherwise just use those in the limited list
|
||||
|
|
|
@ -29,7 +29,7 @@ menu:
|
|||
[ini](https://github.com/go-ini/ini/#recursive-values) 这里的说明。
|
||||
标注了 :exclamation: 的配置项表明除非你真的理解这个配置项的意义,否则最好使用默认值。
|
||||
|
||||
在下面的默认值中,`$XYZ`代表环境变量`XYZ`的值(详见:`enviroment-to-ini`)。 _`XxYyZz`_是指默认配置的一部分列出的值。这些在 app.ini 文件中不起作用,仅在此处列出作为文档说明。
|
||||
在下面的默认值中,`$XYZ`代表环境变量`XYZ`的值(详见:`environment-to-ini`)。 _`XxYyZz`_是指默认配置的一部分列出的值。这些在 app.ini 文件中不起作用,仅在此处列出作为文档说明。
|
||||
|
||||
包含`#`或者`;`的变量必须使用引号(`` ` ``或者`""""`)包裹,否则会被解析为注释。
|
||||
|
||||
|
@ -125,7 +125,7 @@ menu:
|
|||
- `CLOSE_KEYWORDS`: **close**, **closes**, **closed**, **fix**, **fixes**, **fixed**, **resolve**, **resolves**, **resolved**: 在拉取请求评论中用于自动关闭相关问题的关键词列表。
|
||||
- `REOPEN_KEYWORDS`: **reopen**, **reopens**, **reopened**: 在拉取请求评论中用于自动重新打开相关问题的
|
||||
关键词列表。
|
||||
- `DEFAULT_MERGE_STYLE`: **merge**: 设置创建仓库的默认合并方式,可选: `merge`, `rebase`, `rebase-merge`, `squash`
|
||||
- `DEFAULT_MERGE_STYLE`: **merge**: 设置创建仓库的默认合并方式,可选: `merge`, `rebase`, `rebase-merge`, `squash`, `fast-forward-only`
|
||||
- `DEFAULT_MERGE_MESSAGE_COMMITS_LIMIT`: **50**: 在默认合并消息中,对于`squash`提交,最多包括此数量的提交。设置为 -1 以包括所有提交。
|
||||
- `DEFAULT_MERGE_MESSAGE_SIZE`: **5120**: 在默认的合并消息中,对于`squash`提交,限制提交消息的大小。设置为 `-1`以取消限制。仅在`POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES`为`true`时使用。
|
||||
- `DEFAULT_MERGE_MESSAGE_ALL_AUTHORS`: **false**: 在默认合并消息中,对于`squash`提交,遍历所有提交以包括所有作者的`Co-authored-by`,否则仅使用限定列表中的作者。
|
||||
|
|
|
@ -101,6 +101,10 @@ i.e. `services/user`, `models/repository`.
|
|||
Since there are some packages which use the same package name, it is possible that you find packages like `modules/user`, `models/user`, and `services/user`. When these packages are imported in one Go file, it's difficult to know which package we are using and if it's a variable name or an import name. So, we always recommend to use import aliases. To differ from package variables which are commonly in camelCase, just use **snake_case** for import aliases.
|
||||
i.e. `import user_service "code.gitea.io/gitea/services/user"`
|
||||
|
||||
### Implementing `io.Closer`
|
||||
|
||||
If a type implements `io.Closer`, calling `Close` multiple times must not fail or `panic` but return an error or `nil`.
|
||||
|
||||
### Important Gotchas
|
||||
|
||||
- Never write `x.Update(exemplar)` without an explicit `WHERE` clause:
|
||||
|
|
|
@ -243,10 +243,10 @@ documentation using:
|
|||
make generate-swagger
|
||||
```
|
||||
|
||||
You should validate your generated Swagger file and spell-check it with:
|
||||
You should validate your generated Swagger file:
|
||||
|
||||
```bash
|
||||
make swagger-validate misspell-check
|
||||
make swagger-validate
|
||||
```
|
||||
|
||||
You should commit the changed swagger JSON file. The continuous integration
|
||||
|
|
|
@ -228,10 +228,10 @@ Gitea Logo的 PNG 和 SVG 版本是使用 `TAGS="gitea" make generate-images`
|
|||
make generate-swagger
|
||||
```
|
||||
|
||||
您应该验证生成的 Swagger 文件并使用以下命令对其进行拼写检查:
|
||||
您应该验证生成的 Swagger 文件:
|
||||
|
||||
```bash
|
||||
make swagger-validate misspell-check
|
||||
make swagger-validate
|
||||
```
|
||||
|
||||
您应该提交更改后的 swagger JSON 文件。持续集成服务器将使用以下方法检查是否已完成:
|
||||
|
|
|
@ -27,13 +27,7 @@ Next, [install Node.js with npm](https://nodejs.org/en/download/) which is
|
|||
required to build the JavaScript and CSS files. The minimum supported Node.js
|
||||
version is @minNodeVersion@ and the latest LTS version is recommended.
|
||||
|
||||
**Note**: When executing make tasks that require external tools, like
|
||||
`make misspell-check`, Gitea will automatically download and build these as
|
||||
necessary. To be able to use these, you must have the `"$GOPATH/bin"` directory
|
||||
on the executable path. If you don't add the go bin directory to the
|
||||
executable path, you will have to manage this yourself.
|
||||
|
||||
**Note 2**: Go version @minGoVersion@ or higher is required. However, it is recommended to
|
||||
**Note**: Go version @minGoVersion@ or higher is required. However, it is recommended to
|
||||
obtain the same version as our continuous integration, see the advice given in
|
||||
[Hacking on Gitea](development/hacking-on-gitea.md)
|
||||
|
||||
|
|
|
@ -21,9 +21,7 @@ menu:
|
|||
|
||||
接下来,[安装 Node.js 和 npm](https://nodejs.org/zh-cn/download/), 这是构建 JavaScript 和 CSS 文件所需的。最低支持的 Node.js 版本是 @minNodeVersion@,建议使用最新的 LTS 版本。
|
||||
|
||||
**注意**:当执行需要外部工具的 make 任务(如`make misspell-check`)时,Gitea 将根据需要自动下载和构建这些工具。为了能够实现这个目的,你必须将`"$GOPATH/bin"`目录添加到可执行路径中。如果没有将 Go 的二进制目录添加到可执行路径中,你需要自行解决产生的问题。
|
||||
|
||||
**注意2**:需要 Go 版本 @minGoVersion@ 或更高版本。不过,建议获取与我们的持续集成(continuous integration, CI)相同的版本,请参阅在 [Hacking on Gitea](development/hacking-on-gitea.md) 中给出的建议。
|
||||
**注意**:需要 Go 版本 @minGoVersion@ 或更高版本。不过,建议获取与我们的持续集成(continuous integration, CI)相同的版本,请参阅在 [Hacking on Gitea](development/hacking-on-gitea.md) 中给出的建议。
|
||||
|
||||
## 下载
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ The following package managers are currently supported:
|
|||
| [PyPI](usage/packages/pypi.md) | Python | `pip`, `twine` |
|
||||
| [RPM](usage/packages/rpm.md) | - | `yum`, `dnf`, `zypper` |
|
||||
| [RubyGems](usage/packages/rubygems.md) | Ruby | `gem`, `Bundler` |
|
||||
| [Swift](usage/packages/rubygems.md) | Swift | `swift` |
|
||||
| [Swift](usage/packages/swift.md) | Swift | `swift` |
|
||||
| [Vagrant](usage/packages/vagrant.md) | - | `vagrant` |
|
||||
|
||||
**The following paragraphs only apply if Packages are not globally disabled!**
|
||||
|
|
|
@ -26,7 +26,8 @@ To work with the Swift package registry, you need to use [swift](https://www.swi
|
|||
To register the package registry and provide credentials, execute:
|
||||
|
||||
```shell
|
||||
swift package-registry set https://gitea.example.com/api/packages/{owner}/swift -login {username} -password {password}
|
||||
swift package-registry set https://gitea.example.com/api/packages/{owner}/swift
|
||||
swift package-registry login https://gitea.example.com/api/packages/{owner}/swift --username {username} --password {password}
|
||||
```
|
||||
|
||||
| Placeholder | Description |
|
||||
|
|
10
go.mod
10
go.mod
|
@ -3,10 +3,11 @@ module code.gitea.io/gitea
|
|||
go 1.21
|
||||
|
||||
require (
|
||||
code.gitea.io/actions-proto-go v0.3.1
|
||||
code.gitea.io/actions-proto-go v0.4.0
|
||||
code.gitea.io/gitea-vet v0.2.3
|
||||
code.gitea.io/sdk/gitea v0.17.1
|
||||
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570
|
||||
connectrpc.com/connect v1.15.0
|
||||
gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669
|
||||
gitea.com/go-chi/cache v0.2.0
|
||||
gitea.com/go-chi/captcha v0.0.0-20230415143339-2c0754df4384
|
||||
|
@ -19,7 +20,6 @@ require (
|
|||
github.com/alecthomas/chroma/v2 v2.12.0
|
||||
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
|
||||
github.com/blevesearch/bleve/v2 v2.3.10
|
||||
github.com/bufbuild/connect-go v1.10.0
|
||||
github.com/buildkite/terminal-to-html/v3 v3.10.1
|
||||
github.com/caddyserver/certmagic v0.20.0
|
||||
github.com/chi-middleware/proxy v1.1.1
|
||||
|
@ -102,11 +102,11 @@ require (
|
|||
github.com/yuin/goldmark v1.6.0
|
||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
||||
github.com/yuin/goldmark-meta v1.1.0
|
||||
golang.org/x/crypto v0.18.0
|
||||
golang.org/x/crypto v0.19.0
|
||||
golang.org/x/image v0.15.0
|
||||
golang.org/x/net v0.20.0
|
||||
golang.org/x/net v0.21.0
|
||||
golang.org/x/oauth2 v0.16.0
|
||||
golang.org/x/sys v0.16.0
|
||||
golang.org/x/sys v0.17.0
|
||||
golang.org/x/text v0.14.0
|
||||
golang.org/x/tools v0.17.0
|
||||
google.golang.org/grpc v1.60.1
|
||||
|
|
24
go.sum
24
go.sum
|
@ -35,14 +35,16 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
|
|||
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
|
||||
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
code.gitea.io/actions-proto-go v0.3.1 h1:PMyiQtBKb8dNnpEO2R5rcZdXSis+UQZVo/SciMtR1aU=
|
||||
code.gitea.io/actions-proto-go v0.3.1/go.mod h1:00ys5QDo1iHN1tHNvvddAcy2W/g+425hQya1cCSvq9A=
|
||||
code.gitea.io/actions-proto-go v0.4.0 h1:OsPBPhodXuQnsspG1sQ4eRE1PeoZyofd7+i73zCwnsU=
|
||||
code.gitea.io/actions-proto-go v0.4.0/go.mod h1:mn7Wkqz6JbnTOHQpot3yDeHx+O5C9EGhMEE+htvHBas=
|
||||
code.gitea.io/gitea-vet v0.2.3 h1:gdFmm6WOTM65rE8FUBTRzeQZYzXePKSSB1+r574hWwI=
|
||||
code.gitea.io/gitea-vet v0.2.3/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
|
||||
code.gitea.io/sdk/gitea v0.17.1 h1:3jCPOG2ojbl8AcfaUCRYLT5MUcBMFwS0OSK2mA5Zok8=
|
||||
code.gitea.io/sdk/gitea v0.17.1/go.mod h1:aCnBqhHpoEWA180gMbaCtdX9Pl6BWBAuuP2miadoTNM=
|
||||
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570 h1:TXbikPqa7YRtfU9vS6QJBg77pUvbEb6StRdZO8t1bEY=
|
||||
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570/go.mod h1:IIAjsijsd8q1isWX8MACefDEgTQslQ4stk2AeeTt3kM=
|
||||
connectrpc.com/connect v1.15.0 h1:lFdeCbZrVVDydAqwr4xGV2y+ULn+0Z73s5JBj2LikWo=
|
||||
connectrpc.com/connect v1.15.0/go.mod h1:bQmjpDY8xItMnttnurVgOkHUBMRT9cpsNi2O4AjKhmA=
|
||||
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
|
||||
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
|
@ -173,8 +175,6 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
|||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/bufbuild/connect-go v1.10.0 h1:QAJ3G9A1OYQW2Jbk3DeoJbkCxuKArrvZgDt47mjdTbg=
|
||||
github.com/bufbuild/connect-go v1.10.0/go.mod h1:CAIePUgkDR5pAFaylSMtNK45ANQjp9JvpluG20rhpV8=
|
||||
github.com/buildkite/terminal-to-html/v3 v3.10.1 h1:znT9eD26LQ59dDJJEpMCwkP4wEptEAPi74hsTBuHdEo=
|
||||
github.com/buildkite/terminal-to-html/v3 v3.10.1/go.mod h1:qtuRyYs6/Sw3FS9jUyVEaANHgHGqZsGqMknPLyau5cQ=
|
||||
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
|
||||
|
@ -898,8 +898,8 @@ golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2Uz
|
|||
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
|
@ -981,8 +981,8 @@ golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
|||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
|
@ -1061,8 +1061,8 @@ golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
|
@ -1073,8 +1073,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
|||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
||||
golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE=
|
||||
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
|
||||
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
|
11
main.go
11
main.go
|
@ -29,6 +29,8 @@ var (
|
|||
Version = "development" // program version for this build
|
||||
Tags = "" // the Golang build tags
|
||||
MakeVersion = "" // "make" program version if built with make
|
||||
|
||||
ReleaseVersion = ""
|
||||
)
|
||||
|
||||
var ForgejoVersion = "1.0.0"
|
||||
|
@ -54,11 +56,18 @@ func main() {
|
|||
log.GetManager().Close()
|
||||
os.Exit(code)
|
||||
}
|
||||
app := cmd.NewMainApp(Version, formatBuiltWith())
|
||||
app := cmd.NewMainApp(Version, formatReleaseVersion()+formatBuiltWith())
|
||||
_ = cmd.RunMainApp(app, os.Args...) // all errors should have been handled by the RunMainApp
|
||||
log.GetManager().Close()
|
||||
}
|
||||
|
||||
func formatReleaseVersion() string {
|
||||
if len(ReleaseVersion) > 0 {
|
||||
return " (release name " + ReleaseVersion + ")"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func formatBuiltWith() string {
|
||||
version := runtime.Version()
|
||||
if len(MakeVersion) > 0 {
|
||||
|
|
|
@ -26,6 +26,8 @@ const (
|
|||
ArtifactStatusUploadConfirmed // 2, ArtifactStatusUploadConfirmed is the status of an artifact upload that is confirmed
|
||||
ArtifactStatusUploadError // 3, ArtifactStatusUploadError is the status of an artifact upload that is errored
|
||||
ArtifactStatusExpired // 4, ArtifactStatusExpired is the status of an artifact that is expired
|
||||
ArtifactStatusPendingDeletion // 5, ArtifactStatusPendingDeletion is the status of an artifact that is pending deletion
|
||||
ArtifactStatusDeleted // 6, ArtifactStatusDeleted is the status of an artifact that is deleted
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -147,8 +149,28 @@ func ListNeedExpiredArtifacts(ctx context.Context) ([]*ActionArtifact, error) {
|
|||
Where("expired_unix < ? AND status = ?", timeutil.TimeStamp(time.Now().Unix()), ArtifactStatusUploadConfirmed).Find(&arts)
|
||||
}
|
||||
|
||||
// ListPendingDeleteArtifacts returns all artifacts in pending-delete status.
|
||||
// limit is the max number of artifacts to return.
|
||||
func ListPendingDeleteArtifacts(ctx context.Context, limit int) ([]*ActionArtifact, error) {
|
||||
arts := make([]*ActionArtifact, 0, limit)
|
||||
return arts, db.GetEngine(ctx).
|
||||
Where("status = ?", ArtifactStatusPendingDeletion).Limit(limit).Find(&arts)
|
||||
}
|
||||
|
||||
// SetArtifactExpired sets an artifact to expired
|
||||
func SetArtifactExpired(ctx context.Context, artifactID int64) error {
|
||||
_, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusExpired)})
|
||||
return err
|
||||
}
|
||||
|
||||
// SetArtifactNeedDelete sets an artifact to need-delete, cron job will delete it
|
||||
func SetArtifactNeedDelete(ctx context.Context, runID int64, name string) error {
|
||||
_, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusPendingDeletion)})
|
||||
return err
|
||||
}
|
||||
|
||||
// SetArtifactDeleted sets an artifact to deleted
|
||||
func SetArtifactDeleted(ctx context.Context, artifactID int64) error {
|
||||
_, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusDeleted)})
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ func (r *ActionRunner) StatusName() string {
|
|||
}
|
||||
|
||||
func (r *ActionRunner) StatusLocaleName(lang translation.Locale) string {
|
||||
return lang.Tr("actions.runners.status." + r.StatusName())
|
||||
return lang.TrString("actions.runners.status." + r.StatusName())
|
||||
}
|
||||
|
||||
func (r *ActionRunner) IsOnline() bool {
|
||||
|
|
|
@ -41,7 +41,7 @@ func (s Status) String() string {
|
|||
|
||||
// LocaleString returns the locale string name of the Status
|
||||
func (s Status) LocaleString(lang translation.Locale) string {
|
||||
return lang.Tr("actions.status." + s.String())
|
||||
return lang.TrString("actions.status." + s.String())
|
||||
}
|
||||
|
||||
// IsDone returns whether the Status is final
|
||||
|
|
|
@ -7,6 +7,7 @@ package db
|
|||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
|
@ -145,6 +146,7 @@ func InitEngine(ctx context.Context) error {
|
|||
xormEngine.SetMaxOpenConns(setting.Database.MaxOpenConns)
|
||||
xormEngine.SetMaxIdleConns(setting.Database.MaxIdleConns)
|
||||
xormEngine.SetConnMaxLifetime(setting.Database.ConnMaxLifetime)
|
||||
xormEngine.SetConnMaxIdleTime(setting.Database.ConnMaxIdleTime)
|
||||
xormEngine.SetDefaultContext(ctx)
|
||||
|
||||
if setting.Database.SlowQueryThreshold > 0 {
|
||||
|
@ -342,7 +344,7 @@ func (ErrorQueryHook) BeforeProcess(c *contexts.ContextHook) (context.Context, e
|
|||
}
|
||||
|
||||
func (h *ErrorQueryHook) AfterProcess(c *contexts.ContextHook) error {
|
||||
if c.Err != nil {
|
||||
if c.Err != nil && !errors.Is(c.Err, context.Canceled) {
|
||||
h.Logger.Log(8, log.ERROR, "[Error SQL Query] %s %v - %v", c.SQL, c.Args, c.Err)
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -493,6 +493,23 @@ func (err ErrMergeUnrelatedHistories) Error() string {
|
|||
return fmt.Sprintf("Merge UnrelatedHistories Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||
}
|
||||
|
||||
// ErrMergeDivergingFastForwardOnly represents an error if a fast-forward-only merge fails because the branches diverge
|
||||
type ErrMergeDivergingFastForwardOnly struct {
|
||||
StdOut string
|
||||
StdErr string
|
||||
Err error
|
||||
}
|
||||
|
||||
// IsErrMergeDivergingFastForwardOnly checks if an error is a ErrMergeDivergingFastForwardOnly.
|
||||
func IsErrMergeDivergingFastForwardOnly(err error) bool {
|
||||
_, ok := err.(ErrMergeDivergingFastForwardOnly)
|
||||
return ok
|
||||
}
|
||||
|
||||
func (err ErrMergeDivergingFastForwardOnly) Error() string {
|
||||
return fmt.Sprintf("Merge DivergingFastForwardOnly Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||
}
|
||||
|
||||
// ErrRebaseConflicts represents an error if rebase fails with a conflict
|
||||
type ErrRebaseConflicts struct {
|
||||
Style repo_model.MergeStyle
|
||||
|
|
|
@ -135,3 +135,27 @@
|
|||
user_id: 31
|
||||
repo_id: 28
|
||||
mode: 4
|
||||
|
||||
-
|
||||
id: 24
|
||||
user_id: 38
|
||||
repo_id: 60
|
||||
mode: 2
|
||||
|
||||
-
|
||||
id: 25
|
||||
user_id: 38
|
||||
repo_id: 61
|
||||
mode: 1
|
||||
|
||||
-
|
||||
id: 26
|
||||
user_id: 39
|
||||
repo_id: 61
|
||||
mode: 1
|
||||
|
||||
-
|
||||
id: 27
|
||||
user_id: 40
|
||||
repo_id: 61
|
||||
mode: 4
|
||||
|
|
|
@ -45,3 +45,9 @@
|
|||
repo_id: 22
|
||||
user_id: 18
|
||||
mode: 2 # write
|
||||
|
||||
-
|
||||
id: 9
|
||||
repo_id: 60
|
||||
user_id: 38
|
||||
mode: 2 # write
|
||||
|
|
|
@ -293,3 +293,27 @@
|
|||
lower_email: user37@example.com
|
||||
is_activated: true
|
||||
is_primary: true
|
||||
|
||||
-
|
||||
id: 38
|
||||
uid: 38
|
||||
email: user38@example.com
|
||||
lower_email: user38@example.com
|
||||
is_activated: true
|
||||
is_primary: true
|
||||
|
||||
-
|
||||
id: 39
|
||||
uid: 39
|
||||
email: user39@example.com
|
||||
lower_email: user39@example.com
|
||||
is_activated: true
|
||||
is_primary: true
|
||||
|
||||
-
|
||||
id: 40
|
||||
uid: 40
|
||||
email: user40@example.com
|
||||
lower_email: user40@example.com
|
||||
is_activated: true
|
||||
is_primary: true
|
||||
|
|
|
@ -338,3 +338,37 @@
|
|||
created_unix: 978307210
|
||||
updated_unix: 978307210
|
||||
is_locked: false
|
||||
|
||||
-
|
||||
id: 21
|
||||
repo_id: 60
|
||||
index: 1
|
||||
poster_id: 39
|
||||
original_author_id: 0
|
||||
name: repo60 pull1
|
||||
content: content for the 1st issue
|
||||
milestone_id: 0
|
||||
priority: 0
|
||||
is_closed: false
|
||||
is_pull: true
|
||||
num_comments: 0
|
||||
created_unix: 1707270422
|
||||
updated_unix: 1707270422
|
||||
is_locked: false
|
||||
|
||||
-
|
||||
id: 22
|
||||
repo_id: 61
|
||||
index: 1
|
||||
poster_id: 40
|
||||
original_author_id: 0
|
||||
name: repo61 pull1
|
||||
content: content for the 1st issue
|
||||
milestone_id: 0
|
||||
priority: 0
|
||||
is_closed: false
|
||||
is_pull: true
|
||||
num_comments: 0
|
||||
created_unix: 1707270422
|
||||
updated_unix: 1707270422
|
||||
is_locked: false
|
||||
|
|
|
@ -99,3 +99,21 @@
|
|||
uid: 5
|
||||
org_id: 36
|
||||
is_public: true
|
||||
|
||||
-
|
||||
id: 18
|
||||
uid: 38
|
||||
org_id: 41
|
||||
is_public: true
|
||||
|
||||
-
|
||||
id: 19
|
||||
uid: 39
|
||||
org_id: 41
|
||||
is_public: true
|
||||
|
||||
-
|
||||
id: 20
|
||||
uid: 40
|
||||
org_id: 41
|
||||
is_public: true
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
head_branch: branch1
|
||||
base_branch: master
|
||||
merge_base: 4a357436d925b5c974181ff12a994538ddc5a269
|
||||
merged_commit_id: 1a8823cd1a9549fde083f992f6b9b87a7ab74fb3
|
||||
has_merged: true
|
||||
merger_id: 2
|
||||
|
||||
|
@ -98,3 +99,21 @@
|
|||
index: 1
|
||||
head_repo_id: 23
|
||||
base_repo_id: 23
|
||||
|
||||
-
|
||||
id: 9
|
||||
type: 0 # gitea pull request
|
||||
status: 2 # mergable
|
||||
issue_id: 21
|
||||
index: 1
|
||||
head_repo_id: 60
|
||||
base_repo_id: 60
|
||||
|
||||
-
|
||||
id: 10
|
||||
type: 0 # gitea pull request
|
||||
status: 2 # mergable
|
||||
issue_id: 22
|
||||
index: 1
|
||||
head_repo_id: 61
|
||||
base_repo_id: 61
|
||||
|
|
|
@ -708,3 +708,45 @@
|
|||
type: 1
|
||||
config: "{}"
|
||||
created_unix: 946684810
|
||||
|
||||
-
|
||||
id: 102
|
||||
repo_id: 60
|
||||
type: 1
|
||||
config: "{}"
|
||||
created_unix: 946684810
|
||||
|
||||
-
|
||||
id: 103
|
||||
repo_id: 60
|
||||
type: 2
|
||||
config: "{\"EnableTimetracker\":true,\"AllowOnlyContributorsToTrackTime\":true}"
|
||||
created_unix: 946684810
|
||||
|
||||
-
|
||||
id: 104
|
||||
repo_id: 60
|
||||
type: 3
|
||||
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowRebaseMerge\":true,\"AllowSquash\":true}"
|
||||
created_unix: 946684810
|
||||
|
||||
-
|
||||
id: 105
|
||||
repo_id: 61
|
||||
type: 1
|
||||
config: "{}"
|
||||
created_unix: 946684810
|
||||
|
||||
-
|
||||
id: 106
|
||||
repo_id: 61
|
||||
type: 2
|
||||
config: "{\"EnableTimetracker\":true,\"AllowOnlyContributorsToTrackTime\":true}"
|
||||
created_unix: 946684810
|
||||
|
||||
-
|
||||
id: 107
|
||||
repo_id: 61
|
||||
type: 3
|
||||
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowRebaseMerge\":true,\"AllowSquash\":true}"
|
||||
created_unix: 946684810
|
||||
|
|
|
@ -1720,3 +1720,65 @@
|
|||
is_private: true
|
||||
status: 0
|
||||
num_issues: 0
|
||||
|
||||
-
|
||||
id: 60
|
||||
owner_id: 40
|
||||
owner_name: user40
|
||||
lower_name: repo60
|
||||
name: repo60
|
||||
default_branch: main
|
||||
num_watches: 0
|
||||
num_stars: 0
|
||||
num_forks: 0
|
||||
num_issues: 0
|
||||
num_closed_issues: 0
|
||||
num_pulls: 1
|
||||
num_closed_pulls: 0
|
||||
num_milestones: 0
|
||||
num_closed_milestones: 0
|
||||
num_projects: 0
|
||||
num_closed_projects: 0
|
||||
is_private: false
|
||||
is_empty: false
|
||||
is_archived: false
|
||||
is_mirror: false
|
||||
status: 0
|
||||
is_fork: false
|
||||
fork_id: 0
|
||||
is_template: false
|
||||
template_id: 0
|
||||
size: 0
|
||||
is_fsck_enabled: true
|
||||
close_issues_via_commit_in_any_branch: false
|
||||
|
||||
-
|
||||
id: 61
|
||||
owner_id: 41
|
||||
owner_name: org41
|
||||
lower_name: repo61
|
||||
name: repo61
|
||||
default_branch: main
|
||||
num_watches: 0
|
||||
num_stars: 0
|
||||
num_forks: 0
|
||||
num_issues: 0
|
||||
num_closed_issues: 0
|
||||
num_pulls: 1
|
||||
num_closed_pulls: 0
|
||||
num_milestones: 0
|
||||
num_closed_milestones: 0
|
||||
num_projects: 0
|
||||
num_closed_projects: 0
|
||||
is_private: false
|
||||
is_empty: false
|
||||
is_archived: false
|
||||
is_mirror: false
|
||||
status: 0
|
||||
is_fork: false
|
||||
fork_id: 0
|
||||
is_template: false
|
||||
template_id: 0
|
||||
size: 0
|
||||
is_fsck_enabled: true
|
||||
close_issues_via_commit_in_any_branch: false
|
||||
|
|
|
@ -217,3 +217,25 @@
|
|||
num_members: 1
|
||||
includes_all_repositories: false
|
||||
can_create_org_repo: true
|
||||
|
||||
-
|
||||
id: 21
|
||||
org_id: 41
|
||||
lower_name: owners
|
||||
name: Owners
|
||||
authorize: 4 # owner
|
||||
num_repos: 1
|
||||
num_members: 1
|
||||
includes_all_repositories: true
|
||||
can_create_org_repo: true
|
||||
|
||||
-
|
||||
id: 22
|
||||
org_id: 41
|
||||
lower_name: team1
|
||||
name: Team1
|
||||
authorize: 1 # read
|
||||
num_repos: 1
|
||||
num_members: 2
|
||||
includes_all_repositories: false
|
||||
can_create_org_repo: false
|
||||
|
|
|
@ -63,3 +63,15 @@
|
|||
org_id: 17
|
||||
team_id: 9
|
||||
repo_id: 24
|
||||
|
||||
-
|
||||
id: 12
|
||||
org_id: 41
|
||||
team_id: 21
|
||||
repo_id: 61
|
||||
|
||||
-
|
||||
id: 13
|
||||
org_id: 41
|
||||
team_id: 22
|
||||
repo_id: 61
|
||||
|
|
|
@ -286,3 +286,39 @@
|
|||
team_id: 2
|
||||
type: 8
|
||||
access_mode: 2
|
||||
|
||||
-
|
||||
id: 49
|
||||
team_id: 21
|
||||
type: 1
|
||||
access_mode: 4
|
||||
|
||||
-
|
||||
id: 50
|
||||
team_id: 21
|
||||
type: 2
|
||||
access_mode: 4
|
||||
|
||||
-
|
||||
id: 51
|
||||
team_id: 21
|
||||
type: 3
|
||||
access_mode: 4
|
||||
|
||||
-
|
||||
id: 52
|
||||
team_id: 22
|
||||
type: 1
|
||||
access_mode: 1
|
||||
|
||||
-
|
||||
id: 53
|
||||
team_id: 22
|
||||
type: 2
|
||||
access_mode: 1
|
||||
|
||||
-
|
||||
id: 54
|
||||
team_id: 22
|
||||
type: 3
|
||||
access_mode: 1
|
||||
|
|
|
@ -129,3 +129,21 @@
|
|||
org_id: 17
|
||||
team_id: 9
|
||||
uid: 15
|
||||
|
||||
-
|
||||
id: 23
|
||||
org_id: 41
|
||||
team_id: 21
|
||||
uid: 40
|
||||
|
||||
-
|
||||
id: 24
|
||||
org_id: 41
|
||||
team_id: 22
|
||||
uid: 38
|
||||
|
||||
-
|
||||
id: 25
|
||||
org_id: 41
|
||||
team_id: 22
|
||||
uid: 39
|
||||
|
|
|
@ -1369,3 +1369,151 @@
|
|||
repo_admin_change_team_access: false
|
||||
theme: ""
|
||||
keep_activity_private: false
|
||||
|
||||
-
|
||||
id: 38
|
||||
lower_name: user38
|
||||
name: user38
|
||||
full_name: User38
|
||||
email: user38@example.com
|
||||
keep_email_private: false
|
||||
email_notifications_preference: enabled
|
||||
passwd: ZogKvWdyEx:password
|
||||
passwd_hash_algo: dummy
|
||||
must_change_password: false
|
||||
login_source: 0
|
||||
login_name: user38
|
||||
type: 0
|
||||
salt: ZogKvWdyEx
|
||||
max_repo_creation: -1
|
||||
is_active: true
|
||||
is_admin: false
|
||||
is_restricted: false
|
||||
allow_git_hook: false
|
||||
allow_import_local: false
|
||||
allow_create_organization: true
|
||||
prohibit_login: false
|
||||
avatar: avatar38
|
||||
avatar_email: user38@example.com
|
||||
use_custom_avatar: false
|
||||
num_followers: 0
|
||||
num_following: 0
|
||||
num_stars: 0
|
||||
num_repos: 0
|
||||
num_teams: 0
|
||||
num_members: 0
|
||||
visibility: 0
|
||||
repo_admin_change_team_access: false
|
||||
theme: ""
|
||||
keep_activity_private: false
|
||||
|
||||
-
|
||||
id: 39
|
||||
lower_name: user39
|
||||
name: user39
|
||||
full_name: User39
|
||||
email: user39@example.com
|
||||
keep_email_private: false
|
||||
email_notifications_preference: enabled
|
||||
passwd: ZogKvWdyEx:password
|
||||
passwd_hash_algo: dummy
|
||||
must_change_password: false
|
||||
login_source: 0
|
||||
login_name: user39
|
||||
type: 0
|
||||
salt: ZogKvWdyEx
|
||||
max_repo_creation: -1
|
||||
is_active: true
|
||||
is_admin: false
|
||||
is_restricted: false
|
||||
allow_git_hook: false
|
||||
allow_import_local: false
|
||||
allow_create_organization: true
|
||||
prohibit_login: false
|
||||
avatar: avatar39
|
||||
avatar_email: user39@example.com
|
||||
use_custom_avatar: false
|
||||
num_followers: 0
|
||||
num_following: 0
|
||||
num_stars: 0
|
||||
num_repos: 0
|
||||
num_teams: 0
|
||||
num_members: 0
|
||||
visibility: 0
|
||||
repo_admin_change_team_access: false
|
||||
theme: ""
|
||||
keep_activity_private: false
|
||||
|
||||
-
|
||||
id: 40
|
||||
lower_name: user40
|
||||
name: user40
|
||||
full_name: User40
|
||||
email: user40@example.com
|
||||
keep_email_private: false
|
||||
email_notifications_preference: onmention
|
||||
passwd: ZogKvWdyEx:password
|
||||
passwd_hash_algo: dummy
|
||||
must_change_password: false
|
||||
login_source: 0
|
||||
login_name: user40
|
||||
type: 0
|
||||
salt: ZogKvWdyEx
|
||||
max_repo_creation: -1
|
||||
is_active: true
|
||||
is_admin: false
|
||||
is_restricted: false
|
||||
allow_git_hook: false
|
||||
allow_import_local: false
|
||||
allow_create_organization: true
|
||||
prohibit_login: false
|
||||
avatar: avatar40
|
||||
avatar_email: user40@example.com
|
||||
use_custom_avatar: false
|
||||
num_followers: 0
|
||||
num_following: 0
|
||||
num_stars: 0
|
||||
num_repos: 1
|
||||
num_teams: 0
|
||||
num_members: 0
|
||||
visibility: 0
|
||||
repo_admin_change_team_access: false
|
||||
theme: ""
|
||||
keep_activity_private: false
|
||||
|
||||
-
|
||||
id: 41
|
||||
lower_name: org41
|
||||
name: org41
|
||||
full_name: Org41
|
||||
email: org41@example.com
|
||||
keep_email_private: false
|
||||
email_notifications_preference: onmention
|
||||
passwd: ZogKvWdyEx:password
|
||||
passwd_hash_algo: dummy
|
||||
must_change_password: false
|
||||
login_source: 0
|
||||
login_name: org41
|
||||
type: 1
|
||||
salt: ZogKvWdyEx
|
||||
max_repo_creation: -1
|
||||
is_active: false
|
||||
is_admin: false
|
||||
is_restricted: false
|
||||
allow_git_hook: false
|
||||
allow_import_local: false
|
||||
allow_create_organization: true
|
||||
prohibit_login: false
|
||||
avatar: avatar41
|
||||
avatar_email: org41@example.com
|
||||
use_custom_avatar: false
|
||||
num_followers: 0
|
||||
num_following: 0
|
||||
num_stars: 0
|
||||
num_repos: 1
|
||||
num_teams: 2
|
||||
num_members: 3
|
||||
visibility: 0
|
||||
repo_admin_change_team_access: false
|
||||
theme: ""
|
||||
keep_activity_private: false
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
"code.gitea.io/gitea/models/db"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
@ -67,7 +67,7 @@ type FindBranchOptions struct {
|
|||
db.ListOptions
|
||||
RepoID int64
|
||||
ExcludeBranchNames []string
|
||||
IsDeletedBranch util.OptionalBool
|
||||
IsDeletedBranch optional.Option[bool]
|
||||
OrderBy string
|
||||
Keyword string
|
||||
}
|
||||
|
@ -81,8 +81,8 @@ func (opts FindBranchOptions) ToConds() builder.Cond {
|
|||
if len(opts.ExcludeBranchNames) > 0 {
|
||||
cond = cond.And(builder.NotIn("name", opts.ExcludeBranchNames))
|
||||
}
|
||||
if !opts.IsDeletedBranch.IsNone() {
|
||||
cond = cond.And(builder.Eq{"is_deleted": opts.IsDeletedBranch.IsTrue()})
|
||||
if opts.IsDeletedBranch.Has() {
|
||||
cond = cond.And(builder.Eq{"is_deleted": opts.IsDeletedBranch.Value()})
|
||||
}
|
||||
if opts.Keyword != "" {
|
||||
cond = cond.And(builder.Like{"name", opts.Keyword})
|
||||
|
@ -92,7 +92,7 @@ func (opts FindBranchOptions) ToConds() builder.Cond {
|
|||
|
||||
func (opts FindBranchOptions) ToOrders() string {
|
||||
orderBy := opts.OrderBy
|
||||
if !opts.IsDeletedBranch.IsFalse() { // if deleted branch included, put them at the end
|
||||
if opts.IsDeletedBranch.ValueOrDefault(true) { // if deleted branch included, put them at the end
|
||||
if orderBy != "" {
|
||||
orderBy += ", "
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
@ -49,7 +49,7 @@ func TestGetDeletedBranches(t *testing.T) {
|
|||
branches, err := db.Find[git_model.Branch](db.DefaultContext, git_model.FindBranchOptions{
|
||||
ListOptions: db.ListOptionsAll,
|
||||
RepoID: repo.ID,
|
||||
IsDeletedBranch: util.OptionalBoolTrue,
|
||||
IsDeletedBranch: optional.Some(true),
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, branches, 2)
|
||||
|
|
|
@ -194,7 +194,7 @@ func (status *CommitStatus) APIURL(ctx context.Context) string {
|
|||
|
||||
// LocaleString returns the locale string name of the Status
|
||||
func (status *CommitStatus) LocaleString(lang translation.Locale) string {
|
||||
return lang.Tr("repo.commitstatus." + status.State.String())
|
||||
return lang.TrString("repo.commitstatus." + status.State.String())
|
||||
}
|
||||
|
||||
// CalcCommitStatus returns commit status state via some status, the commit statues should order by id desc
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
"sort"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
|
||||
"github.com/gobwas/glob"
|
||||
)
|
||||
|
@ -56,7 +56,7 @@ func FindAllMatchedBranches(ctx context.Context, repoID int64, ruleName string)
|
|||
Page: page,
|
||||
},
|
||||
RepoID: repoID,
|
||||
IsDeletedBranch: util.OptionalBoolFalse,
|
||||
IsDeletedBranch: optional.Some(false),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -210,12 +210,12 @@ const (
|
|||
|
||||
// LocaleString returns the locale string name of the role
|
||||
func (r RoleInRepo) LocaleString(lang translation.Locale) string {
|
||||
return lang.Tr("repo.issues.role." + string(r))
|
||||
return lang.TrString("repo.issues.role." + string(r))
|
||||
}
|
||||
|
||||
// LocaleHelper returns the locale tooltip of the role
|
||||
func (r RoleInRepo) LocaleHelper(lang translation.Locale) string {
|
||||
return lang.Tr("repo.issues.role." + string(r) + "_helper")
|
||||
return lang.TrString("repo.issues.role." + string(r) + "_helper")
|
||||
}
|
||||
|
||||
// Comment represents a comment in commit and issue page.
|
||||
|
@ -695,8 +695,15 @@ func (c *Comment) LoadReactions(ctx context.Context, repo *repo_model.Repository
|
|||
}
|
||||
|
||||
func (c *Comment) loadReview(ctx context.Context) (err error) {
|
||||
if c.ReviewID == 0 {
|
||||
return nil
|
||||
}
|
||||
if c.Review == nil {
|
||||
if c.Review, err = GetReviewByID(ctx, c.ReviewID); err != nil {
|
||||
// review request which has been replaced by actual reviews doesn't exist in database anymore, so ignorem them.
|
||||
if c.Type == CommentTypeReviewRequest {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
@ -856,6 +863,9 @@ func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment
|
|||
// Check comment type.
|
||||
switch opts.Type {
|
||||
case CommentTypeCode:
|
||||
if err = updateAttachments(ctx, opts, comment); err != nil {
|
||||
return err
|
||||
}
|
||||
if comment.ReviewID != 0 {
|
||||
if comment.Review == nil {
|
||||
if err := comment.loadReview(ctx); err != nil {
|
||||
|
@ -873,22 +883,9 @@ func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment
|
|||
}
|
||||
fallthrough
|
||||
case CommentTypeReview:
|
||||
// Check attachments
|
||||
attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, opts.Attachments)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", opts.Attachments, err)
|
||||
if err = updateAttachments(ctx, opts, comment); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := range attachments {
|
||||
attachments[i].IssueID = opts.Issue.ID
|
||||
attachments[i].CommentID = comment.ID
|
||||
// No assign value could be 0, so ignore AllCols().
|
||||
if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Update(attachments[i]); err != nil {
|
||||
return fmt.Errorf("update attachment [%d]: %w", attachments[i].ID, err)
|
||||
}
|
||||
}
|
||||
|
||||
comment.Attachments = attachments
|
||||
case CommentTypeReopen, CommentTypeClose:
|
||||
if err = repo_model.UpdateRepoIssueNumbers(ctx, opts.Issue.RepoID, opts.Issue.IsPull, true); err != nil {
|
||||
return err
|
||||
|
@ -898,6 +895,23 @@ func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment
|
|||
return UpdateIssueCols(ctx, opts.Issue, "updated_unix")
|
||||
}
|
||||
|
||||
func updateAttachments(ctx context.Context, opts *CreateCommentOptions, comment *Comment) error {
|
||||
attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, opts.Attachments)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", opts.Attachments, err)
|
||||
}
|
||||
for i := range attachments {
|
||||
attachments[i].IssueID = opts.Issue.ID
|
||||
attachments[i].CommentID = comment.ID
|
||||
// No assign value could be 0, so ignore AllCols().
|
||||
if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Update(attachments[i]); err != nil {
|
||||
return fmt.Errorf("update attachment [%d]: %w", attachments[i].ID, err)
|
||||
}
|
||||
}
|
||||
comment.Attachments = attachments
|
||||
return nil
|
||||
}
|
||||
|
||||
func createDeadlineComment(ctx context.Context, doer *user_model.User, issue *Issue, newDeadlineUnix timeutil.TimeStamp) (*Comment, error) {
|
||||
var content string
|
||||
var commentType CommentType
|
||||
|
|
|
@ -14,15 +14,58 @@ import (
|
|||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
// CodeConversation contains the comment of a given review
|
||||
type CodeConversation []*Comment
|
||||
|
||||
// CodeConversationsAtLine contains the conversations for a given line
|
||||
type CodeConversationsAtLine map[int64][]CodeConversation
|
||||
|
||||
// CodeConversationsAtLineAndTreePath contains the conversations for a given TreePath and line
|
||||
type CodeConversationsAtLineAndTreePath map[string]CodeConversationsAtLine
|
||||
|
||||
func newCodeConversationsAtLineAndTreePath(comments []*Comment) CodeConversationsAtLineAndTreePath {
|
||||
tree := make(CodeConversationsAtLineAndTreePath)
|
||||
for _, comment := range comments {
|
||||
tree.insertComment(comment)
|
||||
}
|
||||
return tree
|
||||
}
|
||||
|
||||
func (tree CodeConversationsAtLineAndTreePath) insertComment(comment *Comment) {
|
||||
// attempt to append comment to existing conversations (i.e. list of comments belonging to the same review)
|
||||
for i, conversation := range tree[comment.TreePath][comment.Line] {
|
||||
if conversation[0].ReviewID == comment.ReviewID {
|
||||
tree[comment.TreePath][comment.Line][i] = append(conversation, comment)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// no previous conversation was found at this line, create it
|
||||
if tree[comment.TreePath] == nil {
|
||||
tree[comment.TreePath] = make(map[int64][]CodeConversation)
|
||||
}
|
||||
|
||||
tree[comment.TreePath][comment.Line] = append(tree[comment.TreePath][comment.Line], CodeConversation{comment})
|
||||
}
|
||||
|
||||
// FetchCodeConversations will return a 2d-map: ["Path"]["Line"] = List of CodeConversation (one per review) for this line
|
||||
func FetchCodeConversations(ctx context.Context, issue *Issue, doer *user_model.User, showOutdatedComments bool) (CodeConversationsAtLineAndTreePath, error) {
|
||||
opts := FindCommentsOptions{
|
||||
Type: CommentTypeCode,
|
||||
IssueID: issue.ID,
|
||||
}
|
||||
comments, err := findCodeComments(ctx, opts, issue, doer, nil, showOutdatedComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newCodeConversationsAtLineAndTreePath(comments), nil
|
||||
}
|
||||
|
||||
// CodeComments represents comments on code by using this structure: FILENAME -> LINE (+ == proposed; - == previous) -> COMMENTS
|
||||
type CodeComments map[string]map[int64][]*Comment
|
||||
|
||||
// FetchCodeComments will return a 2d-map: ["Path"]["Line"] = Comments at line
|
||||
func FetchCodeComments(ctx context.Context, issue *Issue, currentUser *user_model.User, showOutdatedComments bool) (CodeComments, error) {
|
||||
return fetchCodeCommentsByReview(ctx, issue, currentUser, nil, showOutdatedComments)
|
||||
}
|
||||
|
||||
func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, currentUser *user_model.User, review *Review, showOutdatedComments bool) (CodeComments, error) {
|
||||
func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, doer *user_model.User, review *Review, showOutdatedComments bool) (CodeComments, error) {
|
||||
pathToLineToComment := make(CodeComments)
|
||||
if review == nil {
|
||||
review = &Review{ID: 0}
|
||||
|
@ -33,7 +76,7 @@ func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, currentUser *u
|
|||
ReviewID: review.ID,
|
||||
}
|
||||
|
||||
comments, err := findCodeComments(ctx, opts, issue, currentUser, review, showOutdatedComments)
|
||||
comments, err := findCodeComments(ctx, opts, issue, doer, review, showOutdatedComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -47,7 +90,7 @@ func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, currentUser *u
|
|||
return pathToLineToComment, nil
|
||||
}
|
||||
|
||||
func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issue, currentUser *user_model.User, review *Review, showOutdatedComments bool) ([]*Comment, error) {
|
||||
func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issue, doer *user_model.User, review *Review, showOutdatedComments bool) ([]*Comment, error) {
|
||||
var comments CommentList
|
||||
if review == nil {
|
||||
review = &Review{ID: 0}
|
||||
|
@ -91,7 +134,7 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
|
|||
if re, ok := reviews[comment.ReviewID]; ok && re != nil {
|
||||
// If the review is pending only the author can see the comments (except if the review is set)
|
||||
if review.ID == 0 && re.Type == ReviewTypePending &&
|
||||
(currentUser == nil || currentUser.ID != re.ReviewerID) {
|
||||
(doer == nil || doer.ID != re.ReviewerID) {
|
||||
continue
|
||||
}
|
||||
comment.Review = re
|
||||
|
@ -121,13 +164,14 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
|
|||
return comments[:n], nil
|
||||
}
|
||||
|
||||
// FetchCodeCommentsByLine fetches the code comments for a given treePath and line number
|
||||
func FetchCodeCommentsByLine(ctx context.Context, issue *Issue, currentUser *user_model.User, treePath string, line int64, showOutdatedComments bool) ([]*Comment, error) {
|
||||
// FetchCodeConversation fetches the code conversation of a given comment (same review, treePath and line number)
|
||||
func FetchCodeConversation(ctx context.Context, comment *Comment, doer *user_model.User) ([]*Comment, error) {
|
||||
opts := FindCommentsOptions{
|
||||
Type: CommentTypeCode,
|
||||
IssueID: issue.ID,
|
||||
TreePath: treePath,
|
||||
Line: line,
|
||||
IssueID: comment.IssueID,
|
||||
ReviewID: comment.ReviewID,
|
||||
TreePath: comment.TreePath,
|
||||
Line: comment.Line,
|
||||
}
|
||||
return findCodeComments(ctx, opts, issue, currentUser, nil, showOutdatedComments)
|
||||
return findCodeComments(ctx, opts, comment.Issue, doer, nil, true)
|
||||
}
|
||||
|
|
|
@ -225,6 +225,10 @@ func (comments CommentList) loadAssignees(ctx context.Context) error {
|
|||
|
||||
for _, comment := range comments {
|
||||
comment.Assignee = assignees[comment.AssigneeID]
|
||||
if comment.Assignee == nil {
|
||||
comment.AssigneeID = user_model.GhostUserID
|
||||
comment.Assignee = user_model.NewGhostUser()
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -430,7 +434,8 @@ func (comments CommentList) loadReviews(ctx context.Context) error {
|
|||
for _, comment := range comments {
|
||||
comment.Review = reviews[comment.ReviewID]
|
||||
if comment.Review == nil {
|
||||
if comment.ReviewID > 0 {
|
||||
// review request which has been replaced by actual reviews doesn't exist in database anymore, so don't log errors for them.
|
||||
if comment.ReviewID > 0 && comment.Type != CommentTypeReviewRequest {
|
||||
log.Error("comment with review id [%d] but has no review record", comment.ReviewID)
|
||||
}
|
||||
continue
|
||||
|
|
|
@ -46,20 +46,20 @@ func TestCreateComment(t *testing.T) {
|
|||
unittest.AssertInt64InRange(t, now, then, int64(updatedIssue.UpdatedUnix))
|
||||
}
|
||||
|
||||
func TestFetchCodeComments(t *testing.T) {
|
||||
func TestFetchCodeConversations(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
|
||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||
res, err := issues_model.FetchCodeComments(db.DefaultContext, issue, user, false)
|
||||
res, err := issues_model.FetchCodeConversations(db.DefaultContext, issue, user, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, res, "README.md")
|
||||
assert.Contains(t, res["README.md"], int64(4))
|
||||
assert.Len(t, res["README.md"][4], 1)
|
||||
assert.Equal(t, int64(4), res["README.md"][4][0].ID)
|
||||
assert.Equal(t, int64(4), res["README.md"][4][0][0].ID)
|
||||
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
res, err = issues_model.FetchCodeComments(db.DefaultContext, issue, user2, false)
|
||||
res, err = issues_model.FetchCodeConversations(db.DefaultContext, issue, user2, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, res, 1)
|
||||
}
|
||||
|
|
|
@ -161,22 +161,18 @@ func FetchIssueContentHistoryList(dbCtx context.Context, issueID, commentID int6
|
|||
}
|
||||
|
||||
for _, item := range res {
|
||||
item.UserAvatarLink = avatars.GenerateUserAvatarFastLink(item.UserName, 0)
|
||||
if item.UserID > 0 {
|
||||
item.UserAvatarLink = avatars.GenerateUserAvatarFastLink(item.UserName, 0)
|
||||
} else {
|
||||
item.UserAvatarLink = avatars.DefaultAvatarLink()
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// HasIssueContentHistory check if a ContentHistory entry exists
|
||||
func HasIssueContentHistory(dbCtx context.Context, issueID, commentID int64) (bool, error) {
|
||||
exists, err := db.GetEngine(dbCtx).Cols("id").Exist(&ContentHistory{
|
||||
IssueID: issueID,
|
||||
CommentID: commentID,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error("can not fetch issue content history. err=%v", err)
|
||||
return false, err
|
||||
}
|
||||
return exists, err
|
||||
return db.GetEngine(dbCtx).Where("issue_id = ? AND comment_id = ?", issueID, commentID).Exist(new(ContentHistory))
|
||||
}
|
||||
|
||||
// SoftDeleteIssueContentHistory soft delete
|
||||
|
|
|
@ -78,3 +78,16 @@ func TestContentHistory(t *testing.T) {
|
|||
assert.EqualValues(t, 7, list2[1].HistoryID)
|
||||
assert.EqualValues(t, 4, list2[2].HistoryID)
|
||||
}
|
||||
|
||||
func TestHasIssueContentHistory(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
// Ensures that comment_id is into taken account even if it's zero.
|
||||
_ = issues_model.SaveIssueContentHistory(db.DefaultContext, 1, 11, 100, timeutil.TimeStampNow(), "c-a", true)
|
||||
_ = issues_model.SaveIssueContentHistory(db.DefaultContext, 1, 11, 100, timeutil.TimeStampNow().Add(5), "c-b", false)
|
||||
|
||||
hasHistory1, _ := issues_model.HasIssueContentHistory(db.DefaultContext, 11, 0)
|
||||
assert.False(t, hasHistory1)
|
||||
hasHistory2, _ := issues_model.HasIssueContentHistory(db.DefaultContext, 11, 100)
|
||||
assert.True(t, hasHistory2)
|
||||
}
|
||||
|
|
|
@ -381,7 +381,7 @@ func TestCountIssues(t *testing.T) {
|
|||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
count, err := issues_model.CountIssues(db.DefaultContext, &issues_model.IssuesOptions{})
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 20, count)
|
||||
assert.EqualValues(t, 22, count)
|
||||
}
|
||||
|
||||
func TestIssueLoadAttributes(t *testing.T) {
|
||||
|
|
|
@ -46,10 +46,10 @@ func neuterCrossReferences(ctx context.Context, issueID, commentID int64) error
|
|||
for i, c := range active {
|
||||
ids[i] = c.ID
|
||||
}
|
||||
return neuterCrossReferencesIds(ctx, nil, ids)
|
||||
return neuterCrossReferencesIDs(ctx, nil, ids)
|
||||
}
|
||||
|
||||
func neuterCrossReferencesIds(stdCtx context.Context, ctx *crossReferencesContext, ids []int64) error {
|
||||
func neuterCrossReferencesIDs(stdCtx context.Context, ctx *crossReferencesContext, ids []int64) error {
|
||||
sess := db.GetEngine(stdCtx).In("id", ids).Cols("`ref_action`")
|
||||
if ctx != nil && ctx.OrigIssue.NoAutoTime {
|
||||
sess.SetExpr("updated_unix", ctx.OrigIssue.UpdatedUnix).NoAutoTime()
|
||||
|
@ -104,7 +104,7 @@ func (issue *Issue) createCrossReferences(stdCtx context.Context, ctx *crossRefe
|
|||
}
|
||||
}
|
||||
if len(ids) > 0 {
|
||||
if err = neuterCrossReferencesIds(stdCtx, ctx, ids); err != nil {
|
||||
if err = neuterCrossReferencesIDs(stdCtx, ctx, ids); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -652,6 +652,35 @@ func GetPullRequestByIssueID(ctx context.Context, issueID int64) (*PullRequest,
|
|||
return pr, pr.LoadAttributes(ctx)
|
||||
}
|
||||
|
||||
// GetPullRequestsByBaseHeadInfo returns the pull request by given base and head
|
||||
func GetPullRequestByBaseHeadInfo(ctx context.Context, baseID, headID int64, base, head string) (*PullRequest, error) {
|
||||
pr := &PullRequest{}
|
||||
sess := db.GetEngine(ctx).
|
||||
Join("INNER", "issue", "issue.id = pull_request.issue_id").
|
||||
Where("base_repo_id = ? AND base_branch = ? AND head_repo_id = ? AND head_branch = ?", baseID, base, headID, head)
|
||||
has, err := sess.Get(pr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !has {
|
||||
return nil, ErrPullRequestNotExist{
|
||||
HeadRepoID: headID,
|
||||
BaseRepoID: baseID,
|
||||
HeadBranch: head,
|
||||
BaseBranch: base,
|
||||
}
|
||||
}
|
||||
|
||||
if err = pr.LoadAttributes(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = pr.LoadIssue(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pr, nil
|
||||
}
|
||||
|
||||
// GetAllUnmergedAgitPullRequestByPoster get all unmerged agit flow pull request
|
||||
// By poster id.
|
||||
func GetAllUnmergedAgitPullRequestByPoster(ctx context.Context, uid int64) ([]*PullRequest, error) {
|
||||
|
@ -893,7 +922,14 @@ func PullRequestCodeOwnersReview(ctx context.Context, pull *Issue, pr *PullReque
|
|||
}
|
||||
|
||||
rules, _ := GetCodeOwnersFromContent(ctx, data)
|
||||
changedFiles, err := repo.GetFilesChangedBetween(git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
|
||||
|
||||
prInfo, err := repo.GetCompareInfo(repo.Path, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Use the merge base as the base instead of the main branch to avoid problems
|
||||
// if the pull request is out of date with the base branch.
|
||||
changedFiles, err := repo.GetFilesChangedBetween(prInfo.MergeBase, pr.HeadCommitID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -1093,3 +1129,23 @@ func InsertPullRequests(ctx context.Context, prs ...*PullRequest) error {
|
|||
}
|
||||
return committer.Commit()
|
||||
}
|
||||
|
||||
// GetPullRequestByMergedCommit returns a merged pull request by the given commit
|
||||
func GetPullRequestByMergedCommit(ctx context.Context, repoID int64, sha string) (*PullRequest, error) {
|
||||
pr := new(PullRequest)
|
||||
has, err := db.GetEngine(ctx).Where("base_repo_id = ? AND merged_commit_id = ?", repoID, sha).Get(pr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
return nil, ErrPullRequestNotExist{0, 0, 0, repoID, "", ""}
|
||||
}
|
||||
|
||||
if err = pr.LoadAttributes(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = pr.LoadIssue(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pr, nil
|
||||
}
|
||||
|
|
|
@ -425,6 +425,18 @@ func TestGetApprovers(t *testing.T) {
|
|||
assert.EqualValues(t, expected, approvers)
|
||||
}
|
||||
|
||||
func TestGetPullRequestByMergedCommit(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
pr, err := issues_model.GetPullRequestByMergedCommit(db.DefaultContext, 1, "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3")
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, pr.ID)
|
||||
|
||||
_, err = issues_model.GetPullRequestByMergedCommit(db.DefaultContext, 0, "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3")
|
||||
assert.ErrorAs(t, err, &issues_model.ErrPullRequestNotExist{})
|
||||
_, err = issues_model.GetPullRequestByMergedCommit(db.DefaultContext, 1, "")
|
||||
assert.ErrorAs(t, err, &issues_model.ErrPullRequestNotExist{})
|
||||
}
|
||||
|
||||
func TestMigrate_InsertPullRequests(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
reponame := "repo1"
|
||||
|
|
|
@ -159,6 +159,14 @@ func (r *Review) LoadReviewer(ctx context.Context) (err error) {
|
|||
return err
|
||||
}
|
||||
r.Reviewer, err = user_model.GetPossibleUserByID(ctx, r.ReviewerID)
|
||||
if err != nil {
|
||||
if !user_model.IsErrUserNotExist(err) {
|
||||
return fmt.Errorf("GetPossibleUserByID [%d]: %w", r.ReviewerID, err)
|
||||
}
|
||||
r.ReviewerID = user_model.GhostUserID
|
||||
r.Reviewer = user_model.NewGhostUser()
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -284,8 +292,14 @@ func IsOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organizatio
|
|||
|
||||
// CreateReview creates a new review based on opts
|
||||
func CreateReview(ctx context.Context, opts CreateReviewOptions) (*Review, error) {
|
||||
ctx, committer, err := db.TxContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer committer.Close()
|
||||
sess := db.GetEngine(ctx)
|
||||
|
||||
review := &Review{
|
||||
Type: opts.Type,
|
||||
Issue: opts.Issue,
|
||||
IssueID: opts.Issue.ID,
|
||||
Reviewer: opts.Reviewer,
|
||||
|
@ -295,15 +309,39 @@ func CreateReview(ctx context.Context, opts CreateReviewOptions) (*Review, error
|
|||
CommitID: opts.CommitID,
|
||||
Stale: opts.Stale,
|
||||
}
|
||||
|
||||
if opts.Reviewer != nil {
|
||||
review.Type = opts.Type
|
||||
review.ReviewerID = opts.Reviewer.ID
|
||||
} else {
|
||||
if review.Type != ReviewTypeRequest {
|
||||
review.Type = ReviewTypeRequest
|
||||
|
||||
reviewCond := builder.Eq{"reviewer_id": opts.Reviewer.ID, "issue_id": opts.Issue.ID}
|
||||
// make sure user review requests are cleared
|
||||
if opts.Type != ReviewTypePending {
|
||||
if _, err := sess.Where(reviewCond.And(builder.Eq{"type": ReviewTypeRequest})).Delete(new(Review)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// make sure if the created review gets dismissed no old review surface
|
||||
// other types can be ignored, as they don't affect branch protection
|
||||
if opts.Type == ReviewTypeApprove || opts.Type == ReviewTypeReject {
|
||||
if _, err := sess.Where(reviewCond.And(builder.In("type", ReviewTypeApprove, ReviewTypeReject))).
|
||||
Cols("dismissed").Update(&Review{Dismissed: true}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
} else if opts.ReviewerTeam != nil {
|
||||
review.Type = ReviewTypeRequest
|
||||
review.ReviewerTeamID = opts.ReviewerTeam.ID
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("provide either reviewer or reviewer team")
|
||||
}
|
||||
return review, db.Insert(ctx, review)
|
||||
|
||||
if _, err := sess.Insert(review); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return review, committer.Commit()
|
||||
}
|
||||
|
||||
// GetCurrentReview returns the current pending review of reviewer for given issue
|
||||
|
@ -621,6 +659,9 @@ func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_mo
|
|||
return nil, err
|
||||
}
|
||||
|
||||
// func caller use the created comment to retrieve created review too.
|
||||
comment.Review = review
|
||||
|
||||
return comment, committer.Commit()
|
||||
}
|
||||
|
||||
|
|
|
@ -18,11 +18,11 @@ type ReviewList []*Review
|
|||
|
||||
// LoadReviewers loads reviewers
|
||||
func (reviews ReviewList) LoadReviewers(ctx context.Context) error {
|
||||
reviewerIds := make([]int64, len(reviews))
|
||||
reviewerIDs := make([]int64, len(reviews))
|
||||
for i := 0; i < len(reviews); i++ {
|
||||
reviewerIds[i] = reviews[i].ReviewerID
|
||||
reviewerIDs[i] = reviews[i].ReviewerID
|
||||
}
|
||||
reviewers, err := user_model.GetPossibleUserByIDs(ctx, reviewerIds)
|
||||
reviewers, err := user_model.GetPossibleUserByIDs(ctx, reviewerIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -38,12 +38,12 @@ func (reviews ReviewList) LoadReviewers(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (reviews ReviewList) LoadIssues(ctx context.Context) error {
|
||||
issueIds := container.Set[int64]{}
|
||||
issueIDs := container.Set[int64]{}
|
||||
for i := 0; i < len(reviews); i++ {
|
||||
issueIds.Add(reviews[i].IssueID)
|
||||
issueIDs.Add(reviews[i].IssueID)
|
||||
}
|
||||
|
||||
issues, err := GetIssuesByIDs(ctx, issueIds.Values())
|
||||
issues, err := GetIssuesByIDs(ctx, issueIDs.Values())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -594,9 +594,7 @@ func GetOrgByID(ctx context.Context, id int64) (*Organization, error) {
|
|||
return nil, err
|
||||
} else if !has {
|
||||
return nil, user_model.ErrUserNotExist{
|
||||
UID: id,
|
||||
Name: "",
|
||||
KeyID: 0,
|
||||
UID: id,
|
||||
}
|
||||
}
|
||||
return u, nil
|
||||
|
|
|
@ -356,7 +356,6 @@ func HasAccessUnit(ctx context.Context, user *user_model.User, repo *repo_model.
|
|||
|
||||
// CanBeAssigned return true if user can be assigned to issue or pull requests in repo
|
||||
// Currently any write access (code, issues or pr's) is assignable, to match assignee list in user interface.
|
||||
// FIXME: user could send PullRequest also could be assigned???
|
||||
func CanBeAssigned(ctx context.Context, user *user_model.User, repo *repo_model.Repository, _ bool) (bool, error) {
|
||||
if user.IsOrganization() {
|
||||
return false, fmt.Errorf("Organization can't be added as assignee [user_id: %d, repo_id: %d]", user.ID, repo.ID)
|
||||
|
@ -365,7 +364,8 @@ func CanBeAssigned(ctx context.Context, user *user_model.User, repo *repo_model.
|
|||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return perm.CanAccessAny(perm_model.AccessModeWrite, unit.TypeCode, unit.TypeIssues, unit.TypePullRequests), nil
|
||||
return perm.CanAccessAny(perm_model.AccessModeWrite, unit.AllRepoUnitTypes...) ||
|
||||
perm.CanAccessAny(perm_model.AccessModeRead, unit.TypePullRequests), nil
|
||||
}
|
||||
|
||||
// HasAccess returns true if user has access to repo
|
||||
|
|
|
@ -21,6 +21,8 @@ const (
|
|||
MergeStyleRebaseMerge MergeStyle = "rebase-merge"
|
||||
// MergeStyleSquash squash commits into single commit before merging
|
||||
MergeStyleSquash MergeStyle = "squash"
|
||||
// MergeStyleFastForwardOnly fast-forward merge if possible, otherwise fail
|
||||
MergeStyleFastForwardOnly MergeStyle = "fast-forward-only"
|
||||
// MergeStyleManuallyMerged pr has been merged manually, just mark it as merged directly
|
||||
MergeStyleManuallyMerged MergeStyle = "manually-merged"
|
||||
// MergeStyleRebaseUpdate not a merge style, used to update pull head by rebase
|
||||
|
|
|
@ -449,6 +449,31 @@ func (repo *Repository) GetUnit(ctx context.Context, tp unit.Type) (*RepoUnit, e
|
|||
return nil, ErrUnitTypeNotExist{tp}
|
||||
}
|
||||
|
||||
// AllUnitsEnabled returns true if all units are enabled for the repo.
|
||||
func (repo *Repository) AllUnitsEnabled(ctx context.Context) bool {
|
||||
hasAnyUnitEnabled := func(unitGroup []unit.Type) bool {
|
||||
// Loop over the group of units
|
||||
for _, unit := range unitGroup {
|
||||
// If *any* of them is enabled, return true.
|
||||
if repo.UnitEnabled(ctx, unit) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// If none are enabled, return false.
|
||||
return false
|
||||
}
|
||||
|
||||
for _, unitGroup := range unit.AllowedRepoUnitGroups {
|
||||
// If any disabled unit is found, return false immediately.
|
||||
if !hasAnyUnitEnabled(unitGroup) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// LoadOwner loads owner user
|
||||
func (repo *Repository) LoadOwner(ctx context.Context) (err error) {
|
||||
if repo.Owner != nil {
|
||||
|
|
|
@ -138,12 +138,12 @@ func getTestCases() []struct {
|
|||
{
|
||||
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: util.OptionalBoolFalse},
|
||||
count: 32,
|
||||
count: 34,
|
||||
},
|
||||
{
|
||||
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: util.OptionalBoolFalse},
|
||||
count: 37,
|
||||
count: 39,
|
||||
},
|
||||
{
|
||||
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
||||
|
@ -158,7 +158,7 @@ func getTestCases() []struct {
|
|||
{
|
||||
name: "AllPublic/PublicRepositoriesOfOrganization",
|
||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: util.OptionalBoolFalse, Template: util.OptionalBoolFalse},
|
||||
count: 32,
|
||||
count: 34,
|
||||
},
|
||||
{
|
||||
name: "AllTemplates",
|
||||
|
|
|
@ -153,6 +153,7 @@ type PullRequestsConfig struct {
|
|||
AllowRebase bool
|
||||
AllowRebaseMerge bool
|
||||
AllowSquash bool
|
||||
AllowFastForwardOnly bool
|
||||
AllowManualMerge bool
|
||||
AutodetectManualMerge bool
|
||||
AllowRebaseUpdate bool
|
||||
|
@ -179,6 +180,7 @@ func (cfg *PullRequestsConfig) IsMergeStyleAllowed(mergeStyle MergeStyle) bool {
|
|||
mergeStyle == MergeStyleRebase && cfg.AllowRebase ||
|
||||
mergeStyle == MergeStyleRebaseMerge && cfg.AllowRebaseMerge ||
|
||||
mergeStyle == MergeStyleSquash && cfg.AllowSquash ||
|
||||
mergeStyle == MergeStyleFastForwardOnly && cfg.AllowFastForwardOnly ||
|
||||
mergeStyle == MergeStyleManuallyMerged && cfg.AllowManualMerge
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
|
@ -78,7 +79,8 @@ func GetRepoAssignees(ctx context.Context, repo *Repository) (_ []*user_model.Us
|
|||
if err = e.Table("team_user").
|
||||
Join("INNER", "team_repo", "`team_repo`.team_id = `team_user`.team_id").
|
||||
Join("INNER", "team_unit", "`team_unit`.team_id = `team_user`.team_id").
|
||||
Where("`team_repo`.repo_id = ? AND `team_unit`.access_mode >= ?", repo.ID, perm.AccessModeWrite).
|
||||
Where("`team_repo`.repo_id = ? AND (`team_unit`.access_mode >= ? OR (`team_unit`.access_mode = ? AND `team_unit`.`type` = ?))",
|
||||
repo.ID, perm.AccessModeWrite, perm.AccessModeRead, unit.TypePullRequests).
|
||||
Distinct("`team_user`.uid").
|
||||
Select("`team_user`.uid").
|
||||
Find(&additionalUserIDs); err != nil {
|
||||
|
|
|
@ -17,13 +17,13 @@ const (
|
|||
func (o OwnerType) LocaleString(locale translation.Locale) string {
|
||||
switch o {
|
||||
case OwnerTypeSystemGlobal:
|
||||
return locale.Tr("concept_system_global")
|
||||
return locale.TrString("concept_system_global")
|
||||
case OwnerTypeIndividual:
|
||||
return locale.Tr("concept_user_individual")
|
||||
return locale.TrString("concept_user_individual")
|
||||
case OwnerTypeRepository:
|
||||
return locale.Tr("concept_code_repository")
|
||||
return locale.TrString("concept_code_repository")
|
||||
case OwnerTypeOrganization:
|
||||
return locale.Tr("concept_user_organization")
|
||||
return locale.TrString("concept_user_organization")
|
||||
}
|
||||
return locale.Tr("unknown")
|
||||
return locale.TrString("unknown")
|
||||
}
|
||||
|
|
|
@ -108,6 +108,10 @@ var (
|
|||
|
||||
// DisabledRepoUnits contains the units that have been globally disabled
|
||||
DisabledRepoUnits = []Type{}
|
||||
|
||||
// AllowedRepoUnitGroups contains the units that have been globally enabled,
|
||||
// with mutually exclusive units grouped together.
|
||||
AllowedRepoUnitGroups = [][]Type{}
|
||||
)
|
||||
|
||||
// Get valid set of default repository units from settings
|
||||
|
@ -162,6 +166,45 @@ func LoadUnitConfig() error {
|
|||
if len(DefaultForkRepoUnits) == 0 {
|
||||
return errors.New("no default fork repository units found")
|
||||
}
|
||||
|
||||
// Collect the allowed repo unit groups. Mutually exclusive units are
|
||||
// grouped together.
|
||||
AllowedRepoUnitGroups = [][]Type{}
|
||||
for _, unit := range []Type{
|
||||
TypeCode,
|
||||
TypePullRequests,
|
||||
TypeProjects,
|
||||
TypePackages,
|
||||
TypeActions,
|
||||
} {
|
||||
// If unit is globally disabled, ignore it.
|
||||
if unit.UnitGlobalDisabled() {
|
||||
continue
|
||||
}
|
||||
|
||||
// If it is allowed, add it to the group list.
|
||||
AllowedRepoUnitGroups = append(AllowedRepoUnitGroups, []Type{unit})
|
||||
}
|
||||
|
||||
addMutuallyExclusiveGroup := func(unit1, unit2 Type) {
|
||||
var list []Type
|
||||
|
||||
if !unit1.UnitGlobalDisabled() {
|
||||
list = append(list, unit1)
|
||||
}
|
||||
|
||||
if !unit2.UnitGlobalDisabled() {
|
||||
list = append(list, unit2)
|
||||
}
|
||||
|
||||
if len(list) > 0 {
|
||||
AllowedRepoUnitGroups = append(AllowedRepoUnitGroups, list)
|
||||
}
|
||||
}
|
||||
|
||||
addMutuallyExclusiveGroup(TypeIssues, TypeExternalTracker)
|
||||
addMutuallyExclusiveGroup(TypeWiki, TypeExternalWiki)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -44,12 +44,12 @@ func fatalTestError(fmtStr string, args ...any) {
|
|||
}
|
||||
|
||||
// InitSettings initializes config provider and load common settings for tests
|
||||
func InitSettings(extraConfigs ...string) {
|
||||
func InitSettings() {
|
||||
if setting.CustomConf == "" {
|
||||
setting.CustomConf = filepath.Join(setting.CustomPath, "conf/app-unittest-tmp.ini")
|
||||
_ = os.Remove(setting.CustomConf)
|
||||
}
|
||||
setting.InitCfgProvider(setting.CustomConf, strings.Join(extraConfigs, "\n"))
|
||||
setting.InitCfgProvider(setting.CustomConf)
|
||||
setting.LoadCommonSettings()
|
||||
|
||||
if err := setting.PrepareAppDataPath(); err != nil {
|
||||
|
|
|
@ -131,8 +131,8 @@ func AssertSuccessfulInsert(t assert.TestingT, beans ...any) {
|
|||
}
|
||||
|
||||
// AssertCount assert the count of a bean
|
||||
func AssertCount(t assert.TestingT, bean, expected any) {
|
||||
assert.EqualValues(t, expected, GetCount(t, bean))
|
||||
func AssertCount(t assert.TestingT, bean, expected any) bool {
|
||||
return assert.EqualValues(t, expected, GetCount(t, bean))
|
||||
}
|
||||
|
||||
// AssertInt64InRange assert value is in range [low, high]
|
||||
|
@ -150,7 +150,7 @@ func GetCountByCond(t assert.TestingT, tableName string, cond builder.Cond) int6
|
|||
}
|
||||
|
||||
// AssertCountByCond test the count of database entries matching bean
|
||||
func AssertCountByCond(t assert.TestingT, tableName string, cond builder.Cond, expected int) {
|
||||
assert.EqualValues(t, expected, GetCountByCond(t, tableName, cond),
|
||||
func AssertCountByCond(t assert.TestingT, tableName string, cond builder.Cond, expected int) bool {
|
||||
return assert.EqualValues(t, expected, GetCountByCond(t, tableName, cond),
|
||||
"Failed consistency test, the counted bean (of table %s) was %+v", tableName, cond)
|
||||
}
|
||||
|
|
|
@ -31,9 +31,8 @@ func (err ErrUserAlreadyExist) Unwrap() error {
|
|||
|
||||
// ErrUserNotExist represents a "UserNotExist" kind of error.
|
||||
type ErrUserNotExist struct {
|
||||
UID int64
|
||||
Name string
|
||||
KeyID int64
|
||||
UID int64
|
||||
Name string
|
||||
}
|
||||
|
||||
// IsErrUserNotExist checks if an error is a ErrUserNotExist.
|
||||
|
@ -43,7 +42,7 @@ func IsErrUserNotExist(err error) bool {
|
|||
}
|
||||
|
||||
func (err ErrUserNotExist) Error() string {
|
||||
return fmt.Sprintf("user does not exist [uid: %d, name: %s, keyid: %d]", err.UID, err.Name, err.KeyID)
|
||||
return fmt.Sprintf("user does not exist [uid: %d, name: %s]", err.UID, err.Name)
|
||||
}
|
||||
|
||||
// Unwrap unwraps this error as a ErrNotExist error
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
@ -442,14 +443,14 @@ func (u *User) GetDisplayName() string {
|
|||
}
|
||||
|
||||
// GetCompleteName returns the the full name and username in the form of
|
||||
// "Full Name (@username)" if full name is not empty, otherwise it returns
|
||||
// "@username".
|
||||
// "Full Name (username)" if full name is not empty, otherwise it returns
|
||||
// "username".
|
||||
func (u *User) GetCompleteName() string {
|
||||
trimmedFullName := strings.TrimSpace(u.FullName)
|
||||
if len(trimmedFullName) > 0 {
|
||||
return fmt.Sprintf("%s (@%s)", trimmedFullName, u.Name)
|
||||
return fmt.Sprintf("%s (%s)", trimmedFullName, u.Name)
|
||||
}
|
||||
return fmt.Sprintf("@%s", u.Name)
|
||||
return u.Name
|
||||
}
|
||||
|
||||
func gitSafeName(name string) string {
|
||||
|
@ -591,14 +592,14 @@ func IsUsableUsername(name string) error {
|
|||
|
||||
// CreateUserOverwriteOptions are an optional options who overwrite system defaults on user creation
|
||||
type CreateUserOverwriteOptions struct {
|
||||
KeepEmailPrivate util.OptionalBool
|
||||
KeepEmailPrivate optional.Option[bool]
|
||||
Visibility *structs.VisibleType
|
||||
AllowCreateOrganization util.OptionalBool
|
||||
AllowCreateOrganization optional.Option[bool]
|
||||
EmailNotificationsPreference *string
|
||||
MaxRepoCreation *int
|
||||
Theme *string
|
||||
IsRestricted util.OptionalBool
|
||||
IsActive util.OptionalBool
|
||||
IsRestricted optional.Option[bool]
|
||||
IsActive optional.Option[bool]
|
||||
}
|
||||
|
||||
// CreateUser creates record of a new user.
|
||||
|
@ -625,14 +626,14 @@ func CreateUser(ctx context.Context, u *User, overwriteDefault ...*CreateUserOve
|
|||
// overwrite defaults if set
|
||||
if len(overwriteDefault) != 0 && overwriteDefault[0] != nil {
|
||||
overwrite := overwriteDefault[0]
|
||||
if !overwrite.KeepEmailPrivate.IsNone() {
|
||||
u.KeepEmailPrivate = overwrite.KeepEmailPrivate.IsTrue()
|
||||
if overwrite.KeepEmailPrivate.Has() {
|
||||
u.KeepEmailPrivate = overwrite.KeepEmailPrivate.Value()
|
||||
}
|
||||
if overwrite.Visibility != nil {
|
||||
u.Visibility = *overwrite.Visibility
|
||||
}
|
||||
if !overwrite.AllowCreateOrganization.IsNone() {
|
||||
u.AllowCreateOrganization = overwrite.AllowCreateOrganization.IsTrue()
|
||||
if overwrite.AllowCreateOrganization.Has() {
|
||||
u.AllowCreateOrganization = overwrite.AllowCreateOrganization.Value()
|
||||
}
|
||||
if overwrite.EmailNotificationsPreference != nil {
|
||||
u.EmailNotificationsPreference = *overwrite.EmailNotificationsPreference
|
||||
|
@ -643,11 +644,11 @@ func CreateUser(ctx context.Context, u *User, overwriteDefault ...*CreateUserOve
|
|||
if overwrite.Theme != nil {
|
||||
u.Theme = *overwrite.Theme
|
||||
}
|
||||
if !overwrite.IsRestricted.IsNone() {
|
||||
u.IsRestricted = overwrite.IsRestricted.IsTrue()
|
||||
if overwrite.IsRestricted.Has() {
|
||||
u.IsRestricted = overwrite.IsRestricted.Value()
|
||||
}
|
||||
if !overwrite.IsActive.IsNone() {
|
||||
u.IsActive = overwrite.IsActive.IsTrue()
|
||||
if overwrite.IsActive.Has() {
|
||||
u.IsActive = overwrite.IsActive.Value()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -864,7 +865,7 @@ func GetUserByID(ctx context.Context, id int64) (*User, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
return nil, ErrUserNotExist{id, "", 0}
|
||||
return nil, ErrUserNotExist{UID: id}
|
||||
}
|
||||
return u, nil
|
||||
}
|
||||
|
@ -914,14 +915,14 @@ func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
|||
// GetUserByNameCtx returns user by given name.
|
||||
func GetUserByName(ctx context.Context, name string) (*User, error) {
|
||||
if len(name) == 0 {
|
||||
return nil, ErrUserNotExist{0, name, 0}
|
||||
return nil, ErrUserNotExist{Name: name}
|
||||
}
|
||||
u := &User{LowerName: strings.ToLower(name), Type: UserTypeIndividual}
|
||||
has, err := db.GetEngine(ctx).Get(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
return nil, ErrUserNotExist{0, name, 0}
|
||||
return nil, ErrUserNotExist{Name: name}
|
||||
}
|
||||
return u, nil
|
||||
}
|
||||
|
@ -1062,7 +1063,7 @@ func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) []
|
|||
// GetUserByEmail returns the user object by given e-mail if exists.
|
||||
func GetUserByEmail(ctx context.Context, email string) (*User, error) {
|
||||
if len(email) == 0 {
|
||||
return nil, ErrUserNotExist{0, email, 0}
|
||||
return nil, ErrUserNotExist{Name: email}
|
||||
}
|
||||
|
||||
email = strings.ToLower(email)
|
||||
|
@ -1089,7 +1090,7 @@ func GetUserByEmail(ctx context.Context, email string) (*User, error) {
|
|||
}
|
||||
}
|
||||
|
||||
return nil, ErrUserNotExist{0, email, 0}
|
||||
return nil, ErrUserNotExist{Name: email}
|
||||
}
|
||||
|
||||
// GetUser checks if a user already exists
|
||||
|
@ -1100,7 +1101,7 @@ func GetUser(ctx context.Context, user *User) (bool, error) {
|
|||
// GetUserByOpenID returns the user object by given OpenID if exists.
|
||||
func GetUserByOpenID(ctx context.Context, uri string) (*User, error) {
|
||||
if len(uri) == 0 {
|
||||
return nil, ErrUserNotExist{0, uri, 0}
|
||||
return nil, ErrUserNotExist{Name: uri}
|
||||
}
|
||||
|
||||
uri, err := openid.Normalize(uri)
|
||||
|
@ -1120,7 +1121,7 @@ func GetUserByOpenID(ctx context.Context, uri string) (*User, error) {
|
|||
return GetUserByID(ctx, oid.UID)
|
||||
}
|
||||
|
||||
return nil, ErrUserNotExist{0, uri, 0}
|
||||
return nil, ErrUserNotExist{Name: uri}
|
||||
}
|
||||
|
||||
// GetAdminUser returns the first administrator
|
||||
|
|
|
@ -98,7 +98,7 @@ func TestSearchUsers(t *testing.T) {
|
|||
[]int64{19, 25})
|
||||
|
||||
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
|
||||
[]int64{26})
|
||||
[]int64{26, 41})
|
||||
|
||||
testOrgSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
|
||||
[]int64{})
|
||||
|
@ -110,13 +110,13 @@ func TestSearchUsers(t *testing.T) {
|
|||
}
|
||||
|
||||
testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}},
|
||||
[]int64{1, 2, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37})
|
||||
[]int64{1, 2, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40})
|
||||
|
||||
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: util.OptionalBoolFalse},
|
||||
[]int64{9})
|
||||
|
||||
testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: util.OptionalBoolTrue},
|
||||
[]int64{1, 2, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37})
|
||||
[]int64{1, 2, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40})
|
||||
|
||||
testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: util.OptionalBoolTrue},
|
||||
[]int64{1, 10, 11, 12, 13, 14, 15, 16, 18})
|
||||
|
|
|
@ -25,6 +25,45 @@ const (
|
|||
GithubEventSchedule = "schedule"
|
||||
)
|
||||
|
||||
// IsDefaultBranchWorkflow returns true if the event only triggers workflows on the default branch
|
||||
func IsDefaultBranchWorkflow(triggedEvent webhook_module.HookEventType) bool {
|
||||
switch triggedEvent {
|
||||
case webhook_module.HookEventDelete:
|
||||
// GitHub "delete" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#delete
|
||||
return true
|
||||
case webhook_module.HookEventFork:
|
||||
// GitHub "fork" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#fork
|
||||
return true
|
||||
case webhook_module.HookEventIssueComment:
|
||||
// GitHub "issue_comment" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment
|
||||
return true
|
||||
case webhook_module.HookEventPullRequestComment:
|
||||
// GitHub "pull_request_comment" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
|
||||
return true
|
||||
case webhook_module.HookEventWiki:
|
||||
// GitHub "gollum" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#gollum
|
||||
return true
|
||||
case webhook_module.HookEventSchedule:
|
||||
// GitHub "schedule" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule
|
||||
return true
|
||||
case webhook_module.HookEventIssues,
|
||||
webhook_module.HookEventIssueAssign,
|
||||
webhook_module.HookEventIssueLabel,
|
||||
webhook_module.HookEventIssueMilestone:
|
||||
// Github "issues" event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issues
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// canGithubEventMatch check if the input Github event can match any Gitea event.
|
||||
func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEventType) bool {
|
||||
switch eventName {
|
||||
|
@ -55,7 +94,9 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
|
|||
case webhook_module.HookEventPullRequest,
|
||||
webhook_module.HookEventPullRequestSync,
|
||||
webhook_module.HookEventPullRequestAssign,
|
||||
webhook_module.HookEventPullRequestLabel:
|
||||
webhook_module.HookEventPullRequestLabel,
|
||||
webhook_module.HookEventPullRequestReviewRequest,
|
||||
webhook_module.HookEventPullRequestMilestone:
|
||||
return true
|
||||
|
||||
default:
|
||||
|
@ -73,6 +114,11 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
|
|||
return false
|
||||
}
|
||||
|
||||
case GithubEventIssueComment:
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
|
||||
return triggedEvent == webhook_module.HookEventIssueComment ||
|
||||
triggedEvent == webhook_module.HookEventPullRequestComment
|
||||
|
||||
default:
|
||||
return eventName == string(triggedEvent)
|
||||
}
|
||||
|
|
|
@ -103,6 +103,12 @@ func TestCanGithubEventMatch(t *testing.T) {
|
|||
webhook_module.HookEventCreate,
|
||||
true,
|
||||
},
|
||||
{
|
||||
"create pull request comment",
|
||||
GithubEventIssueComment,
|
||||
webhook_module.HookEventPullRequestComment,
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
|
|
@ -186,7 +186,9 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
|
|||
webhook_module.HookEventPullRequest,
|
||||
webhook_module.HookEventPullRequestSync,
|
||||
webhook_module.HookEventPullRequestAssign,
|
||||
webhook_module.HookEventPullRequestLabel:
|
||||
webhook_module.HookEventPullRequestLabel,
|
||||
webhook_module.HookEventPullRequestReviewRequest,
|
||||
webhook_module.HookEventPullRequestMilestone:
|
||||
return matchPullRequestEvent(gitRepo, commit, payload.(*api.PullRequestPayload), evt)
|
||||
|
||||
case // pull_request_review
|
||||
|
@ -362,13 +364,13 @@ func matchPullRequestEvent(gitRepo *git.Repository, commit *git.Commit, prPayloa
|
|||
} else {
|
||||
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
// Actions with the same name:
|
||||
// opened, edited, closed, reopened, assigned, unassigned
|
||||
// opened, edited, closed, reopened, assigned, unassigned, review_requested, review_request_removed, milestoned, demilestoned
|
||||
// Actions need to be converted:
|
||||
// synchronized -> synchronize
|
||||
// label_updated -> labeled
|
||||
// label_cleared -> unlabeled
|
||||
// Unsupported activity types:
|
||||
// converted_to_draft, ready_for_review, locked, unlocked, review_requested, review_request_removed, auto_merge_enabled, auto_merge_disabled
|
||||
// converted_to_draft, ready_for_review, locked, unlocked, auto_merge_enabled, auto_merge_disabled, enqueued, dequeued
|
||||
|
||||
action := prPayload.Action
|
||||
switch action {
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"context"
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
"html/template"
|
||||
"math/big"
|
||||
"strings"
|
||||
"sync"
|
||||
|
@ -121,15 +122,15 @@ func Generate(n int) (string, error) {
|
|||
}
|
||||
|
||||
// BuildComplexityError builds the error message when password complexity checks fail
|
||||
func BuildComplexityError(locale translation.Locale) string {
|
||||
func BuildComplexityError(locale translation.Locale) template.HTML {
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(locale.Tr("form.password_complexity"))
|
||||
buffer.WriteString(locale.TrString("form.password_complexity"))
|
||||
buffer.WriteString("<ul>")
|
||||
for _, c := range requiredList {
|
||||
buffer.WriteString("<li>")
|
||||
buffer.WriteString(locale.Tr(c.TrNameOne))
|
||||
buffer.WriteString(locale.TrString(c.TrNameOne))
|
||||
buffer.WriteString("</li>")
|
||||
}
|
||||
buffer.WriteString("</ul>")
|
||||
return buffer.String()
|
||||
return template.HTML(buffer.String())
|
||||
}
|
||||
|
|
|
@ -5,11 +5,13 @@ package base
|
|||
|
||||
import (
|
||||
"math/big"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// NaturalSortLess compares two strings so that they could be sorted in natural order
|
||||
func NaturalSortLess(s1, s2 string) bool {
|
||||
s1, s2 = strings.ToLower(s1), strings.ToLower(s2)
|
||||
var i1, i2 int
|
||||
for {
|
||||
rune1, j1, end1 := getNextRune(s1, i1)
|
||||
|
|
|
@ -20,4 +20,10 @@ func TestNaturalSortLess(t *testing.T) {
|
|||
test("a-1-a", "a-1-b", true)
|
||||
test("2", "12", true)
|
||||
test("a", "ab", true)
|
||||
|
||||
// Test for case insensitive.
|
||||
test("A", "ab", true)
|
||||
test("B", "ab", false)
|
||||
test("a", "AB", true)
|
||||
test("b", "AB", false)
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ func CreateTimeLimitCode(data string, minutes int, startInf any) string {
|
|||
|
||||
// create sha1 encode string
|
||||
sh := sha1.New()
|
||||
_, _ = sh.Write([]byte(fmt.Sprintf("%s%s%s%s%d", data, setting.SecretKey, startStr, endStr, minutes)))
|
||||
_, _ = sh.Write([]byte(fmt.Sprintf("%s%s%s%s%d", data, hex.EncodeToString(setting.GetGeneralTokenSigningSecret()), startStr, endStr, minutes)))
|
||||
encoded := hex.EncodeToString(sh.Sum(nil))
|
||||
|
||||
code := fmt.Sprintf("%s%06d%s", startStr, minutes, encoded)
|
||||
|
@ -174,7 +174,7 @@ func Int64sToStrings(ints []int64) []string {
|
|||
func EntryIcon(entry *git.TreeEntry) string {
|
||||
switch {
|
||||
case entry.IsLink():
|
||||
te, err := entry.FollowLink()
|
||||
te, _, err := entry.FollowLink()
|
||||
if err != nil {
|
||||
log.Debug(err.Error())
|
||||
return "file-symlink-file"
|
||||
|
|
|
@ -10,6 +10,7 @@ package charset
|
|||
import (
|
||||
"html/template"
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
@ -20,16 +21,29 @@ import (
|
|||
// RuneNBSP is the codepoint for NBSP
|
||||
const RuneNBSP = 0xa0
|
||||
|
||||
type escapeContext string
|
||||
|
||||
// Keep this consistent with the documentation of [ui].SKIP_ESCAPE_CONTEXTS
|
||||
// Defines the different contexts that could be used to escape in.
|
||||
const (
|
||||
// Wiki pages.
|
||||
WikiContext escapeContext = "wiki"
|
||||
// Rendered content (except markup), source code and blames.
|
||||
FileviewContext escapeContext = "file-view"
|
||||
// Commits or pull requet's diff.
|
||||
DiffContext escapeContext = "diff"
|
||||
)
|
||||
|
||||
// EscapeControlHTML escapes the unicode control sequences in a provided html document
|
||||
func EscapeControlHTML(html template.HTML, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, output template.HTML) {
|
||||
func EscapeControlHTML(html template.HTML, locale translation.Locale, context escapeContext, allowed ...rune) (escaped *EscapeStatus, output template.HTML) {
|
||||
sb := &strings.Builder{}
|
||||
escaped, _ = EscapeControlReader(strings.NewReader(string(html)), sb, locale, allowed...) // err has been handled in EscapeControlReader
|
||||
escaped, _ = EscapeControlReader(strings.NewReader(string(html)), sb, locale, context, allowed...) // err has been handled in EscapeControlReader
|
||||
return escaped, template.HTML(sb.String())
|
||||
}
|
||||
|
||||
// EscapeControlReader escapes the unicode control sequences in a provided reader of HTML content and writer in a locale and returns the findings as an EscapeStatus
|
||||
func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, err error) {
|
||||
if !setting.UI.AmbiguousUnicodeDetection {
|
||||
func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation.Locale, context escapeContext, allowed ...rune) (escaped *EscapeStatus, err error) {
|
||||
if !setting.UI.AmbiguousUnicodeDetection || slices.Contains(setting.UI.SkipEscapeContexts, string(context)) {
|
||||
_, err = io.Copy(writer, reader)
|
||||
return &EscapeStatus{}, err
|
||||
}
|
||||
|
|
|
@ -173,7 +173,7 @@ func (e *escapeStreamer) ambiguousRune(r, c rune) error {
|
|||
Val: "ambiguous-code-point",
|
||||
}, html.Attribute{
|
||||
Key: "data-tooltip-content",
|
||||
Val: e.locale.Tr("repo.ambiguous_character", r, c),
|
||||
Val: e.locale.TrString("repo.ambiguous_character", r, c),
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package charset
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -14,6 +15,8 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var testContext = escapeContext("test")
|
||||
|
||||
type escapeControlTest struct {
|
||||
name string
|
||||
text string
|
||||
|
@ -159,7 +162,7 @@ func TestEscapeControlReader(t *testing.T) {
|
|||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
output := &strings.Builder{}
|
||||
status, err := EscapeControlReader(strings.NewReader(tt.text), output, &translation.MockLocale{})
|
||||
status, err := EscapeControlReader(strings.NewReader(tt.text), output, &translation.MockLocale{}, testContext)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.status, *status)
|
||||
assert.Equal(t, tt.result, output.String())
|
||||
|
@ -169,9 +172,22 @@ func TestEscapeControlReader(t *testing.T) {
|
|||
|
||||
func TestSettingAmbiguousUnicodeDetection(t *testing.T) {
|
||||
defer test.MockVariableValue(&setting.UI.AmbiguousUnicodeDetection, true)()
|
||||
_, out := EscapeControlHTML("a test", &translation.MockLocale{})
|
||||
|
||||
_, out := EscapeControlHTML("a test", &translation.MockLocale{}, testContext)
|
||||
assert.EqualValues(t, `a<span class="escaped-code-point" data-escaped="[U+00A0]"><span class="char"> </span></span>test`, out)
|
||||
setting.UI.AmbiguousUnicodeDetection = false
|
||||
_, out = EscapeControlHTML("a test", &translation.MockLocale{})
|
||||
_, out = EscapeControlHTML("a test", &translation.MockLocale{}, testContext)
|
||||
assert.EqualValues(t, `a test`, out)
|
||||
}
|
||||
|
||||
func TestAmbiguousUnicodeDetectionContext(t *testing.T) {
|
||||
defer test.MockVariableValue(&setting.UI.SkipEscapeContexts, []string{"test"})()
|
||||
|
||||
input := template.HTML("a test")
|
||||
|
||||
_, out := EscapeControlHTML(input, &translation.MockLocale{}, escapeContext("not-test"))
|
||||
assert.EqualValues(t, `a<span class="escaped-code-point" data-escaped="[U+00A0]"><span class="char"> </span></span>test`, out)
|
||||
|
||||
_, out = EscapeControlHTML(input, &translation.MockLocale{}, testContext)
|
||||
assert.EqualValues(t, input, out)
|
||||
}
|
||||
|
|
|
@ -247,7 +247,7 @@ func APIContexter() func(http.Handler) http.Handler {
|
|||
// NotFound handles 404s for APIContext
|
||||
// String will replace message, errors will be added to a slice
|
||||
func (ctx *APIContext) NotFound(objs ...any) {
|
||||
message := ctx.Tr("error.not_found")
|
||||
message := ctx.Locale.TrString("error.not_found")
|
||||
var errors []string
|
||||
for _, obj := range objs {
|
||||
// Ignore nil
|
||||
|
@ -309,12 +309,6 @@ func RepoRefForAPI(next http.Handler) http.Handler {
|
|||
return
|
||||
}
|
||||
|
||||
objectFormat, err := ctx.Repo.GitRepo.GetObjectFormat()
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "GetCommit", err)
|
||||
return
|
||||
}
|
||||
|
||||
if ref := ctx.FormTrim("ref"); len(ref) > 0 {
|
||||
commit, err := ctx.Repo.GitRepo.GetCommit(ref)
|
||||
if err != nil {
|
||||
|
@ -333,6 +327,7 @@ func RepoRefForAPI(next http.Handler) http.Handler {
|
|||
}
|
||||
|
||||
refName := getRefName(ctx.Base, ctx.Repo, RepoRefAny)
|
||||
var err error
|
||||
|
||||
if ctx.Repo.GitRepo.IsBranchExist(refName) {
|
||||
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
|
||||
|
@ -348,7 +343,7 @@ func RepoRefForAPI(next http.Handler) http.Handler {
|
|||
return
|
||||
}
|
||||
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
|
||||
} else if len(refName) == objectFormat.FullLength() {
|
||||
} else if len(refName) == ctx.Repo.GetObjectFormat().FullLength() {
|
||||
ctx.Repo.CommitID = refName
|
||||
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
|
||||
if err != nil {
|
||||
|
|
|
@ -6,6 +6,7 @@ package context
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
@ -286,11 +287,11 @@ func (b *Base) cleanUp() {
|
|||
}
|
||||
}
|
||||
|
||||
func (b *Base) Tr(msg string, args ...any) string {
|
||||
func (b *Base) Tr(msg string, args ...any) template.HTML {
|
||||
return b.Locale.Tr(msg, args...)
|
||||
}
|
||||
|
||||
func (b *Base) TrN(cnt any, key1, keyN string, args ...any) string {
|
||||
func (b *Base) TrN(cnt any, key1, keyN string, args ...any) template.HTML {
|
||||
return b.Locale.TrN(cnt, key1, keyN, args...)
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,8 @@ package context
|
|||
|
||||
import (
|
||||
"context"
|
||||
"html"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"net/http"
|
||||
|
@ -71,16 +72,6 @@ func init() {
|
|||
})
|
||||
}
|
||||
|
||||
// TrHTMLEscapeArgs runs ".Locale.Tr()" but pre-escapes all arguments with html.EscapeString.
|
||||
// This is useful if the locale message is intended to only produce HTML content.
|
||||
func (ctx *Context) TrHTMLEscapeArgs(msg string, args ...string) string {
|
||||
trArgs := make([]any, len(args))
|
||||
for i, arg := range args {
|
||||
trArgs[i] = html.EscapeString(arg)
|
||||
}
|
||||
return ctx.Locale.Tr(msg, trArgs...)
|
||||
}
|
||||
|
||||
type webContextKeyType struct{}
|
||||
|
||||
var WebContextKey = webContextKeyType{}
|
||||
|
@ -134,7 +125,7 @@ func NewWebContext(base *Base, render Render, session session.Store) *Context {
|
|||
func Contexter() func(next http.Handler) http.Handler {
|
||||
rnd := templates.HTMLRenderer()
|
||||
csrfOpts := CsrfOptions{
|
||||
Secret: setting.SecretKey,
|
||||
Secret: hex.EncodeToString(setting.GetGeneralTokenSigningSecret()),
|
||||
Cookie: setting.CSRFCookieName,
|
||||
SetCookie: true,
|
||||
Secure: setting.SessionConfig.Secure,
|
||||
|
@ -207,6 +198,7 @@ func Contexter() func(next http.Handler) http.Handler {
|
|||
// FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these
|
||||
ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations
|
||||
ctx.Data["DisableStars"] = setting.Repository.DisableStars
|
||||
ctx.Data["DisableForks"] = setting.Repository.DisableForks
|
||||
ctx.Data["EnableActions"] = setting.Actions.Enabled
|
||||
|
||||
ctx.Data["ManifestData"] = setting.ManifestData
|
||||
|
@ -253,6 +245,13 @@ func (ctx *Context) JSONOK() {
|
|||
ctx.JSON(http.StatusOK, map[string]any{"ok": true}) // this is only a dummy response, frontend seldom uses it
|
||||
}
|
||||
|
||||
func (ctx *Context) JSONError(msg string) {
|
||||
ctx.JSON(http.StatusBadRequest, map[string]any{"errorMessage": msg})
|
||||
func (ctx *Context) JSONError(msg any) {
|
||||
switch v := msg.(type) {
|
||||
case string:
|
||||
ctx.JSON(http.StatusBadRequest, map[string]any{"errorMessage": v, "renderFormat": "text"})
|
||||
case template.HTML:
|
||||
ctx.JSON(http.StatusBadRequest, map[string]any{"errorMessage": v, "renderFormat": "html"})
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported type: %T", msg))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,6 +90,20 @@ func (ctx *Context) HTML(status int, name base.TplName) {
|
|||
}
|
||||
}
|
||||
|
||||
// JSONTemplate renders the template as JSON response
|
||||
// keep in mind that the template is processed in HTML context, so JSON-things should be handled carefully, eg: by JSEscape
|
||||
func (ctx *Context) JSONTemplate(tmpl base.TplName) {
|
||||
t, err := ctx.Render.TemplateLookup(string(tmpl), nil)
|
||||
if err != nil {
|
||||
ctx.ServerError("unable to find template", err)
|
||||
return
|
||||
}
|
||||
ctx.Resp.Header().Set("Content-Type", "application/json")
|
||||
if err = t.Execute(ctx.Resp, ctx.Data); err != nil {
|
||||
ctx.ServerError("unable to execute template", err)
|
||||
}
|
||||
}
|
||||
|
||||
// RenderToString renders the template content to a string
|
||||
func (ctx *Context) RenderToString(name base.TplName, data map[string]any) (string, error) {
|
||||
var buf strings.Builder
|
||||
|
@ -98,12 +112,11 @@ func (ctx *Context) RenderToString(name base.TplName, data map[string]any) (stri
|
|||
}
|
||||
|
||||
// RenderWithErr used for page has form validation but need to prompt error to users.
|
||||
func (ctx *Context) RenderWithErr(msg string, tpl base.TplName, form any) {
|
||||
func (ctx *Context) RenderWithErr(msg any, tpl base.TplName, form any) {
|
||||
if form != nil {
|
||||
middleware.AssignForm(form, ctx.Data)
|
||||
}
|
||||
ctx.Flash.ErrorMsg = msg
|
||||
ctx.Data["Flash"] = ctx.Flash
|
||||
ctx.Flash.Error(msg, true)
|
||||
ctx.HTML(http.StatusOK, tpl)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,10 +5,7 @@ package context
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
var _ context.Context = TemplateContext(nil)
|
||||
|
@ -36,14 +33,3 @@ func (c TemplateContext) Err() error {
|
|||
func (c TemplateContext) Value(key any) any {
|
||||
return c.parentContext().Value(key)
|
||||
}
|
||||
|
||||
// DataRaceCheck checks whether the template context function "ctx()" returns the consistent context
|
||||
// as the current template's rendering context (request context), to help to find data race issues as early as possible.
|
||||
// When the code is proven to be correct and stable, this function should be removed.
|
||||
func (c TemplateContext) DataRaceCheck(dataCtx context.Context) (string, error) {
|
||||
if c.parentContext() != dataCtx {
|
||||
log.Error("TemplateContext.DataRaceCheck: parent context mismatch\n%s", log.Stack(2))
|
||||
return "", errors.New("parent context mismatch")
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
|
|
|
@ -11,6 +11,8 @@ import (
|
|||
"code.gitea.io/gitea/models/perm"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/markup/markdown"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
)
|
||||
|
@ -255,6 +257,19 @@ func HandleOrgAssignment(ctx *Context, args ...bool) {
|
|||
ctx.Data["CanReadProjects"] = ctx.Org.CanReadUnit(ctx, unit.TypeProjects)
|
||||
ctx.Data["CanReadPackages"] = ctx.Org.CanReadUnit(ctx, unit.TypePackages)
|
||||
ctx.Data["CanReadCode"] = ctx.Org.CanReadUnit(ctx, unit.TypeCode)
|
||||
|
||||
ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
|
||||
if len(ctx.ContextUser.Description) != 0 {
|
||||
content, err := markdown.RenderString(&markup.RenderContext{
|
||||
Metas: map[string]string{"mode": "document"},
|
||||
Ctx: ctx,
|
||||
}, ctx.ContextUser.Description)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
return
|
||||
}
|
||||
ctx.Data["RenderedDescription"] = content
|
||||
}
|
||||
}
|
||||
|
||||
// OrgAssignment returns a middleware to handle organization assignment
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue