mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-08 18:04:14 +01:00
Merge pull request '[gitea] v1.21 cherry-pick' (#2621) from earl-warren/forgejo:wip-v1.21-gitea-cherry-pick into v1.21/forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/2621 Reviewed-by: oliverpool <oliverpool@noreply.codeberg.org> Reviewed-by: Otto <otto@codeberg.org>
This commit is contained in:
commit
bd71bdda26
53 changed files with 516 additions and 156 deletions
|
@ -395,10 +395,14 @@ func (a *Action) GetCreate() time.Time {
|
||||||
return a.CreatedUnix.AsTime()
|
return a.CreatedUnix.AsTime()
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetIssueInfos returns a list of issues associated with
|
// GetIssueInfos returns a list of associated information with the action.
|
||||||
// the action.
|
|
||||||
func (a *Action) GetIssueInfos() []string {
|
func (a *Action) GetIssueInfos() []string {
|
||||||
return strings.SplitN(a.Content, "|", 3)
|
// make sure it always returns 3 elements, because there are some access to the a[1] and a[2] without checking the length
|
||||||
|
ret := strings.SplitN(a.Content, "|", 3)
|
||||||
|
for len(ret) < 3 {
|
||||||
|
ret = append(ret, "")
|
||||||
|
}
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetIssueTitle returns the title of first issue associated with the action.
|
// GetIssueTitle returns the title of first issue associated with the action.
|
||||||
|
|
|
@ -70,16 +70,26 @@ type PackageFileDescriptor struct {
|
||||||
Properties PackagePropertyList
|
Properties PackagePropertyList
|
||||||
}
|
}
|
||||||
|
|
||||||
// PackageWebLink returns the package web link
|
// PackageWebLink returns the relative package web link
|
||||||
func (pd *PackageDescriptor) PackageWebLink() string {
|
func (pd *PackageDescriptor) PackageWebLink() string {
|
||||||
return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HomeLink(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName))
|
return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HomeLink(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName))
|
||||||
}
|
}
|
||||||
|
|
||||||
// FullWebLink returns the package version web link
|
// VersionWebLink returns the relative package version web link
|
||||||
func (pd *PackageDescriptor) FullWebLink() string {
|
func (pd *PackageDescriptor) VersionWebLink() string {
|
||||||
return fmt.Sprintf("%s/%s", pd.PackageWebLink(), url.PathEscape(pd.Version.LowerVersion))
|
return fmt.Sprintf("%s/%s", pd.PackageWebLink(), url.PathEscape(pd.Version.LowerVersion))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// PackageHTMLURL returns the absolute package HTML URL
|
||||||
|
func (pd *PackageDescriptor) PackageHTMLURL() string {
|
||||||
|
return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HTMLURL(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VersionHTMLURL returns the absolute package version HTML URL
|
||||||
|
func (pd *PackageDescriptor) VersionHTMLURL() string {
|
||||||
|
return fmt.Sprintf("%s/%s", pd.PackageHTMLURL(), url.PathEscape(pd.Version.LowerVersion))
|
||||||
|
}
|
||||||
|
|
||||||
// CalculateBlobSize returns the total blobs size in bytes
|
// CalculateBlobSize returns the total blobs size in bytes
|
||||||
func (pd *PackageDescriptor) CalculateBlobSize() int64 {
|
func (pd *PackageDescriptor) CalculateBlobSize() int64 {
|
||||||
size := int64(0)
|
size := int64(0)
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
|
@ -30,6 +31,8 @@ type SearchUserOptions struct {
|
||||||
Actor *User // The user doing the search
|
Actor *User // The user doing the search
|
||||||
SearchByEmail bool // Search by email as well as username/full name
|
SearchByEmail bool // Search by email as well as username/full name
|
||||||
|
|
||||||
|
SupportedSortOrders container.Set[string] // if not nil, only allow to use the sort orders in this set
|
||||||
|
|
||||||
IsActive util.OptionalBool
|
IsActive util.OptionalBool
|
||||||
IsAdmin util.OptionalBool
|
IsAdmin util.OptionalBool
|
||||||
IsRestricted util.OptionalBool
|
IsRestricted util.OptionalBool
|
||||||
|
|
|
@ -35,6 +35,9 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
|
||||||
} else if task.Status.IsDone() {
|
} else if task.Status.IsDone() {
|
||||||
preStep.Stopped = task.Stopped
|
preStep.Stopped = task.Stopped
|
||||||
preStep.Status = actions_model.StatusFailure
|
preStep.Status = actions_model.StatusFailure
|
||||||
|
if task.Status.IsSkipped() {
|
||||||
|
preStep.Status = actions_model.StatusSkipped
|
||||||
|
}
|
||||||
}
|
}
|
||||||
logIndex += preStep.LogLength
|
logIndex += preStep.LogLength
|
||||||
|
|
||||||
|
|
|
@ -118,7 +118,15 @@ func (g *Manager) start(ctx context.Context) {
|
||||||
defer close(startupDone)
|
defer close(startupDone)
|
||||||
// Wait till we're done getting all of the listeners and then close
|
// Wait till we're done getting all of the listeners and then close
|
||||||
// the unused ones
|
// the unused ones
|
||||||
|
func() {
|
||||||
|
// FIXME: there is a fundamental design problem of the "manager" and the "wait group".
|
||||||
|
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned
|
||||||
|
// There is no clear solution besides a complete rewriting of the "manager"
|
||||||
|
defer func() {
|
||||||
|
_ = recover()
|
||||||
|
}()
|
||||||
g.createServerWaitGroup.Wait()
|
g.createServerWaitGroup.Wait()
|
||||||
|
}()
|
||||||
// Ignore the error here there's not much we can do with it
|
// Ignore the error here there's not much we can do with it
|
||||||
// They're logged in the CloseProvidedListeners function
|
// They're logged in the CloseProvidedListeners function
|
||||||
_ = CloseProvidedListeners()
|
_ = CloseProvidedListeners()
|
||||||
|
|
|
@ -227,8 +227,16 @@ func (g *Manager) awaitServer(limit time.Duration) bool {
|
||||||
c := make(chan struct{})
|
c := make(chan struct{})
|
||||||
go func() {
|
go func() {
|
||||||
defer close(c)
|
defer close(c)
|
||||||
|
func() {
|
||||||
|
// FIXME: there is a fundamental design problem of the "manager" and the "wait group".
|
||||||
|
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned
|
||||||
|
// There is no clear solution besides a complete rewriting of the "manager"
|
||||||
|
defer func() {
|
||||||
|
_ = recover()
|
||||||
|
}()
|
||||||
g.createServerWaitGroup.Wait()
|
g.createServerWaitGroup.Wait()
|
||||||
}()
|
}()
|
||||||
|
}()
|
||||||
if limit > 0 {
|
if limit > 0 {
|
||||||
select {
|
select {
|
||||||
case <-c:
|
case <-c:
|
||||||
|
|
|
@ -22,8 +22,12 @@ type Result struct {
|
||||||
UpdatedUnix timeutil.TimeStamp
|
UpdatedUnix timeutil.TimeStamp
|
||||||
Language string
|
Language string
|
||||||
Color string
|
Color string
|
||||||
LineNumbers []int
|
Lines []ResultLine
|
||||||
FormattedLines template.HTML
|
}
|
||||||
|
|
||||||
|
type ResultLine struct {
|
||||||
|
Num int
|
||||||
|
FormattedContent template.HTML
|
||||||
}
|
}
|
||||||
|
|
||||||
type SearchResultLanguages = internal.SearchResultLanguages
|
type SearchResultLanguages = internal.SearchResultLanguages
|
||||||
|
@ -70,7 +74,7 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
|
||||||
var formattedLinesBuffer bytes.Buffer
|
var formattedLinesBuffer bytes.Buffer
|
||||||
|
|
||||||
contentLines := strings.SplitAfter(result.Content[startIndex:endIndex], "\n")
|
contentLines := strings.SplitAfter(result.Content[startIndex:endIndex], "\n")
|
||||||
lineNumbers := make([]int, len(contentLines))
|
lines := make([]ResultLine, 0, len(contentLines))
|
||||||
index := startIndex
|
index := startIndex
|
||||||
for i, line := range contentLines {
|
for i, line := range contentLines {
|
||||||
var err error
|
var err error
|
||||||
|
@ -93,11 +97,20 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
lineNumbers[i] = startLineNum + i
|
lines = append(lines, ResultLine{Num: startLineNum + i})
|
||||||
index += len(line)
|
index += len(line)
|
||||||
}
|
}
|
||||||
|
|
||||||
highlighted, _ := highlight.Code(result.Filename, "", formattedLinesBuffer.String())
|
// we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting
|
||||||
|
hl, _ := highlight.Code(result.Filename, "", formattedLinesBuffer.String())
|
||||||
|
highlightedLines := strings.Split(string(hl), "\n")
|
||||||
|
|
||||||
|
// The lines outputted by highlight.Code might not match the original lines, because "highlight" removes the last `\n`
|
||||||
|
lines = lines[:min(len(highlightedLines), len(lines))]
|
||||||
|
highlightedLines = highlightedLines[:len(lines)]
|
||||||
|
for i := 0; i < len(lines); i++ {
|
||||||
|
lines[i].FormattedContent = template.HTML(highlightedLines[i])
|
||||||
|
}
|
||||||
|
|
||||||
return &Result{
|
return &Result{
|
||||||
RepoID: result.RepoID,
|
RepoID: result.RepoID,
|
||||||
|
@ -106,8 +119,7 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
|
||||||
UpdatedUnix: result.UpdatedUnix,
|
UpdatedUnix: result.UpdatedUnix,
|
||||||
Language: result.Language,
|
Language: result.Language,
|
||||||
Color: result.Color,
|
Color: result.Color,
|
||||||
LineNumbers: lineNumbers,
|
Lines: lines,
|
||||||
FormattedLines: highlighted,
|
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -93,9 +93,11 @@ func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Wri
|
||||||
if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil {
|
if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = tmpBlock.WriteString("</pre>")
|
if _, err := tmpBlock.WriteString("</pre>"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
return tmpBlock.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, bytes.NewReader(rawBytes))
|
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, bytes.NewReader(rawBytes))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -60,6 +60,9 @@ func (q *WorkerPoolQueue[T]) doDispatchBatchToWorker(wg *workerGroup[T], flushCh
|
||||||
full = true
|
full = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: the logic could be improved in the future, to avoid a data-race between "doStartNewWorker" and "workerNum"
|
||||||
|
// The root problem is that if we skip "doStartNewWorker" here, the "workerNum" might be decreased by other workers later
|
||||||
|
// So ideally, it should check whether there are enough workers by some approaches, and start new workers if necessary.
|
||||||
q.workerNumMu.Lock()
|
q.workerNumMu.Lock()
|
||||||
noWorker := q.workerNum == 0
|
noWorker := q.workerNum == 0
|
||||||
if full || noWorker {
|
if full || noWorker {
|
||||||
|
@ -143,7 +146,11 @@ func (q *WorkerPoolQueue[T]) doStartNewWorker(wp *workerGroup[T]) {
|
||||||
log.Debug("Queue %q starts new worker", q.GetName())
|
log.Debug("Queue %q starts new worker", q.GetName())
|
||||||
defer log.Debug("Queue %q stops idle worker", q.GetName())
|
defer log.Debug("Queue %q stops idle worker", q.GetName())
|
||||||
|
|
||||||
|
atomic.AddInt32(&q.workerStartedCounter, 1) // Only increase counter, used for debugging
|
||||||
|
|
||||||
t := time.NewTicker(workerIdleDuration)
|
t := time.NewTicker(workerIdleDuration)
|
||||||
|
defer t.Stop()
|
||||||
|
|
||||||
keepWorking := true
|
keepWorking := true
|
||||||
stopWorking := func() {
|
stopWorking := func() {
|
||||||
q.workerNumMu.Lock()
|
q.workerNumMu.Lock()
|
||||||
|
@ -158,13 +165,18 @@ func (q *WorkerPoolQueue[T]) doStartNewWorker(wp *workerGroup[T]) {
|
||||||
case batch, ok := <-q.batchChan:
|
case batch, ok := <-q.batchChan:
|
||||||
if !ok {
|
if !ok {
|
||||||
stopWorking()
|
stopWorking()
|
||||||
} else {
|
continue
|
||||||
|
}
|
||||||
q.doWorkerHandle(batch)
|
q.doWorkerHandle(batch)
|
||||||
|
// reset the idle ticker, and drain the tick after reset in case a tick is already triggered
|
||||||
t.Reset(workerIdleDuration)
|
t.Reset(workerIdleDuration)
|
||||||
|
select {
|
||||||
|
case <-t.C:
|
||||||
|
default:
|
||||||
}
|
}
|
||||||
case <-t.C:
|
case <-t.C:
|
||||||
q.workerNumMu.Lock()
|
q.workerNumMu.Lock()
|
||||||
keepWorking = q.workerNum <= 1
|
keepWorking = q.workerNum <= 1 // keep the last worker running
|
||||||
if !keepWorking {
|
if !keepWorking {
|
||||||
q.workerNum--
|
q.workerNum--
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,6 +40,8 @@ type WorkerPoolQueue[T any] struct {
|
||||||
workerMaxNum int
|
workerMaxNum int
|
||||||
workerActiveNum int
|
workerActiveNum int
|
||||||
workerNumMu sync.Mutex
|
workerNumMu sync.Mutex
|
||||||
|
|
||||||
|
workerStartedCounter int32
|
||||||
}
|
}
|
||||||
|
|
||||||
type flushType chan struct{}
|
type flushType chan struct{}
|
||||||
|
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
@ -175,11 +176,7 @@ func testWorkerPoolQueuePersistence(t *testing.T, queueSetting setting.QueueSett
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWorkerPoolQueueActiveWorkers(t *testing.T) {
|
func TestWorkerPoolQueueActiveWorkers(t *testing.T) {
|
||||||
oldWorkerIdleDuration := workerIdleDuration
|
defer test.MockVariableValue(&workerIdleDuration, 300*time.Millisecond)()
|
||||||
workerIdleDuration = 300 * time.Millisecond
|
|
||||||
defer func() {
|
|
||||||
workerIdleDuration = oldWorkerIdleDuration
|
|
||||||
}()
|
|
||||||
|
|
||||||
handler := func(items ...int) (unhandled []int) {
|
handler := func(items ...int) (unhandled []int) {
|
||||||
time.Sleep(100 * time.Millisecond)
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
@ -250,3 +247,25 @@ func TestWorkerPoolQueueShutdown(t *testing.T) {
|
||||||
q, _ = newWorkerPoolQueueForTest("test-workpoolqueue", qs, handler, false)
|
q, _ = newWorkerPoolQueueForTest("test-workpoolqueue", qs, handler, false)
|
||||||
assert.EqualValues(t, 20, q.GetQueueItemNumber())
|
assert.EqualValues(t, 20, q.GetQueueItemNumber())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestWorkerPoolQueueWorkerIdleReset(t *testing.T) {
|
||||||
|
defer test.MockVariableValue(&workerIdleDuration, 10*time.Millisecond)()
|
||||||
|
|
||||||
|
handler := func(items ...int) (unhandled []int) {
|
||||||
|
time.Sleep(50 * time.Millisecond)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
q, _ := newWorkerPoolQueueForTest("test-workpoolqueue", setting.QueueSettings{Type: "channel", BatchLength: 1, MaxWorkers: 2, Length: 100}, handler, false)
|
||||||
|
stop := runWorkerPoolQueue(q)
|
||||||
|
for i := 0; i < 20; i++ {
|
||||||
|
assert.NoError(t, q.Push(i))
|
||||||
|
}
|
||||||
|
|
||||||
|
time.Sleep(500 * time.Millisecond)
|
||||||
|
assert.EqualValues(t, 2, q.GetWorkerNumber())
|
||||||
|
assert.EqualValues(t, 2, q.GetWorkerActiveNumber())
|
||||||
|
// when the queue never becomes empty, the existing workers should keep working
|
||||||
|
assert.EqualValues(t, 2, q.workerStartedCounter)
|
||||||
|
stop()
|
||||||
|
}
|
||||||
|
|
|
@ -31,9 +31,9 @@ var (
|
||||||
// mentionPattern matches all mentions in the form of "@user" or "@org/team"
|
// mentionPattern matches all mentions in the form of "@user" or "@org/team"
|
||||||
mentionPattern = regexp.MustCompile(`(?:\s|^|\(|\[)(@[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_]+\/?[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_][0-9a-zA-Z-_.]+\/?[0-9a-zA-Z-_.]+[0-9a-zA-Z-_])(?:\s|[:,;.?!]\s|[:,;.?!]?$|\)|\])`)
|
mentionPattern = regexp.MustCompile(`(?:\s|^|\(|\[)(@[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_]+\/?[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_][0-9a-zA-Z-_.]+\/?[0-9a-zA-Z-_.]+[0-9a-zA-Z-_])(?:\s|[:,;.?!]\s|[:,;.?!]?$|\)|\])`)
|
||||||
// issueNumericPattern matches string that references to a numeric issue, e.g. #1287
|
// issueNumericPattern matches string that references to a numeric issue, e.g. #1287
|
||||||
issueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[|\')([#!][0-9]+)(?:\s|$|\)|\]|[:;,.?!]\s|[:;,.?!]$)`)
|
issueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[|\'|\")([#!][0-9]+)(?:\s|$|\)|\]|\'|\"|[:;,.?!]\s|[:;,.?!]$)`)
|
||||||
// issueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234
|
// issueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234
|
||||||
issueAlphanumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([A-Z]{1,10}-[1-9][0-9]*)(?:\s|$|\)|\]|:|\.(\s|$))`)
|
issueAlphanumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[|\"|\')([A-Z]{1,10}-[1-9][0-9]*)(?:\s|$|\)|\]|:|\.(\s|$)|\"|\')`)
|
||||||
// crossReferenceIssueNumericPattern matches string that references a numeric issue in a different repository
|
// crossReferenceIssueNumericPattern matches string that references a numeric issue in a different repository
|
||||||
// e.g. org/repo#12345
|
// e.g. org/repo#12345
|
||||||
crossReferenceIssueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-zA-Z-_\.]+/[0-9a-zA-Z-_\.]+[#!][0-9]+)(?:\s|$|\)|\]|[:;,.?!]\s|[:;,.?!]$)`)
|
crossReferenceIssueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-zA-Z-_\.]+/[0-9a-zA-Z-_\.]+[#!][0-9]+)(?:\s|$|\)|\]|[:;,.?!]\s|[:;,.?!]$)`)
|
||||||
|
|
|
@ -429,6 +429,8 @@ func TestRegExp_issueNumericPattern(t *testing.T) {
|
||||||
" #12",
|
" #12",
|
||||||
"#12:",
|
"#12:",
|
||||||
"ref: #12: msg",
|
"ref: #12: msg",
|
||||||
|
"\"#1234\"",
|
||||||
|
"'#1234'",
|
||||||
}
|
}
|
||||||
falseTestCases := []string{
|
falseTestCases := []string{
|
||||||
"# 1234",
|
"# 1234",
|
||||||
|
@ -459,6 +461,8 @@ func TestRegExp_issueAlphanumericPattern(t *testing.T) {
|
||||||
"(ABC-123)",
|
"(ABC-123)",
|
||||||
"[ABC-123]",
|
"[ABC-123]",
|
||||||
"ABC-123:",
|
"ABC-123:",
|
||||||
|
"\"ABC-123\"",
|
||||||
|
"'ABC-123'",
|
||||||
}
|
}
|
||||||
falseTestCases := []string{
|
falseTestCases := []string{
|
||||||
"RC-08",
|
"RC-08",
|
||||||
|
|
|
@ -21,7 +21,7 @@ var SessionConfig = struct {
|
||||||
ProviderConfig string
|
ProviderConfig string
|
||||||
// Cookie name to save session ID. Default is "MacaronSession".
|
// Cookie name to save session ID. Default is "MacaronSession".
|
||||||
CookieName string
|
CookieName string
|
||||||
// Cookie path to store. Default is "/". HINT: there was a bug, the old value doesn't have trailing slash, and could be empty "".
|
// Cookie path to store. Default is "/".
|
||||||
CookiePath string
|
CookiePath string
|
||||||
// GC interval time in seconds. Default is 3600.
|
// GC interval time in seconds. Default is 3600.
|
||||||
Gclifetime int64
|
Gclifetime int64
|
||||||
|
@ -49,7 +49,10 @@ func loadSessionFrom(rootCfg ConfigProvider) {
|
||||||
SessionConfig.ProviderConfig = path.Join(AppWorkPath, SessionConfig.ProviderConfig)
|
SessionConfig.ProviderConfig = path.Join(AppWorkPath, SessionConfig.ProviderConfig)
|
||||||
}
|
}
|
||||||
SessionConfig.CookieName = sec.Key("COOKIE_NAME").MustString("i_like_gitea")
|
SessionConfig.CookieName = sec.Key("COOKIE_NAME").MustString("i_like_gitea")
|
||||||
SessionConfig.CookiePath = AppSubURL + "/" // there was a bug, old code only set CookePath=AppSubURL, no trailing slash
|
SessionConfig.CookiePath = AppSubURL
|
||||||
|
if SessionConfig.CookiePath == "" {
|
||||||
|
SessionConfig.CookiePath = "/"
|
||||||
|
}
|
||||||
SessionConfig.Secure = sec.Key("COOKIE_SECURE").MustBool(strings.HasPrefix(strings.ToLower(AppURL), "https://"))
|
SessionConfig.Secure = sec.Key("COOKIE_SECURE").MustBool(strings.HasPrefix(strings.ToLower(AppURL), "https://"))
|
||||||
SessionConfig.Gclifetime = sec.Key("GC_INTERVAL_TIME").MustInt64(86400)
|
SessionConfig.Gclifetime = sec.Key("GC_INTERVAL_TIME").MustInt64(86400)
|
||||||
SessionConfig.Maxlifetime = sec.Key("SESSION_LIFE_TIME").MustInt64(86400)
|
SessionConfig.Maxlifetime = sec.Key("SESSION_LIFE_TIME").MustInt64(86400)
|
||||||
|
|
|
@ -574,6 +574,8 @@ enterred_invalid_repo_name = The repository name you entered is incorrect.
|
||||||
enterred_invalid_org_name = The organization name you entered is incorrect.
|
enterred_invalid_org_name = The organization name you entered is incorrect.
|
||||||
enterred_invalid_owner_name = The new owner name is not valid.
|
enterred_invalid_owner_name = The new owner name is not valid.
|
||||||
enterred_invalid_password = The password you entered is incorrect.
|
enterred_invalid_password = The password you entered is incorrect.
|
||||||
|
unset_password = The login user has not set the password.
|
||||||
|
unsupported_login_type = The login type is not supported to delete account.
|
||||||
user_not_exist = The user does not exist.
|
user_not_exist = The user does not exist.
|
||||||
team_not_exist = The team does not exist.
|
team_not_exist = The team does not exist.
|
||||||
last_org_owner = You cannot remove the last user from the 'owners' team. There must be at least one owner for an organization.
|
last_org_owner = You cannot remove the last user from the 'owners' team. There must be at least one owner for an organization.
|
||||||
|
@ -1802,9 +1804,9 @@ pulls.unrelated_histories = Merge Failed: The merge head and base do not share a
|
||||||
pulls.merge_out_of_date = Merge Failed: Whilst generating the merge, the base was updated. Hint: Try again.
|
pulls.merge_out_of_date = Merge Failed: Whilst generating the merge, the base was updated. Hint: Try again.
|
||||||
pulls.head_out_of_date = Merge Failed: Whilst generating the merge, the head was updated. Hint: Try again.
|
pulls.head_out_of_date = Merge Failed: Whilst generating the merge, the head was updated. Hint: Try again.
|
||||||
pulls.has_merged = Failed: The pull request has been merged, you cannot merge again or change the target branch.
|
pulls.has_merged = Failed: The pull request has been merged, you cannot merge again or change the target branch.
|
||||||
pulls.push_rejected = Merge Failed: The push was rejected. Review the Git Hooks for this repository.
|
pulls.push_rejected = Push Failed: The push was rejected. Review the Git Hooks for this repository.
|
||||||
pulls.push_rejected_summary = Full Rejection Message
|
pulls.push_rejected_summary = Full Rejection Message
|
||||||
pulls.push_rejected_no_message = Merge Failed: The push was rejected but there was no remote message.<br>Review the Git Hooks for this repository
|
pulls.push_rejected_no_message = Push Failed: The push was rejected but there was no remote message. Review the Git Hooks for this repository
|
||||||
pulls.open_unmerged_pull_exists = `You cannot perform a reopen operation because there is a pending pull request (#%d) with identical properties.`
|
pulls.open_unmerged_pull_exists = `You cannot perform a reopen operation because there is a pending pull request (#%d) with identical properties.`
|
||||||
pulls.status_checking = Some checks are pending
|
pulls.status_checking = Some checks are pending
|
||||||
pulls.status_checks_success = All checks were successful
|
pulls.status_checks_success = All checks were successful
|
||||||
|
|
2
public/assets/img/svg/gitea-twitter.svg
generated
2
public/assets/img/svg/gitea-twitter.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" fill-rule="evenodd" stroke-linejoin="round" stroke-miterlimit="2" aria-hidden="true" class="gitea-twitter__svg gitea-twitter__gitea-twitter svg gitea-twitter" clip-rule="evenodd" viewBox="-89.009 -46.884 643.937 446.884" width="16" height="16"><path fill="#1da1f2" fill-rule="nonzero" d="M154.729 400c185.669 0 287.205-153.876 287.205-287.312 0-4.37-.089-8.72-.286-13.052A205.304 205.304 0 0 0 492 47.346c-18.087 8.044-37.55 13.458-57.968 15.899 20.841-12.501 36.84-32.278 44.389-55.852a202.42 202.42 0 0 1-64.098 24.511C395.903 12.276 369.679 0 340.641 0c-55.744 0-100.948 45.222-100.948 100.965 0 7.925.887 15.631 2.619 23.025-83.895-4.223-158.287-44.405-208.074-105.504A100.739 100.739 0 0 0 20.57 69.24c0 35.034 17.82 65.961 44.92 84.055a100.172 100.172 0 0 1-45.716-12.63c-.015.424-.015.837-.015 1.29 0 48.903 34.794 89.734 80.982 98.986a101.036 101.036 0 0 1-26.617 3.553c-6.493 0-12.821-.639-18.971-1.82 12.851 40.122 50.115 69.319 94.296 70.135-34.549 27.089-78.07 43.224-125.371 43.224A204.9 204.9 0 0 1 0 354.634c44.674 28.645 97.72 45.359 154.734 45.359"/></svg>
|
<svg viewBox="0 0 24 24" class="svg gitea-twitter" xmlns="http://www.w3.org/2000/svg" width="16" height="16" aria-hidden="true"><path d="M14.095 10.316 22.286 1h-1.94L13.23 9.088 7.551 1H1l8.59 12.231L1 23h1.94l7.51-8.543 6 8.543H23l-8.905-12.684zm-2.658 3.022-.872-1.218L3.64 2.432h2.98l5.59 7.821.869 1.219 7.265 10.166h-2.982l-5.926-8.3z"/></svg>
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 349 B |
|
@ -12,6 +12,7 @@ import (
|
||||||
|
|
||||||
packages_model "code.gitea.io/gitea/models/packages"
|
packages_model "code.gitea.io/gitea/models/packages"
|
||||||
npm_module "code.gitea.io/gitea/modules/packages/npm"
|
npm_module "code.gitea.io/gitea/modules/packages/npm"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
)
|
)
|
||||||
|
|
||||||
func createPackageMetadataResponse(registryURL string, pds []*packages_model.PackageDescriptor) *npm_module.PackageMetadata {
|
func createPackageMetadataResponse(registryURL string, pds []*packages_model.PackageDescriptor) *npm_module.PackageMetadata {
|
||||||
|
@ -98,7 +99,7 @@ func createPackageSearchResponse(pds []*packages_model.PackageDescriptor, total
|
||||||
Maintainers: []npm_module.User{}, // npm cli needs this field
|
Maintainers: []npm_module.User{}, // npm cli needs this field
|
||||||
Keywords: metadata.Keywords,
|
Keywords: metadata.Keywords,
|
||||||
Links: &npm_module.PackageSearchPackageLinks{
|
Links: &npm_module.PackageSearchPackageLinks{
|
||||||
Registry: pd.FullWebLink(),
|
Registry: setting.AppURL + "api/packages/" + pd.Owner.Name + "/npm",
|
||||||
Homepage: metadata.ProjectURL,
|
Homepage: metadata.ProjectURL,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -654,6 +654,7 @@ func UpdateFile(ctx *context.APIContext) {
|
||||||
apiOpts := web.GetForm(ctx).(*api.UpdateFileOptions)
|
apiOpts := web.GetForm(ctx).(*api.UpdateFileOptions)
|
||||||
if ctx.Repo.Repository.IsEmpty {
|
if ctx.Repo.Repository.IsEmpty {
|
||||||
ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", fmt.Errorf("repo is empty"))
|
ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", fmt.Errorf("repo is empty"))
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if apiOpts.BranchName == "" {
|
if apiOpts.BranchName == "" {
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package repo
|
package repo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
|
@ -221,6 +222,10 @@ func CreateRelease(ctx *context.APIContext) {
|
||||||
// "409":
|
// "409":
|
||||||
// "$ref": "#/responses/error"
|
// "$ref": "#/responses/error"
|
||||||
form := web.GetForm(ctx).(*api.CreateReleaseOption)
|
form := web.GetForm(ctx).(*api.CreateReleaseOption)
|
||||||
|
if ctx.Repo.Repository.IsEmpty {
|
||||||
|
ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", fmt.Errorf("repo is empty"))
|
||||||
|
return
|
||||||
|
}
|
||||||
rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, form.TagName)
|
rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, form.TagName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !repo_model.IsErrReleaseNotExist(err) {
|
if !repo_model.IsErrReleaseNotExist(err) {
|
||||||
|
|
|
@ -38,6 +38,7 @@ func ProtocolMiddlewares() (handlers []any) {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// wrap the request and response, use the process context and add it to the process manager
|
||||||
handlers = append(handlers, func(next http.Handler) http.Handler {
|
handlers = append(handlers, func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
|
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
|
||||||
ctx, _, finished := process.GetManager().AddTypedContext(req.Context(), fmt.Sprintf("%s: %s", req.Method, req.RequestURI), process.RequestProcessType, true)
|
ctx, _, finished := process.GetManager().AddTypedContext(req.Context(), fmt.Sprintf("%s: %s", req.Method, req.RequestURI), process.RequestProcessType, true)
|
||||||
|
|
|
@ -6,6 +6,7 @@ package explore
|
||||||
import (
|
import (
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
|
@ -24,8 +25,16 @@ func Organizations(ctx *context.Context) {
|
||||||
visibleTypes = append(visibleTypes, structs.VisibleTypeLimited, structs.VisibleTypePrivate)
|
visibleTypes = append(visibleTypes, structs.VisibleTypeLimited, structs.VisibleTypePrivate)
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.FormString("sort") == "" {
|
supportedSortOrders := container.SetOf(
|
||||||
ctx.SetFormString("sort", UserSearchDefaultSortType)
|
"newest",
|
||||||
|
"oldest",
|
||||||
|
"alphabetically",
|
||||||
|
"reversealphabetically",
|
||||||
|
)
|
||||||
|
sortOrder := ctx.FormString("sort")
|
||||||
|
if sortOrder == "" {
|
||||||
|
sortOrder = "newest"
|
||||||
|
ctx.SetFormString("sort", sortOrder)
|
||||||
}
|
}
|
||||||
|
|
||||||
RenderUserSearch(ctx, &user_model.SearchUserOptions{
|
RenderUserSearch(ctx, &user_model.SearchUserOptions{
|
||||||
|
@ -33,5 +42,7 @@ func Organizations(ctx *context.Context) {
|
||||||
Type: user_model.UserTypeOrganization,
|
Type: user_model.UserTypeOrganization,
|
||||||
ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},
|
ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},
|
||||||
Visible: visibleTypes,
|
Visible: visibleTypes,
|
||||||
|
|
||||||
|
SupportedSortOrders: supportedSortOrders,
|
||||||
}, tplExploreUsers)
|
}, tplExploreUsers)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
@ -60,8 +61,8 @@ func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions,
|
||||||
|
|
||||||
// we can not set orderBy to `models.SearchOrderByXxx`, because there may be a JOIN in the statement, different tables may have the same name columns
|
// we can not set orderBy to `models.SearchOrderByXxx`, because there may be a JOIN in the statement, different tables may have the same name columns
|
||||||
|
|
||||||
ctx.Data["SortType"] = ctx.FormString("sort")
|
sortOrder := ctx.FormString("sort")
|
||||||
switch ctx.FormString("sort") {
|
switch sortOrder {
|
||||||
case "newest":
|
case "newest":
|
||||||
orderBy = "`user`.id DESC"
|
orderBy = "`user`.id DESC"
|
||||||
case "oldest":
|
case "oldest":
|
||||||
|
@ -80,9 +81,15 @@ func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions,
|
||||||
fallthrough
|
fallthrough
|
||||||
default:
|
default:
|
||||||
// in case the sortType is not valid, we set it to recentupdate
|
// in case the sortType is not valid, we set it to recentupdate
|
||||||
ctx.Data["SortType"] = "recentupdate"
|
sortOrder = "recentupdate"
|
||||||
orderBy = "`user`.updated_unix DESC"
|
orderBy = "`user`.updated_unix DESC"
|
||||||
}
|
}
|
||||||
|
ctx.Data["SortType"] = sortOrder
|
||||||
|
|
||||||
|
if opts.SupportedSortOrders != nil && !opts.SupportedSortOrders.Contains(sortOrder) {
|
||||||
|
ctx.NotFound("unsupported sort order", nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
opts.Keyword = ctx.FormTrim("q")
|
opts.Keyword = ctx.FormTrim("q")
|
||||||
opts.OrderBy = orderBy
|
opts.OrderBy = orderBy
|
||||||
|
@ -133,8 +140,16 @@ func Users(ctx *context.Context) {
|
||||||
ctx.Data["PageIsExploreUsers"] = true
|
ctx.Data["PageIsExploreUsers"] = true
|
||||||
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
|
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
|
||||||
|
|
||||||
if ctx.FormString("sort") == "" {
|
supportedSortOrders := container.SetOf(
|
||||||
ctx.SetFormString("sort", UserSearchDefaultSortType)
|
"newest",
|
||||||
|
"oldest",
|
||||||
|
"alphabetically",
|
||||||
|
"reversealphabetically",
|
||||||
|
)
|
||||||
|
sortOrder := ctx.FormString("sort")
|
||||||
|
if sortOrder == "" {
|
||||||
|
sortOrder = "newest"
|
||||||
|
ctx.SetFormString("sort", sortOrder)
|
||||||
}
|
}
|
||||||
|
|
||||||
RenderUserSearch(ctx, &user_model.SearchUserOptions{
|
RenderUserSearch(ctx, &user_model.SearchUserOptions{
|
||||||
|
@ -143,5 +158,7 @@ func Users(ctx *context.Context) {
|
||||||
ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},
|
ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},
|
||||||
IsActive: util.OptionalBoolTrue,
|
IsActive: util.OptionalBoolTrue,
|
||||||
Visible: []structs.VisibleType{structs.VisibleTypePublic, structs.VisibleTypeLimited, structs.VisibleTypePrivate},
|
Visible: []structs.VisibleType{structs.VisibleTypePublic, structs.VisibleTypeLimited, structs.VisibleTypePrivate},
|
||||||
|
|
||||||
|
SupportedSortOrders: supportedSortOrders,
|
||||||
}, tplExploreUsers)
|
}, tplExploreUsers)
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -260,10 +261,14 @@ func ViewPost(ctx *context_module.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rerun will rerun jobs in the given run
|
// Rerun will rerun jobs in the given run
|
||||||
// jobIndex = 0 means rerun all jobs
|
// If jobIndexStr is a blank string, it means rerun all jobs
|
||||||
func Rerun(ctx *context_module.Context) {
|
func Rerun(ctx *context_module.Context) {
|
||||||
runIndex := ctx.ParamsInt64("run")
|
runIndex := ctx.ParamsInt64("run")
|
||||||
jobIndex := ctx.ParamsInt64("job")
|
jobIndexStr := ctx.Params("job")
|
||||||
|
var jobIndex int64
|
||||||
|
if jobIndexStr != "" {
|
||||||
|
jobIndex, _ = strconv.ParseInt(jobIndexStr, 10, 64)
|
||||||
|
}
|
||||||
|
|
||||||
run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
|
run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -284,7 +289,7 @@ func Rerun(ctx *context_module.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if jobIndex != 0 {
|
if jobIndexStr != "" {
|
||||||
jobs = []*actions_model.ActionRunJob{job}
|
jobs = []*actions_model.ActionRunJob{job}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1443,7 +1443,7 @@ func CompareAndPullRequestPost(ctx *context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ctx.Flash.Error(flashError)
|
ctx.Flash.Error(flashError)
|
||||||
ctx.JSONRedirect(pullIssue.Link()) // FIXME: it's unfriendly, and will make the content lost
|
ctx.JSONRedirect(ctx.Link + "?" + ctx.Req.URL.RawQuery) // FIXME: it's unfriendly, and will make the content lost
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ctx.ServerError("NewPullRequest", err)
|
ctx.ServerError("NewPullRequest", err)
|
||||||
|
|
|
@ -161,7 +161,7 @@ func RedirectToLastVersion(ctx *context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.Redirect(pd.FullWebLink())
|
ctx.Redirect(pd.VersionWebLink())
|
||||||
}
|
}
|
||||||
|
|
||||||
// ViewPackageVersion displays a single package version
|
// ViewPackageVersion displays a single package version
|
||||||
|
|
|
@ -19,6 +19,8 @@ import (
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
"code.gitea.io/gitea/services/auth"
|
"code.gitea.io/gitea/services/auth"
|
||||||
|
"code.gitea.io/gitea/services/auth/source/db"
|
||||||
|
"code.gitea.io/gitea/services/auth/source/smtp"
|
||||||
"code.gitea.io/gitea/services/forms"
|
"code.gitea.io/gitea/services/forms"
|
||||||
"code.gitea.io/gitea/services/mailer"
|
"code.gitea.io/gitea/services/mailer"
|
||||||
"code.gitea.io/gitea/services/user"
|
"code.gitea.io/gitea/services/user"
|
||||||
|
@ -245,11 +247,24 @@ func DeleteAccount(ctx *context.Context) {
|
||||||
ctx.Data["PageIsSettingsAccount"] = true
|
ctx.Data["PageIsSettingsAccount"] = true
|
||||||
|
|
||||||
if _, _, err := auth.UserSignIn(ctx, ctx.Doer.Name, ctx.FormString("password")); err != nil {
|
if _, _, err := auth.UserSignIn(ctx, ctx.Doer.Name, ctx.FormString("password")); err != nil {
|
||||||
if user_model.IsErrUserNotExist(err) {
|
switch {
|
||||||
|
case user_model.IsErrUserNotExist(err):
|
||||||
|
loadAccountData(ctx)
|
||||||
|
|
||||||
|
ctx.RenderWithErr(ctx.Tr("form.user_not_exist"), tplSettingsAccount, nil)
|
||||||
|
case errors.Is(err, smtp.ErrUnsupportedLoginType):
|
||||||
|
loadAccountData(ctx)
|
||||||
|
|
||||||
|
ctx.RenderWithErr(ctx.Tr("form.unsupported_login_type"), tplSettingsAccount, nil)
|
||||||
|
case errors.As(err, &db.ErrUserPasswordNotSet{}):
|
||||||
|
loadAccountData(ctx)
|
||||||
|
|
||||||
|
ctx.RenderWithErr(ctx.Tr("form.unset_password"), tplSettingsAccount, nil)
|
||||||
|
case errors.As(err, &db.ErrUserPasswordInvalid{}):
|
||||||
loadAccountData(ctx)
|
loadAccountData(ctx)
|
||||||
|
|
||||||
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_password"), tplSettingsAccount, nil)
|
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_password"), tplSettingsAccount, nil)
|
||||||
} else {
|
default:
|
||||||
ctx.ServerError("UserSignIn", err)
|
ctx.ServerError("UserSignIn", err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
|
|
@ -7,12 +7,14 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
actions_model "code.gitea.io/gitea/models/actions"
|
actions_model "code.gitea.io/gitea/models/actions"
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
"code.gitea.io/gitea/modules/graceful"
|
"code.gitea.io/gitea/modules/graceful"
|
||||||
"code.gitea.io/gitea/modules/queue"
|
"code.gitea.io/gitea/modules/queue"
|
||||||
|
|
||||||
|
"github.com/nektos/act/pkg/jobparser"
|
||||||
"xorm.io/builder"
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -76,12 +78,15 @@ func checkJobsOfRun(ctx context.Context, runID int64) error {
|
||||||
type jobStatusResolver struct {
|
type jobStatusResolver struct {
|
||||||
statuses map[int64]actions_model.Status
|
statuses map[int64]actions_model.Status
|
||||||
needs map[int64][]int64
|
needs map[int64][]int64
|
||||||
|
jobMap map[int64]*actions_model.ActionRunJob
|
||||||
}
|
}
|
||||||
|
|
||||||
func newJobStatusResolver(jobs actions_model.ActionJobList) *jobStatusResolver {
|
func newJobStatusResolver(jobs actions_model.ActionJobList) *jobStatusResolver {
|
||||||
idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
|
idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
|
||||||
|
jobMap := make(map[int64]*actions_model.ActionRunJob)
|
||||||
for _, job := range jobs {
|
for _, job := range jobs {
|
||||||
idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
|
idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
|
||||||
|
jobMap[job.ID] = job
|
||||||
}
|
}
|
||||||
|
|
||||||
statuses := make(map[int64]actions_model.Status, len(jobs))
|
statuses := make(map[int64]actions_model.Status, len(jobs))
|
||||||
|
@ -97,6 +102,7 @@ func newJobStatusResolver(jobs actions_model.ActionJobList) *jobStatusResolver {
|
||||||
return &jobStatusResolver{
|
return &jobStatusResolver{
|
||||||
statuses: statuses,
|
statuses: statuses,
|
||||||
needs: needs,
|
needs: needs,
|
||||||
|
jobMap: jobMap,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,10 +140,23 @@ func (r *jobStatusResolver) resolve() map[int64]actions_model.Status {
|
||||||
if allDone {
|
if allDone {
|
||||||
if allSucceed {
|
if allSucceed {
|
||||||
ret[id] = actions_model.StatusWaiting
|
ret[id] = actions_model.StatusWaiting
|
||||||
|
} else {
|
||||||
|
// If a job's "if" condition is "always()", the job should always run even if some of its dependencies did not succeed.
|
||||||
|
// See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idneeds
|
||||||
|
always := false
|
||||||
|
if wfJobs, _ := jobparser.Parse(r.jobMap[id].WorkflowPayload); len(wfJobs) == 1 {
|
||||||
|
_, wfJob := wfJobs[0].Job()
|
||||||
|
expr := strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(wfJob.If.Value, "${{"), "}}"))
|
||||||
|
always = expr == "always()"
|
||||||
|
}
|
||||||
|
|
||||||
|
if always {
|
||||||
|
ret[id] = actions_model.StatusWaiting
|
||||||
} else {
|
} else {
|
||||||
ret[id] = actions_model.StatusSkipped
|
ret[id] = actions_model.StatusSkipped
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,6 +70,62 @@ func Test_jobStatusResolver_Resolve(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: map[int64]actions_model.Status{},
|
want: map[int64]actions_model.Status{},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "with ${{ always() }} condition",
|
||||||
|
jobs: actions_model.ActionJobList{
|
||||||
|
{ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
|
||||||
|
{ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
|
||||||
|
`
|
||||||
|
name: test
|
||||||
|
on: push
|
||||||
|
jobs:
|
||||||
|
job2:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: job1
|
||||||
|
if: ${{ always() }}
|
||||||
|
steps:
|
||||||
|
- run: echo "always run"
|
||||||
|
`)},
|
||||||
|
},
|
||||||
|
want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with always() condition",
|
||||||
|
jobs: actions_model.ActionJobList{
|
||||||
|
{ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
|
||||||
|
{ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
|
||||||
|
`
|
||||||
|
name: test
|
||||||
|
on: push
|
||||||
|
jobs:
|
||||||
|
job2:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: job1
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- run: echo "always run"
|
||||||
|
`)},
|
||||||
|
},
|
||||||
|
want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "without always() condition",
|
||||||
|
jobs: actions_model.ActionJobList{
|
||||||
|
{ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
|
||||||
|
{ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
|
||||||
|
`
|
||||||
|
name: test
|
||||||
|
on: push
|
||||||
|
jobs:
|
||||||
|
job2:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: job1
|
||||||
|
steps:
|
||||||
|
- run: echo "not always run"
|
||||||
|
`)},
|
||||||
|
},
|
||||||
|
want: map[int64]actions_model.Status{2: actions_model.StatusSkipped},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
|
|
@ -114,6 +114,9 @@ func notify(ctx context.Context, input *notifyInput) error {
|
||||||
log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
|
log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
if input.Repo.IsEmpty {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if unit_model.TypeActions.UnitGlobalDisabled() {
|
if unit_model.TypeActions.UnitGlobalDisabled() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ func ToPackage(ctx context.Context, pd *packages.PackageDescriptor, doer *user_m
|
||||||
Name: pd.Package.Name,
|
Name: pd.Package.Name,
|
||||||
Version: pd.Version.Version,
|
Version: pd.Version.Version,
|
||||||
CreatedAt: pd.Version.CreatedUnix.AsTime(),
|
CreatedAt: pd.Version.CreatedUnix.AsTime(),
|
||||||
HTMLURL: pd.FullWebLink(),
|
HTMLURL: pd.VersionHTMLURL(),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,9 +34,9 @@ func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, commitStatus := range commitStatuses {
|
|
||||||
var targetStatus structs.CommitStatusState
|
|
||||||
for _, gp := range requiredContextsGlob {
|
for _, gp := range requiredContextsGlob {
|
||||||
|
var targetStatus structs.CommitStatusState
|
||||||
|
for _, commitStatus := range commitStatuses {
|
||||||
if gp.Match(commitStatus.Context) {
|
if gp.Match(commitStatus.Context) {
|
||||||
targetStatus = commitStatus.State
|
targetStatus = commitStatus.State
|
||||||
matchedCount++
|
matchedCount++
|
||||||
|
@ -44,13 +44,21 @@ func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if targetStatus != "" && targetStatus.NoBetterThan(returnedStatus) {
|
// If required rule not match any action, then it is pending
|
||||||
|
if targetStatus == "" {
|
||||||
|
if structs.CommitStatusPending.NoBetterThan(returnedStatus) {
|
||||||
|
returnedStatus = structs.CommitStatusPending
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if targetStatus.NoBetterThan(returnedStatus) {
|
||||||
returnedStatus = targetStatus
|
returnedStatus = targetStatus
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if matchedCount == 0 {
|
if matchedCount == 0 && returnedStatus == structs.CommitStatusSuccess {
|
||||||
status := git_model.CalcCommitStatus(commitStatuses)
|
status := git_model.CalcCommitStatus(commitStatuses)
|
||||||
if status != nil {
|
if status != nil {
|
||||||
return status.State
|
return status.State
|
||||||
|
|
65
services/pull/commit_status_test.go
Normal file
65
services/pull/commit_status_test.go
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
// Copyright 2024 The Gitea Authors.
|
||||||
|
// All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package pull
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
git_model "code.gitea.io/gitea/models/git"
|
||||||
|
"code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMergeRequiredContextsCommitStatus(t *testing.T) {
|
||||||
|
testCases := [][]*git_model.CommitStatus{
|
||||||
|
{
|
||||||
|
{Context: "Build 1", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 3", State: structs.CommitStatusSuccess},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
{Context: "Build 1", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2t", State: structs.CommitStatusPending},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
{Context: "Build 1", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2t", State: structs.CommitStatusFailure},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
{Context: "Build 1", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2t", State: structs.CommitStatusSuccess},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
{Context: "Build 1", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2", State: structs.CommitStatusSuccess},
|
||||||
|
{Context: "Build 2t", State: structs.CommitStatusSuccess},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
testCasesRequiredContexts := [][]string{
|
||||||
|
{"Build*"},
|
||||||
|
{"Build*", "Build 2t*"},
|
||||||
|
{"Build*", "Build 2t*"},
|
||||||
|
{"Build*", "Build 2t*", "Build 3*"},
|
||||||
|
{"Build*", "Build *", "Build 2t*", "Build 1*"},
|
||||||
|
}
|
||||||
|
|
||||||
|
testCasesExpected := []structs.CommitStatusState{
|
||||||
|
structs.CommitStatusSuccess,
|
||||||
|
structs.CommitStatusPending,
|
||||||
|
structs.CommitStatusFailure,
|
||||||
|
structs.CommitStatusPending,
|
||||||
|
structs.CommitStatusSuccess,
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, commitStatuses := range testCases {
|
||||||
|
if MergeRequiredContextsCommitStatus(commitStatuses, testCasesRequiredContexts[i]) != testCasesExpected[i] {
|
||||||
|
assert.Fail(t, "Test case failed", "Test case %d failed", i+1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -22,20 +22,7 @@
|
||||||
<a role="button" class="ui basic tiny button" rel="nofollow" href="{{$repo.Link}}/src/commit/{{$result.CommitID | PathEscape}}/{{.Filename | PathEscapeSegments}}">{{ctx.Locale.Tr "repo.diff.view_file"}}</a>
|
<a role="button" class="ui basic tiny button" rel="nofollow" href="{{$repo.Link}}/src/commit/{{$result.CommitID | PathEscape}}/{{.Filename | PathEscapeSegments}}">{{ctx.Locale.Tr "repo.diff.view_file"}}</a>
|
||||||
</h4>
|
</h4>
|
||||||
<div class="ui attached table segment">
|
<div class="ui attached table segment">
|
||||||
<div class="file-body file-code code-view">
|
{{template "shared/searchfile" dict "RepoLink" $repo.Link "SearchResult" .}}
|
||||||
<table>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td class="lines-num">
|
|
||||||
{{range .LineNumbers}}
|
|
||||||
<a href="{{$repo.Link}}/src/commit/{{$result.CommitID | PathEscape}}/{{$result.Filename | PathEscapeSegments}}#L{{.}}"><span>{{.}}</span></a>
|
|
||||||
{{end}}
|
|
||||||
</td>
|
|
||||||
<td class="lines-code chroma"><code class="code-inner">{{.FormattedLines}}</code></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
{{template "shared/searchbottom" dict "root" $ "result" .}}
|
{{template "shared/searchbottom" dict "root" $ "result" .}}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -16,8 +16,6 @@
|
||||||
<a class="{{if eq .SortType "oldest"}}active {{end}}item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.filter_sort.oldest"}}</a>
|
<a class="{{if eq .SortType "oldest"}}active {{end}}item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.filter_sort.oldest"}}</a>
|
||||||
<a class="{{if eq .SortType "alphabetically"}}active {{end}}item" href="{{$.Link}}?sort=alphabetically&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.label.filter_sort.alphabetically"}}</a>
|
<a class="{{if eq .SortType "alphabetically"}}active {{end}}item" href="{{$.Link}}?sort=alphabetically&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.label.filter_sort.alphabetically"}}</a>
|
||||||
<a class="{{if eq .SortType "reversealphabetically"}}active {{end}}item" href="{{$.Link}}?sort=reversealphabetically&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.label.filter_sort.reverse_alphabetically"}}</a>
|
<a class="{{if eq .SortType "reversealphabetically"}}active {{end}}item" href="{{$.Link}}?sort=reversealphabetically&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.label.filter_sort.reverse_alphabetically"}}</a>
|
||||||
<a class="{{if eq .SortType "recentupdate"}}active {{end}}item" href="{{$.Link}}?sort=recentupdate&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.filter_sort.recentupdate"}}</a>
|
|
||||||
<a class="{{if eq .SortType "leastupdate"}}active {{end}}item" href="{{$.Link}}?sort=leastupdate&q={{$.Keyword}}">{{ctx.Locale.Tr "repo.issues.filter_sort.leastupdate"}}</a>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -24,14 +24,22 @@
|
||||||
const btn = isSSH ? sshBtn : httpsBtn;
|
const btn = isSSH ? sshBtn : httpsBtn;
|
||||||
if (!btn) return;
|
if (!btn) return;
|
||||||
|
|
||||||
let link = btn.getAttribute('data-link');
|
// NOTE: Keep this function in sync with the one in the js folder
|
||||||
if (link.startsWith('http://') || link.startsWith('https://')) {
|
function toOriginUrl(urlStr) {
|
||||||
// use current protocol/host as the clone link
|
try {
|
||||||
const url = new URL(link);
|
if (urlStr.startsWith('http://') || urlStr.startsWith('https://') || urlStr.startsWith('/')) {
|
||||||
url.protocol = window.location.protocol;
|
const {origin, protocol, hostname, port} = window.location;
|
||||||
url.host = window.location.host;
|
const url = new URL(urlStr, origin);
|
||||||
link = url.toString();
|
url.protocol = protocol;
|
||||||
|
url.hostname = hostname;
|
||||||
|
url.port = port || (protocol === 'https:' ? '443' : '80');
|
||||||
|
return url.toString();
|
||||||
}
|
}
|
||||||
|
} catch {}
|
||||||
|
return urlStr;
|
||||||
|
}
|
||||||
|
const link = toOriginUrl(btn.getAttribute('data-link'));
|
||||||
|
|
||||||
for (const el of document.getElementsByClassName('js-clone-url')) {
|
for (const el of document.getElementsByClassName('js-clone-url')) {
|
||||||
el[el.nodeName === 'INPUT' ? 'value' : 'textContent'] = link;
|
el[el.nodeName === 'INPUT' ? 'value' : 'textContent'] = link;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@
|
||||||
<div class="comment-header-right actions gt-df gt-ac">
|
<div class="comment-header-right actions gt-df gt-ac">
|
||||||
{{if .Invalidated}}
|
{{if .Invalidated}}
|
||||||
{{$referenceUrl := printf "%s#%s" $.root.Issue.Link .HashTag}}
|
{{$referenceUrl := printf "%s#%s" $.root.Issue.Link .HashTag}}
|
||||||
<a href="{{AppSubUrl}}{{$referenceUrl}}" class="ui label basic small" data-tooltip-content="{{ctx.Locale.Tr "repo.issues.review.outdated_description"}}">
|
<a href="{{$referenceUrl}}" class="ui label basic small" data-tooltip-content="{{ctx.Locale.Tr "repo.issues.review.outdated_description"}}">
|
||||||
{{ctx.Locale.Tr "repo.issues.review.outdated"}}
|
{{ctx.Locale.Tr "repo.issues.review.outdated"}}
|
||||||
</a>
|
</a>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
|
@ -11,14 +11,6 @@
|
||||||
{{ctx.Locale.Tr "action.compare_commits_general"}}
|
{{ctx.Locale.Tr "action.compare_commits_general"}}
|
||||||
{{end}}
|
{{end}}
|
||||||
</h2>
|
</h2>
|
||||||
{{if .Flash.WarningMsg}}
|
|
||||||
{{/*
|
|
||||||
There's already an importing of alert.tmpl in new_form.tmpl,
|
|
||||||
but only the negative message will be displayed within forms for some reasons, see semantic.css:10659.
|
|
||||||
To avoid repeated negative messages, the importing here if for .Flash.WarningMsg only.
|
|
||||||
*/}}
|
|
||||||
{{template "base/alert" .}}
|
|
||||||
{{end}}
|
|
||||||
{{$BaseCompareName := $.BaseName -}}
|
{{$BaseCompareName := $.BaseName -}}
|
||||||
{{- $HeadCompareName := $.HeadRepo.OwnerName -}}
|
{{- $HeadCompareName := $.HeadRepo.OwnerName -}}
|
||||||
{{- if and (eq $.BaseName $.HeadRepo.OwnerName) (ne $.Repository.Name $.HeadRepo.Name) -}}
|
{{- if and (eq $.BaseName $.HeadRepo.OwnerName) (ne $.Repository.Name $.HeadRepo.Name) -}}
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
We only handle the case $resolved=true and $invalid=true in this template because if the comment is not resolved it has the outdated label in the comments area (not the header above).
|
We only handle the case $resolved=true and $invalid=true in this template because if the comment is not resolved it has the outdated label in the comments area (not the header above).
|
||||||
The case $resolved=false and $invalid=true is handled in repo/diff/comments.tmpl
|
The case $resolved=false and $invalid=true is handled in repo/diff/comments.tmpl
|
||||||
-->
|
-->
|
||||||
<a href="{{AppSubUrl}}{{$referenceUrl}}" class="ui label basic small gt-ml-3" data-tooltip-content="{{ctx.Locale.Tr "repo.issues.review.outdated_description"}}">
|
<a href="{{$referenceUrl}}" class="ui label basic small gt-ml-3" data-tooltip-content="{{ctx.Locale.Tr "repo.issues.review.outdated_description"}}">
|
||||||
{{ctx.Locale.Tr "repo.issues.review.outdated"}}
|
{{ctx.Locale.Tr "repo.issues.review.outdated"}}
|
||||||
</a>
|
</a>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
|
@ -2,14 +2,6 @@
|
||||||
<div role="main" aria-label="{{.Title}}" class="page-content repository new issue">
|
<div role="main" aria-label="{{.Title}}" class="page-content repository new issue">
|
||||||
{{template "repo/header" .}}
|
{{template "repo/header" .}}
|
||||||
<div class="ui container">
|
<div class="ui container">
|
||||||
{{if .Flash.WarningMsg}}
|
|
||||||
{{/*
|
|
||||||
There's already an importing of alert.tmpl in new_form.tmpl,
|
|
||||||
but only the negative message will be displayed within forms for some reasons, see semantic.css:10659.
|
|
||||||
To avoid repeated negative messages, the importing here if for .Flash.WarningMsg only.
|
|
||||||
*/}}
|
|
||||||
{{template "base/alert" .}}
|
|
||||||
{{end}}
|
|
||||||
{{template "repo/issue/new_form" .}}
|
{{template "repo/issue/new_form" .}}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
|
{{if .Flash}}
|
||||||
|
{{template "base/alert" .}}
|
||||||
|
{{end}}
|
||||||
<form class="issue-content ui comment form form-fetch-action" id="new-issue" action="{{.Link}}" method="post">
|
<form class="issue-content ui comment form form-fetch-action" id="new-issue" action="{{.Link}}" method="post">
|
||||||
{{.CsrfTokenHtml}}
|
{{.CsrfTokenHtml}}
|
||||||
{{if .Flash}}
|
|
||||||
<div class="sixteen wide column">
|
|
||||||
{{template "base/alert" .}}
|
|
||||||
</div>
|
|
||||||
{{end}}
|
|
||||||
<div class="issue-content-left">
|
<div class="issue-content-left">
|
||||||
<div class="ui comments">
|
<div class="ui comments">
|
||||||
<div class="comment">
|
<div class="comment">
|
||||||
|
|
|
@ -44,20 +44,7 @@
|
||||||
<a role="button" class="ui basic tiny button" rel="nofollow" href="{{$.SourcePath}}/src/commit/{{PathEscape $result.CommitID}}/{{PathEscapeSegments .Filename}}">{{ctx.Locale.Tr "repo.diff.view_file"}}</a>
|
<a role="button" class="ui basic tiny button" rel="nofollow" href="{{$.SourcePath}}/src/commit/{{PathEscape $result.CommitID}}/{{PathEscapeSegments .Filename}}">{{ctx.Locale.Tr "repo.diff.view_file"}}</a>
|
||||||
</h4>
|
</h4>
|
||||||
<div class="ui attached table segment">
|
<div class="ui attached table segment">
|
||||||
<div class="file-body file-code code-view">
|
{{template "shared/searchfile" dict "RepoLink" $.SourcePath "SearchResult" .}}
|
||||||
<table>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td class="lines-num">
|
|
||||||
{{range .LineNumbers}}
|
|
||||||
<a href="{{$.SourcePath}}/src/commit/{{PathEscape $result.CommitID}}/{{PathEscapeSegments $result.Filename}}#L{{.}}"><span>{{.}}</span></a>
|
|
||||||
{{end}}
|
|
||||||
</td>
|
|
||||||
<td class="lines-code chroma"><code class="code-inner">{{.FormattedLines}}</code></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
{{template "shared/searchbottom" dict "root" $ "result" .}}
|
{{template "shared/searchbottom" dict "root" $ "result" .}}
|
||||||
</div>
|
</div>
|
||||||
|
|
14
templates/shared/searchfile.tmpl
Normal file
14
templates/shared/searchfile.tmpl
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
<div class="file-body file-code code-view">
|
||||||
|
<table>
|
||||||
|
<tbody>
|
||||||
|
{{range .SearchResult.Lines}}
|
||||||
|
<tr>
|
||||||
|
<td class="lines-num">
|
||||||
|
<a href="{{$.RepoLink}}/src/commit/{{PathEscape $.SearchResult.CommitID}}/{{PathEscapeSegments $.SearchResult.Filename}}#L{{.Num}}"><span>{{.Num}}</span></a>
|
||||||
|
</td>
|
||||||
|
<td class="lines-code chroma"><code class="code-inner">{{.FormattedContent}}</code></td>
|
||||||
|
</tr>
|
||||||
|
{{end}}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
|
@ -83,6 +83,7 @@
|
||||||
{{if .GetOpType.InActions "commit_repo" "mirror_sync_push"}}
|
{{if .GetOpType.InActions "commit_repo" "mirror_sync_push"}}
|
||||||
{{$push := ActionContent2Commits .}}
|
{{$push := ActionContent2Commits .}}
|
||||||
{{$repoLink := (.GetRepoLink ctx)}}
|
{{$repoLink := (.GetRepoLink ctx)}}
|
||||||
|
{{$repo := .Repo}}
|
||||||
<div class="gt-df gt-fc gt-gap-2">
|
<div class="gt-df gt-fc gt-gap-2">
|
||||||
{{range $push.Commits}}
|
{{range $push.Commits}}
|
||||||
{{$commitLink := printf "%s/commit/%s" $repoLink .Sha1}}
|
{{$commitLink := printf "%s/commit/%s" $repoLink .Sha1}}
|
||||||
|
@ -90,7 +91,7 @@
|
||||||
<img class="ui avatar" src="{{$push.AvatarLink $.Context .AuthorEmail}}" title="{{.AuthorName}}" width="16" height="16">
|
<img class="ui avatar" src="{{$push.AvatarLink $.Context .AuthorEmail}}" title="{{.AuthorName}}" width="16" height="16">
|
||||||
<a class="ui sha label" href="{{$commitLink}}">{{ShortSha .Sha1}}</a>
|
<a class="ui sha label" href="{{$commitLink}}">{{ShortSha .Sha1}}</a>
|
||||||
<span class="text truncate">
|
<span class="text truncate">
|
||||||
{{RenderCommitMessage $.Context .Message $.ComposeMetas}}
|
{{RenderCommitMessage $.Context .Message $repo.ComposeMetas}}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
45
tests/integration/explore_user_test.go
Normal file
45
tests/integration/explore_user_test.go
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package integration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/tests"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestExploreUser(t *testing.T) {
|
||||||
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
cases := []struct{ sortOrder, expected string }{
|
||||||
|
{"", "/explore/users?sort=newest&q="},
|
||||||
|
{"newest", "/explore/users?sort=newest&q="},
|
||||||
|
{"oldest", "/explore/users?sort=oldest&q="},
|
||||||
|
{"alphabetically", "/explore/users?sort=alphabetically&q="},
|
||||||
|
{"reversealphabetically", "/explore/users?sort=reversealphabetically&q="},
|
||||||
|
}
|
||||||
|
for _, c := range cases {
|
||||||
|
req := NewRequest(t, "GET", "/explore/users?sort="+c.sortOrder)
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
h := NewHTMLParser(t, resp.Body)
|
||||||
|
href, _ := h.Find(`.ui.dropdown .menu a.active.item[href^="/explore/users"]`).Attr("href")
|
||||||
|
assert.Equal(t, c.expected, href)
|
||||||
|
}
|
||||||
|
|
||||||
|
// these sort orders shouldn't be supported, to avoid leaking user activity
|
||||||
|
cases404 := []string{
|
||||||
|
"/explore/users?sort=lastlogin",
|
||||||
|
"/explore/users?sort=reverselastlogin",
|
||||||
|
"/explore/users?sort=leastupdate",
|
||||||
|
"/explore/users?sort=reverseleastupdate",
|
||||||
|
}
|
||||||
|
for _, c := range cases404 {
|
||||||
|
req := NewRequest(t, "GET", c)
|
||||||
|
req.Header.Get("Accept: text/html")
|
||||||
|
MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1784,7 +1784,7 @@ a.ui.active.label:hover {
|
||||||
|
|
||||||
.lines-commit {
|
.lines-commit {
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
color: var(--color-grey);
|
color: var(--color-text-light-2);
|
||||||
padding: 0 !important;
|
padding: 0 !important;
|
||||||
background: var(--color-code-sidebar-bg);
|
background: var(--color-code-sidebar-bg);
|
||||||
width: 1%;
|
width: 1%;
|
||||||
|
@ -1827,6 +1827,10 @@ a.ui.active.label:hover {
|
||||||
border-top: 1px solid var(--color-secondary);
|
border-top: 1px solid var(--color-secondary);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.code-view tr.top-line-blame:first-of-type {
|
||||||
|
border-top: none;
|
||||||
|
}
|
||||||
|
|
||||||
.lines-code .bottom-line,
|
.lines-code .bottom-line,
|
||||||
.lines-commit .bottom-line {
|
.lines-commit .bottom-line {
|
||||||
border-bottom: 1px solid var(--color-secondary);
|
border-bottom: 1px solid var(--color-secondary);
|
||||||
|
|
57
web_src/js/bootstrap.js
vendored
57
web_src/js/bootstrap.js
vendored
|
@ -1,5 +1,6 @@
|
||||||
// DO NOT IMPORT window.config HERE!
|
// DO NOT IMPORT window.config HERE!
|
||||||
// to make sure the error handler always works, we should never import `window.config`, because some user's custom template breaks it.
|
// to make sure the error handler always works, we should never import `window.config`, because
|
||||||
|
// some user's custom template breaks it.
|
||||||
|
|
||||||
// This sets up the URL prefix used in webpack's chunk loading.
|
// This sets up the URL prefix used in webpack's chunk loading.
|
||||||
// This file must be imported before any lazy-loading is being attempted.
|
// This file must be imported before any lazy-loading is being attempted.
|
||||||
|
@ -26,29 +27,42 @@ export function showGlobalErrorMessage(msg) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ErrorEvent} e
|
* @param {ErrorEvent|PromiseRejectionEvent} event - Event
|
||||||
|
* @param {string} event.message - Only present on ErrorEvent
|
||||||
|
* @param {string} event.error - Only present on ErrorEvent
|
||||||
|
* @param {string} event.type - Only present on ErrorEvent
|
||||||
|
* @param {string} event.filename - Only present on ErrorEvent
|
||||||
|
* @param {number} event.lineno - Only present on ErrorEvent
|
||||||
|
* @param {number} event.colno - Only present on ErrorEvent
|
||||||
|
* @param {string} event.reason - Only present on PromiseRejectionEvent
|
||||||
|
* @param {number} event.promise - Only present on PromiseRejectionEvent
|
||||||
*/
|
*/
|
||||||
function processWindowErrorEvent(e) {
|
function processWindowErrorEvent({error, reason, message, type, filename, lineno, colno}) {
|
||||||
const err = e.error ?? e.reason;
|
const err = error ?? reason;
|
||||||
const assetBaseUrl = String(new URL(__webpack_public_path__, window.location.origin));
|
const assetBaseUrl = String(new URL(__webpack_public_path__, window.location.origin));
|
||||||
|
const {runModeIsProd} = window.config ?? {};
|
||||||
|
|
||||||
// error is likely from browser extension or inline script. Do not show these in production builds.
|
// `error` and `reason` are not guaranteed to be errors. If the value is falsy, it is likly a
|
||||||
if (!err.stack?.includes(assetBaseUrl) && window.config?.runModeIsProd) return;
|
// non-critical event from the browser. We log them but don't show them to users. Examples:
|
||||||
|
// - https://developer.mozilla.org/en-US/docs/Web/API/ResizeObserver#observation_errors
|
||||||
let message;
|
// - https://github.com/mozilla-mobile/firefox-ios/issues/10817
|
||||||
if (e.type === 'unhandledrejection') {
|
// - https://github.com/go-gitea/gitea/issues/20240
|
||||||
message = `JavaScript promise rejection: ${err.message}.`;
|
if (!err) {
|
||||||
} else {
|
if (message) console.error(new Error(message));
|
||||||
message = `JavaScript error: ${e.message} (${e.filename} @ ${e.lineno}:${e.colno}).`;
|
if (runModeIsProd) return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!e.error && e.lineno === 0 && e.colno === 0 && e.filename === '' && window.navigator.userAgent.includes('FxiOS/')) {
|
// If the error stack trace does not include the base URL of our script assets, it likely came
|
||||||
// At the moment, Firefox (iOS) (10x) has an engine bug. See https://github.com/go-gitea/gitea/issues/20240
|
// from a browser extension or inline script. Do not show such errors in production.
|
||||||
// If a script inserts a newly created (and content changed) element into DOM, there will be a nonsense error event reporting: Script error: line 0, col 0.
|
if (err instanceof Error && !err.stack?.includes(assetBaseUrl) && runModeIsProd) {
|
||||||
return; // ignore such nonsense error event
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
showGlobalErrorMessage(`${message} Open browser console to see more details.`);
|
let msg = err?.message ?? message;
|
||||||
|
if (lineno) msg += ` (${filename} @ ${lineno}:${colno})`;
|
||||||
|
const dot = msg.endsWith('.') ? '' : '.';
|
||||||
|
const renderedType = type === 'unhandledrejection' ? 'promise rejection' : type;
|
||||||
|
showGlobalErrorMessage(`JavaScript ${renderedType}: ${msg}${dot} Open browser console to see more details.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function initGlobalErrorHandler() {
|
function initGlobalErrorHandler() {
|
||||||
|
@ -59,13 +73,14 @@ function initGlobalErrorHandler() {
|
||||||
if (!window.config) {
|
if (!window.config) {
|
||||||
showGlobalErrorMessage(`Gitea JavaScript code couldn't run correctly, please check your custom templates`);
|
showGlobalErrorMessage(`Gitea JavaScript code couldn't run correctly, please check your custom templates`);
|
||||||
}
|
}
|
||||||
// we added an event handler for window error at the very beginning of <script> of page head
|
// we added an event handler for window error at the very beginning of <script> of page head the
|
||||||
// the handler calls `_globalHandlerErrors.push` (array method) to record all errors occur before this init
|
// handler calls `_globalHandlerErrors.push` (array method) to record all errors occur before
|
||||||
// then in this init, we can collect all error events and show them
|
// this init then in this init, we can collect all error events and show them.
|
||||||
for (const e of window._globalHandlerErrors || []) {
|
for (const e of window._globalHandlerErrors || []) {
|
||||||
processWindowErrorEvent(e);
|
processWindowErrorEvent(e);
|
||||||
}
|
}
|
||||||
// then, change _globalHandlerErrors to an object with push method, to process further error events directly
|
// then, change _globalHandlerErrors to an object with push method, to process further error
|
||||||
|
// events directly
|
||||||
window._globalHandlerErrors = {_inited: true, push: (e) => processWindowErrorEvent(e)};
|
window._globalHandlerErrors = {_inited: true, push: (e) => processWindowErrorEvent(e)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -101,10 +101,12 @@ async function fetchActionDoRequest(actionElem, url, opt) {
|
||||||
showErrorToast(`server error: ${resp.status}`);
|
showErrorToast(`server error: ${resp.status}`);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error('error when doRequest', e);
|
|
||||||
actionElem.classList.remove('is-loading', 'small-loading-icon');
|
actionElem.classList.remove('is-loading', 'small-loading-icon');
|
||||||
|
if (e.name !== 'AbortError') {
|
||||||
|
console.error('error when doRequest', e);
|
||||||
showErrorToast(i18n.network_error);
|
showErrorToast(i18n.network_error);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function formFetchAction(e) {
|
async function formFetchAction(e) {
|
||||||
|
|
|
@ -112,7 +112,7 @@ export function initNotificationCount() {
|
||||||
type: 'close',
|
type: 'close',
|
||||||
});
|
});
|
||||||
worker.port.close();
|
worker.port.close();
|
||||||
window.location.href = appSubUrl;
|
window.location.href = `${appSubUrl}/`;
|
||||||
} else if (event.data.type === 'close') {
|
} else if (event.data.type === 'close') {
|
||||||
worker.port.postMessage({
|
worker.port.postMessage({
|
||||||
type: 'close',
|
type: 'close',
|
||||||
|
|
|
@ -44,9 +44,11 @@ export function initViewedCheckboxListenerFor() {
|
||||||
// Mark the file as viewed visually - will especially change the background
|
// Mark the file as viewed visually - will especially change the background
|
||||||
if (this.checked) {
|
if (this.checked) {
|
||||||
form.classList.add(viewedStyleClass);
|
form.classList.add(viewedStyleClass);
|
||||||
|
checkbox.setAttribute('checked', '');
|
||||||
prReview.numberOfViewedFiles++;
|
prReview.numberOfViewedFiles++;
|
||||||
} else {
|
} else {
|
||||||
form.classList.remove(viewedStyleClass);
|
form.classList.remove(viewedStyleClass);
|
||||||
|
checkbox.removeAttribute('checked');
|
||||||
prReview.numberOfViewedFiles--;
|
prReview.numberOfViewedFiles--;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ export function initStopwatch() {
|
||||||
type: 'close',
|
type: 'close',
|
||||||
});
|
});
|
||||||
worker.port.close();
|
worker.port.close();
|
||||||
window.location.href = appSubUrl;
|
window.location.href = `${appSubUrl}/`;
|
||||||
} else if (event.data.type === 'close') {
|
} else if (event.data.type === 'close') {
|
||||||
worker.port.postMessage({
|
worker.port.postMessage({
|
||||||
type: 'close',
|
type: 'close',
|
||||||
|
|
|
@ -2,6 +2,7 @@ import {svg} from '../svg.js';
|
||||||
|
|
||||||
const headingSelector = '.markup h1, .markup h2, .markup h3, .markup h4, .markup h5, .markup h6';
|
const headingSelector = '.markup h1, .markup h2, .markup h3, .markup h4, .markup h5, .markup h6';
|
||||||
|
|
||||||
|
// scroll to anchor while respecting the `user-content` prefix that exists on the target
|
||||||
function scrollToAnchor(hash, initial) {
|
function scrollToAnchor(hash, initial) {
|
||||||
// abort if the browser has already scrolled to another anchor during page load
|
// abort if the browser has already scrolled to another anchor during page load
|
||||||
if (initial && document.querySelector(':target')) return;
|
if (initial && document.querySelector(':target')) return;
|
||||||
|
@ -19,6 +20,7 @@ function scrollToAnchor(hash, initial) {
|
||||||
export function initMarkupAnchors() {
|
export function initMarkupAnchors() {
|
||||||
if (!document.querySelector('.markup')) return;
|
if (!document.querySelector('.markup')) return;
|
||||||
|
|
||||||
|
// create link icons for markup headings, the resulting link href will remove `user-content-`
|
||||||
for (const heading of document.querySelectorAll(headingSelector)) {
|
for (const heading of document.querySelectorAll(headingSelector)) {
|
||||||
const originalId = heading.id.replace(/^user-content-/, '');
|
const originalId = heading.id.replace(/^user-content-/, '');
|
||||||
const a = document.createElement('a');
|
const a = document.createElement('a');
|
||||||
|
@ -31,5 +33,18 @@ export function initMarkupAnchors() {
|
||||||
heading.prepend(a);
|
heading.prepend(a);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handle user-defined `name` anchors like `[Link](#link)` linking to `<a name="link"></a>Link`
|
||||||
|
for (const a of document.querySelectorAll('.markup a[href^="#"]')) {
|
||||||
|
const href = a.getAttribute('href');
|
||||||
|
if (!href.startsWith('#user-content-')) continue;
|
||||||
|
const originalId = href.replace(/^#user-content-/, '');
|
||||||
|
a.setAttribute('href', `#${encodeURIComponent(originalId)}`);
|
||||||
|
if (a.closest('.markup').querySelectorAll(`a[name="${originalId}"]`).length !== 1) {
|
||||||
|
a.addEventListener('click', (e) => {
|
||||||
|
scrollToAnchor(e.currentTarget.getAttribute('href'), false);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
scrollToAnchor(window.location.hash, true);
|
scrollToAnchor(window.location.hash, true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
// Convert an absolute or relative URL to an absolute URL with the current origin
|
// Convert an absolute or relative URL to an absolute URL with the current origin. It only
|
||||||
|
// processes absolute HTTP/HTTPS URLs or relative URLs like '/xxx' or '//host/xxx'.
|
||||||
|
// NOTE: Keep this function in sync with clone_script.tmpl
|
||||||
export function toOriginUrl(urlStr) {
|
export function toOriginUrl(urlStr) {
|
||||||
try {
|
try {
|
||||||
// only process absolute HTTP/HTTPS URL or relative URLs ('/xxx' or '//host/xxx')
|
|
||||||
if (urlStr.startsWith('http://') || urlStr.startsWith('https://') || urlStr.startsWith('/')) {
|
if (urlStr.startsWith('http://') || urlStr.startsWith('https://') || urlStr.startsWith('/')) {
|
||||||
const {origin, protocol, hostname, port} = window.location;
|
const {origin, protocol, hostname, port} = window.location;
|
||||||
const url = new URL(urlStr, origin);
|
const url = new URL(urlStr, origin);
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" fill-rule="evenodd" stroke-linejoin="round" stroke-miterlimit="2" clip-rule="evenodd" viewBox="-89.009 -46.884 643.937 446.884" class="svg gitea-twitter" width="16" height="16" aria-hidden="true"><path fill="#1da1f2" fill-rule="nonzero" d="M154.729 400c185.669 0 287.205-153.876 287.205-287.312 0-4.37-.089-8.72-.286-13.052A205.304 205.304 0 0 0 492 47.346c-18.087 8.044-37.55 13.458-57.968 15.899 20.841-12.501 36.84-32.278 44.389-55.852a202.42 202.42 0 0 1-64.098 24.511C395.903 12.276 369.679 0 340.641 0c-55.744 0-100.948 45.222-100.948 100.965 0 7.925.887 15.631 2.619 23.025-83.895-4.223-158.287-44.405-208.074-105.504A100.739 100.739 0 0 0 20.57 69.24c0 35.034 17.82 65.961 44.92 84.055a100.172 100.172 0 0 1-45.716-12.63c-.015.424-.015.837-.015 1.29 0 48.903 34.794 89.734 80.982 98.986a101.036 101.036 0 0 1-26.617 3.553c-6.493 0-12.821-.639-18.971-1.82 12.851 40.122 50.115 69.319 94.296 70.135-34.549 27.089-78.07 43.224-125.371 43.224A204.9 204.9 0 0 1 0 354.634c44.674 28.645 97.72 45.359 154.734 45.359"/></svg>
|
<svg viewBox="0 0 24 24"><path d="M14.095 10.316 22.286 1h-1.94L13.23 9.088 7.551 1H1l8.59 12.231L1 23h1.94l7.51-8.543 6 8.543H23l-8.905-12.684zm-2.658 3.022-.872-1.218L3.64 2.432h2.98l5.59 7.821.869 1.219 7.265 10.166h-2.982l-5.926-8.3z"/></svg>
|
Before Width: | Height: | Size: 1 KiB After Width: | Height: | Size: 246 B |
Loading…
Reference in a new issue