[v15.0/forgejo]: chore: add modernizer linter (#11949)

**Backport: !11936**

- Go has a suite of small linters that helps with modernizing Go code by using newer functions and catching small mistakes, https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize.
- Enable this linter in golangci-lint.
- There's also [`go fix`](https://go.dev/blog/gofix), which is not yet released as a linter in golangci-lint: https://github.com/golangci/golangci-lint/pull/6385

Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/11949
Reviewed-by: Mathieu Fenniak <mfenniak@noreply.codeberg.org>
Co-authored-by: Gusted <postmaster@gusted.xyz>
Co-committed-by: Gusted <postmaster@gusted.xyz>
This commit is contained in:
Gusted 2026-04-02 16:54:46 +02:00 committed by Mathieu Fenniak
parent a32804bebe
commit 607d031069
247 changed files with 650 additions and 1001 deletions

View file

@ -7,6 +7,7 @@ import (
"bytes"
"fmt"
"io"
"slices"
"strings"
actions_model "forgejo.org/models/actions"
@ -609,11 +610,8 @@ func matchPullRequestReviewEvent(prPayload *api.PullRequestPayload, evt *jobpars
matched := false
for _, val := range vals {
for _, action := range actions {
if glob.MustCompile(val, '/').Match(action) {
matched = true
break
}
if slices.ContainsFunc(actions, glob.MustCompile(val, '/').Match) {
matched = true
}
if matched {
break
@ -658,11 +656,8 @@ func matchPullRequestReviewCommentEvent(prPayload *api.PullRequestPayload, evt *
matched := false
for _, val := range vals {
for _, action := range actions {
if glob.MustCompile(val, '/').Match(action) {
matched = true
break
}
if slices.ContainsFunc(actions, glob.MustCompile(val, '/').Match) {
matched = true
}
if matched {
break

View file

@ -101,7 +101,7 @@ func Generate(n int) (string, error) {
buffer := make([]byte, n)
max := big.NewInt(int64(len(validChars)))
for {
for j := 0; j < n; j++ {
for j := range n {
rnd, err := rand.Int(rand.Reader, max)
if err != nil {
return "", err

View file

@ -51,7 +51,7 @@ func TestComplexity_Generate(t *testing.T) {
test := func(t *testing.T, modes []string) {
testComplextity(modes)
for i := 0; i < maxCount; i++ {
for range maxCount {
pwd, err := Generate(pwdLen)
require.NoError(t, err)
assert.Len(t, pwd, pwdLen)

View file

@ -101,7 +101,7 @@ func (c *Client) CheckPassword(pw string, padding bool) (int, error) {
}
defer resp.Body.Close()
for _, pair := range strings.Split(string(body), "\n") {
for pair := range strings.SplitSeq(string(body), "\n") {
parts := strings.Split(pair, ":")
if len(parts) != 2 {
continue

View file

@ -24,8 +24,8 @@ func drawBlock(img *image.Paletted, x, y, size, angle int, points []int) {
rotate(points, m, m, angle)
}
for i := 0; i < size; i++ {
for j := 0; j < size; j++ {
for i := range size {
for j := range size {
if pointInPolygon(i, j, points) {
img.SetColorIndex(x+i, y+j, 1)
}

View file

@ -134,7 +134,7 @@ func drawBlocks(p *image.Paletted, size int, c, b1, b2 blockFunc, b1Angle, b2Ang
// then we make it left-right mirror, so we didn't draw 3/6/9 before
for x := 0; x < size/2; x++ {
for y := 0; y < size; y++ {
for y := range size {
p.SetColorIndex(size-x, y, p.ColorIndexAt(x, y))
}
}

View file

@ -164,7 +164,7 @@ func DetectEncoding(content []byte) (string, error) {
}
times := 1024 / len(content)
detectContent = make([]byte, 0, times*len(content))
for i := 0; i < times; i++ {
for range times {
detectContent = append(detectContent, content...)
}
} else {

View file

@ -243,7 +243,7 @@ func stringMustEndWith(t *testing.T, expected, value string) {
func TestToUTF8WithFallbackReader(t *testing.T) {
resetDefaultCharsetsOrder()
for testLen := 0; testLen < 2048; testLen++ {
for testLen := range 2048 {
pattern := " test { () }\n"
input := ""
for len(input) < testLen {

View file

@ -6,6 +6,7 @@ package forgefed
import (
"fmt"
"net/url"
"slices"
"strconv"
"strings"
@ -107,12 +108,7 @@ func newActorID(uri string) (ActorID, error) {
}
func containsEmptyString(ar []string) bool {
for _, elem := range ar {
if elem == "" {
return true
}
}
return false
return slices.Contains(ar, "")
}
func removeEmptyStrings(ls []string) []string {

View file

@ -88,7 +88,7 @@ func ToRepository(it ap.Item) (*Repository, error) {
return (*Repository)(unsafe.Pointer(&i)), nil
default:
// NOTE(marius): this is an ugly way of dealing with the interface conversion error: types from different scopes
typ := reflect.TypeOf(new(Repository))
typ := reflect.TypeFor[*Repository]()
if i, ok := reflect.ValueOf(it).Convert(typ).Interface().(*Repository); ok {
return i, nil
}

View file

@ -269,8 +269,8 @@ func NewSearchCommitsOptions(searchString string, forAllRefs bool) SearchCommits
var keywords, authors, committers []string
var after, before string
fields := strings.Fields(searchString)
for _, k := range fields {
fields := strings.FieldsSeq(searchString)
for k := range fields {
switch {
case strings.HasPrefix(k, "author:"):
authors = append(authors, strings.TrimPrefix(k, "author:"))

View file

@ -7,6 +7,7 @@ import (
"context"
"fmt"
"io"
"maps"
"path"
"sort"
@ -45,9 +46,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
return nil, nil, err
}
for pth, found := range commits {
revs[pth] = found
}
maps.Copy(revs, commits)
}
} else {
sort.Strings(entryPaths)

View file

@ -75,9 +75,9 @@ func (f Format) Parser(r io.Reader) *Parser {
// hexEscaped produces hex-escaped characters from a string. For example, "\n\0"
// would turn into "%0a%00".
func (f Format) hexEscaped(delim []byte) string {
escaped := ""
for i := 0; i < len(delim); i++ {
escaped += "%" + hex.EncodeToString([]byte{delim[i]})
var escaped strings.Builder
for i := range delim {
escaped.WriteString("%" + hex.EncodeToString([]byte{delim[i]}))
}
return escaped
return escaped.String()
}

View file

@ -9,6 +9,7 @@ import (
"os"
"path"
"path/filepath"
"slices"
"strings"
"forgejo.org/modules/log"
@ -27,12 +28,7 @@ var ErrNotValidHook = errors.New("not a valid Git hook")
// IsValidHookName returns true if given name is a valid Git hook.
func IsValidHookName(name string) bool {
for _, hn := range hookNames {
if hn == name {
return true
}
}
return false
return slices.Contains(hookNames, name)
}
// Hook represents a Git hook.

View file

@ -21,7 +21,7 @@ type Cache interface {
}
func getCacheKey(repoPath, commitID, entryPath string) string {
hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%s:%s:%s", repoPath, commitID, entryPath)))
hashBytes := sha256.Sum256(fmt.Appendf(nil, "%s:%s:%s", repoPath, commitID, entryPath))
return fmt.Sprintf("last_commit:%x", hashBytes)
}

View file

@ -346,10 +346,7 @@ func WalkGitLog(ctx context.Context, repo *Repository, head *Commit, treepath st
results := make([]string, len(paths))
remaining := len(paths)
nextRestart := (len(paths) * 3) / 4
if nextRestart > 70 {
nextRestart = 70
}
nextRestart := min((len(paths)*3)/4, 70)
lastEmptyParent := head.ID.String()
commitSinceLastEmptyParent := uint64(0)
commitSinceNextRestart := uint64(0)

View file

@ -8,6 +8,7 @@ import (
"context"
"io"
"os"
"strings"
"forgejo.org/modules/log"
)
@ -33,7 +34,7 @@ func GetNote(ctx context.Context, repo *Repository, commitID string) (*Note, err
return nil, err
}
path := ""
var path strings.Builder
tree := &notes.Tree
log.Trace("Found tree with ID %q while searching for git note corresponding to the commit %q", tree.ID, commitID)
@ -43,12 +44,12 @@ func GetNote(ctx context.Context, repo *Repository, commitID string) (*Note, err
for len(commitID) > 2 {
entry, err = tree.GetTreeEntryByPath(commitID)
if err == nil {
path += commitID
path.WriteString(commitID)
break
}
if IsErrNotExist(err) {
tree, err = tree.SubTree(commitID[0:2])
path += commitID[0:2] + "/"
path.WriteString(commitID[0:2] + "/")
commitID = commitID[2:]
}
if err != nil {
@ -80,9 +81,9 @@ func GetNote(ctx context.Context, repo *Repository, commitID string) (*Note, err
_ = dataRc.Close()
closed = true
lastCommit, err := repo.getCommitByPathWithID(notes.ID, path)
lastCommit, err := repo.getCommitByPathWithID(notes.ID, path.String())
if err != nil {
log.Error("Unable to get the commit for the path %q. Error: %v", path, err)
log.Error("Unable to get the commit for the path %q. Error: %v", path.String(), err)
return nil, err
}

View file

@ -33,16 +33,16 @@ func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) {
posEnd += pos
}
line := data[pos:posEnd]
posTab := bytes.IndexByte(line, '\t')
if posTab == -1 {
before, after, ok := bytes.Cut(line, []byte{'\t'})
if !ok {
return nil, fmt.Errorf("invalid ls-tree output (no tab): %q", line)
}
entry := new(TreeEntry)
entry.ptree = ptree
entryAttrs := line[:posTab]
entryName := line[posTab+1:]
entryAttrs := before
entryName := after
entryMode, entryAttrs, _ := bytes.Cut(entryAttrs, sepSpace)
_ /* entryType */, entryAttrs, _ = bytes.Cut(entryAttrs, sepSpace) // the type is not used, the mode is enough to determine the type

View file

@ -52,7 +52,7 @@ func NewFromMap(o *map[string]string) Interface {
func (o *gitPushOptions) ReadEnv() Interface {
if pushCount, err := strconv.Atoi(os.Getenv(EnvCount)); err == nil {
for idx := 0; idx < pushCount; idx++ {
for idx := range pushCount {
_ = o.Parse(os.Getenv(fmt.Sprintf(EnvFormat, idx)))
}
}

View file

@ -105,8 +105,8 @@ func (ref RefName) IsFor() bool {
}
func (ref RefName) nameWithoutPrefix(prefix string) string {
if strings.HasPrefix(string(ref), prefix) {
return strings.TrimPrefix(string(ref), prefix)
if after, ok := strings.CutPrefix(string(ref), prefix); ok {
return after
}
return ""
}

View file

@ -46,9 +46,9 @@ func (repo *Repository) parsePrettyFormatLogToList(logs []byte) ([]*Commit, erro
return commits, nil
}
parts := bytes.Split(logs, []byte{'\n'})
parts := bytes.SplitSeq(logs, []byte{'\n'})
for _, commitID := range parts {
for commitID := range parts {
commit, err := repo.GetCommit(string(commitID))
if err != nil {
return nil, err

View file

@ -96,8 +96,8 @@ func (ca GitAttribute) String() string {
// sometimes used within gitlab-language: https://docs.gitlab.com/ee/user/project/highlighting.html#override-syntax-highlighting-for-a-file-type
func (ca GitAttribute) Prefix() string {
s := ca.String()
if i := strings.IndexByte(s, '?'); i >= 0 {
return s[:i]
if before, _, ok := strings.Cut(s, "?"); ok {
return before
}
return s
}

View file

@ -95,7 +95,7 @@ func (repo *Repository) LsFiles(filenames ...string) ([]string, error) {
return nil, err
}
filelist := make([]string, 0, len(filenames))
for _, line := range bytes.Split(res, []byte{'\000'}) {
for line := range bytes.SplitSeq(res, []byte{'\000'}) {
filelist = append(filelist, string(line))
}

View file

@ -42,8 +42,8 @@ func (repo *Repository) GetTagNameBySHA(sha string) (string, error) {
return "", err
}
tagRefs := strings.Split(stdout, "\n")
for _, tagRef := range tagRefs {
tagRefs := strings.SplitSeq(stdout, "\n")
for tagRef := range tagRefs {
if len(strings.TrimSpace(tagRef)) > 0 {
fields := strings.Fields(tagRef)
if strings.HasPrefix(fields[0], sha) && strings.HasPrefix(fields[1], TagPrefix) {
@ -65,7 +65,7 @@ func (repo *Repository) GetTagID(name string) (string, error) {
return "", err
}
// Make sure exact match is used: "v1" != "release/v1"
for _, line := range strings.Split(stdout, "\n") {
for line := range strings.SplitSeq(stdout, "\n") {
fields := strings.Fields(line)
if len(fields) == 2 && fields[1] == "refs/tags/"+name {
return fields[0], nil

View file

@ -170,7 +170,7 @@ func (repo *Repository) LsTree(ref string, filenames ...string) ([]string, error
return nil, err
}
filelist := make([]string, 0, len(filenames))
for _, line := range bytes.Split(res, []byte{'\000'}) {
for line := range bytes.SplitSeq(res, []byte{'\000'}) {
filelist = append(filelist, string(line))
}

View file

@ -171,7 +171,7 @@ func (te *TreeEntry) FollowLinks() (*TreeEntry, string, error) {
}
entry := te
entryLink := ""
for i := 0; i < 999; i++ {
for range 999 {
if entry.IsLink() {
next, link, err := entry.FollowLink()
entryLink = link

View file

@ -20,7 +20,7 @@ func TestSubTree_Issue29101(t *testing.T) {
require.NoError(t, err)
// old code could produce a different error if called multiple times
for i := 0; i < 10; i++ {
for range 10 {
_, err = commit.SubTree("file1.txt")
require.Error(t, err)
assert.True(t, IsErrNotExist(err))

View file

@ -6,6 +6,7 @@ package hostmatcher
import (
"net"
"path/filepath"
"slices"
"strings"
)
@ -38,7 +39,7 @@ func isBuiltin(s string) bool {
// ParseHostMatchList parses the host list HostMatchList
func ParseHostMatchList(settingKeyHint, hostList string) *HostMatchList {
hl := &HostMatchList{SettingKeyHint: settingKeyHint, SettingValue: hostList}
for _, s := range strings.Split(hostList, ",") {
for s := range strings.SplitSeq(hostList, ",") {
s = strings.ToLower(strings.TrimSpace(s))
if s == "" {
continue
@ -61,7 +62,7 @@ func ParseSimpleMatchList(settingKeyHint, matchList string) *HostMatchList {
SettingKeyHint: settingKeyHint,
SettingValue: matchList,
}
for _, s := range strings.Split(matchList, ",") {
for s := range strings.SplitSeq(matchList, ",") {
s = strings.ToLower(strings.TrimSpace(s))
if s == "" {
continue
@ -98,10 +99,8 @@ func (hl *HostMatchList) checkPattern(host string) bool {
}
func (hl *HostMatchList) checkIP(ip net.IP) bool {
for _, pattern := range hl.patterns {
if pattern == "*" {
return true
}
if slices.Contains(hl.patterns, "*") {
return true
}
for _, builtin := range hl.builtins {
switch builtin {

View file

@ -59,7 +59,7 @@ func HandleGenericETagCache(req *http.Request, w http.ResponseWriter, etag strin
func checkIfNoneMatchIsValid(req *http.Request, etag string) bool {
ifNoneMatch := req.Header.Get("If-None-Match")
if len(ifNoneMatch) > 0 {
for _, item := range strings.Split(ifNoneMatch, ",") {
for item := range strings.SplitSeq(ifNoneMatch, ",") {
item = strings.TrimPrefix(strings.TrimSpace(item), "W/") // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag#directives
if item == etag {
return true

View file

@ -8,6 +8,7 @@ import (
"errors"
"fmt"
"io"
"maps"
"net/http"
"net/url"
"path"
@ -86,9 +87,7 @@ func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
}
if opts.AdditionalHeaders != nil {
for k, v := range opts.AdditionalHeaders {
header[k] = v
}
maps.Copy(header, opts.AdditionalHeaders)
}
}

View file

@ -129,8 +129,8 @@ func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revisio
changes.Updates = append(changes.Updates, updates...)
return nil
}
lines := strings.Split(stdout, "\n")
for _, line := range lines {
lines := strings.SplitSeq(stdout, "\n")
for line := range lines {
line = strings.TrimSpace(line)
if len(line) == 0 {
continue

View file

@ -8,6 +8,7 @@ import (
"fmt"
"net/url"
"regexp"
"slices"
"strconv"
"strings"
@ -447,12 +448,7 @@ func (o *valuedOption) IsChecked() bool {
case api.IssueFormFieldTypeDropdown:
checks := strings.Split(o.field.Get(fmt.Sprintf("form-field-%s", o.field.ID)), ",")
idx := strconv.Itoa(o.index)
for _, v := range checks {
if v == idx {
return true
}
}
return false
return slices.Contains(checks, idx)
case api.IssueFormFieldTypeCheckboxes:
return o.field.Get(fmt.Sprintf("form-field-%s-%d", o.field.ID, o.index)) == "on"
}

View file

@ -72,7 +72,7 @@ func parseYamlFormat(fileName string, data []byte) ([]*Label, error) {
func parseLegacyFormat(fileName string, data []byte) ([]*Label, error) {
lines := strings.Split(string(data), "\n")
list := make([]*Label, 0, len(lines))
for i := 0; i < len(lines); i++ {
for i := range lines {
line := strings.TrimSpace(lines[i])
if len(line) == 0 {
continue
@ -108,7 +108,7 @@ func LoadTemplateDescription(fileName string) (string, error) {
return "", err
}
for i := 0; i < len(list); i++ {
for i := range list {
if i > 0 {
buf.WriteString(", ")
}

View file

@ -208,7 +208,7 @@ func EventFormatTextMessage(mode *WriterMode, event *Event, msgFormat string, ms
}
}
if hasColorValue {
msg = []byte(fmt.Sprintf(msgFormat, msgArgs...))
msg = fmt.Appendf(nil, msgFormat, msgArgs...)
}
}
// try to reuse the pre-formatted simple text message
@ -227,8 +227,8 @@ func EventFormatTextMessage(mode *WriterMode, event *Event, msgFormat string, ms
buf = append(buf, msg...)
if event.Stacktrace != "" && mode.StacktraceLevel <= event.Level {
lines := bytes.Split([]byte(event.Stacktrace), []byte("\n"))
for _, line := range lines {
lines := bytes.SplitSeq([]byte(event.Stacktrace), []byte("\n"))
for line := range lines {
buf = append(buf, "\n\t"...)
buf = append(buf, line...)
}

View file

@ -63,11 +63,9 @@ func TestConnLogger(t *testing.T) {
}
expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.Filename, event.Line, event.Caller, strings.ToUpper(event.Level.String())[0], event.MsgSimpleText)
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
wg.Go(func() {
listenReadAndClose(t, l, expected)
}()
})
logger.SendLogEvent(&event)
wg.Wait()

View file

@ -124,7 +124,7 @@ func FlagsFromString(from string, def ...uint32) Flags {
return Flags{defined: true, flags: def[0]}
}
flags := uint32(0)
for _, flag := range strings.Split(strings.ToLower(from), ",") {
for flag := range strings.SplitSeq(strings.ToLower(from), ",") {
flags |= flagFromString[strings.TrimSpace(flag)]
}
return Flags{defined: true, flags: flags}

View file

@ -33,11 +33,11 @@ func TestLevelMarshalUnmarshalJSON(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, INFO, testLevel.Level)
err = json.Unmarshal([]byte(fmt.Sprintf(`{"level":%d}`, 2)), &testLevel)
err = json.Unmarshal(fmt.Appendf(nil, `{"level":%d}`, 2), &testLevel)
require.NoError(t, err)
assert.Equal(t, INFO, testLevel.Level)
err = json.Unmarshal([]byte(fmt.Sprintf(`{"level":%d}`, 10012)), &testLevel)
err = json.Unmarshal(fmt.Appendf(nil, `{"level":%d}`, 10012), &testLevel)
require.NoError(t, err)
assert.Equal(t, INFO, testLevel.Level)
@ -52,5 +52,5 @@ func TestLevelMarshalUnmarshalJSON(t *testing.T) {
}
func makeTestLevelBytes(level string) []byte {
return []byte(fmt.Sprintf(`{"level":"%s"}`, level))
return fmt.Appendf(nil, `{"level":"%s"}`, level)
}

View file

@ -80,8 +80,8 @@ func newFilePreview(ctx *RenderContext, node *html.Node, locale translation.Loca
filePath := node.Data[m[6]:m[7]]
hash := node.Data[m[8]:m[9]]
urlFullSource := urlFull
if strings.HasSuffix(filePath, "?display=source") {
filePath = strings.TrimSuffix(filePath, "?display=source")
if before, ok := strings.CutSuffix(filePath, "?display=source"); ok {
filePath = before
} else if Type(filePath) != "" {
urlFullSource = node.Data[m[0]:m[6]] + filePath + "?display=source#" + hash
}

View file

@ -11,6 +11,7 @@ import (
"path"
"path/filepath"
"regexp"
"slices"
"strings"
"sync"
@ -124,13 +125,7 @@ func CustomLinkURLSchemes(schemes []string) {
if !validScheme.MatchString(s) {
continue
}
without := false
for _, sna := range xurls.SchemesNoAuthority {
if s == sna {
without = true
break
}
}
without := slices.Contains(xurls.SchemesNoAuthority, s)
if without {
s += ":"
} else {
@ -675,9 +670,9 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
// It makes page handling terrible, but we prefer GitHub syntax
// And fall back to MediaWiki only when it is obvious from the look
// Of text and link contents
sl := strings.Split(content, "|")
for _, v := range sl {
if equalPos := strings.IndexByte(v, '='); equalPos == -1 {
sl := strings.SplitSeq(content, "|")
for v := range sl {
if found := strings.Contains(v, "="); !found {
// There is no equal in this argument; this is a mandatory arg
if props["name"] == "" {
if IsLinkStr(v) {
@ -1148,7 +1143,7 @@ func comparePatternProcessor(ctx *RenderContext, node *html.Node) {
}
// Ensure that every group (m[0]...m[9]) has a match
for i := 0; i < 10; i++ {
for i := range 10 {
if m[i] == -1 {
return
}

View file

@ -182,10 +182,7 @@ func actualRender(ctx *markup.RenderContext, input io.Reader, output io.Writer)
}
buf, _ = ExtractMetadataBytes(buf, rc)
metaLength := bufWithMetadataLength - len(buf)
if metaLength < 0 {
metaLength = 0
}
metaLength := max(bufWithMetadataLength-len(buf), 0)
rc.metaLength = metaLength
pc.Set(markdownutil.RenderConfigKey, rc)

View file

@ -319,7 +319,7 @@ func TestTotal_RenderWiki(t *testing.T) {
answers := testAnswers(util.URLJoin(FullURL, "wiki"), util.URLJoin(FullURL, "wiki", "raw"))
for i := 0; i < len(sameCases); i++ {
for i := range sameCases {
line, err := markdown.RenderString(&markup.RenderContext{
Ctx: git.DefaultContext,
Links: markup.Links{
@ -363,7 +363,7 @@ func TestTotal_RenderString(t *testing.T) {
answers := testAnswers(util.URLJoin(FullURL, "src", "master"), util.URLJoin(FullURL, "media", "master"))
for i := 0; i < len(sameCases); i++ {
for i := range sameCases {
line, err := markdown.RenderString(&markup.RenderContext{
Ctx: git.DefaultContext,
Links: markup.Links{

View file

@ -24,7 +24,7 @@ func (r *BlockRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
func (r *BlockRenderer) writeLines(w util.BufWriter, source []byte, n gast.Node) {
l := n.Lines().Len()
for i := 0; i < l; i++ {
for i := range l {
line := n.Lines().At(i)
_, _ = w.Write(util.EscapeHTML(line.Value(source)))
}

View file

@ -63,7 +63,7 @@ func TestExtractMetadata(t *testing.T) {
func TestExtractMetadataBytes(t *testing.T) {
t.Run("ValidFrontAndBody", func(t *testing.T) {
var meta IssueTemplate
body, err := ExtractMetadataBytes([]byte(fmt.Sprintf("%s\n%s\n%s\n%s", sepTest, frontTest, sepTest, bodyTest)), &meta)
body, err := ExtractMetadataBytes(fmt.Appendf(nil, "%s\n%s\n%s\n%s", sepTest, frontTest, sepTest, bodyTest), &meta)
require.NoError(t, err)
assert.Equal(t, bodyTest, string(body))
assert.Equal(t, metaTest, meta)
@ -72,19 +72,19 @@ func TestExtractMetadataBytes(t *testing.T) {
t.Run("NoFirstSeparator", func(t *testing.T) {
var meta IssueTemplate
_, err := ExtractMetadataBytes([]byte(fmt.Sprintf("%s\n%s\n%s", frontTest, sepTest, bodyTest)), &meta)
_, err := ExtractMetadataBytes(fmt.Appendf(nil, "%s\n%s\n%s", frontTest, sepTest, bodyTest), &meta)
require.Error(t, err)
})
t.Run("NoLastSeparator", func(t *testing.T) {
var meta IssueTemplate
_, err := ExtractMetadataBytes([]byte(fmt.Sprintf("%s\n%s\n%s", sepTest, frontTest, bodyTest)), &meta)
_, err := ExtractMetadataBytes(fmt.Appendf(nil, "%s\n%s\n%s", sepTest, frontTest, bodyTest), &meta)
require.Error(t, err)
})
t.Run("NoBody", func(t *testing.T) {
var meta IssueTemplate
body, err := ExtractMetadataBytes([]byte(fmt.Sprintf("%s\n%s\n%s", sepTest, frontTest, sepTest)), &meta)
body, err := ExtractMetadataBytes(fmt.Appendf(nil, "%s\n%s\n%s", sepTest, frontTest, sepTest), &meta)
require.NoError(t, err)
assert.Empty(t, string(body))
assert.Equal(t, metaTest, meta)

View file

@ -44,7 +44,7 @@ func createTOCNode(toc []markup.Header, lang string, detailsAttrs map[string]str
}
li := ast.NewListItem(currentLevel * 2)
a := ast.NewLink()
a.Destination = []byte(fmt.Sprintf("#%s", url.QueryEscape(header.ID)))
a.Destination = fmt.Appendf(nil, "#%s", url.QueryEscape(header.ID))
a.AppendChild(a, ast.NewString([]byte(header.Text)))
li.AppendChild(li, a)
ul.AppendChild(ul, li)

View file

@ -17,7 +17,7 @@ import (
func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) {
for _, attr := range v.Attributes() {
if _, ok := attr.Value.([]byte); !ok {
v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value)))
v.SetAttribute(attr.Name, fmt.Appendf(nil, "%v", attr.Value))
}
}
txt := mdutil.Text(v, reader.Source())

View file

@ -319,23 +319,19 @@ func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Wr
_ = pw2.Close()
}()
wg.Add(1)
go func() {
wg.Go(func() {
err = donotpanic.SafeFuncWithError(func() error { return SanitizeReader(pr2, renderer.Name(), output) })
_ = pr2.Close()
wg.Done()
}()
})
} else {
pw2 = nopCloser{output}
}
wg.Add(1)
go func() {
wg.Go(func() {
err = donotpanic.SafeFuncWithError(func() error { return postProcessOrCopy(ctx, renderer, pr, pw2) })
_ = pr.Close()
_ = pw2.Close()
wg.Done()
}()
})
if err1 := renderer.Render(ctx, input, pw); err1 != nil {
return err1

View file

@ -58,7 +58,7 @@ type PackageMetadata struct {
Time map[string]time.Time `json:"time,omitempty"`
Homepage string `json:"homepage,omitempty"`
Keywords []string `json:"keywords,omitempty"`
Repository Repository `json:"repository,omitempty"`
Repository Repository `json:"repository"`
Author User `json:"author"`
ReadmeFilename string `json:"readmeFilename,omitempty"`
Users map[string]bool `json:"users,omitempty"`
@ -75,7 +75,7 @@ type PackageMetadataVersion struct {
Author User `json:"author"`
Homepage string `json:"homepage,omitempty"`
License string `json:"license,omitempty"`
Repository Repository `json:"repository,omitempty"`
Repository Repository `json:"repository"`
Keywords []string `json:"keywords,omitempty"`
Dependencies map[string]string `json:"dependencies,omitempty"`
BundleDependencies []string `json:"bundleDependencies,omitempty"`

View file

@ -22,5 +22,5 @@ type Metadata struct {
OptionalDependencies map[string]string `json:"optional_dependencies,omitempty"`
Bin map[string]string `json:"bin,omitempty"`
Readme string `json:"readme,omitempty"`
Repository Repository `json:"repository,omitempty"`
Repository Repository `json:"repository"`
}

View file

@ -142,8 +142,8 @@ func ParseDebugHeaderID(r io.ReadSeeker) (string, error) {
if _, err := r.Read(b); err != nil {
return "", err
}
if i := bytes.IndexByte(b, 0); i != -1 {
buf.Write(b[:i])
if before, _, ok := bytes.Cut(b, []byte{0}); ok {
buf.Write(before)
return buf.String(), nil
}
buf.Write(b)

View file

@ -91,7 +91,7 @@ func (e *MarshalEncoder) marshal(v any) error {
val := reflect.ValueOf(v)
typ := reflect.TypeOf(v)
if typ.Kind() == reflect.Ptr {
if typ.Kind() == reflect.Pointer {
val = val.Elem()
typ = typ.Elem()
}
@ -250,7 +250,7 @@ func (e *MarshalEncoder) marshalArray(arr reflect.Value) error {
return err
}
for i := 0; i < length; i++ {
for i := range length {
if err := e.marshal(arr.Index(i).Interface()); err != nil {
return err
}

View file

@ -47,7 +47,7 @@ type Metadata struct {
Keywords []string `json:"keywords,omitempty"`
RepositoryURL string `json:"repository_url,omitempty"`
License string `json:"license,omitempty"`
Author Person `json:"author,omitempty"`
Author Person `json:"author"`
Manifests map[string]*Manifest `json:"manifests,omitempty"`
}

View file

@ -7,6 +7,7 @@ import (
"context"
"fmt"
"net/url"
"strings"
asymkey_model "forgejo.org/models/asymkey"
"forgejo.org/models/perm"
@ -47,17 +48,18 @@ type ServCommandResults struct {
// ServCommand preps for a serv call
func ServCommand(ctx context.Context, keyID int64, ownerName, repoName string, mode perm.AccessMode, verbs ...string) (*ServCommandResults, ResponseExtra) {
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/serv/command/%d/%s/%s?mode=%d",
var reqURL strings.Builder
reqURL.WriteString(setting.LocalURL + fmt.Sprintf("api/internal/serv/command/%d/%s/%s?mode=%d",
keyID,
url.PathEscape(ownerName),
url.PathEscape(repoName),
mode,
)
))
for _, verb := range verbs {
if verb != "" {
reqURL += fmt.Sprintf("&verb=%s", url.QueryEscape(verb))
fmt.Fprintf(&reqURL, "&verb=%s", url.QueryEscape(verb))
}
}
req := newInternalRequest(ctx, reqURL, "GET")
req := newInternalRequest(ctx, reqURL.String(), "GET")
return requestJSONResp(req, &ServCommandResults{})
}

View file

@ -45,7 +45,7 @@ func FileHandlerFunc() http.HandlerFunc {
func parseAcceptEncoding(val string) container.Set[string] {
parts := strings.Split(val, ";")
types := make(container.Set[string])
for _, v := range strings.Split(parts[0], ",") {
for v := range strings.SplitSeq(parts[0], ",") {
types.Add(strings.TrimSpace(v))
}
return types

View file

@ -83,7 +83,7 @@ func prepareLevelDB(cfg *BaseConfig) (conn string, db *leveldb.DB, err error) {
}
conn = cfg.ConnStr
}
for i := 0; i < 10; i++ {
for range 10 {
if db, err = nosql.GetManager().GetLevelDB(conn); err == nil {
break
}

View file

@ -49,7 +49,7 @@ func newBaseRedisGeneric(cfg *BaseConfig, unique bool, client nosql.RedisClient)
}
var err error
for i := 0; i < 10; i++ {
for range 10 {
err = client.Ping(graceful.GetManager().ShutdownContext()).Err()
if err == nil {
break

View file

@ -88,7 +88,7 @@ func testQueueBasic(t *testing.T, newFn func(cfg *BaseConfig) (baseQueue, error)
// test blocking push if queue is full
for i := 0; i < cfg.Length; i++ {
err = q.PushItem(ctx, []byte(fmt.Sprintf("item-%d", i)))
err = q.PushItem(ctx, fmt.Appendf(nil, "item-%d", i))
require.NoError(t, err)
}
ctxTimed, cancel = context.WithTimeout(ctx, 10*time.Millisecond)

View file

@ -5,6 +5,7 @@ package queue
import (
"context"
"maps"
"sync"
"time"
@ -68,9 +69,7 @@ func (m *Manager) ManagedQueues() map[int64]ManagedWorkerPoolQueue {
defer m.mu.Unlock()
queues := make(map[int64]ManagedWorkerPoolQueue, len(m.Queues))
for k, v := range m.Queues {
queues[k] = v
}
maps.Copy(queues, m.Queues)
return queues
}

View file

@ -142,11 +142,7 @@ func (q *WorkerPoolQueue[T]) basePushForShutdown(items ...T) bool {
// doStartNewWorker starts a new worker for the queue, the worker reads from worker's channel and handles the items.
func (q *WorkerPoolQueue[T]) doStartNewWorker(wp *workerGroup[T]) {
wp.wg.Add(1)
go func() {
defer wp.wg.Done()
wp.wg.Go(func() {
log.Debug("Queue %q starts new worker", q.GetName())
defer log.Debug("Queue %q stops idle worker", q.GetName())
@ -187,7 +183,7 @@ func (q *WorkerPoolQueue[T]) doStartNewWorker(wp *workerGroup[T]) {
q.workerNumMu.Unlock()
}
}
}()
})
}
// doFlush flushes the queue: it tries to read all items from the queue and handles them.

View file

@ -78,17 +78,17 @@ func TestWorkerPoolQueueUnhandled(t *testing.T) {
runCount := 2 // we can run these tests even hundreds times to see its stability
t.Run("1/1", func(t *testing.T) {
for i := 0; i < runCount; i++ {
for range runCount {
test(t, setting.QueueSettings{BatchLength: 1, MaxWorkers: 1})
}
})
t.Run("3/1", func(t *testing.T) {
for i := 0; i < runCount; i++ {
for range runCount {
test(t, setting.QueueSettings{BatchLength: 3, MaxWorkers: 1})
}
})
t.Run("4/5", func(t *testing.T) {
for i := 0; i < runCount; i++ {
for range runCount {
test(t, setting.QueueSettings{BatchLength: 4, MaxWorkers: 5})
}
})
@ -97,17 +97,17 @@ func TestWorkerPoolQueueUnhandled(t *testing.T) {
func TestWorkerPoolQueuePersistence(t *testing.T) {
runCount := 2 // we can run these tests even hundreds times to see its stability
t.Run("1/1", func(t *testing.T) {
for i := 0; i < runCount; i++ {
for range runCount {
testWorkerPoolQueuePersistence(t, setting.QueueSettings{BatchLength: 1, MaxWorkers: 1, Length: 100})
}
})
t.Run("3/1", func(t *testing.T) {
for i := 0; i < runCount; i++ {
for range runCount {
testWorkerPoolQueuePersistence(t, setting.QueueSettings{BatchLength: 3, MaxWorkers: 1, Length: 100})
}
})
t.Run("4/5", func(t *testing.T) {
for i := 0; i < runCount; i++ {
for range runCount {
testWorkerPoolQueuePersistence(t, setting.QueueSettings{BatchLength: 4, MaxWorkers: 5, Length: 100})
}
})
@ -142,7 +142,7 @@ func testWorkerPoolQueuePersistence(t *testing.T, queueSetting setting.QueueSett
q, _ := newWorkerPoolQueueForTest("pr_patch_checker_test", queueSetting, testHandler, true)
stop := runWorkerPoolQueue(q)
for i := 0; i < testCount; i++ {
for i := range testCount {
_ = q.Push("task-" + strconv.Itoa(i))
}
close(startWhenAllReady)
@ -187,7 +187,7 @@ func TestWorkerPoolQueueActiveWorkers(t *testing.T) {
q, _ := newWorkerPoolQueueForTest("test-workpoolqueue", setting.QueueSettings{Type: "channel", BatchLength: 1, MaxWorkers: 1, Length: 100}, handler, false)
stop := runWorkerPoolQueue(q)
for i := 0; i < 5; i++ {
for i := range 5 {
require.NoError(t, q.Push(i))
}
@ -203,7 +203,7 @@ func TestWorkerPoolQueueActiveWorkers(t *testing.T) {
q, _ = newWorkerPoolQueueForTest("test-workpoolqueue", setting.QueueSettings{Type: "channel", BatchLength: 1, MaxWorkers: 3, Length: 100}, handler, false)
stop = runWorkerPoolQueue(q)
for i := 0; i < 15; i++ {
for i := range 15 {
require.NoError(t, q.Push(i))
}
@ -264,12 +264,12 @@ func TestWorkerPoolQueueWorkerIdleReset(t *testing.T) {
stop := runWorkerPoolQueue(q)
const workloadSize = 12
for i := 0; i < workloadSize; i++ {
for i := range workloadSize {
require.NoError(t, q.Push(i))
}
workerIDs := make(map[string]struct{})
for i := 0; i < workloadSize; i++ {
for i := range workloadSize {
c := <-chGoroutineIDs
workerIDs[c] = struct{}{}
t.Logf("%d workers: overall=%d current=%d", i, len(workerIDs), q.GetWorkerNumber())

View file

@ -152,7 +152,7 @@ func InitializeLabels(ctx context.Context, id int64, labelTemplate string, isOrg
}
labels := make([]*issues_model.Label, len(list))
for i := 0; i < len(list); i++ {
for i := range list {
labels[i] = &issues_model.Label{
Name: list[i].Name,
Exclusive: list[i].Exclusive,

View file

@ -4,6 +4,7 @@
package setting
import (
"strings"
"sync"
"forgejo.org/modules/log"
@ -23,11 +24,11 @@ type OpenWithEditorApp struct {
type OpenWithEditorAppsType []OpenWithEditorApp
func (t OpenWithEditorAppsType) ToTextareaString() string {
ret := ""
var ret strings.Builder
for _, app := range t {
ret += app.DisplayName + " = " + app.OpenURL + "\n"
ret.WriteString(app.DisplayName + " = " + app.OpenURL + "\n")
}
return ret
return ret.String()
}
func DefaultOpenWithEditorApps() OpenWithEditorAppsType {

View file

@ -51,10 +51,10 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
for _, unescapeIdx := range escapeStringIndices {
preceding := encoded[last:unescapeIdx[0]]
if !inKey {
if splitter := strings.Index(preceding, "__"); splitter > -1 {
section += preceding[:splitter]
if before, after, ok := strings.Cut(preceding, "__"); ok {
section += before
inKey = true
key += preceding[splitter+2:]
key += after
} else {
section += preceding
}
@ -77,9 +77,9 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
}
remaining := encoded[last:]
if !inKey {
if splitter := strings.Index(remaining, "__"); splitter > -1 {
section += remaining[:splitter]
key += remaining[splitter+2:]
if before, after, ok := strings.Cut(remaining, "__"); ok {
section += before
key += after
} else {
section += remaining
}
@ -113,25 +113,24 @@ func decodeEnvironmentKey(prefixRegexp *regexp.Regexp, suffixFile, envKey string
func EnvironmentToConfig(cfg ConfigProvider, envs []string) (changed bool) {
prefixRegexp := regexp.MustCompile(EnvConfigKeyPrefixGitea)
for _, kv := range envs {
idx := strings.IndexByte(kv, '=')
if idx < 0 {
before, after, ok0 := strings.Cut(kv, "=")
if !ok0 {
continue
}
// parse the environment variable to config section name and key name
envKey := kv[:idx]
envValue := kv[idx+1:]
envKey := before
keyValue := after
ok, sectionName, keyName, useFileValue := decodeEnvironmentKey(prefixRegexp, EnvConfigKeySuffixFile, envKey)
if !ok {
continue
}
// use environment value as config value, or read the file content as value if the key indicates a file
keyValue := envValue
if useFileValue {
fileContent, err := os.ReadFile(envValue)
fileContent, err := os.ReadFile(keyValue)
if err != nil {
log.Error("Error reading file for %s : %v", envKey, envValue, err)
log.Error("Error reading file for %s : %v", envKey, keyValue, err)
continue
}
if bytes.HasSuffix(fileContent, []byte("\r\n")) {

View file

@ -108,7 +108,7 @@ func loadIndexerFrom(rootCfg ConfigProvider) {
// IndexerGlobFromString parses a comma separated list of patterns and returns a glob.Glob slice suited for repo indexing
func IndexerGlobFromString(globstr string) []Glob {
extarr := make([]Glob, 0, 10)
for _, expr := range strings.Split(strings.ToLower(globstr), ",") {
for expr := range strings.SplitSeq(strings.ToLower(globstr), ",") {
expr = strings.TrimSpace(expr)
if expr != "" {
if g, err := glob.Compile(expr, '.', '/'); err != nil {

View file

@ -269,8 +269,8 @@ func initLoggerByName(manager *log.LoggerManager, rootCfg ConfigProvider, logger
}
var eventWriters []log.EventWriter
modes := strings.Split(modeVal, ",")
for _, modeName := range modes {
modes := strings.SplitSeq(modeVal, ",")
for modeName := range modes {
modeName = strings.TrimSpace(modeName)
if modeName == "" {
continue

View file

@ -85,8 +85,8 @@ func loadMarkupFrom(rootCfg ConfigProvider) {
func newMarkupSanitizer(name string, sec ConfigSection) {
rule, ok := createMarkupSanitizerRule(name, sec)
if ok {
if strings.HasPrefix(name, "sanitizer.") {
names := strings.SplitN(strings.TrimPrefix(name, "sanitizer."), ".", 2)
if after, ok0 := strings.CutPrefix(name, "sanitizer."); ok0 {
names := strings.SplitN(after, ".", 2)
name = names[0]
}
for _, renderer := range ExternalMarkupRenderers {

View file

@ -48,11 +48,7 @@ func loadMirrorFrom(rootCfg ConfigProvider) {
Mirror.MinInterval = 1 * time.Minute
}
if Mirror.DefaultInterval < Mirror.MinInterval {
if time.Hour*8 < Mirror.MinInterval {
Mirror.DefaultInterval = Mirror.MinInterval
} else {
Mirror.DefaultInterval = time.Hour * 8
}
Mirror.DefaultInterval = max(time.Hour*8, Mirror.MinInterval)
log.Warn("Mirror.DefaultInterval is less than Mirror.MinInterval, set to %s", Mirror.DefaultInterval.String())
}
}

View file

@ -7,6 +7,7 @@ import (
"errors"
"fmt"
"path/filepath"
"slices"
"strings"
)
@ -27,12 +28,7 @@ var storageTypes = []StorageType{
// IsValidStorageType returns true if the given storage type is valid
func IsValidStorageType(storageType StorageType) bool {
for _, t := range storageTypes {
if t == storageType {
return true
}
}
return false
return slices.Contains(storageTypes, storageType)
}
// MinioStorageConfig represents the configuration for a minio storage

View file

@ -70,13 +70,13 @@ type ActionRun struct {
// the current status of this run
Status string `json:"status"`
// when the action run was started
Started time.Time `json:"started,omitempty"`
Started time.Time `json:"started"`
// when the action run was stopped
Stopped time.Time `json:"stopped,omitempty"`
Stopped time.Time `json:"stopped"`
// when the action run was created
Created time.Time `json:"created,omitempty"`
Created time.Time `json:"created"`
// when the action run was last updated
Updated time.Time `json:"updated,omitempty"`
Updated time.Time `json:"updated"`
// how long the action run ran for
Duration time.Duration `json:"duration,omitempty"`
// the url of this action run

View file

@ -204,7 +204,7 @@ func (l *IssueTemplateLabels) UnmarshalYAML(value *yaml.Node) error {
if err != nil {
return err
}
for _, v := range strings.Split(str, ",") {
for v := range strings.SplitSeq(str, ",") {
if v = strings.TrimSpace(v); v == "" {
continue
}

View file

@ -118,7 +118,7 @@ type Repository struct {
// enum: ["sha1", "sha256"]
ObjectFormatName string `json:"object_format_name"`
// swagger:strfmt date-time
MirrorUpdated time.Time `json:"mirror_updated,omitempty"`
MirrorUpdated time.Time `json:"mirror_updated"`
RepoTransfer *RepoTransfer `json:"repo_transfer"`
Topics []string `json:"topics"`
}

View file

@ -34,9 +34,9 @@ type User struct {
// Is the user an administrator
IsAdmin bool `json:"is_admin"`
// swagger:strfmt date-time
LastLogin time.Time `json:"last_login,omitempty"`
LastLogin time.Time `json:"last_login"`
// swagger:strfmt date-time
Created time.Time `json:"created,omitempty"`
Created time.Time `json:"created"`
// Is user restricted
Restricted bool `json:"restricted"`
// Is user active

View file

@ -21,9 +21,9 @@ type GPGKey struct {
CanCertify bool `json:"can_certify"`
Verified bool `json:"verified"`
// swagger:strfmt date-time
Created time.Time `json:"created_at,omitempty"`
Created time.Time `json:"created_at"`
// swagger:strfmt date-time
Expires time.Time `json:"expires_at,omitempty"`
Expires time.Time `json:"expires_at"`
}
// GPGKeyEmail an email attached to a GPGKey

View file

@ -15,7 +15,7 @@ type PublicKey struct {
Title string `json:"title,omitempty"`
Fingerprint string `json:"fingerprint,omitempty"`
// swagger:strfmt date-time
Created time.Time `json:"created_at,omitempty"`
Created time.Time `json:"created_at"`
Owner *User `json:"user,omitempty"`
ReadOnly bool `json:"read_only,omitempty"`
KeyType string `json:"key_type,omitempty"`

View file

@ -13,7 +13,7 @@ import (
)
func tokens(s string) (a []any) {
for _, v := range strings.Fields(s) {
for v := range strings.FieldsSeq(s) {
a = append(a, v)
}
return a

View file

@ -248,7 +248,7 @@ func extractErrorLine(code []byte, lineNum, posNum int, target string) string {
b := bufio.NewReader(bytes.NewReader(code))
var line []byte
var err error
for i := 0; i < lineNum; i++ {
for i := range lineNum {
if line, err = b.ReadBytes('\n'); err != nil {
if i == lineNum-1 && errors.Is(err, io.EOF) {
err = nil

View file

@ -7,6 +7,7 @@ import (
"fmt"
"html/template"
"io"
"maps"
"reflect"
"sync"
texttemplate "text/template"
@ -40,9 +41,7 @@ func (t *ScopedTemplate) Funcs(funcMap template.FuncMap) {
panic("cannot add new functions to frozen template set")
}
t.all.Funcs(funcMap)
for k, v := range funcMap {
t.parseFuncs[k] = v
}
maps.Copy(t.parseFuncs, funcMap)
}
func (t *ScopedTemplate) New(name string) *template.Template {
@ -159,9 +158,7 @@ func newScopedTemplateSet(all *template.Template, name string) (*scopedTemplateS
textTmplPtr.muFuncs.Lock()
ts.execFuncs = map[string]reflect.Value{}
for k, v := range textTmplPtr.execFuncs {
ts.execFuncs[k] = v
}
maps.Copy(ts.execFuncs, textTmplPtr.execFuncs)
textTmplPtr.muFuncs.Unlock()
var collectTemplates func(nodes []parse.Node)
@ -220,9 +217,7 @@ func (ts *scopedTemplateSet) newExecutor(funcMap map[string]any) TemplateExecuto
tmpl := texttemplate.New("")
tmplPtr := ptr[textTemplate](tmpl)
tmplPtr.execFuncs = map[string]reflect.Value{}
for k, v := range ts.execFuncs {
tmplPtr.execFuncs[k] = v
}
maps.Copy(tmplPtr.execFuncs, ts.execFuncs)
if funcMap != nil {
tmpl.Funcs(funcMap)
}

View file

@ -246,7 +246,8 @@ func RenderMarkdownToHtml(ctx context.Context, input string) template.HTML { //n
}
func RenderLabels(ctx *Context, labels []*issues_model.Label, repoLink string, isPull bool) template.HTML {
htmlCode := `<span class="labels-list">`
var htmlCode strings.Builder
htmlCode.WriteString(`<span class="labels-list">`)
for _, label := range labels {
// Protect against nil value in labels - shouldn't happen but would cause a panic if so
if label == nil {
@ -257,11 +258,11 @@ func RenderLabels(ctx *Context, labels []*issues_model.Label, repoLink string, i
if isPull {
issuesOrPull = "pulls"
}
htmlCode += fmt.Sprintf("<a href='%s/%s?labels=%d' rel='nofollow'>%s</a> ",
fmt.Fprintf(&htmlCode, "<a href='%s/%s?labels=%d' rel='nofollow'>%s</a> ",
repoLink, issuesOrPull, label.ID, RenderLabel(ctx, label))
}
htmlCode += "</span>"
return template.HTML(htmlCode)
htmlCode.WriteString("</span>")
return template.HTML(htmlCode.String())
}
func RenderUser(ctx context.Context, user user_model.User) template.HTML {

View file

@ -53,11 +53,11 @@ func (lc *LogChecker) checkLogEvent(event *log.EventFormatted) {
}
}
var checkerIndex int64
var checkerIndex atomic.Int64
func NewLogChecker(namePrefix string, level log.Level) (logChecker *LogChecker, cancel func()) {
logger := log.GetManager().GetLogger(namePrefix)
newCheckerIndex := atomic.AddInt64(&checkerIndex, 1)
newCheckerIndex := checkerIndex.Add(1)
writerName := namePrefix + "-" + fmt.Sprint(newCheckerIndex)
lc := &LogChecker{}

View file

@ -501,7 +501,7 @@ func PrintCurrentTest(t testing.TB, skip ...int) func() {
// Printf takes a format and args and prints the string to os.Stdout
func Printf(format string, args ...any) {
if log.CanColorStdout {
for i := 0; i < len(args); i++ {
for i := range args {
args[i] = log.NewColoredValue(args[i])
}
}

View file

@ -60,9 +60,9 @@ func getVersionDNS(domainEndpoint string) (version string, err error) {
}
for _, record := range records {
if strings.HasPrefix(record, "forgejo_versions=") {
if after, ok := strings.CutPrefix(record, "forgejo_versions="); ok {
// Get all supported versions, separated by a comma.
supportedVersions := strings.Split(strings.TrimPrefix(record, "forgejo_versions="), ",")
supportedVersions := strings.Split(after, ",")
// For now always return the latest supported version.
return supportedVersions[len(supportedVersions)-1], nil
}

View file

@ -12,7 +12,7 @@ import (
// Remove removes the named file or (empty) directory with at most 5 attempts.
func Remove(name string) error {
var err error
for i := 0; i < 5; i++ {
for range 5 {
err = os.Remove(name)
if err == nil {
break
@ -35,7 +35,7 @@ func Remove(name string) error {
// RemoveAll removes the named file or (empty) directory with at most 5 attempts.
func RemoveAll(name string) error {
var err error
for i := 0; i < 5; i++ {
for range 5 {
err = os.RemoveAll(name)
if err == nil {
break
@ -58,7 +58,7 @@ func RemoveAll(name string) error {
// Rename renames (moves) oldpath to newpath with at most 5 attempts.
func Rename(oldpath, newpath string) error {
var err error
for i := 0; i < 5; i++ {
for i := range 5 {
err = os.Rename(oldpath, newpath)
if err == nil {
break

View file

@ -24,7 +24,7 @@ func TestCompressOldFile(t *testing.T) {
ng, err := os.OpenFile(nonGzip, os.O_CREATE|os.O_WRONLY, 0o660)
require.NoError(t, err)
for i := 0; i < 999; i++ {
for range 999 {
f.WriteString("This is a test file\n")
ng.WriteString("This is a test file\n")
}

View file

@ -12,19 +12,19 @@ import (
)
func TestDebounce(t *testing.T) {
var c int64
var c atomic.Int64
d := Debounce(50 * time.Millisecond)
d(func() { atomic.AddInt64(&c, 1) })
assert.EqualValues(t, 0, atomic.LoadInt64(&c))
d(func() { atomic.AddInt64(&c, 1) })
d(func() { atomic.AddInt64(&c, 1) })
d(func() { c.Add(1) })
assert.EqualValues(t, 0, c.Load())
d(func() { c.Add(1) })
d(func() { c.Add(1) })
time.Sleep(100 * time.Millisecond)
assert.EqualValues(t, 1, atomic.LoadInt64(&c))
d(func() { atomic.AddInt64(&c, 1) })
assert.EqualValues(t, 1, atomic.LoadInt64(&c))
d(func() { atomic.AddInt64(&c, 1) })
d(func() { atomic.AddInt64(&c, 1) })
d(func() { atomic.AddInt64(&c, 1) })
assert.EqualValues(t, 1, c.Load())
d(func() { c.Add(1) })
assert.EqualValues(t, 1, c.Load())
d(func() { c.Add(1) })
d(func() { c.Add(1) })
d(func() { c.Add(1) })
time.Sleep(100 * time.Millisecond)
assert.EqualValues(t, 2, atomic.LoadInt64(&c))
assert.EqualValues(t, 2, c.Load())
}

View file

@ -47,7 +47,7 @@ func SplitTrimSpace(input, sep string) []string {
input = strings.ReplaceAll(input, "\r\n", "\n")
var stringList []string
for _, s := range strings.Split(input, sep) {
for s := range strings.SplitSeq(input, sep) {
// trim leading and trailing space
stringList = append(stringList, strings.TrimSpace(s))
}

View file

@ -243,7 +243,7 @@ func TestGeneratingEd25519Keypair(t *testing.T) {
// And another 32 bytes are required, which is included as random value
// in the OpenSSH format.
b := make([]byte, 64)
for i := 0; i < 64; i++ {
for i := range 64 {
b[i] = byte(i)
}
rand.Reader = bytes.NewReader(b)

View file

@ -266,17 +266,17 @@ func addEmailBindingRules() {
}
func portOnly(hostport string) string {
colon := strings.IndexByte(hostport, ':')
if colon == -1 {
_, after, ok := strings.Cut(hostport, ":")
if !ok {
return ""
}
if i := strings.Index(hostport, "]:"); i != -1 {
return hostport[i+len("]:"):]
if _, after, ok := strings.Cut(hostport, "]:"); ok {
return after
}
if strings.Contains(hostport, "]") {
return ""
}
return hostport[colon+len(":"):]
return after
}
func validPort(p string) bool {

View file

@ -7,6 +7,7 @@ import (
"net"
"net/url"
"regexp"
"slices"
"strings"
"forgejo.org/modules/setting"
@ -40,12 +41,7 @@ func IsValidSiteURL(uri string) bool {
return false
}
for _, scheme := range setting.Service.ValidSiteURLSchemes {
if scheme == u.Scheme {
return true
}
}
return false
return slices.Contains(setting.Service.ValidSiteURLSchemes, u.Scheme)
}
// IsAPIURL checks if URL is current Gitea instance API URL

View file

@ -6,6 +6,7 @@ package validation
import (
"fmt"
"reflect"
"slices"
"strings"
"unicode/utf8"
@ -87,10 +88,8 @@ func ValidateMaxLen(value string, maxLen int, name string) []string {
}
func ValidateOneOf(value any, allowed []any, name string) []string {
for _, allowedElem := range allowed {
if value == allowedElem {
return []string{}
}
if slices.Contains(allowed, value) {
return []string{}
}
return []string{fmt.Sprintf("Field %s contains the value %v, which is not in allowed subset %v", name, value, allowed)}
}

View file

@ -17,7 +17,7 @@ import (
var responseStatusProviders = map[reflect.Type]func(req *http.Request) types.ResponseStatusProvider{}
func RegisterResponseStatusProvider[T any](fn func(req *http.Request) types.ResponseStatusProvider) {
responseStatusProviders[reflect.TypeOf((*T)(nil)).Elem()] = fn
responseStatusProviders[reflect.TypeFor[T]()] = fn
}
// responseWriter is a wrapper of http.ResponseWriter, to check whether the response has been written
@ -49,9 +49,9 @@ func (r *responseWriter) WriteHeader(statusCode int) {
}
var (
httpReqType = reflect.TypeOf((*http.Request)(nil))
respWriterType = reflect.TypeOf((*http.ResponseWriter)(nil)).Elem()
cancelFuncType = reflect.TypeOf((*goctx.CancelFunc)(nil)).Elem()
httpReqType = reflect.TypeFor[*http.Request]()
respWriterType = reflect.TypeFor[http.ResponseWriter]()
cancelFuncType = reflect.TypeFor[goctx.CancelFunc]()
)
// preCheckHandler checks whether the handler is valid, developers could get first-time feedback, all mistakes could be found at startup

View file

@ -30,7 +30,7 @@ func AssignForm(form any, data map[string]any) {
typ := reflect.TypeOf(form)
val := reflect.ValueOf(form)
for typ.Kind() == reflect.Ptr {
for typ.Kind() == reflect.Pointer {
typ = typ.Elem()
val = val.Elem()
}
@ -51,7 +51,7 @@ func AssignForm(form any, data map[string]any) {
}
func getRuleBody(field reflect.StructField, prefix string) string {
for _, rule := range strings.Split(field.Tag.Get("binding"), ";") {
for rule := range strings.SplitSeq(field.Tag.Get("binding"), ";") {
if strings.HasPrefix(rule, prefix) {
return rule[len(prefix) : len(rule)-1]
}
@ -99,7 +99,7 @@ func Validate(errs binding.Errors, data map[string]any, f any, l translation.Loc
typ := reflect.TypeOf(f)
if typ.Kind() == reflect.Ptr {
if typ.Kind() == reflect.Pointer {
typ = typ.Elem()
}

View file

@ -5,6 +5,7 @@ package middleware
import (
"context"
"maps"
"time"
"forgejo.org/modules/setting"
@ -22,9 +23,7 @@ func (ds ContextData) GetData() ContextData {
}
func (ds ContextData) MergeFrom(other ContextData) ContextData {
for k, v := range other {
ds[k] = v
}
maps.Copy(ds, other)
return ds
}

View file

@ -107,8 +107,8 @@ func (r *Route) Methods(methods, pattern string, h ...any) {
middlewares, handlerFunc := r.wrapMiddlewareAndHandler(h)
fullPattern := r.getPattern(pattern)
if strings.Contains(methods, ",") {
methods := strings.Split(methods, ",")
for _, method := range methods {
methods := strings.SplitSeq(methods, ",")
for method := range methods {
r.R.With(middlewares...).Method(strings.TrimSpace(method), fullPattern, handlerFunc)
}
} else {