mirror of
https://github.com/go-gitea/gitea
synced 2026-02-07 13:11:14 +00:00
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
99053ce4fa | ||
|
|
e818de179e | ||
|
|
0a87bf9016 | ||
|
|
86d99e2f38 | ||
|
|
7bfb7567b2 | ||
|
|
7619808137 | ||
|
|
b854930a96 | ||
|
|
935f5e0ad5 | ||
|
|
08c6ea6728 | ||
|
|
67977f0b1c | ||
|
|
78fbcf35ad | ||
|
|
8f5b1d27d4 | ||
|
|
89c99a4dcb | ||
|
|
3c7e7a19dd | ||
|
|
8313b5d998 | ||
|
|
6ca73bf662 | ||
|
|
5e10def7f7 | ||
|
|
1b8efb6fc7 | ||
|
|
8f89e1e174 | ||
|
|
cbc595b9d9 | ||
|
|
cc5ccf44dc | ||
|
|
f91e35b8b7 | ||
|
|
f52ed422dc | ||
|
|
0266ee5de7 | ||
|
|
ac03e65cf4 | ||
|
|
f3e6672c09 |
@@ -31,7 +31,7 @@ jobs:
|
||||
minio:
|
||||
# as github actions doesn't support "entrypoint", we need to use a non-official image
|
||||
# that has a custom entrypoint set to "minio server /data"
|
||||
image: bitnami/minio:2023.8.31
|
||||
image: bitnamilegacy/minio:2023.8.31
|
||||
env:
|
||||
MINIO_ROOT_USER: 123456
|
||||
MINIO_ROOT_PASSWORD: 12345678
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
ports:
|
||||
- 6379:6379
|
||||
minio:
|
||||
image: bitnami/minio:2021.3.17
|
||||
image: bitnamilegacy/minio:2021.3.17
|
||||
env:
|
||||
MINIO_ACCESS_KEY: 123456
|
||||
MINIO_SECRET_KEY: 12345678
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
services:
|
||||
mysql:
|
||||
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||
image: bitnami/mysql:8.0
|
||||
image: bitnamilegacy/mysql:8.0
|
||||
env:
|
||||
ALLOW_EMPTY_PASSWORD: true
|
||||
MYSQL_DATABASE: testgitea
|
||||
|
||||
@@ -4,6 +4,37 @@ This changelog goes through the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
|
||||
## [1.24.7](https://github.com/go-gitea/gitea/releases/tag/1.24.7) - 2025-10-24
|
||||
|
||||
* SECURITY
|
||||
* Refactor legacy code (#35708) (#35713)
|
||||
* Fixing issue #35530: Password Leak in Log Messages (#35584) (#35665)
|
||||
* Fix a bug missed return (#35655) (#35671)
|
||||
* BUGFIXES
|
||||
* Fix inputing review comment will remove reviewer (#35591) (#35664)
|
||||
* TESTING
|
||||
* Mock external service in hcaptcha TestCaptcha (#35604) (#35663)
|
||||
* Fix build (#35669)
|
||||
|
||||
## [1.24.6](https://github.com/go-gitea/gitea/releases/tag/1.24.6) - 2025-09-10
|
||||
|
||||
* SECURITY
|
||||
* Upgrade xz to v0.5.15 (#35385)
|
||||
* BUGFIXES
|
||||
* Fix a compare page 404 bug when the pull request disabled (#35441) (#35453)
|
||||
* Fix bug when issue disabled, pull request number in the commit message cannot be redirected (#35420) (#35442)
|
||||
* Add author.name field to Swift Package Registry API response (#35410) (#35431)
|
||||
* Remove usernames when empty in discord webhook (#35412) (#35417)
|
||||
* Allow foreachref parser to grow its buffer (#35365) (#35376)
|
||||
* Allow deleting comment with content via API like web did (#35346) (#35354)
|
||||
* Fix atom/rss mixed error (#35345) (#35347)
|
||||
* Fix review request webhook bug (#35339)
|
||||
* Remove duplicate html IDs (#35210) (#35325)
|
||||
* Fix LFS range size header response (#35277) (#35293)
|
||||
* Fix GitHub release assets URL validation (#35287) (#35290)
|
||||
* Fix token lifetime, closes #35230 (#35271) (#35281)
|
||||
* Fix push commits comments when changing the pull request target branch (#35386) (#35443)
|
||||
|
||||
## [1.24.5](https://github.com/go-gitea/gitea/releases/tag/v1.24.5) - 2025-08-12
|
||||
|
||||
* BUGFIXES
|
||||
|
||||
+2
-25
@@ -13,7 +13,6 @@ import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
asymkey_model "code.gitea.io/gitea/models/asymkey"
|
||||
@@ -31,7 +30,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/services/lfs"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
"github.com/kballard/go-shellquote"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
@@ -131,27 +129,6 @@ func getAccessMode(verb, lfsVerb string) perm.AccessMode {
|
||||
return perm.AccessModeNone
|
||||
}
|
||||
|
||||
func getLFSAuthToken(ctx context.Context, lfsVerb string, results *private.ServCommandResults) (string, error) {
|
||||
now := time.Now()
|
||||
claims := lfs.Claims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
RepoID: results.RepoID,
|
||||
Op: lfsVerb,
|
||||
UserID: results.UserID,
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
|
||||
if err != nil {
|
||||
return "", fail(ctx, "Failed to sign JWT Token", "Failed to sign JWT token: %v", err)
|
||||
}
|
||||
return "Bearer " + tokenString, nil
|
||||
}
|
||||
|
||||
func runServ(c *cli.Context) error {
|
||||
ctx, cancel := installSignals()
|
||||
defer cancel()
|
||||
@@ -284,7 +261,7 @@ func runServ(c *cli.Context) error {
|
||||
|
||||
// LFS SSH protocol
|
||||
if verb == git.CmdVerbLfsTransfer {
|
||||
token, err := getLFSAuthToken(ctx, lfsVerb, results)
|
||||
token, err := lfs.GetLFSAuthTokenWithBearer(lfs.AuthTokenOptions{Op: lfsVerb, UserID: results.UserID, RepoID: results.RepoID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -295,7 +272,7 @@ func runServ(c *cli.Context) error {
|
||||
if verb == git.CmdVerbLfsAuthenticate {
|
||||
url := fmt.Sprintf("%s%s/%s.git/info/lfs", setting.AppURL, url.PathEscape(results.OwnerName), url.PathEscape(results.RepoName))
|
||||
|
||||
token, err := getLFSAuthToken(ctx, lfsVerb, results)
|
||||
token, err := lfs.GetLFSAuthTokenWithBearer(lfs.AuthTokenOptions{Op: lfsVerb, UserID: results.UserID, RepoID: results.RepoID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@ require (
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/syndtr/goleveldb v1.0.0
|
||||
github.com/tstranex/u2f v1.0.0
|
||||
github.com/ulikunitz/xz v0.5.12
|
||||
github.com/ulikunitz/xz v0.5.15
|
||||
github.com/urfave/cli/v2 v2.27.6
|
||||
github.com/wneessen/go-mail v0.6.2
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
|
||||
@@ -757,8 +757,8 @@ github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGB
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
|
||||
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY=
|
||||
github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
|
||||
github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||
github.com/urfave/cli/v2 v2.27.6 h1:VdRdS98FNhKZ8/Az8B7MTyGQmpIr36O1EHybx/LaZ4g=
|
||||
|
||||
@@ -67,13 +67,6 @@ func (key *PublicKey) OmitEmail() string {
|
||||
return strings.Join(strings.Split(key.Content, " ")[:2], " ")
|
||||
}
|
||||
|
||||
// AuthorizedString returns formatted public key string for authorized_keys file.
|
||||
//
|
||||
// TODO: Consider dropping this function
|
||||
func (key *PublicKey) AuthorizedString() string {
|
||||
return AuthorizedStringForKey(key)
|
||||
}
|
||||
|
||||
func addKey(ctx context.Context, key *PublicKey) (err error) {
|
||||
if len(key.Fingerprint) == 0 {
|
||||
key.Fingerprint, err = CalcFingerprint(key.Content)
|
||||
|
||||
@@ -17,29 +17,13 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"golang.org/x/crypto/ssh"
|
||||
)
|
||||
|
||||
// _____ __ .__ .__ .___
|
||||
// / _ \ __ ___/ |_| |__ ___________|__|_______ ____ __| _/
|
||||
// / /_\ \| | \ __\ | \ / _ \_ __ \ \___ // __ \ / __ |
|
||||
// / | \ | /| | | Y ( <_> ) | \/ |/ /\ ___// /_/ |
|
||||
// \____|__ /____/ |__| |___| /\____/|__| |__/_____ \\___ >____ |
|
||||
// \/ \/ \/ \/ \/
|
||||
// ____ __.
|
||||
// | |/ _|____ ___.__. ______
|
||||
// | <_/ __ < | |/ ___/
|
||||
// | | \ ___/\___ |\___ \
|
||||
// |____|__ \___ > ____/____ >
|
||||
// \/ \/\/ \/
|
||||
//
|
||||
// This file contains functions for creating authorized_keys files
|
||||
//
|
||||
// There is a dependence on the database within RegeneratePublicKeys however most of these functions probably belong in a module
|
||||
|
||||
const (
|
||||
tplCommentPrefix = `# gitea public key`
|
||||
tplPublicKey = tplCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict %s` + "\n"
|
||||
)
|
||||
// AuthorizedStringCommentPrefix is a magic tag
|
||||
// some functions like RegeneratePublicKeys needs this tag to skip the keys generated by Gitea, while keep other keys
|
||||
const AuthorizedStringCommentPrefix = `# gitea public key`
|
||||
|
||||
var sshOpLocker sync.Mutex
|
||||
|
||||
@@ -50,17 +34,45 @@ func WithSSHOpLocker(f func() error) error {
|
||||
}
|
||||
|
||||
// AuthorizedStringForKey creates the authorized keys string appropriate for the provided key
|
||||
func AuthorizedStringForKey(key *PublicKey) string {
|
||||
func AuthorizedStringForKey(key *PublicKey) (string, error) {
|
||||
sb := &strings.Builder{}
|
||||
_ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]any{
|
||||
_, err := writeAuthorizedStringForKey(key, sb)
|
||||
return sb.String(), err
|
||||
}
|
||||
|
||||
// WriteAuthorizedStringForValidKey writes the authorized key for the provided key. If the key is invalid, it does nothing.
|
||||
func WriteAuthorizedStringForValidKey(key *PublicKey, w io.Writer) error {
|
||||
validKey, err := writeAuthorizedStringForKey(key, w)
|
||||
if !validKey {
|
||||
log.Debug("WriteAuthorizedStringForValidKey: key %s is not valid: %v", key, err)
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func writeAuthorizedStringForKey(key *PublicKey, w io.Writer) (keyValid bool, err error) {
|
||||
const tpl = AuthorizedStringCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict %s %s` + "\n"
|
||||
pubKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(key.Content))
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
// now the key is valid, the code below could only return template/IO related errors
|
||||
sbCmd := &strings.Builder{}
|
||||
err = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sbCmd, map[string]any{
|
||||
"AppPath": util.ShellEscape(setting.AppPath),
|
||||
"AppWorkPath": util.ShellEscape(setting.AppWorkPath),
|
||||
"CustomConf": util.ShellEscape(setting.CustomConf),
|
||||
"CustomPath": util.ShellEscape(setting.CustomPath),
|
||||
"Key": key,
|
||||
})
|
||||
|
||||
return fmt.Sprintf(tplPublicKey, util.ShellEscape(sb.String()), key.Content)
|
||||
if err != nil {
|
||||
return true, err
|
||||
}
|
||||
sshCommandEscaped := util.ShellEscape(sbCmd.String())
|
||||
sshKeyMarshalled := strings.TrimSpace(string(ssh.MarshalAuthorizedKey(pubKey)))
|
||||
sshKeyComment := fmt.Sprintf("user-%d", key.OwnerID)
|
||||
_, err = fmt.Fprintf(w, tpl, sshCommandEscaped, sshKeyMarshalled, sshKeyComment)
|
||||
return true, err
|
||||
}
|
||||
|
||||
// appendAuthorizedKeysToFile appends new SSH keys' content to authorized_keys file.
|
||||
@@ -112,7 +124,7 @@ func appendAuthorizedKeysToFile(keys ...*PublicKey) error {
|
||||
if key.Type == KeyTypePrincipal {
|
||||
continue
|
||||
}
|
||||
if _, err = f.WriteString(key.AuthorizedString()); err != nil {
|
||||
if err = WriteAuthorizedStringForValidKey(key, f); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -120,10 +132,9 @@ func appendAuthorizedKeysToFile(keys ...*PublicKey) error {
|
||||
}
|
||||
|
||||
// RegeneratePublicKeys regenerates the authorized_keys file
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.Writer) error {
|
||||
if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
||||
return err
|
||||
return WriteAuthorizedStringForValidKey(bean.(*PublicKey), t)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -144,11 +155,11 @@ func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, AuthorizedStringCommentPrefix) {
|
||||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = io.WriteString(t, line+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -173,7 +173,7 @@ func GetReviewsByIssueID(ctx context.Context, issueID int64) (latestReviews, mig
|
||||
reviewersMap := make(map[int64][]*Review) // key is reviewer id
|
||||
originalReviewersMap := make(map[int64][]*Review) // key is original author id
|
||||
reviewTeamsMap := make(map[int64][]*Review) // key is reviewer team id
|
||||
countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest}
|
||||
countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest, ReviewTypeComment}
|
||||
for _, review := range reviews {
|
||||
if review.ReviewerTeamID == 0 && slices.Contains(countedReivewTypes, review.Type) && !review.Dismissed {
|
||||
if review.OriginalAuthorID != 0 {
|
||||
|
||||
@@ -123,6 +123,7 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 3})
|
||||
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
|
||||
user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
|
||||
@@ -130,6 +131,12 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
|
||||
expectedReviews := []*issues_model.Review{}
|
||||
expectedReviews = append(expectedReviews,
|
||||
&issues_model.Review{
|
||||
ID: 5,
|
||||
Reviewer: user1,
|
||||
Type: issues_model.ReviewTypeComment,
|
||||
UpdatedUnix: 946684810,
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 7,
|
||||
Reviewer: org3,
|
||||
@@ -168,8 +175,9 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
for _, review := range allReviews {
|
||||
assert.NoError(t, review.LoadReviewer(db.DefaultContext))
|
||||
}
|
||||
if assert.Len(t, allReviews, 5) {
|
||||
if assert.Len(t, allReviews, 6) {
|
||||
for i, review := range allReviews {
|
||||
assert.Equal(t, expectedReviews[i].ID, review.ID)
|
||||
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
|
||||
assert.Equal(t, expectedReviews[i].Type, review.Type)
|
||||
assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
|
||||
|
||||
@@ -348,10 +348,8 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
|
||||
|
||||
for _, u := range repo.Units {
|
||||
for _, team := range teams {
|
||||
unitAccessMode := minAccessMode
|
||||
if teamMode, exist := team.UnitAccessModeEx(ctx, u.Type); exist {
|
||||
unitAccessMode = max(perm.unitsMode[u.Type], unitAccessMode, teamMode)
|
||||
}
|
||||
teamMode, _ := team.UnitAccessModeEx(ctx, u.Type)
|
||||
unitAccessMode := max(perm.unitsMode[u.Type], minAccessMode, teamMode)
|
||||
perm.unitsMode[u.Type] = unitAccessMode
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,4 +197,37 @@ func TestGetUserRepoPermission(t *testing.T) {
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
|
||||
repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) // org private repo, same org as repo 32
|
||||
require.NoError(t, repo3.LoadOwner(ctx))
|
||||
require.True(t, repo3.Owner.IsOrganization())
|
||||
require.NoError(t, db.TruncateBeans(ctx, &organization.TeamUnit{}, &Access{})) // The user has access set of that repo, remove it, it is useless for our test
|
||||
require.NoError(t, db.Insert(ctx, &organization.TeamRepo{OrgID: org.ID, TeamID: team.ID, RepoID: repo3.ID}))
|
||||
t.Run("DoerWithNoopTeamOnPrivateRepo", func(t *testing.T) {
|
||||
perm, err := GetUserRepoPermission(ctx, repo3, user)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.AccessMode)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
|
||||
require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeCode, AccessMode: perm_model.AccessModeNone}))
|
||||
require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeIssues, AccessMode: perm_model.AccessModeRead}))
|
||||
t.Run("DoerWithReadIssueTeamOnPrivateRepo", func(t *testing.T) {
|
||||
perm, err := GetUserRepoPermission(ctx, repo3, user)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.AccessMode)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
|
||||
require.NoError(t, db.Insert(ctx, repo_model.Collaboration{RepoID: repo3.ID, UserID: user.ID, Mode: perm_model.AccessModeWrite}))
|
||||
require.NoError(t, db.Insert(ctx, Access{RepoID: repo3.ID, UserID: user.ID, Mode: perm_model.AccessModeWrite}))
|
||||
t.Run("DoerWithReadIssueTeamAndWriteCollaboratorOnPrivateRepo", func(t *testing.T) {
|
||||
perm, err := GetUserRepoPermission(ctx, repo3, user)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.AccessMode)
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
}
|
||||
|
||||
@@ -85,8 +85,8 @@ func TestRepository_ChangeCollaborationAccessMode(t *testing.T) {
|
||||
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, unittest.NonexistentID, perm.AccessModeAdmin))
|
||||
|
||||
// Disvard invalid input.
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessMode(unittest.NonexistentID)))
|
||||
// Discard invalid input.
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessMode(-1)))
|
||||
|
||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
||||
}
|
||||
|
||||
@@ -137,16 +137,9 @@ func DeleteUploads(ctx context.Context, uploads ...*Upload) (err error) {
|
||||
|
||||
for _, upload := range uploads {
|
||||
localPath := upload.LocalPath()
|
||||
isFile, err := util.IsFile(localPath)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s is a file. Error: %v", localPath, err)
|
||||
}
|
||||
if !isFile {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := util.Remove(localPath); err != nil {
|
||||
return fmt.Errorf("remove upload: %w", err)
|
||||
// just continue, don't fail the whole operation if a file is missing (removed by others)
|
||||
log.Error("unable to remove upload file %s: %v", localPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -30,6 +30,10 @@ type Parser struct {
|
||||
func NewParser(r io.Reader, format Format) *Parser {
|
||||
scanner := bufio.NewScanner(r)
|
||||
|
||||
// default MaxScanTokenSize = 64 kiB may be too small for some references,
|
||||
// so allow the buffer to grow up to 4x if needed
|
||||
scanner.Buffer(nil, 4*bufio.MaxScanTokenSize)
|
||||
|
||||
// in addition to the reference delimiter we specified in the --format,
|
||||
// `git for-each-ref` will always add a newline after every reference.
|
||||
refDelim := make([]byte, 0, len(format.refDelim)+1)
|
||||
@@ -70,6 +74,9 @@ func NewParser(r io.Reader, format Format) *Parser {
|
||||
// { "objecttype": "tag", "refname:short": "v1.16.4", "object": "f460b7543ed500e49c133c2cd85c8c55ee9dbe27" }
|
||||
func (p *Parser) Next() map[string]string {
|
||||
if !p.scanner.Scan() {
|
||||
if err := p.scanner.Err(); err != nil {
|
||||
p.err = err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
fields, err := p.parseRef(p.scanner.Text())
|
||||
|
||||
+4
-18
@@ -51,30 +51,16 @@ func GetHook(repoPath, name string) (*Hook, error) {
|
||||
name: name,
|
||||
path: filepath.Join(repoPath, "hooks", name+".d", name),
|
||||
}
|
||||
isFile, err := util.IsFile(h.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if isFile {
|
||||
data, err := os.ReadFile(h.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if data, err := os.ReadFile(h.path); err == nil {
|
||||
h.IsActive = true
|
||||
h.Content = string(data)
|
||||
return h, nil
|
||||
} else if !os.IsNotExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
samplePath := filepath.Join(repoPath, "hooks", name+".sample")
|
||||
isFile, err = util.IsFile(samplePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if isFile {
|
||||
data, err := os.ReadFile(samplePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if data, err := os.ReadFile(samplePath); err == nil {
|
||||
h.Sample = string(data)
|
||||
}
|
||||
return h, nil
|
||||
|
||||
@@ -34,12 +34,12 @@ func TestParseGitURLs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
kase: "git@[fe80:14fc:cec5:c174:d88%2510]:go-gitea/gitea.git",
|
||||
kase: "git@[fe80::14fc:cec5:c174:d88%2510]:go-gitea/gitea.git",
|
||||
expected: &GitURL{
|
||||
URL: &url.URL{
|
||||
Scheme: "ssh",
|
||||
User: url.User("git"),
|
||||
Host: "[fe80:14fc:cec5:c174:d88%10]",
|
||||
Host: "[fe80::14fc:cec5:c174:d88%10]",
|
||||
Path: "go-gitea/gitea.git",
|
||||
},
|
||||
extraMark: 1,
|
||||
@@ -137,11 +137,11 @@ func TestParseGitURLs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
kase: "https://[fe80:14fc:cec5:c174:d88%2510]:20/go-gitea/gitea.git",
|
||||
kase: "https://[fe80::14fc:cec5:c174:d88%2510]:20/go-gitea/gitea.git",
|
||||
expected: &GitURL{
|
||||
URL: &url.URL{
|
||||
Scheme: "https",
|
||||
Host: "[fe80:14fc:cec5:c174:d88%10]:20",
|
||||
Host: "[fe80::14fc:cec5:c174:d88%10]:20",
|
||||
Path: "/go-gitea/gitea.git",
|
||||
},
|
||||
extraMark: 0,
|
||||
|
||||
@@ -4,7 +4,10 @@
|
||||
package hcaptcha
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -21,6 +24,33 @@ func TestMain(m *testing.M) {
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
type mockTransport struct{}
|
||||
|
||||
func (mockTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if req.URL.String() != verifyURL {
|
||||
return nil, errors.New("unsupported url")
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bodyValues, err := url.ParseQuery(string(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var responseText string
|
||||
if bodyValues.Get("response") == dummyToken {
|
||||
responseText = `{"success":true,"credit":false,"hostname":"dummy-key-pass","challenge_ts":"2025-10-08T16:02:56.136Z"}`
|
||||
} else {
|
||||
responseText = `{"success":false,"error-codes":["invalid-input-response"]}`
|
||||
}
|
||||
|
||||
return &http.Response{Request: req, Body: io.NopCloser(strings.NewReader(responseText))}, nil
|
||||
}
|
||||
|
||||
func TestCaptcha(t *testing.T) {
|
||||
tt := []struct {
|
||||
Name string
|
||||
@@ -54,7 +84,8 @@ func TestCaptcha(t *testing.T) {
|
||||
for _, tc := range tt {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
client, err := New(tc.Secret, WithHTTP(&http.Client{
|
||||
Timeout: time.Second * 5,
|
||||
Timeout: time.Second * 5,
|
||||
Transport: mockTransport{},
|
||||
}))
|
||||
if err != nil {
|
||||
// The only error that can be returned from creating a client
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -166,12 +167,12 @@ func Init() {
|
||||
log.Fatal("PID: %d Unable to initialize the bleve Repository Indexer at path: %s Error: %v", os.Getpid(), setting.Indexer.RepoPath, err)
|
||||
}
|
||||
case "elasticsearch":
|
||||
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), setting.Indexer.RepoConnStr)
|
||||
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr))
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
log.Error("PANIC whilst initializing repository indexer: %v\nStacktrace: %s", err, log.Stack(2))
|
||||
log.Error("The indexer files are likely corrupted and may need to be deleted")
|
||||
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", setting.Indexer.RepoConnStr)
|
||||
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr))
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -181,7 +182,7 @@ func Init() {
|
||||
cancel()
|
||||
(*globalIndexer.Load()).Close()
|
||||
close(waitChannel)
|
||||
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), setting.Indexer.RepoConnStr, err)
|
||||
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr), err)
|
||||
}
|
||||
|
||||
default:
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// IndexerMetadata is used to send data to the queue, so it contains only the ids.
|
||||
@@ -100,7 +101,7 @@ func InitIssueIndexer(syncReindex bool) {
|
||||
issueIndexer = elasticsearch.NewIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueIndexerName)
|
||||
existed, err = issueIndexer.Init(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", util.SanitizeCredentialURLs(setting.Indexer.IssueConnStr), err)
|
||||
}
|
||||
case "db":
|
||||
issueIndexer = db.GetIndexer()
|
||||
@@ -108,7 +109,7 @@ func InitIssueIndexer(syncReindex bool) {
|
||||
issueIndexer = meilisearch.NewIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueConnAuth, setting.Indexer.IssueIndexerName)
|
||||
existed, err = issueIndexer.Init(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", util.SanitizeCredentialURLs(setting.Indexer.IssueConnStr), err)
|
||||
}
|
||||
default:
|
||||
log.Fatal("Unknown issue indexer type: %s", setting.Indexer.IssueType)
|
||||
|
||||
@@ -82,6 +82,7 @@ type ProgrammingLanguage struct {
|
||||
// https://schema.org/Person
|
||||
type Person struct {
|
||||
Type string `json:"@type,omitempty"`
|
||||
Name string `json:"name,omitempty"` // inherited from https://schema.org/Thing
|
||||
GivenName string `json:"givenName,omitempty"`
|
||||
MiddleName string `json:"middleName,omitempty"`
|
||||
FamilyName string `json:"familyName,omitempty"`
|
||||
@@ -184,11 +185,17 @@ func ParsePackage(sr io.ReaderAt, size int64, mr io.Reader) (*Package, error) {
|
||||
p.Metadata.Description = ssc.Description
|
||||
p.Metadata.Keywords = ssc.Keywords
|
||||
p.Metadata.License = ssc.License
|
||||
p.Metadata.Author = Person{
|
||||
author := Person{
|
||||
Name: ssc.Author.Name,
|
||||
GivenName: ssc.Author.GivenName,
|
||||
MiddleName: ssc.Author.MiddleName,
|
||||
FamilyName: ssc.Author.FamilyName,
|
||||
}
|
||||
// If Name is not provided, generate it from individual name components
|
||||
if author.Name == "" {
|
||||
author.Name = author.String()
|
||||
}
|
||||
p.Metadata.Author = author
|
||||
|
||||
p.Metadata.RepositoryURL = ssc.CodeRepository
|
||||
if !validation.IsValidURL(p.Metadata.RepositoryURL) {
|
||||
|
||||
@@ -97,10 +97,49 @@ func TestParsePackage(t *testing.T) {
|
||||
assert.Equal(t, packageDescription, p.Metadata.Description)
|
||||
assert.ElementsMatch(t, []string{"swift", "package"}, p.Metadata.Keywords)
|
||||
assert.Equal(t, packageLicense, p.Metadata.License)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author.Name)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, packageRepositoryURL, p.Metadata.RepositoryURL)
|
||||
assert.ElementsMatch(t, []string{packageRepositoryURL}, p.RepositoryURLs)
|
||||
})
|
||||
|
||||
t.Run("WithExplicitNameField", func(t *testing.T) {
|
||||
data := createArchive(map[string][]byte{
|
||||
"Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"),
|
||||
})
|
||||
|
||||
authorName := "John Doe"
|
||||
p, err := ParsePackage(
|
||||
data,
|
||||
data.Size(),
|
||||
strings.NewReader(`{"name":"`+packageName+`","version":"`+packageVersion+`","description":"`+packageDescription+`","author":{"name":"`+authorName+`","givenName":"John","familyName":"Doe"}}`),
|
||||
)
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, authorName, p.Metadata.Author.Name)
|
||||
assert.Equal(t, "John", p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, "Doe", p.Metadata.Author.FamilyName)
|
||||
})
|
||||
|
||||
t.Run("NameFieldGeneration", func(t *testing.T) {
|
||||
data := createArchive(map[string][]byte{
|
||||
"Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"),
|
||||
})
|
||||
|
||||
// Test with only individual name components - Name should be auto-generated
|
||||
p, err := ParsePackage(
|
||||
data,
|
||||
data.Size(),
|
||||
strings.NewReader(`{"author":{"givenName":"John","middleName":"Q","familyName":"Doe"}}`),
|
||||
)
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "John Q Doe", p.Metadata.Author.Name)
|
||||
assert.Equal(t, "John", p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, "Q", p.Metadata.Author.MiddleName)
|
||||
assert.Equal(t, "Doe", p.Metadata.Author.FamilyName)
|
||||
})
|
||||
}
|
||||
|
||||
func TestTrimmedVersionString(t *testing.T) {
|
||||
@@ -142,3 +181,43 @@ func TestTrimmedVersionString(t *testing.T) {
|
||||
assert.Equal(t, c.Expected, TrimmedVersionString(c.Version))
|
||||
}
|
||||
}
|
||||
|
||||
func TestPersonNameString(t *testing.T) {
|
||||
cases := []struct {
|
||||
Name string
|
||||
Person Person
|
||||
Expected string
|
||||
}{
|
||||
{
|
||||
Name: "GivenNameOnly",
|
||||
Person: Person{GivenName: "John"},
|
||||
Expected: "John",
|
||||
},
|
||||
{
|
||||
Name: "GivenAndFamily",
|
||||
Person: Person{GivenName: "John", FamilyName: "Doe"},
|
||||
Expected: "John Doe",
|
||||
},
|
||||
{
|
||||
Name: "FullName",
|
||||
Person: Person{GivenName: "John", MiddleName: "Q", FamilyName: "Doe"},
|
||||
Expected: "John Q Doe",
|
||||
},
|
||||
{
|
||||
Name: "MiddleAndFamily",
|
||||
Person: Person{MiddleName: "Q", FamilyName: "Doe"},
|
||||
Expected: "Q Doe",
|
||||
},
|
||||
{
|
||||
Name: "Empty",
|
||||
Person: Person{},
|
||||
Expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.Name, func(t *testing.T) {
|
||||
assert.Equal(t, c.Expected, c.Person.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,11 +202,11 @@ func NewConfigProviderFromFile(file string) (ConfigProvider, error) {
|
||||
loadedFromEmpty := true
|
||||
|
||||
if file != "" {
|
||||
isFile, err := util.IsFile(file)
|
||||
isExist, err := util.IsExist(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to check if %q is a file. Error: %v", file, err)
|
||||
return nil, fmt.Errorf("unable to check if %q exists: %v", file, err)
|
||||
}
|
||||
if isFile {
|
||||
if isExist {
|
||||
if err = cfg.Append(file); err != nil {
|
||||
return nil, fmt.Errorf("failed to load config file %q: %v", file, err)
|
||||
}
|
||||
|
||||
@@ -13,6 +13,6 @@ func TestCountFmt(t *testing.T) {
|
||||
assert.Equal(t, "125", countFmt(125))
|
||||
assert.Equal(t, "1.3k", countFmt(int64(1317)))
|
||||
assert.Equal(t, "21.3M", countFmt(21317675))
|
||||
assert.Equal(t, "45.7G", countFmt(45721317675))
|
||||
assert.Equal(t, "45.7G", countFmt(int64(45721317675)))
|
||||
assert.Empty(t, countFmt("test"))
|
||||
}
|
||||
|
||||
@@ -115,15 +115,10 @@ func IsDir(dir string) (bool, error) {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// IsFile returns true if given path is a file,
|
||||
// or returns false when it's a directory or does not exist.
|
||||
func IsFile(filePath string) (bool, error) {
|
||||
f, err := os.Stat(filePath)
|
||||
func IsRegularFile(filePath string) (bool, error) {
|
||||
f, err := os.Lstat(filePath)
|
||||
if err == nil {
|
||||
return !f.IsDir(), nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
return f.Mode().IsRegular(), nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
Generated
+18
-18
@@ -35,7 +35,7 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.22",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "11.6.0",
|
||||
"mermaid": "11.10.0",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.52.2",
|
||||
@@ -1540,9 +1540,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@mermaid-js/parser": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.4.0.tgz",
|
||||
"integrity": "sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==",
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.2.tgz",
|
||||
"integrity": "sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"langium": "3.3.1"
|
||||
@@ -6154,9 +6154,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/dompurify": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.4.tgz",
|
||||
"integrity": "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==",
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.6.tgz",
|
||||
"integrity": "sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==",
|
||||
"license": "(MPL-2.0 OR Apache-2.0)",
|
||||
"optionalDependencies": {
|
||||
"@types/trusted-types": "^2.0.7"
|
||||
@@ -9249,14 +9249,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mermaid": {
|
||||
"version": "11.6.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.6.0.tgz",
|
||||
"integrity": "sha512-PE8hGUy1LDlWIHWBP05SFdqUHGmRcCcK4IzpOKPE35eOw+G9zZgcnMpyunJVUEOgb//KBORPjysKndw8bFLuRg==",
|
||||
"version": "11.10.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.10.0.tgz",
|
||||
"integrity": "sha512-oQsFzPBy9xlpnGxUqLbVY8pvknLlsNIJ0NWwi8SUJjhbP1IT0E0o1lfhU4iYV3ubpy+xkzkaOyDUQMn06vQElQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@braintree/sanitize-url": "^7.0.4",
|
||||
"@iconify/utils": "^2.1.33",
|
||||
"@mermaid-js/parser": "^0.4.0",
|
||||
"@mermaid-js/parser": "^0.6.2",
|
||||
"@types/d3": "^7.4.3",
|
||||
"cytoscape": "^3.29.3",
|
||||
"cytoscape-cose-bilkent": "^4.1.0",
|
||||
@@ -9265,11 +9265,11 @@
|
||||
"d3-sankey": "^0.12.3",
|
||||
"dagre-d3-es": "7.0.11",
|
||||
"dayjs": "^1.11.13",
|
||||
"dompurify": "^3.2.4",
|
||||
"katex": "^0.16.9",
|
||||
"dompurify": "^3.2.5",
|
||||
"katex": "^0.16.22",
|
||||
"khroma": "^2.1.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"marked": "^15.0.7",
|
||||
"marked": "^16.0.0",
|
||||
"roughjs": "^4.6.6",
|
||||
"stylis": "^4.3.6",
|
||||
"ts-dedent": "^2.2.0",
|
||||
@@ -9277,15 +9277,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mermaid/node_modules/marked": {
|
||||
"version": "15.0.7",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-15.0.7.tgz",
|
||||
"integrity": "sha512-dgLIeKGLx5FwziAnsk4ONoGwHwGPJzselimvlVskE9XLN4Orv9u2VA3GWw/lYUqjfA0rUT/6fqKwfZJapP9BEg==",
|
||||
"version": "16.2.0",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-16.2.0.tgz",
|
||||
"integrity": "sha512-LbbTuye+0dWRz2TS9KJ7wsnD4KAtpj0MVkWc90XvBa6AslXsT0hTBVH5k32pcSyHH1fst9XEFJunXHktVy0zlg==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
"node": ">= 20"
|
||||
}
|
||||
},
|
||||
"node_modules/micromark": {
|
||||
|
||||
+1
-1
@@ -34,7 +34,7 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.22",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "11.6.0",
|
||||
"mermaid": "11.10.0",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.52.2",
|
||||
|
||||
@@ -230,6 +230,7 @@ func PackageVersionMetadata(ctx *context.Context) {
|
||||
},
|
||||
Author: swift_module.Person{
|
||||
Type: "Person",
|
||||
Name: metadata.Author.String(),
|
||||
GivenName: metadata.Author.GivenName,
|
||||
MiddleName: metadata.Author.MiddleName,
|
||||
FamilyName: metadata.Author.FamilyName,
|
||||
|
||||
@@ -721,8 +721,8 @@ func deleteIssueComment(ctx *context.APIContext) {
|
||||
if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
|
||||
ctx.Status(http.StatusForbidden)
|
||||
return
|
||||
} else if comment.Type != issues_model.CommentTypeComment {
|
||||
ctx.Status(http.StatusNoContent)
|
||||
} else if !comment.Type.HasContentSupport() {
|
||||
ctx.Status(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
+11
-2
@@ -45,7 +45,7 @@ func UpdatePublicKeyInRepo(ctx *context.PrivateContext) {
|
||||
ctx.PlainText(http.StatusOK, "success")
|
||||
}
|
||||
|
||||
// AuthorizedPublicKeyByContent searches content as prefix (leak e-mail part)
|
||||
// AuthorizedPublicKeyByContent searches content as prefix (without comment part)
|
||||
// and returns public key found.
|
||||
func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) {
|
||||
content := ctx.FormString("content")
|
||||
@@ -57,5 +57,14 @@ func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) {
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx.PlainText(http.StatusOK, publicKey.AuthorizedString())
|
||||
|
||||
authorizedString, err := asymkey_model.AuthorizedStringForKey(publicKey)
|
||||
if err != nil {
|
||||
ctx.JSON(http.StatusInternalServerError, private.Response{
|
||||
Err: err.Error(),
|
||||
UserMsg: "invalid public key",
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx.PlainText(http.StatusOK, authorizedString)
|
||||
}
|
||||
|
||||
@@ -639,6 +639,7 @@ func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm, s
|
||||
ErrorCode: oauth2_provider.AccessTokenErrorCodeInvalidRequest,
|
||||
ErrorDescription: "cannot proceed your request",
|
||||
})
|
||||
return
|
||||
}
|
||||
resp, tokenErr := oauth2_provider.NewAccessTokenResponse(ctx, authorizationCode.Grant, serverKey, clientKey)
|
||||
if tokenErr != nil {
|
||||
|
||||
@@ -8,11 +8,18 @@ import (
|
||||
)
|
||||
|
||||
// RenderBranchFeed render format for branch or file
|
||||
func RenderBranchFeed(ctx *context.Context) {
|
||||
_, showFeedType := GetFeedType(ctx.PathParam("reponame"), ctx.Req)
|
||||
func RenderBranchFeed(ctx *context.Context, feedType string) {
|
||||
if ctx.Repo.TreePath == "" {
|
||||
ShowBranchFeed(ctx, ctx.Repo.Repository, showFeedType)
|
||||
ShowBranchFeed(ctx, ctx.Repo.Repository, feedType)
|
||||
} else {
|
||||
ShowFileFeed(ctx, ctx.Repo.Repository, showFeedType)
|
||||
ShowFileFeed(ctx, ctx.Repo.Repository, feedType)
|
||||
}
|
||||
}
|
||||
|
||||
func RenderBranchFeedRSS(ctx *context.Context) {
|
||||
RenderBranchFeed(ctx, "rss")
|
||||
}
|
||||
|
||||
func RenderBranchFeedAtom(ctx *context.Context) {
|
||||
RenderBranchFeed(ctx, "atom")
|
||||
}
|
||||
|
||||
@@ -523,7 +523,7 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
|
||||
|
||||
// Treat as pull request if both references are branches
|
||||
if ctx.Data["PageIsComparePull"] == nil {
|
||||
ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch
|
||||
ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch && permBase.CanReadIssuesOrPulls(true)
|
||||
}
|
||||
|
||||
if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) {
|
||||
@@ -735,6 +735,7 @@ func CompareDiff(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["PageIsViewCode"] = true
|
||||
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
|
||||
ctx.Data["DirectComparison"] = ci.DirectComparison
|
||||
ctx.Data["OtherCompareSeparator"] = ".."
|
||||
|
||||
+6
-5
@@ -1217,10 +1217,11 @@ func registerWebRoutes(m *web.Router) {
|
||||
// end "/{username}/{reponame}": view milestone, label, issue, pull, etc
|
||||
|
||||
m.Group("/{username}/{reponame}/{type:issues}", func() {
|
||||
// these handlers also check unit permissions internally
|
||||
m.Get("", repo.Issues)
|
||||
m.Get("/{index}", repo.ViewIssue)
|
||||
}, optSignIn, context.RepoAssignment, context.RequireUnitReader(unit.TypeIssues, unit.TypeExternalTracker))
|
||||
// end "/{username}/{reponame}": issue/pull list, issue/pull view, external tracker
|
||||
m.Get("/{index}", repo.ViewIssue) // also do pull-request redirection (".../issues/{PR-number}" -> ".../pulls/{PR-number}")
|
||||
}, optSignIn, context.RepoAssignment, context.RequireUnitReader(unit.TypeIssues, unit.TypePullRequests, unit.TypeExternalTracker))
|
||||
// end "/{username}/{reponame}": issue list, issue view (pull-request redirection), external tracker
|
||||
|
||||
m.Group("/{username}/{reponame}", func() { // edit issues, pulls, labels, milestones, etc
|
||||
m.Group("/issues", func() {
|
||||
@@ -1592,8 +1593,8 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Get("/cherry-pick/{sha:([a-f0-9]{7,64})$}", repo.SetEditorconfigIfExists, context.RepoRefByDefaultBranch(), repo.CherryPick)
|
||||
}, repo.MustBeNotEmpty)
|
||||
|
||||
m.Get("/rss/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeed)
|
||||
m.Get("/atom/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeed)
|
||||
m.Get("/rss/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeedRSS)
|
||||
m.Get("/atom/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeedAtom)
|
||||
|
||||
m.Group("/src", func() {
|
||||
m.Get("", func(ctx *context.Context) { ctx.Redirect(ctx.Repo.RepoLink) }) // there is no "{owner}/{repo}/src" page, so redirect to "{owner}/{repo}" to avoid 404
|
||||
|
||||
@@ -53,7 +53,7 @@ func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
|
||||
|
||||
claims := actionsClaims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(1*time.Hour + setting.Actions.EndlessTaskTimeout)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
|
||||
|
||||
@@ -250,7 +250,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load pull issue. Error: %w", err)
|
||||
}
|
||||
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i])
|
||||
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i], forcePush.Value())
|
||||
if err == nil && comment != nil {
|
||||
notify_service.PullRequestPushCommits(ctx, pusher, pr, comment)
|
||||
}
|
||||
|
||||
@@ -25,10 +25,7 @@ import (
|
||||
// There is a dependence on the database within RewriteAllPrincipalKeys & RegeneratePrincipalKeys
|
||||
// The sshOpLocker is used from ssh_key_authorized_keys.go
|
||||
|
||||
const (
|
||||
authorizedPrincipalsFile = "authorized_principals"
|
||||
tplCommentPrefix = `# gitea public key`
|
||||
)
|
||||
const authorizedPrincipalsFile = "authorized_principals"
|
||||
|
||||
// RewriteAllPrincipalKeys removes any authorized principal and rewrite all keys from database again.
|
||||
// Note: db.GetEngine(ctx).Iterate does not get latest data after insert/delete, so we have to call this function
|
||||
@@ -90,10 +87,9 @@ func rewriteAllPrincipalKeys(ctx context.Context) error {
|
||||
return util.Rename(tmpPath, fPath)
|
||||
}
|
||||
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.Writer) error {
|
||||
if err := db.GetEngine(ctx).Where("type = ?", asymkey_model.KeyTypePrincipal).Iterate(new(asymkey_model.PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*asymkey_model.PublicKey)).AuthorizedString())
|
||||
return err
|
||||
return asymkey_model.WriteAuthorizedStringForValidKey(bean.(*asymkey_model.PublicKey), t)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -114,11 +110,11 @@ func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = io.WriteString(t, line+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -20,8 +20,6 @@ import (
|
||||
asymkey_service "code.gitea.io/gitea/services/asymkey"
|
||||
)
|
||||
|
||||
const tplCommentPrefix = `# gitea public key`
|
||||
|
||||
func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) error {
|
||||
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
|
||||
return nil
|
||||
@@ -47,7 +45,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
linesInAuthorizedKeys.Add(line)
|
||||
@@ -67,7 +65,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
|
||||
scanner = bufio.NewScanner(regenerated)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
if linesInAuthorizedKeys.Contains(line) {
|
||||
|
||||
+30
-5
@@ -17,6 +17,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
actions_model "code.gitea.io/gitea/models/actions"
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
@@ -51,6 +52,33 @@ type Claims struct {
|
||||
jwt.RegisteredClaims
|
||||
}
|
||||
|
||||
type AuthTokenOptions struct {
|
||||
Op string
|
||||
UserID int64
|
||||
RepoID int64
|
||||
}
|
||||
|
||||
func GetLFSAuthTokenWithBearer(opts AuthTokenOptions) (string, error) {
|
||||
now := time.Now()
|
||||
claims := Claims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
RepoID: opts.RepoID,
|
||||
Op: opts.Op,
|
||||
UserID: opts.UserID,
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to sign LFS JWT token: %w", err)
|
||||
}
|
||||
return "Bearer " + tokenString, nil
|
||||
}
|
||||
|
||||
// DownloadLink builds a URL to download the object.
|
||||
func (rc *requestContext) DownloadLink(p lfs_module.Pointer) string {
|
||||
return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid))
|
||||
@@ -111,7 +139,7 @@ func DownloadHandler(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size-fromByte))
|
||||
ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size))
|
||||
ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Range")
|
||||
}
|
||||
}
|
||||
@@ -557,9 +585,6 @@ func authenticate(ctx *context.Context, repository *repo_model.Repository, autho
|
||||
}
|
||||
|
||||
func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repository, mode perm_model.AccessMode) (*user_model.User, error) {
|
||||
if !strings.Contains(tokenSHA, ".") {
|
||||
return nil, nil
|
||||
}
|
||||
token, err := jwt.ParseWithClaims(tokenSHA, &Claims{}, func(t *jwt.Token) (any, error) {
|
||||
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||
return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
|
||||
@@ -567,7 +592,7 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo
|
||||
return setting.LFS.JWTSecretBytes, nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
return nil, errors.New("invalid token")
|
||||
}
|
||||
|
||||
claims, claimsOk := token.Claims.(*Claims)
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package lfs
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
perm_model "code.gitea.io/gitea/models/perm"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
"code.gitea.io/gitea/services/contexttest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
unittest.MainTest(m)
|
||||
}
|
||||
|
||||
func TestAuthenticate(t *testing.T) {
|
||||
require.NoError(t, unittest.PrepareTestDatabase())
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
|
||||
|
||||
token2, _ := GetLFSAuthTokenWithBearer(AuthTokenOptions{Op: "download", UserID: 2, RepoID: 1})
|
||||
_, token2, _ = strings.Cut(token2, " ")
|
||||
ctx, _ := contexttest.MockContext(t, "/")
|
||||
|
||||
t.Run("handleLFSToken", func(t *testing.T) {
|
||||
u, err := handleLFSToken(ctx, "", repo1, perm_model.AccessModeRead)
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, u)
|
||||
|
||||
u, err = handleLFSToken(ctx, "invalid", repo1, perm_model.AccessModeRead)
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, u)
|
||||
|
||||
u, err = handleLFSToken(ctx, token2, repo1, perm_model.AccessModeRead)
|
||||
require.NoError(t, err)
|
||||
assert.EqualValues(t, 2, u.ID)
|
||||
})
|
||||
|
||||
t.Run("authenticate", func(t *testing.T) {
|
||||
const prefixBearer = "Bearer "
|
||||
assert.False(t, authenticate(ctx, repo1, "", true, false))
|
||||
assert.False(t, authenticate(ctx, repo1, prefixBearer+"invalid", true, false))
|
||||
assert.True(t, authenticate(ctx, repo1, prefixBearer+token2, true, false))
|
||||
})
|
||||
}
|
||||
@@ -354,7 +354,8 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith
|
||||
|
||||
// Prevent open redirect
|
||||
if !hasBaseURL(redirectURL, g.baseURL) &&
|
||||
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
|
||||
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") &&
|
||||
!hasBaseURL(redirectURL, "https://release-assets.githubusercontent.com/") {
|
||||
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
|
||||
|
||||
return io.NopCloser(strings.NewReader(redirectURL)), nil
|
||||
|
||||
+22
-34
@@ -14,42 +14,28 @@ import (
|
||||
)
|
||||
|
||||
// getCommitIDsFromRepo get commit IDs from repo in between oldCommitID and newCommitID
|
||||
// isForcePush will be true if oldCommit isn't on the branch
|
||||
// Commit on baseBranch will skip
|
||||
func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, isForcePush bool, err error) {
|
||||
func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, err error) {
|
||||
gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
defer closer.Close()
|
||||
|
||||
oldCommit, err := gitRepo.GetCommit(oldCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newCommit, err := gitRepo.GetCommit(newCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
isForcePush, err = newCommit.IsForcePush(oldCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
if isForcePush {
|
||||
commitIDs = make([]string, 2)
|
||||
commitIDs[0] = oldCommitID
|
||||
commitIDs[1] = newCommitID
|
||||
|
||||
return commitIDs, isForcePush, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find commits between new and old commit excluding base branch commits
|
||||
commits, err := gitRepo.CommitsBetweenNotBase(newCommit, oldCommit, baseBranch)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commitIDs = make([]string, 0, len(commits))
|
||||
@@ -57,38 +43,40 @@ func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldC
|
||||
commitIDs = append(commitIDs, commits[i].ID.String())
|
||||
}
|
||||
|
||||
return commitIDs, isForcePush, err
|
||||
return commitIDs, err
|
||||
}
|
||||
|
||||
// CreatePushPullComment create push code to pull base comment
|
||||
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (comment *issues_model.Comment, err error) {
|
||||
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string, isForcePush bool) (comment *issues_model.Comment, err error) {
|
||||
if pr.HasMerged || oldCommitID == "" || newCommitID == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
ops := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: pusher,
|
||||
Repo: pr.BaseRepo,
|
||||
opts := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: pusher,
|
||||
Repo: pr.BaseRepo,
|
||||
IsForcePush: isForcePush,
|
||||
Issue: pr.Issue,
|
||||
}
|
||||
|
||||
var data issues_model.PushActionContent
|
||||
|
||||
data.CommitIDs, data.IsForcePush, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if opts.IsForcePush {
|
||||
data.CommitIDs = []string{oldCommitID, newCommitID}
|
||||
} else {
|
||||
data.CommitIDs, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ops.Issue = pr.Issue
|
||||
|
||||
dataJSON, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ops.Content = string(dataJSON)
|
||||
|
||||
comment, err = issues_model.CreateComment(ctx, ops)
|
||||
opts.Content = string(dataJSON)
|
||||
comment, err = issues_model.CreateComment(ctx, opts)
|
||||
|
||||
return comment, err
|
||||
}
|
||||
|
||||
+49
-57
@@ -28,7 +28,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/globallock"
|
||||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@@ -142,36 +141,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
return err
|
||||
}
|
||||
|
||||
compareInfo, err := baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(),
|
||||
git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(compareInfo.Commits) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
data := issues_model.PushActionContent{IsForcePush: false}
|
||||
data.CommitIDs = make([]string, 0, len(compareInfo.Commits))
|
||||
for i := len(compareInfo.Commits) - 1; i >= 0; i-- {
|
||||
data.CommitIDs = append(data.CommitIDs, compareInfo.Commits[i].ID.String())
|
||||
}
|
||||
|
||||
dataJSON, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ops := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: issue.Poster,
|
||||
Repo: repo,
|
||||
Issue: pr.Issue,
|
||||
IsForcePush: false,
|
||||
Content: string(dataJSON),
|
||||
}
|
||||
|
||||
if _, err = issues_model.CreateComment(ctx, ops); err != nil {
|
||||
if _, err := CreatePushPullComment(ctx, issue.Poster, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -193,6 +163,20 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
|
||||
issue_service.ReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifiers)
|
||||
|
||||
// Request reviews, these should be requested before other notifications because they will add request reviews record
|
||||
// on database
|
||||
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
|
||||
for _, reviewer := range opts.Reviewers {
|
||||
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, teamReviewer := range opts.TeamReviewers {
|
||||
if _, err = issue_service.TeamReviewRequest(ctx, pr.Issue, issue.Poster, teamReviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, issue.Poster, issue.Content)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -211,17 +195,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
}
|
||||
notify_service.IssueChangeAssignee(ctx, issue.Poster, issue, assignee, false, assigneeCommentMap[assigneeID])
|
||||
}
|
||||
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
|
||||
for _, reviewer := range opts.Reviewers {
|
||||
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, teamReviewer := range opts.TeamReviewers {
|
||||
if _, err = issue_service.TeamReviewRequest(ctx, pr.Issue, issue.Poster, teamReviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -332,24 +306,42 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer
|
||||
pr.CommitsAhead = divergence.Ahead
|
||||
pr.CommitsBehind = divergence.Behind
|
||||
|
||||
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
|
||||
// add first push codes comment
|
||||
baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer baseGitRepo.Close()
|
||||
|
||||
// Create comment
|
||||
options := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypeChangeTargetBranch,
|
||||
Doer: doer,
|
||||
Repo: pr.Issue.Repo,
|
||||
Issue: pr.Issue,
|
||||
OldRef: oldBranch,
|
||||
NewRef: targetBranch,
|
||||
}
|
||||
if _, err = issues_model.CreateComment(ctx, options); err != nil {
|
||||
return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
|
||||
}
|
||||
return db.WithTx(ctx, func(ctx context.Context) error {
|
||||
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
// Create comment
|
||||
options := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypeChangeTargetBranch,
|
||||
Doer: doer,
|
||||
Repo: pr.Issue.Repo,
|
||||
Issue: pr.Issue,
|
||||
OldRef: oldBranch,
|
||||
NewRef: targetBranch,
|
||||
}
|
||||
if _, err = issues_model.CreateComment(ctx, options); err != nil {
|
||||
return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
|
||||
}
|
||||
|
||||
// Delete all old push comments and insert new push comments
|
||||
if _, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID).
|
||||
And("type = ?", issues_model.CommentTypePullRequestPush).
|
||||
NoAutoCondition().
|
||||
Delete(new(issues_model.Comment)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = CreatePushPullComment(ctx, doer, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func checkForInvalidation(ctx context.Context, requests issues_model.PullRequestList, repoID int64, doer *user_model.User, branch string) error {
|
||||
@@ -410,7 +402,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) {
|
||||
}
|
||||
|
||||
StartPullRequestCheckImmediately(ctx, pr)
|
||||
comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID)
|
||||
comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID, opts.IsForcePush)
|
||||
if err == nil && comment != nil {
|
||||
notify_service.PullRequestPushCommits(ctx, opts.Doer, pr, comment)
|
||||
}
|
||||
|
||||
+112
-85
@@ -13,6 +13,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
@@ -39,29 +40,41 @@ type expansion struct {
|
||||
Transformers []transformer
|
||||
}
|
||||
|
||||
var defaultTransformers = []transformer{
|
||||
{Name: "SNAKE", Transform: xstrings.ToSnakeCase},
|
||||
{Name: "KEBAB", Transform: xstrings.ToKebabCase},
|
||||
{Name: "CAMEL", Transform: xstrings.ToCamelCase},
|
||||
{Name: "PASCAL", Transform: xstrings.ToPascalCase},
|
||||
{Name: "LOWER", Transform: strings.ToLower},
|
||||
{Name: "UPPER", Transform: strings.ToUpper},
|
||||
{Name: "TITLE", Transform: util.ToTitleCase},
|
||||
}
|
||||
var globalVars = sync.OnceValue(func() (ret struct {
|
||||
defaultTransformers []transformer
|
||||
fileNameSanitizeRegexp *regexp.Regexp
|
||||
},
|
||||
) {
|
||||
ret.defaultTransformers = []transformer{
|
||||
{Name: "SNAKE", Transform: xstrings.ToSnakeCase},
|
||||
{Name: "KEBAB", Transform: xstrings.ToKebabCase},
|
||||
{Name: "CAMEL", Transform: xstrings.ToCamelCase},
|
||||
{Name: "PASCAL", Transform: xstrings.ToPascalCase},
|
||||
{Name: "LOWER", Transform: strings.ToLower},
|
||||
{Name: "UPPER", Transform: strings.ToUpper},
|
||||
{Name: "TITLE", Transform: util.ToTitleCase},
|
||||
}
|
||||
|
||||
func generateExpansion(ctx context.Context, src string, templateRepo, generateRepo *repo_model.Repository, sanitizeFileName bool) string {
|
||||
// invalid filename contents, based on https://github.com/sindresorhus/filename-reserved-regex
|
||||
// "COM10" needs to be opened with UNC "\\.\COM10" on Windows, so itself is valid
|
||||
ret.fileNameSanitizeRegexp = regexp.MustCompile(`(?i)[<>:"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`)
|
||||
return ret
|
||||
})
|
||||
|
||||
func generateExpansion(ctx context.Context, src string, templateRepo, generateRepo *repo_model.Repository) string {
|
||||
transformers := globalVars().defaultTransformers
|
||||
year, month, day := time.Now().Date()
|
||||
expansions := []expansion{
|
||||
{Name: "YEAR", Value: strconv.Itoa(year), Transformers: nil},
|
||||
{Name: "MONTH", Value: fmt.Sprintf("%02d", int(month)), Transformers: nil},
|
||||
{Name: "MONTH_ENGLISH", Value: month.String(), Transformers: defaultTransformers},
|
||||
{Name: "MONTH_ENGLISH", Value: month.String(), Transformers: transformers},
|
||||
{Name: "DAY", Value: fmt.Sprintf("%02d", day), Transformers: nil},
|
||||
{Name: "REPO_NAME", Value: generateRepo.Name, Transformers: defaultTransformers},
|
||||
{Name: "TEMPLATE_NAME", Value: templateRepo.Name, Transformers: defaultTransformers},
|
||||
{Name: "REPO_NAME", Value: generateRepo.Name, Transformers: transformers},
|
||||
{Name: "TEMPLATE_NAME", Value: templateRepo.Name, Transformers: transformers},
|
||||
{Name: "REPO_DESCRIPTION", Value: generateRepo.Description, Transformers: nil},
|
||||
{Name: "TEMPLATE_DESCRIPTION", Value: templateRepo.Description, Transformers: nil},
|
||||
{Name: "REPO_OWNER", Value: generateRepo.OwnerName, Transformers: defaultTransformers},
|
||||
{Name: "TEMPLATE_OWNER", Value: templateRepo.OwnerName, Transformers: defaultTransformers},
|
||||
{Name: "REPO_OWNER", Value: generateRepo.OwnerName, Transformers: transformers},
|
||||
{Name: "TEMPLATE_OWNER", Value: templateRepo.OwnerName, Transformers: transformers},
|
||||
{Name: "REPO_LINK", Value: generateRepo.Link(), Transformers: nil},
|
||||
{Name: "TEMPLATE_LINK", Value: templateRepo.Link(), Transformers: nil},
|
||||
{Name: "REPO_HTTPS_URL", Value: generateRepo.CloneLinkGeneral(ctx).HTTPS, Transformers: nil},
|
||||
@@ -79,32 +92,23 @@ func generateExpansion(ctx context.Context, src string, templateRepo, generateRe
|
||||
}
|
||||
|
||||
return os.Expand(src, func(key string) string {
|
||||
if expansion, ok := expansionMap[key]; ok {
|
||||
if sanitizeFileName {
|
||||
return fileNameSanitize(expansion)
|
||||
}
|
||||
return expansion
|
||||
if val, ok := expansionMap[key]; ok {
|
||||
return val
|
||||
}
|
||||
return key
|
||||
})
|
||||
}
|
||||
|
||||
// GiteaTemplate holds information about a .gitea/template file
|
||||
type GiteaTemplate struct {
|
||||
Path string
|
||||
Content []byte
|
||||
|
||||
globs []glob.Glob
|
||||
// giteaTemplateFileMatcher holds information about a .gitea/template file
|
||||
type giteaTemplateFileMatcher struct {
|
||||
LocalFullPath string
|
||||
globs []glob.Glob
|
||||
}
|
||||
|
||||
// Globs parses the .gitea/template globs or returns them if they were already parsed
|
||||
func (gt *GiteaTemplate) Globs() []glob.Glob {
|
||||
if gt.globs != nil {
|
||||
return gt.globs
|
||||
}
|
||||
|
||||
func newGiteaTemplateFileMatcher(fullPath string, content []byte) *giteaTemplateFileMatcher {
|
||||
gt := &giteaTemplateFileMatcher{LocalFullPath: fullPath}
|
||||
gt.globs = make([]glob.Glob, 0)
|
||||
scanner := bufio.NewScanner(bytes.NewReader(gt.Content))
|
||||
scanner := bufio.NewScanner(bytes.NewReader(content))
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
@@ -112,73 +116,91 @@ func (gt *GiteaTemplate) Globs() []glob.Glob {
|
||||
}
|
||||
g, err := glob.Compile(line, '/')
|
||||
if err != nil {
|
||||
log.Info("Invalid glob expression '%s' (skipped): %v", line, err)
|
||||
log.Debug("Invalid glob expression '%s' (skipped): %v", line, err)
|
||||
continue
|
||||
}
|
||||
gt.globs = append(gt.globs, g)
|
||||
}
|
||||
return gt.globs
|
||||
return gt
|
||||
}
|
||||
|
||||
func readGiteaTemplateFile(tmpDir string) (*GiteaTemplate, error) {
|
||||
gtPath := filepath.Join(tmpDir, ".gitea", "template")
|
||||
if _, err := os.Stat(gtPath); os.IsNotExist(err) {
|
||||
func (gt *giteaTemplateFileMatcher) HasRules() bool {
|
||||
return len(gt.globs) != 0
|
||||
}
|
||||
|
||||
func (gt *giteaTemplateFileMatcher) Match(s string) bool {
|
||||
for _, g := range gt.globs {
|
||||
if g.Match(s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func readGiteaTemplateFile(tmpDir string) (*giteaTemplateFileMatcher, error) {
|
||||
localPath := filepath.Join(tmpDir, ".gitea", "template")
|
||||
if _, err := os.Stat(localPath); os.IsNotExist(err) {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(gtPath)
|
||||
content, err := os.ReadFile(localPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &GiteaTemplate{Path: gtPath, Content: content}, nil
|
||||
return newGiteaTemplateFileMatcher(localPath, content), nil
|
||||
}
|
||||
|
||||
func processGiteaTemplateFile(ctx context.Context, tmpDir string, templateRepo, generateRepo *repo_model.Repository, giteaTemplateFile *GiteaTemplate) error {
|
||||
if err := util.Remove(giteaTemplateFile.Path); err != nil {
|
||||
return fmt.Errorf("remove .giteatemplate: %w", err)
|
||||
func substGiteaTemplateFile(ctx context.Context, tmpDir, tmpDirSubPath string, templateRepo, generateRepo *repo_model.Repository) error {
|
||||
tmpFullPath := filepath.Join(tmpDir, tmpDirSubPath)
|
||||
if ok, err := util.IsRegularFile(tmpFullPath); !ok {
|
||||
return err
|
||||
}
|
||||
if len(giteaTemplateFile.Globs()) == 0 {
|
||||
|
||||
content, err := os.ReadFile(tmpFullPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := util.Remove(tmpFullPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
generatedContent := generateExpansion(ctx, string(content), templateRepo, generateRepo)
|
||||
substSubPath := filepath.Clean(filePathSanitize(generateExpansion(ctx, tmpDirSubPath, templateRepo, generateRepo)))
|
||||
newLocalPath := filepath.Join(tmpDir, substSubPath)
|
||||
regular, err := util.IsRegularFile(newLocalPath)
|
||||
if canWrite := regular || os.IsNotExist(err); !canWrite {
|
||||
return nil
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(newLocalPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.WriteFile(newLocalPath, []byte(generatedContent), 0o644)
|
||||
}
|
||||
|
||||
func processGiteaTemplateFile(ctx context.Context, tmpDir string, templateRepo, generateRepo *repo_model.Repository, fileMatcher *giteaTemplateFileMatcher) error {
|
||||
if err := util.Remove(fileMatcher.LocalFullPath); err != nil {
|
||||
return fmt.Errorf("unable to remove .gitea/template: %w", err)
|
||||
}
|
||||
if !fileMatcher.HasRules() {
|
||||
return nil // Avoid walking tree if there are no globs
|
||||
}
|
||||
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
|
||||
return filepath.WalkDir(tmpDirSlash, func(path string, d os.DirEntry, walkErr error) error {
|
||||
|
||||
return filepath.WalkDir(tmpDir, func(fullPath string, d os.DirEntry, walkErr error) error {
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
|
||||
for _, g := range giteaTemplateFile.Globs() {
|
||||
if g.Match(base) {
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
generatedContent := []byte(generateExpansion(ctx, string(content), templateRepo, generateRepo, false))
|
||||
if err := os.WriteFile(path, generatedContent, 0o644); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
substPath := filepath.FromSlash(filepath.Join(tmpDirSlash, generateExpansion(ctx, base, templateRepo, generateRepo, true)))
|
||||
|
||||
// Create parent subdirectories if needed or continue silently if it exists
|
||||
if err = os.MkdirAll(filepath.Dir(substPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Substitute filename variables
|
||||
if err = os.Rename(path, substPath); err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
tmpDirSubPath, err := filepath.Rel(tmpDir, fullPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fileMatcher.Match(filepath.ToSlash(tmpDirSubPath)) {
|
||||
return substGiteaTemplateFile(ctx, tmpDir, tmpDirSubPath, templateRepo, generateRepo)
|
||||
}
|
||||
return nil
|
||||
}) // end: WalkDir
|
||||
@@ -218,13 +240,13 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r
|
||||
}
|
||||
|
||||
// Variable expansion
|
||||
giteaTemplateFile, err := readGiteaTemplateFile(tmpDir)
|
||||
fileMatcher, err := readGiteaTemplateFile(tmpDir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("readGiteaTemplateFile: %w", err)
|
||||
}
|
||||
|
||||
if giteaTemplateFile != nil {
|
||||
err = processGiteaTemplateFile(ctx, tmpDir, templateRepo, generateRepo, giteaTemplateFile)
|
||||
if fileMatcher != nil {
|
||||
err = processGiteaTemplateFile(ctx, tmpDir, templateRepo, generateRepo, fileMatcher)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -321,12 +343,17 @@ func (gro GenerateRepoOptions) IsValid() bool {
|
||||
gro.IssueLabels || gro.ProtectedBranch // or other items as they are added
|
||||
}
|
||||
|
||||
var fileNameSanitizeRegexp = regexp.MustCompile(`(?i)\.\.|[<>:\"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`)
|
||||
|
||||
// Sanitize user input to valid OS filenames
|
||||
//
|
||||
// Based on https://github.com/sindresorhus/filename-reserved-regex
|
||||
// Adds ".." to prevent directory traversal
|
||||
func fileNameSanitize(s string) string {
|
||||
return strings.TrimSpace(fileNameSanitizeRegexp.ReplaceAllString(s, "_"))
|
||||
func filePathSanitize(s string) string {
|
||||
fields := strings.Split(filepath.ToSlash(s), "/")
|
||||
for i, field := range fields {
|
||||
field = strings.TrimSpace(strings.TrimSpace(globalVars().fileNameSanitizeRegexp.ReplaceAllString(field, "_")))
|
||||
if strings.HasPrefix(field, "..") {
|
||||
field = "__" + field[2:]
|
||||
}
|
||||
if strings.EqualFold(field, ".git") {
|
||||
field = "_" + field[1:]
|
||||
}
|
||||
fields[i] = field
|
||||
}
|
||||
return filepath.FromSlash(strings.Join(fields, "/"))
|
||||
}
|
||||
|
||||
@@ -4,13 +4,18 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var giteaTemplate = []byte(`
|
||||
func TestGiteaTemplate(t *testing.T) {
|
||||
giteaTemplate := []byte(`
|
||||
# Header
|
||||
|
||||
# All .go files
|
||||
@@ -23,48 +28,153 @@ text/*.txt
|
||||
**/modules/*
|
||||
`)
|
||||
|
||||
func TestGiteaTemplate(t *testing.T) {
|
||||
gt := GiteaTemplate{Content: giteaTemplate}
|
||||
assert.Len(t, gt.Globs(), 3)
|
||||
gt := newGiteaTemplateFileMatcher("", giteaTemplate)
|
||||
assert.Len(t, gt.globs, 3)
|
||||
|
||||
tt := []struct {
|
||||
Path string
|
||||
Match bool
|
||||
}{
|
||||
{Path: "main.go", Match: true},
|
||||
{Path: "a/b/c/d/e.go", Match: true},
|
||||
{Path: "main.txt", Match: false},
|
||||
{Path: "a/b.txt", Match: false},
|
||||
{Path: "sub/sub/foo.go", Match: true},
|
||||
|
||||
{Path: "a.txt", Match: false},
|
||||
{Path: "text/a.txt", Match: true},
|
||||
{Path: "text/b.txt", Match: true},
|
||||
{Path: "text/c.json", Match: false},
|
||||
{Path: "sub/text/a.txt", Match: false},
|
||||
{Path: "text/a.json", Match: false},
|
||||
|
||||
{Path: "a/b/c/modules/README.md", Match: true},
|
||||
{Path: "a/b/c/modules/d/README.md", Match: false},
|
||||
}
|
||||
|
||||
for _, tc := range tt {
|
||||
t.Run(tc.Path, func(t *testing.T) {
|
||||
match := false
|
||||
for _, g := range gt.Globs() {
|
||||
if g.Match(tc.Path) {
|
||||
match = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.Equal(t, tc.Match, match)
|
||||
})
|
||||
assert.Equal(t, tc.Match, gt.Match(tc.Path), "path: %s", tc.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileNameSanitize(t *testing.T) {
|
||||
assert.Equal(t, "test_CON", fileNameSanitize("test_CON"))
|
||||
assert.Equal(t, "test CON", fileNameSanitize("test CON "))
|
||||
assert.Equal(t, "__traverse__", fileNameSanitize("../traverse/.."))
|
||||
assert.Equal(t, "http___localhost_3003_user_test.git", fileNameSanitize("http://localhost:3003/user/test.git"))
|
||||
assert.Equal(t, "_", fileNameSanitize("CON"))
|
||||
assert.Equal(t, "_", fileNameSanitize("con"))
|
||||
assert.Equal(t, "_", fileNameSanitize("\u0000"))
|
||||
assert.Equal(t, "目标", fileNameSanitize("目标"))
|
||||
func TestFilePathSanitize(t *testing.T) {
|
||||
assert.Equal(t, "test_CON", filePathSanitize("test_CON"))
|
||||
assert.Equal(t, "test CON", filePathSanitize("test CON "))
|
||||
assert.Equal(t, "__/traverse/__", filePathSanitize(".. /traverse/ .."))
|
||||
assert.Equal(t, "./__/a/_git/b_", filePathSanitize("./../a/.git/ b: "))
|
||||
assert.Equal(t, "_", filePathSanitize("CoN"))
|
||||
assert.Equal(t, "_", filePathSanitize("LpT1"))
|
||||
assert.Equal(t, "_", filePathSanitize("CoM1"))
|
||||
assert.Equal(t, "_", filePathSanitize("\u0000"))
|
||||
assert.Equal(t, "目标", filePathSanitize("目标"))
|
||||
// unlike filepath.Clean, it only sanitizes, doesn't change the separator layout
|
||||
assert.Equal(t, "", filePathSanitize("")) //nolint:testifylint // for easy reading
|
||||
assert.Equal(t, ".", filePathSanitize("."))
|
||||
assert.Equal(t, "/", filePathSanitize("/"))
|
||||
}
|
||||
|
||||
func TestProcessGiteaTemplateFile(t *testing.T) {
|
||||
tmpDir := filepath.Join(t.TempDir(), "gitea-template-test")
|
||||
|
||||
assertFileContent := func(path, expected string) {
|
||||
data, err := os.ReadFile(filepath.Join(tmpDir, path))
|
||||
if expected == "" {
|
||||
assert.ErrorIs(t, err, os.ErrNotExist)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, expected, string(data), "file content mismatch for %s", path)
|
||||
}
|
||||
|
||||
assertSymLink := func(path, expected string) {
|
||||
link, err := os.Readlink(filepath.Join(tmpDir, path))
|
||||
if expected == "" {
|
||||
assert.ErrorIs(t, err, os.ErrNotExist)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, expected, link, "symlink target mismatch for %s", path)
|
||||
}
|
||||
|
||||
require.NoError(t, os.MkdirAll(tmpDir+"/.gitea", 0o755))
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/.gitea/template", []byte("*\ninclude/**"), 0o644))
|
||||
require.NoError(t, os.MkdirAll(tmpDir+"/sub", 0o755))
|
||||
require.NoError(t, os.MkdirAll(tmpDir+"/include/foo/bar", 0o755))
|
||||
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/sub/link-target", []byte("link target content from ${TEMPLATE_NAME}"), 0o644))
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/include/foo/bar/test.txt", []byte("include subdir ${TEMPLATE_NAME}"), 0o644))
|
||||
|
||||
// case-1
|
||||
{
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/normal", []byte("normal content"), 0o644))
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/template", []byte("template from ${TEMPLATE_NAME}"), 0o644))
|
||||
}
|
||||
|
||||
// case-2
|
||||
{
|
||||
require.NoError(t, os.Symlink(tmpDir+"/sub/link-target", tmpDir+"/link"))
|
||||
}
|
||||
|
||||
// case-3
|
||||
{
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/subst-${REPO_NAME}", []byte("dummy subst repo name"), 0o644))
|
||||
}
|
||||
|
||||
// case-4
|
||||
assertSubstTemplateName := func(normalContent, toLinkContent, fromLinkContent string) {
|
||||
assertFileContent("subst-${TEMPLATE_NAME}-normal", normalContent)
|
||||
assertFileContent("subst-${TEMPLATE_NAME}-to-link", toLinkContent)
|
||||
assertFileContent("subst-${TEMPLATE_NAME}-from-link", fromLinkContent)
|
||||
}
|
||||
{
|
||||
// will succeed
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/subst-${TEMPLATE_NAME}-normal", []byte("dummy subst template name normal"), 0o644))
|
||||
// will skil if the path subst result is a link
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/subst-${TEMPLATE_NAME}-to-link", []byte("dummy subst template name to link"), 0o644))
|
||||
require.NoError(t, os.Symlink(tmpDir+"/sub/link-target", tmpDir+"/subst-TemplateRepoName-to-link"))
|
||||
// will be skipped since the source is a symlink
|
||||
require.NoError(t, os.Symlink(tmpDir+"/sub/link-target", tmpDir+"/subst-${TEMPLATE_NAME}-from-link"))
|
||||
// pre-check
|
||||
assertSubstTemplateName("dummy subst template name normal", "dummy subst template name to link", "link target content from ${TEMPLATE_NAME}")
|
||||
}
|
||||
|
||||
// process the template files
|
||||
{
|
||||
templateRepo := &repo_model.Repository{Name: "TemplateRepoName"}
|
||||
generatedRepo := &repo_model.Repository{Name: "/../.gIt/name"}
|
||||
fileMatcher, _ := readGiteaTemplateFile(tmpDir)
|
||||
err := processGiteaTemplateFile(t.Context(), tmpDir, templateRepo, generatedRepo, fileMatcher)
|
||||
require.NoError(t, err)
|
||||
assertFileContent("include/foo/bar/test.txt", "include subdir TemplateRepoName")
|
||||
}
|
||||
|
||||
// the lin target should never be modified, and since it is in a subdirectory, it is not affected by the template either
|
||||
assertFileContent("sub/link-target", "link target content from ${TEMPLATE_NAME}")
|
||||
|
||||
// case-1
|
||||
{
|
||||
assertFileContent("no-such", "")
|
||||
assertFileContent("normal", "normal content")
|
||||
assertFileContent("template", "template from TemplateRepoName")
|
||||
}
|
||||
|
||||
// case-2
|
||||
{
|
||||
// symlink with templates should be preserved (not read or write)
|
||||
assertSymLink("link", tmpDir+"/sub/link-target")
|
||||
}
|
||||
|
||||
// case-3
|
||||
{
|
||||
assertFileContent("subst-${REPO_NAME}", "")
|
||||
assertFileContent("subst-/__/_gIt/name", "dummy subst repo name")
|
||||
}
|
||||
|
||||
// case-4
|
||||
{
|
||||
// the paths with templates should have been removed, subst to a regular file, succeed, the link is preserved
|
||||
assertSubstTemplateName("", "", "link target content from ${TEMPLATE_NAME}")
|
||||
assertFileContent("subst-TemplateRepoName-normal", "dummy subst template name normal")
|
||||
// subst to a link, skip, and the target is unchanged
|
||||
assertSymLink("subst-TemplateRepoName-to-link", tmpDir+"/sub/link-target")
|
||||
// subst from a link, skip, and the target is unchanged
|
||||
assertSymLink("subst-${TEMPLATE_NAME}-from-link", tmpDir+"/sub/link-target")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransformers(t *testing.T) {
|
||||
@@ -82,9 +192,9 @@ func TestTransformers(t *testing.T) {
|
||||
}
|
||||
|
||||
input := "Abc_Def-XYZ"
|
||||
assert.Len(t, defaultTransformers, len(cases))
|
||||
assert.Len(t, globalVars().defaultTransformers, len(cases))
|
||||
for i, c := range cases {
|
||||
tf := defaultTransformers[i]
|
||||
tf := globalVars().defaultTransformers[i]
|
||||
require.Equal(t, c.name, tf.Name)
|
||||
assert.Equal(t, c.expected, tf.Transform(input), "case %s", c.name)
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ type (
|
||||
DiscordPayload struct {
|
||||
Wait bool `json:"wait"`
|
||||
Content string `json:"content"`
|
||||
Username string `json:"username"`
|
||||
Username string `json:"username,omitempty"`
|
||||
AvatarURL string `json:"avatar_url,omitempty"`
|
||||
TTS bool `json:"tts"`
|
||||
Embeds []DiscordEmbed `json:"embeds"`
|
||||
|
||||
@@ -119,7 +119,7 @@
|
||||
{{range $idx, $code := .FileContent}}
|
||||
{{$line := Eval $idx "+" 1}}
|
||||
<tr>
|
||||
<td id="L{{$line}}" class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
<td class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
{{if $.EscapeStatus.Escaped}}
|
||||
<td class="lines-escape">{{if (index $.LineEscapeStatus $idx).Escaped}}<button class="toggle-escape-button btn interact-bg" title="{{if (index $.LineEscapeStatus $idx).HasInvisible}}{{ctx.Locale.Tr "repo.invisible_runes_line"}} {{end}}{{if (index $.LineEscapeStatus $idx).HasAmbiguous}}{{ctx.Locale.Tr "repo.ambiguous_runes_line"}}{{end}}"></button>{{end}}</td>
|
||||
{{end}}
|
||||
|
||||
@@ -355,6 +355,7 @@ func TestPackageSwift(t *testing.T) {
|
||||
assert.Equal(t, packageVersion, result.Metadata.Version)
|
||||
assert.Equal(t, packageDescription, result.Metadata.Description)
|
||||
assert.Equal(t, "Swift", result.Metadata.ProgrammingLanguage.Name)
|
||||
assert.Equal(t, packageAuthor, result.Metadata.Author.Name)
|
||||
assert.Equal(t, packageAuthor, result.Metadata.Author.GivenName)
|
||||
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s/%s.json", url, packageScope, packageName, packageVersion)).
|
||||
|
||||
@@ -30,7 +30,7 @@ func Test_CmdKeys(t *testing.T) {
|
||||
"with_key",
|
||||
[]string{"keys", "-e", "git", "-u", "git", "-t", "ssh-rsa", "-k", "AAAAB3NzaC1yc2EAAAADAQABAAABgQDWVj0fQ5N8wNc0LVNA41wDLYJ89ZIbejrPfg/avyj3u/ZohAKsQclxG4Ju0VirduBFF9EOiuxoiFBRr3xRpqzpsZtnMPkWVWb+akZwBFAx8p+jKdy4QXR/SZqbVobrGwip2UjSrri1CtBxpJikojRIZfCnDaMOyd9Jp6KkujvniFzUWdLmCPxUE9zhTaPu0JsEP7MW0m6yx7ZUhHyfss+NtqmFTaDO+QlMR7L2QkDliN2Jl3Xa3PhuWnKJfWhdAq1Cw4oraKUOmIgXLkuiuxVQ6mD3AiFupkmfqdHq6h+uHHmyQqv3gU+/sD8GbGAhf6ftqhTsXjnv1Aj4R8NoDf9BS6KRkzkeun5UisSzgtfQzjOMEiJtmrep2ZQrMGahrXa+q4VKr0aKJfm+KlLfwm/JztfsBcqQWNcTURiCFqz+fgZw0Ey/de0eyMzldYTdXXNRYCKjs9bvBK+6SSXRM7AhftfQ0ZuoW5+gtinPrnmoOaSCEJbAiEiTO/BzOHgowiM="},
|
||||
false,
|
||||
"# gitea public key\ncommand=\"" + setting.AppPath + " --config=" + util.ShellEscape(setting.CustomConf) + " serv key-1\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDWVj0fQ5N8wNc0LVNA41wDLYJ89ZIbejrPfg/avyj3u/ZohAKsQclxG4Ju0VirduBFF9EOiuxoiFBRr3xRpqzpsZtnMPkWVWb+akZwBFAx8p+jKdy4QXR/SZqbVobrGwip2UjSrri1CtBxpJikojRIZfCnDaMOyd9Jp6KkujvniFzUWdLmCPxUE9zhTaPu0JsEP7MW0m6yx7ZUhHyfss+NtqmFTaDO+QlMR7L2QkDliN2Jl3Xa3PhuWnKJfWhdAq1Cw4oraKUOmIgXLkuiuxVQ6mD3AiFupkmfqdHq6h+uHHmyQqv3gU+/sD8GbGAhf6ftqhTsXjnv1Aj4R8NoDf9BS6KRkzkeun5UisSzgtfQzjOMEiJtmrep2ZQrMGahrXa+q4VKr0aKJfm+KlLfwm/JztfsBcqQWNcTURiCFqz+fgZw0Ey/de0eyMzldYTdXXNRYCKjs9bvBK+6SSXRM7AhftfQ0ZuoW5+gtinPrnmoOaSCEJbAiEiTO/BzOHgowiM= user2@localhost\n",
|
||||
"# gitea public key\ncommand=\"" + setting.AppPath + " --config=" + util.ShellEscape(setting.CustomConf) + " serv key-1\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDWVj0fQ5N8wNc0LVNA41wDLYJ89ZIbejrPfg/avyj3u/ZohAKsQclxG4Ju0VirduBFF9EOiuxoiFBRr3xRpqzpsZtnMPkWVWb+akZwBFAx8p+jKdy4QXR/SZqbVobrGwip2UjSrri1CtBxpJikojRIZfCnDaMOyd9Jp6KkujvniFzUWdLmCPxUE9zhTaPu0JsEP7MW0m6yx7ZUhHyfss+NtqmFTaDO+QlMR7L2QkDliN2Jl3Xa3PhuWnKJfWhdAq1Cw4oraKUOmIgXLkuiuxVQ6mD3AiFupkmfqdHq6h+uHHmyQqv3gU+/sD8GbGAhf6ftqhTsXjnv1Aj4R8NoDf9BS6KRkzkeun5UisSzgtfQzjOMEiJtmrep2ZQrMGahrXa+q4VKr0aKJfm+KlLfwm/JztfsBcqQWNcTURiCFqz+fgZw0Ey/de0eyMzldYTdXXNRYCKjs9bvBK+6SSXRM7AhftfQ0ZuoW5+gtinPrnmoOaSCEJbAiEiTO/BzOHgowiM= user-2\n",
|
||||
},
|
||||
{"invalid", []string{"keys", "--not-a-flag=git"}, true, "Incorrect Usage: flag provided but not defined: -not-a-flag\n\n"},
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"code.gitea.io/gitea/models/db"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/indexer/issues"
|
||||
@@ -471,19 +472,38 @@ func TestIssueRedirect(t *testing.T) {
|
||||
session := loginUser(t, "user2")
|
||||
|
||||
// Test external tracker where style not set (shall default numeric)
|
||||
req := NewRequest(t, "GET", path.Join("org26", "repo_external_tracker", "issues", "1"))
|
||||
req := NewRequest(t, "GET", "/org26/repo_external_tracker/issues/1")
|
||||
resp := session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "https://tracker.com/org26/repo_external_tracker/issues/1", test.RedirectURL(resp))
|
||||
|
||||
// Test external tracker with numeric style
|
||||
req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_numeric", "issues", "1"))
|
||||
req = NewRequest(t, "GET", "/org26/repo_external_tracker_numeric/issues/1")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "https://tracker.com/org26/repo_external_tracker_numeric/issues/1", test.RedirectURL(resp))
|
||||
|
||||
// Test external tracker with alphanumeric style (for a pull request)
|
||||
req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_alpha", "issues", "1"))
|
||||
req = NewRequest(t, "GET", "/org26/repo_external_tracker_alpha/issues/1")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/"+path.Join("org26", "repo_external_tracker_alpha", "pulls", "1"), test.RedirectURL(resp))
|
||||
assert.Equal(t, "/org26/repo_external_tracker_alpha/pulls/1", test.RedirectURL(resp))
|
||||
|
||||
// test to check that the PR redirection works if the issue unit is disabled
|
||||
// repo1 is a normal repository with issue unit enabled, visit issue 2(which is a pull request)
|
||||
// will redirect to pulls
|
||||
req = NewRequest(t, "GET", "/user2/repo1/issues/2")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/repo1/pulls/2", test.RedirectURL(resp))
|
||||
|
||||
repoUnit := unittest.AssertExistsAndLoadBean(t, &repo_model.RepoUnit{RepoID: 1, Type: unit.TypeIssues})
|
||||
|
||||
// disable issue unit, it will be reset
|
||||
_, err := db.DeleteByID[repo_model.RepoUnit](t.Context(), repoUnit.ID)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// even if the issue unit is disabled, visiting an issue which is a pull request
|
||||
// will still redirect to pull request
|
||||
req = NewRequest(t, "GET", "/user2/repo1/issues/2")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/repo1/pulls/2", test.RedirectURL(resp))
|
||||
}
|
||||
|
||||
func TestSearchIssues(t *testing.T) {
|
||||
|
||||
@@ -105,7 +105,15 @@ func TestPullCompare_EnableAllowEditsFromMaintainer(t *testing.T) {
|
||||
|
||||
// user4 creates a new branch and a PR
|
||||
testEditFileToNewBranch(t, user4Session, "user4", forkedRepoName, "master", "user4/update-readme", "README.md", "Hello, World\n(Edited by user4)\n")
|
||||
resp := testPullCreateDirectly(t, user4Session, repo3.OwnerName, repo3.Name, "master", "user4", forkedRepoName, "user4/update-readme", "PR for user4 forked repo3")
|
||||
resp := testPullCreateDirectly(t, user4Session, createPullRequestOptions{
|
||||
BaseRepoOwner: repo3.OwnerName,
|
||||
BaseRepoName: repo3.Name,
|
||||
BaseBranch: "master",
|
||||
HeadRepoOwner: "user4",
|
||||
HeadRepoName: forkedRepoName,
|
||||
HeadBranch: "user4/update-readme",
|
||||
Title: "PR for user4 forked repo3",
|
||||
})
|
||||
prURL := test.RedirectURL(resp)
|
||||
|
||||
// user2 (admin of repo3) goes to the PR files page
|
||||
|
||||
@@ -60,26 +60,50 @@ func testPullCreate(t *testing.T, session *TestSession, user, repo string, toSel
|
||||
return resp
|
||||
}
|
||||
|
||||
func testPullCreateDirectly(t *testing.T, session *TestSession, baseRepoOwner, baseRepoName, baseBranch, headRepoOwner, headRepoName, headBranch, title string) *httptest.ResponseRecorder {
|
||||
headCompare := headBranch
|
||||
if headRepoOwner != "" {
|
||||
if headRepoName != "" {
|
||||
headCompare = fmt.Sprintf("%s/%s:%s", headRepoOwner, headRepoName, headBranch)
|
||||
type createPullRequestOptions struct {
|
||||
BaseRepoOwner string
|
||||
BaseRepoName string
|
||||
BaseBranch string
|
||||
HeadRepoOwner string
|
||||
HeadRepoName string
|
||||
HeadBranch string
|
||||
Title string
|
||||
ReviewerIDs string // comma-separated list of user IDs
|
||||
}
|
||||
|
||||
func (opts createPullRequestOptions) IsValid() bool {
|
||||
return opts.BaseRepoOwner != "" && opts.BaseRepoName != "" && opts.BaseBranch != "" &&
|
||||
opts.HeadBranch != "" && opts.Title != ""
|
||||
}
|
||||
|
||||
func testPullCreateDirectly(t *testing.T, session *TestSession, opts createPullRequestOptions) *httptest.ResponseRecorder {
|
||||
if !opts.IsValid() {
|
||||
t.Fatal("Invalid pull request options")
|
||||
}
|
||||
|
||||
headCompare := opts.HeadBranch
|
||||
if opts.HeadRepoOwner != "" {
|
||||
if opts.HeadRepoName != "" {
|
||||
headCompare = fmt.Sprintf("%s/%s:%s", opts.HeadRepoOwner, opts.HeadRepoName, opts.HeadBranch)
|
||||
} else {
|
||||
headCompare = fmt.Sprintf("%s:%s", headRepoOwner, headBranch)
|
||||
headCompare = fmt.Sprintf("%s:%s", opts.HeadRepoOwner, opts.HeadBranch)
|
||||
}
|
||||
}
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/%s...%s", baseRepoOwner, baseRepoName, baseBranch, headCompare))
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/%s...%s", opts.BaseRepoOwner, opts.BaseRepoName, opts.BaseBranch, headCompare))
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// Submit the form for creating the pull
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
link, exists := htmlDoc.doc.Find("form.ui.form").Attr("action")
|
||||
assert.True(t, exists, "The template has changed")
|
||||
req = NewRequestWithValues(t, "POST", link, map[string]string{
|
||||
params := map[string]string{
|
||||
"_csrf": htmlDoc.GetCSRF(),
|
||||
"title": title,
|
||||
})
|
||||
"title": opts.Title,
|
||||
}
|
||||
if opts.ReviewerIDs != "" {
|
||||
params["reviewer_ids"] = opts.ReviewerIDs
|
||||
}
|
||||
req = NewRequestWithValues(t, "POST", link, params)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
return resp
|
||||
}
|
||||
@@ -246,7 +270,15 @@ func TestPullCreatePrFromBaseToFork(t *testing.T) {
|
||||
testEditFile(t, sessionBase, "user2", "repo1", "master", "README.md", "Hello, World (Edited)\n")
|
||||
|
||||
// Create a PR
|
||||
resp := testPullCreateDirectly(t, sessionFork, "user1", "repo1", "master", "user2", "repo1", "master", "This is a pull title")
|
||||
resp := testPullCreateDirectly(t, sessionFork, createPullRequestOptions{
|
||||
BaseRepoOwner: "user1",
|
||||
BaseRepoName: "repo1",
|
||||
BaseBranch: "master",
|
||||
HeadRepoOwner: "user2",
|
||||
HeadRepoName: "repo1",
|
||||
HeadBranch: "master",
|
||||
Title: "This is a pull title",
|
||||
})
|
||||
// check the redirected URL
|
||||
url := test.RedirectURL(resp)
|
||||
assert.Regexp(t, "^/user1/repo1/pulls/[0-9]*$", url)
|
||||
|
||||
@@ -184,13 +184,29 @@ func TestPullView_CodeOwner(t *testing.T) {
|
||||
session := loginUser(t, "user5")
|
||||
|
||||
// create a pull request on the forked repository, code reviewers should not be mentioned
|
||||
testPullCreateDirectly(t, session, "user5", "test_codeowner", forkedRepo.DefaultBranch, "", "", "codeowner-basebranch-forked", "Test Pull Request on Forked Repository")
|
||||
testPullCreateDirectly(t, session, createPullRequestOptions{
|
||||
BaseRepoOwner: "user5",
|
||||
BaseRepoName: "test_codeowner",
|
||||
BaseBranch: forkedRepo.DefaultBranch,
|
||||
HeadRepoOwner: "",
|
||||
HeadRepoName: "",
|
||||
HeadBranch: "codeowner-basebranch-forked",
|
||||
Title: "Test Pull Request on Forked Repository",
|
||||
})
|
||||
|
||||
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: forkedRepo.ID, HeadBranch: "codeowner-basebranch-forked"})
|
||||
unittest.AssertNotExistsBean(t, &issues_model.Review{IssueID: pr.IssueID, Type: issues_model.ReviewTypeRequest, ReviewerID: 8})
|
||||
|
||||
// create a pull request to base repository, code reviewers should be mentioned
|
||||
testPullCreateDirectly(t, session, repo.OwnerName, repo.Name, repo.DefaultBranch, forkedRepo.OwnerName, forkedRepo.Name, "codeowner-basebranch-forked", "Test Pull Request3")
|
||||
testPullCreateDirectly(t, session, createPullRequestOptions{
|
||||
BaseRepoOwner: repo.OwnerName,
|
||||
BaseRepoName: repo.Name,
|
||||
BaseBranch: repo.DefaultBranch,
|
||||
HeadRepoOwner: forkedRepo.OwnerName,
|
||||
HeadRepoName: forkedRepo.Name,
|
||||
HeadBranch: "codeowner-basebranch-forked",
|
||||
Title: "Test Pull Request3",
|
||||
})
|
||||
|
||||
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: repo.ID, HeadRepoID: forkedRepo.ID, HeadBranch: "codeowner-basebranch-forked"})
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.Review{IssueID: pr.IssueID, Type: issues_model.ReviewTypeRequest, ReviewerID: 8})
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"time"
|
||||
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
"code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
@@ -529,15 +530,30 @@ func Test_WebhookPullRequest(t *testing.T) {
|
||||
}, http.StatusOK)
|
||||
defer provider.Close()
|
||||
|
||||
testCtx := NewAPITestContext(t, "user2", "repo1", auth_model.AccessTokenScopeAll)
|
||||
// add user4 as collabrator so that it can be a reviewer
|
||||
doAPIAddCollaborator(testCtx, "user4", perm.AccessModeWrite)(t)
|
||||
|
||||
// 1. create a new webhook with special webhook for repo1
|
||||
session := loginUser(t, "user2")
|
||||
sessionUser2 := loginUser(t, "user2")
|
||||
sessionUser4 := loginUser(t, "user4")
|
||||
|
||||
testAPICreateWebhookForRepo(t, session, "user2", "repo1", provider.URL(), "pull_request")
|
||||
// ignore the possible review_requested event to keep the test deterministic
|
||||
testAPICreateWebhookForRepo(t, sessionUser2, "user2", "repo1", provider.URL(), "pull_request_only")
|
||||
|
||||
testAPICreateBranch(t, session, "user2", "repo1", "master", "master2", http.StatusCreated)
|
||||
testAPICreateBranch(t, sessionUser2, "user2", "repo1", "master", "master2", http.StatusCreated)
|
||||
// 2. trigger the webhook
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 1})
|
||||
testCreatePullToDefaultBranch(t, session, repo1, repo1, "master2", "first pull request")
|
||||
testPullCreateDirectly(t, sessionUser4, createPullRequestOptions{
|
||||
BaseRepoOwner: repo1.OwnerName,
|
||||
BaseRepoName: repo1.Name,
|
||||
BaseBranch: repo1.DefaultBranch,
|
||||
HeadRepoOwner: "",
|
||||
HeadRepoName: "",
|
||||
HeadBranch: "master2",
|
||||
Title: "first pull request",
|
||||
ReviewerIDs: "2", // add user2 as reviewer
|
||||
})
|
||||
|
||||
// 3. validate the webhook is triggered
|
||||
assert.Equal(t, "pull_request", triggeredEvent)
|
||||
@@ -549,6 +565,8 @@ func Test_WebhookPullRequest(t *testing.T) {
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.Additions)
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.ChangedFiles)
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.Deletions)
|
||||
assert.Len(t, payloads[0].PullRequest.RequestedReviewers, 1)
|
||||
assert.Equal(t, int64(2), payloads[0].PullRequest.RequestedReviewers[0].ID)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user