Use batch database operations instead of one by one to optimze api pulls (#32680)
Resolve #31492 The response time for the Pull Requests API has improved significantly, dropping from over `2000ms` to about `350ms` on my local machine. It's about `6` times faster. A key area for further optimization lies in batch-fetching data for `apiPullRequest.ChangedFiles, apiPullRequest.Additions, and apiPullRequest.Deletions`. Tests `TestAPIViewPulls` does exist and new tests added. - This PR also fixes some bugs in `GetDiff` functions. - This PR also fixes data inconsistent in test data. For a pull request, the head branch's reference should be equal to the reference in `pull/xxx/head`.
This commit is contained in:
parent
2ac6f2b129
commit
fbe6d9dc6b
@ -4,6 +4,7 @@
|
||||
reviewer_id: 1
|
||||
issue_id: 2
|
||||
content: "Demo Review"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684810
|
||||
created_unix: 946684810
|
||||
-
|
||||
@ -12,6 +13,7 @@
|
||||
reviewer_id: 534543
|
||||
issue_id: 534543
|
||||
content: "Invalid Review #1"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684810
|
||||
created_unix: 946684810
|
||||
-
|
||||
@ -20,6 +22,7 @@
|
||||
reviewer_id: 1
|
||||
issue_id: 343545
|
||||
content: "Invalid Review #2"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684810
|
||||
created_unix: 946684810
|
||||
-
|
||||
@ -28,6 +31,7 @@
|
||||
reviewer_id: 1
|
||||
issue_id: 2
|
||||
content: "Pending Review"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684810
|
||||
created_unix: 946684810
|
||||
-
|
||||
@ -36,6 +40,7 @@
|
||||
reviewer_id: 1
|
||||
issue_id: 3
|
||||
content: "New review 1"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684810
|
||||
created_unix: 946684810
|
||||
-
|
||||
@ -61,8 +66,8 @@
|
||||
type: 1
|
||||
reviewer_id: 4
|
||||
issue_id: 3
|
||||
original_author_id: 0
|
||||
content: "New review 5"
|
||||
original_author_id: 0
|
||||
commit_id: 8091a55037cd59e47293aca02981b5a67076b364
|
||||
stale: true
|
||||
updated_unix: 946684813
|
||||
@ -73,9 +78,9 @@
|
||||
reviewer_id: 2
|
||||
issue_id: 3
|
||||
content: "New review 3 rejected"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684814
|
||||
created_unix: 946684814
|
||||
original_author_id: 0
|
||||
|
||||
-
|
||||
id: 10
|
||||
@ -83,6 +88,7 @@
|
||||
reviewer_id: 100
|
||||
issue_id: 3
|
||||
content: "a deleted user's review"
|
||||
original_author_id: 0
|
||||
official: true
|
||||
updated_unix: 946684815
|
||||
created_unix: 946684815
|
||||
@ -112,6 +118,7 @@
|
||||
reviewer_id: 5
|
||||
issue_id: 11
|
||||
content: "old review from user5"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684820
|
||||
created_unix: 946684820
|
||||
|
||||
@ -121,6 +128,7 @@
|
||||
reviewer_id: 5
|
||||
issue_id: 11
|
||||
content: "duplicate review from user5 (latest)"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684830
|
||||
created_unix: 946684830
|
||||
|
||||
@ -130,6 +138,7 @@
|
||||
reviewer_id: 6
|
||||
issue_id: 11
|
||||
content: "singular review from org6 and final review for this pr"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684831
|
||||
created_unix: 946684831
|
||||
|
||||
@ -139,6 +148,7 @@
|
||||
reviewer_id: 20
|
||||
issue_id: 20
|
||||
content: "review request for user20"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684832
|
||||
created_unix: 946684832
|
||||
|
||||
@ -148,6 +158,7 @@
|
||||
reviewer_id: 20
|
||||
issue_id: 20
|
||||
content: "review approved by user20"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684833
|
||||
created_unix: 946684833
|
||||
|
||||
@ -158,6 +169,7 @@
|
||||
reviewer_team_id: 5
|
||||
issue_id: 20
|
||||
content: "review request for team5"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684834
|
||||
created_unix: 946684834
|
||||
|
||||
@ -168,6 +180,7 @@
|
||||
reviewer_team_id: 0
|
||||
issue_id: 20
|
||||
content: "review request for user15"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684835
|
||||
created_unix: 946684835
|
||||
|
||||
@ -177,6 +190,7 @@
|
||||
reviewer_id: 1
|
||||
issue_id: 2
|
||||
content: "Review Comment"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684810
|
||||
created_unix: 946684810
|
||||
|
||||
@ -186,6 +200,7 @@
|
||||
reviewer_id: 5
|
||||
issue_id: 3
|
||||
content: "reviewed by user5"
|
||||
original_author_id: 0
|
||||
commit_id: 4a357436d925b5c974181ff12a994538ddc5a269
|
||||
updated_unix: 946684816
|
||||
created_unix: 946684816
|
||||
@ -196,5 +211,6 @@
|
||||
reviewer_id: 5
|
||||
issue_id: 3
|
||||
content: "review request for user5"
|
||||
original_author_id: 0
|
||||
updated_unix: 946684817
|
||||
created_unix: 946684817
|
||||
|
@ -16,6 +16,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"xorm.io/builder"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
@ -240,6 +241,64 @@ func (prs PullRequestList) GetIssueIDs() []int64 {
|
||||
})
|
||||
}
|
||||
|
||||
func (prs PullRequestList) LoadReviewCommentsCounts(ctx context.Context) (map[int64]int, error) {
|
||||
issueIDs := prs.GetIssueIDs()
|
||||
countsMap := make(map[int64]int, len(issueIDs))
|
||||
counts := make([]struct {
|
||||
IssueID int64
|
||||
Count int
|
||||
}, 0, len(issueIDs))
|
||||
if err := db.GetEngine(ctx).Select("issue_id, count(*) as count").
|
||||
Table("comment").In("issue_id", issueIDs).And("type = ?", CommentTypeReview).
|
||||
GroupBy("issue_id").Find(&counts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, c := range counts {
|
||||
countsMap[c.IssueID] = c.Count
|
||||
}
|
||||
return countsMap, nil
|
||||
}
|
||||
|
||||
func (prs PullRequestList) LoadReviews(ctx context.Context) (ReviewList, error) {
|
||||
issueIDs := prs.GetIssueIDs()
|
||||
reviews := make([]*Review, 0, len(issueIDs))
|
||||
|
||||
subQuery := builder.Select("max(id) as id").
|
||||
From("review").
|
||||
Where(builder.In("issue_id", issueIDs)).
|
||||
And(builder.In("`type`", ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest)).
|
||||
And(builder.Eq{
|
||||
"dismissed": false,
|
||||
"original_author_id": 0,
|
||||
"reviewer_team_id": 0,
|
||||
}).
|
||||
GroupBy("issue_id, reviewer_id")
|
||||
// Get latest review of each reviewer, sorted in order they were made
|
||||
if err := db.GetEngine(ctx).In("id", subQuery).OrderBy("review.updated_unix ASC").Find(&reviews); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
teamReviewRequests := make([]*Review, 0, 5)
|
||||
subQueryTeam := builder.Select("max(id) as id").
|
||||
From("review").
|
||||
Where(builder.In("issue_id", issueIDs)).
|
||||
And(builder.Eq{
|
||||
"original_author_id": 0,
|
||||
}).And(builder.Neq{
|
||||
"reviewer_team_id": 0,
|
||||
}).
|
||||
GroupBy("issue_id, reviewer_team_id")
|
||||
if err := db.GetEngine(ctx).In("id", subQueryTeam).OrderBy("review.updated_unix ASC").Find(&teamReviewRequests); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(teamReviewRequests) > 0 {
|
||||
reviews = append(reviews, teamReviewRequests...)
|
||||
}
|
||||
|
||||
return reviews, nil
|
||||
}
|
||||
|
||||
// HasMergedPullRequestInRepo returns whether the user(poster) has merged pull-request in the repo
|
||||
func HasMergedPullRequestInRepo(ctx context.Context, repoID, posterID int64) (bool, error) {
|
||||
return db.GetEngine(ctx).
|
||||
|
64
models/issues/pull_list_test.go
Normal file
64
models/issues/pull_list_test.go
Normal file
@ -0,0 +1,64 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package issues_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPullRequestList_LoadAttributes(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
prs := []*issues_model.PullRequest{
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
|
||||
}
|
||||
assert.NoError(t, issues_model.PullRequestList(prs).LoadAttributes(db.DefaultContext))
|
||||
for _, pr := range prs {
|
||||
assert.NotNil(t, pr.Issue)
|
||||
assert.Equal(t, pr.IssueID, pr.Issue.ID)
|
||||
}
|
||||
|
||||
assert.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(db.DefaultContext))
|
||||
}
|
||||
|
||||
func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
prs := []*issues_model.PullRequest{
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
|
||||
}
|
||||
reviewComments, err := issues_model.PullRequestList(prs).LoadReviewCommentsCounts(db.DefaultContext)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, reviewComments, 2)
|
||||
for _, pr := range prs {
|
||||
assert.EqualValues(t, reviewComments[pr.IssueID], 1)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPullRequestList_LoadReviews(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
prs := []*issues_model.PullRequest{
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
|
||||
}
|
||||
reviewList, err := issues_model.PullRequestList(prs).LoadReviews(db.DefaultContext)
|
||||
assert.NoError(t, err)
|
||||
// 1, 7, 8, 9, 10, 22
|
||||
assert.Len(t, reviewList, 6)
|
||||
assert.EqualValues(t, 1, reviewList[0].ID)
|
||||
assert.EqualValues(t, 7, reviewList[1].ID)
|
||||
assert.EqualValues(t, 8, reviewList[2].ID)
|
||||
assert.EqualValues(t, 9, reviewList[3].ID)
|
||||
assert.EqualValues(t, 10, reviewList[4].ID)
|
||||
assert.EqualValues(t, 22, reviewList[5].ID)
|
||||
}
|
@ -79,7 +79,7 @@ func TestPullRequestsNewest(t *testing.T) {
|
||||
func TestLoadRequestedReviewers(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
|
||||
pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
|
||||
assert.NoError(t, pull.LoadIssue(db.DefaultContext))
|
||||
issue := pull.Issue
|
||||
assert.NoError(t, issue.LoadRepo(db.DefaultContext))
|
||||
@ -93,7 +93,7 @@ func TestLoadRequestedReviewers(t *testing.T) {
|
||||
assert.NotNil(t, comment)
|
||||
|
||||
assert.NoError(t, pull.LoadRequestedReviewers(db.DefaultContext))
|
||||
assert.Len(t, pull.RequestedReviewers, 1)
|
||||
assert.Len(t, pull.RequestedReviewers, 6)
|
||||
|
||||
comment, err = issues_model.RemoveReviewRequest(db.DefaultContext, issue, user1, &user_model.User{})
|
||||
assert.NoError(t, err)
|
||||
@ -234,22 +234,6 @@ func TestPullRequest_UpdateCols(t *testing.T) {
|
||||
unittest.CheckConsistencyFor(t, pr)
|
||||
}
|
||||
|
||||
func TestPullRequestList_LoadAttributes(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
prs := []*issues_model.PullRequest{
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
|
||||
}
|
||||
assert.NoError(t, issues_model.PullRequestList(prs).LoadAttributes(db.DefaultContext))
|
||||
for _, pr := range prs {
|
||||
assert.NotNil(t, pr.Issue)
|
||||
assert.Equal(t, pr.IssueID, pr.Issue.ID)
|
||||
}
|
||||
|
||||
assert.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(db.DefaultContext))
|
||||
}
|
||||
|
||||
// TODO TestAddTestPullRequestTask
|
||||
|
||||
func TestPullRequest_IsWorkInProgress(t *testing.T) {
|
||||
|
@ -47,14 +47,9 @@ func (reviews ReviewList) LoadReviewersTeams(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
teamsMap := make(map[int64]*organization_model.Team, 0)
|
||||
for _, teamID := range reviewersTeamsIDs {
|
||||
team, err := organization_model.GetTeamByID(ctx, teamID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
teamsMap[teamID] = team
|
||||
teamsMap, err := organization_model.GetTeamsByIDs(ctx, reviewersTeamsIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, review := range reviews {
|
||||
|
@ -126,42 +126,48 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
|
||||
user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
|
||||
user5 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
|
||||
|
||||
expectedReviews := []*issues_model.Review{}
|
||||
expectedReviews = append(expectedReviews,
|
||||
&issues_model.Review{
|
||||
ID: 7,
|
||||
Reviewer: org3,
|
||||
Type: issues_model.ReviewTypeReject,
|
||||
UpdatedUnix: 946684812,
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 8,
|
||||
Reviewer: user4,
|
||||
Type: issues_model.ReviewTypeApprove,
|
||||
UpdatedUnix: 946684813,
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 9,
|
||||
Reviewer: user2,
|
||||
Type: issues_model.ReviewTypeReject,
|
||||
UpdatedUnix: 946684814,
|
||||
})
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 10,
|
||||
Reviewer: user_model.NewGhostUser(),
|
||||
Type: issues_model.ReviewTypeReject,
|
||||
UpdatedUnix: 946684815,
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 22,
|
||||
Reviewer: user5,
|
||||
Type: issues_model.ReviewTypeRequest,
|
||||
UpdatedUnix: 946684817,
|
||||
},
|
||||
)
|
||||
|
||||
allReviews, err := issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID)
|
||||
assert.NoError(t, err)
|
||||
for _, review := range allReviews {
|
||||
assert.NoError(t, review.LoadReviewer(db.DefaultContext))
|
||||
}
|
||||
if assert.Len(t, allReviews, 3) {
|
||||
for i, review := range allReviews {
|
||||
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
|
||||
assert.Equal(t, expectedReviews[i].Type, review.Type)
|
||||
assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
|
||||
}
|
||||
}
|
||||
|
||||
allReviews, err = issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID)
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, allReviews.LoadReviewers(db.DefaultContext))
|
||||
if assert.Len(t, allReviews, 3) {
|
||||
if assert.Len(t, allReviews, 5) {
|
||||
for i, review := range allReviews {
|
||||
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
|
||||
assert.Equal(t, expectedReviews[i].Type, review.Type)
|
||||
|
@ -131,3 +131,8 @@ func GetTeamsByOrgIDs(ctx context.Context, orgIDs []int64) (TeamList, error) {
|
||||
teams := make([]*Team, 0, 10)
|
||||
return teams, db.GetEngine(ctx).Where(builder.In("org_id", orgIDs)).Find(&teams)
|
||||
}
|
||||
|
||||
func GetTeamsByIDs(ctx context.Context, teamIDs []int64) (map[int64]*Team, error) {
|
||||
teams := make(map[int64]*Team, len(teamIDs))
|
||||
return teams, db.GetEngine(ctx).Where(builder.In("`id`", teamIDs)).Find(&teams)
|
||||
}
|
||||
|
25
models/organization/team_list_test.go
Normal file
25
models/organization/team_list_test.go
Normal file
@ -0,0 +1,25 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package organization_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
org_model "code.gitea.io/gitea/models/organization"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_GetTeamsByIDs(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
// 1 owner team, 2 normal team
|
||||
teams, err := org_model.GetTeamsByIDs(db.DefaultContext, []int64{1, 2})
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, teams, 2)
|
||||
assert.Equal(t, "Owners", teams[1].Name)
|
||||
assert.Equal(t, "team1", teams[2].Name)
|
||||
}
|
@ -246,18 +246,40 @@ func (repo *Repository) GetDiffOrPatch(base, head string, w io.Writer, patch, bi
|
||||
|
||||
// GetDiff generates and returns patch data between given revisions, optimized for human readability
|
||||
func (repo *Repository) GetDiff(base, head string, w io.Writer) error {
|
||||
return NewCommand(repo.Ctx, "diff", "-p").AddDynamicArguments(base, head).Run(&RunOpts{
|
||||
Dir: repo.Path,
|
||||
Stdout: w,
|
||||
})
|
||||
stderr := new(bytes.Buffer)
|
||||
err := NewCommand(repo.Ctx, "diff", "-p").AddDynamicArguments(base + "..." + head).
|
||||
Run(&RunOpts{
|
||||
Dir: repo.Path,
|
||||
Stdout: w,
|
||||
Stderr: stderr,
|
||||
})
|
||||
if err != nil && bytes.Contains(stderr.Bytes(), []byte("no merge base")) {
|
||||
return NewCommand(repo.Ctx, "diff", "-p").AddDynamicArguments(base, head).
|
||||
Run(&RunOpts{
|
||||
Dir: repo.Path,
|
||||
Stdout: w,
|
||||
})
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// GetDiffBinary generates and returns patch data between given revisions, including binary diffs.
|
||||
func (repo *Repository) GetDiffBinary(base, head string, w io.Writer) error {
|
||||
return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram").AddDynamicArguments(base, head).Run(&RunOpts{
|
||||
Dir: repo.Path,
|
||||
Stdout: w,
|
||||
})
|
||||
stderr := new(bytes.Buffer)
|
||||
err := NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram").AddDynamicArguments(base + "..." + head).
|
||||
Run(&RunOpts{
|
||||
Dir: repo.Path,
|
||||
Stdout: w,
|
||||
Stderr: stderr,
|
||||
})
|
||||
if err != nil && bytes.Contains(stderr.Bytes(), []byte("no merge base")) {
|
||||
return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram").AddDynamicArguments(base, head).
|
||||
Run(&RunOpts{
|
||||
Dir: repo.Path,
|
||||
Stdout: w,
|
||||
})
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// GetPatch generates and returns format-patch data between given revisions, able to be used with `git apply`
|
||||
|
@ -139,43 +139,12 @@ func ListPullRequests(ctx *context.APIContext) {
|
||||
return
|
||||
}
|
||||
|
||||
apiPrs := make([]*api.PullRequest, len(prs))
|
||||
// NOTE: load repository first, so that issue.Repo will be filled with pr.BaseRepo
|
||||
if err := prs.LoadRepositories(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadRepositories", err)
|
||||
return
|
||||
}
|
||||
issueList, err := prs.LoadIssues(ctx)
|
||||
apiPrs, err := convert.ToAPIPullRequests(ctx, ctx.Repo.Repository, prs, ctx.Doer)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadIssues", err)
|
||||
ctx.Error(http.StatusInternalServerError, "ToAPIPullRequests", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := issueList.LoadLabels(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadLabels", err)
|
||||
return
|
||||
}
|
||||
if err := issueList.LoadPosters(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadPoster", err)
|
||||
return
|
||||
}
|
||||
if err := issueList.LoadAttachments(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadAttachments", err)
|
||||
return
|
||||
}
|
||||
if err := issueList.LoadMilestones(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadMilestones", err)
|
||||
return
|
||||
}
|
||||
if err := issueList.LoadAssignees(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "LoadAssignees", err)
|
||||
return
|
||||
}
|
||||
|
||||
for i := range prs {
|
||||
apiPrs[i] = convert.ToAPIPullRequest(ctx, prs[i], ctx.Doer)
|
||||
}
|
||||
|
||||
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
|
||||
ctx.SetTotalCountHeader(maxResults)
|
||||
ctx.JSON(http.StatusOK, &apiPrs)
|
||||
|
@ -7,9 +7,11 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
access_model "code.gitea.io/gitea/models/perm/access"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
@ -259,3 +261,252 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u
|
||||
|
||||
return apiPullRequest
|
||||
}
|
||||
|
||||
func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs issues_model.PullRequestList, doer *user_model.User) ([]*api.PullRequest, error) {
|
||||
for _, pr := range prs {
|
||||
pr.BaseRepo = baseRepo
|
||||
if pr.BaseRepoID == pr.HeadRepoID {
|
||||
pr.HeadRepo = baseRepo
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: load head repositories
|
||||
if err := prs.LoadRepositories(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
issueList, err := prs.LoadIssues(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := issueList.LoadLabels(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := issueList.LoadPosters(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := issueList.LoadAttachments(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := issueList.LoadMilestones(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := issueList.LoadAssignees(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reviews, err := prs.LoadReviews(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = reviews.LoadReviewers(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reviewersMap := make(map[int64][]*user_model.User)
|
||||
for _, review := range reviews {
|
||||
if review.Reviewer != nil {
|
||||
reviewersMap[review.IssueID] = append(reviewersMap[review.IssueID], review.Reviewer)
|
||||
}
|
||||
}
|
||||
|
||||
reviewCounts, err := prs.LoadReviewCommentsCounts(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gitRepo, err := gitrepo.OpenRepository(ctx, baseRepo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer gitRepo.Close()
|
||||
|
||||
baseRepoPerm, err := access_model.GetUserRepoPermission(ctx, baseRepo, doer)
|
||||
if err != nil {
|
||||
log.Error("GetUserRepoPermission[%d]: %v", baseRepo.ID, err)
|
||||
baseRepoPerm.AccessMode = perm.AccessModeNone
|
||||
}
|
||||
|
||||
apiRepo := ToRepo(ctx, baseRepo, baseRepoPerm)
|
||||
baseBranchCache := make(map[string]*git_model.Branch)
|
||||
apiPullRequests := make([]*api.PullRequest, 0, len(prs))
|
||||
for _, pr := range prs {
|
||||
apiIssue := ToAPIIssue(ctx, doer, pr.Issue)
|
||||
|
||||
apiPullRequest := &api.PullRequest{
|
||||
ID: pr.ID,
|
||||
URL: pr.Issue.HTMLURL(),
|
||||
Index: pr.Index,
|
||||
Poster: apiIssue.Poster,
|
||||
Title: apiIssue.Title,
|
||||
Body: apiIssue.Body,
|
||||
Labels: apiIssue.Labels,
|
||||
Milestone: apiIssue.Milestone,
|
||||
Assignee: apiIssue.Assignee,
|
||||
Assignees: apiIssue.Assignees,
|
||||
State: apiIssue.State,
|
||||
Draft: pr.IsWorkInProgress(ctx),
|
||||
IsLocked: apiIssue.IsLocked,
|
||||
Comments: apiIssue.Comments,
|
||||
ReviewComments: reviewCounts[pr.IssueID],
|
||||
HTMLURL: pr.Issue.HTMLURL(),
|
||||
DiffURL: pr.Issue.DiffURL(),
|
||||
PatchURL: pr.Issue.PatchURL(),
|
||||
HasMerged: pr.HasMerged,
|
||||
MergeBase: pr.MergeBase,
|
||||
Mergeable: pr.Mergeable(ctx),
|
||||
Deadline: apiIssue.Deadline,
|
||||
Created: pr.Issue.CreatedUnix.AsTimePtr(),
|
||||
Updated: pr.Issue.UpdatedUnix.AsTimePtr(),
|
||||
PinOrder: apiIssue.PinOrder,
|
||||
|
||||
AllowMaintainerEdit: pr.AllowMaintainerEdit,
|
||||
|
||||
Base: &api.PRBranchInfo{
|
||||
Name: pr.BaseBranch,
|
||||
Ref: pr.BaseBranch,
|
||||
RepoID: pr.BaseRepoID,
|
||||
Repository: apiRepo,
|
||||
},
|
||||
Head: &api.PRBranchInfo{
|
||||
Name: pr.HeadBranch,
|
||||
Ref: fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index),
|
||||
RepoID: -1,
|
||||
},
|
||||
}
|
||||
|
||||
pr.RequestedReviewers = reviewersMap[pr.IssueID]
|
||||
for _, reviewer := range pr.RequestedReviewers {
|
||||
apiPullRequest.RequestedReviewers = append(apiPullRequest.RequestedReviewers, ToUser(ctx, reviewer, nil))
|
||||
}
|
||||
|
||||
for _, reviewerTeam := range pr.RequestedReviewersTeams {
|
||||
convertedTeam, err := ToTeam(ctx, reviewerTeam, true)
|
||||
if err != nil {
|
||||
log.Error("LoadRequestedReviewersTeams[%d]: %v", pr.ID, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
apiPullRequest.RequestedReviewersTeams = append(apiPullRequest.RequestedReviewersTeams, convertedTeam)
|
||||
}
|
||||
|
||||
if pr.Issue.ClosedUnix != 0 {
|
||||
apiPullRequest.Closed = pr.Issue.ClosedUnix.AsTimePtr()
|
||||
}
|
||||
|
||||
baseBranch, ok := baseBranchCache[pr.BaseBranch]
|
||||
if !ok {
|
||||
baseBranch, err = git_model.GetBranch(ctx, baseRepo.ID, pr.BaseBranch)
|
||||
if err == nil {
|
||||
baseBranchCache[pr.BaseBranch] = baseBranch
|
||||
} else if !git_model.IsErrBranchNotExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if baseBranch != nil {
|
||||
apiPullRequest.Base.Sha = baseBranch.CommitID
|
||||
}
|
||||
|
||||
if pr.Flow == issues_model.PullRequestFlowAGit {
|
||||
apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName())
|
||||
if err != nil {
|
||||
log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err)
|
||||
return nil, err
|
||||
}
|
||||
apiPullRequest.Head.RepoID = pr.BaseRepoID
|
||||
apiPullRequest.Head.Repository = apiPullRequest.Base.Repository
|
||||
apiPullRequest.Head.Name = ""
|
||||
}
|
||||
|
||||
var headGitRepo *git.Repository
|
||||
if pr.HeadRepo != nil && pr.Flow == issues_model.PullRequestFlowGithub {
|
||||
if pr.HeadRepoID == pr.BaseRepoID {
|
||||
apiPullRequest.Head.RepoID = pr.HeadRepo.ID
|
||||
apiPullRequest.Head.Repository = apiRepo
|
||||
headGitRepo = gitRepo
|
||||
} else {
|
||||
p, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
|
||||
if err != nil {
|
||||
log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err)
|
||||
p.AccessMode = perm.AccessModeNone
|
||||
}
|
||||
|
||||
apiPullRequest.Head.RepoID = pr.HeadRepo.ID
|
||||
apiPullRequest.Head.Repository = ToRepo(ctx, pr.HeadRepo, p)
|
||||
|
||||
headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
||||
if err != nil {
|
||||
log.Error("OpenRepository[%s]: %v", pr.HeadRepo.RepoPath(), err)
|
||||
return nil, err
|
||||
}
|
||||
defer headGitRepo.Close()
|
||||
}
|
||||
|
||||
headBranch, err := headGitRepo.GetBranch(pr.HeadBranch)
|
||||
if err != nil && !git.IsErrBranchNotExist(err) {
|
||||
log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Outer scope variables to be used in diff calculation
|
||||
var (
|
||||
startCommitID string
|
||||
endCommitID string
|
||||
)
|
||||
|
||||
if git.IsErrBranchNotExist(err) {
|
||||
headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref)
|
||||
if err != nil && !git.IsErrNotExist(err) {
|
||||
log.Error("GetCommit[%s]: %v", pr.HeadBranch, err)
|
||||
return nil, err
|
||||
}
|
||||
if err == nil {
|
||||
apiPullRequest.Head.Sha = headCommitID
|
||||
endCommitID = headCommitID
|
||||
}
|
||||
} else {
|
||||
commit, err := headBranch.GetCommit()
|
||||
if err != nil && !git.IsErrNotExist(err) {
|
||||
log.Error("GetCommit[%s]: %v", headBranch.Name, err)
|
||||
return nil, err
|
||||
}
|
||||
if err == nil {
|
||||
apiPullRequest.Head.Ref = pr.HeadBranch
|
||||
apiPullRequest.Head.Sha = commit.ID.String()
|
||||
endCommitID = commit.ID.String()
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate diff
|
||||
startCommitID = pr.MergeBase
|
||||
|
||||
apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions, err = gitRepo.GetDiffShortStat(startCommitID, endCommitID)
|
||||
if err != nil {
|
||||
log.Error("GetDiffShortStat: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 {
|
||||
refs, err := gitRepo.GetRefsFiltered(apiPullRequest.Head.Ref)
|
||||
if err != nil {
|
||||
log.Error("GetRefsFiltered[%s]: %v", apiPullRequest.Head.Ref, err)
|
||||
return nil, err
|
||||
} else if len(refs) == 0 {
|
||||
log.Error("unable to resolve PR head ref")
|
||||
} else {
|
||||
apiPullRequest.Head.Sha = refs[0].Object.String()
|
||||
}
|
||||
}
|
||||
|
||||
if pr.HasMerged {
|
||||
apiPullRequest.Merged = pr.MergedUnix.AsTimePtr()
|
||||
apiPullRequest.MergedCommitID = &pr.MergedCommitID
|
||||
apiPullRequest.MergedBy = ToUser(ctx, pr.Merger, nil)
|
||||
}
|
||||
|
||||
apiPullRequests = append(apiPullRequests, apiPullRequest)
|
||||
}
|
||||
|
||||
return apiPullRequests, nil
|
||||
}
|
||||
|
@ -1 +1 @@
|
||||
5f22f7d0d95d614d25a5b68592adb345a4b5c7fd
|
||||
985f0301dba5e7b34be866819cd15ad3d8f508ee
|
||||
|
@ -33,8 +33,8 @@ func TestAPIPullCommits(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(t, "5f22f7d0d95d614d25a5b68592adb345a4b5c7fd", commits[0].SHA)
|
||||
assert.Equal(t, "4a357436d925b5c974181ff12a994538ddc5a269", commits[1].SHA)
|
||||
assert.Equal(t, "985f0301dba5e7b34be866819cd15ad3d8f508ee", commits[0].SHA)
|
||||
assert.Equal(t, "5c050d3b6d2db231ab1f64e324f1b6b9a0b181c2", commits[1].SHA)
|
||||
|
||||
assert.NotEmpty(t, commits[0].Files)
|
||||
assert.NotEmpty(t, commits[1].Files)
|
||||
|
@ -4,6 +4,8 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@ -19,6 +21,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/services/forms"
|
||||
"code.gitea.io/gitea/services/gitdiff"
|
||||
issue_service "code.gitea.io/gitea/services/issue"
|
||||
"code.gitea.io/gitea/tests"
|
||||
|
||||
@ -41,25 +44,99 @@ func TestAPIViewPulls(t *testing.T) {
|
||||
expectedLen := unittest.GetCount(t, &issues_model.Issue{RepoID: repo.ID}, unittest.Cond("is_pull = ?", true))
|
||||
assert.Len(t, pulls, expectedLen)
|
||||
|
||||
assert.Len(t, pulls, 3)
|
||||
pull := pulls[0]
|
||||
assert.EqualValues(t, 1, pull.Poster.ID)
|
||||
assert.Len(t, pull.RequestedReviewers, 2)
|
||||
assert.Len(t, pull.RequestedReviewersTeams, 0)
|
||||
assert.EqualValues(t, 5, pull.RequestedReviewers[0].ID)
|
||||
assert.EqualValues(t, 6, pull.RequestedReviewers[1].ID)
|
||||
assert.EqualValues(t, 1, pull.ChangedFiles)
|
||||
|
||||
if assert.EqualValues(t, 5, pull.ID) {
|
||||
resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK)
|
||||
_, err := io.ReadAll(resp.Body)
|
||||
bs, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
// TODO: use diff to generate stats to test against
|
||||
patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "")
|
||||
assert.NoError(t, err)
|
||||
if assert.Len(t, patch.Files, pull.ChangedFiles) {
|
||||
assert.Equal(t, "File-WoW", patch.Files[0].Name)
|
||||
// FIXME: The old name should be empty if it's a file add type
|
||||
assert.Equal(t, "File-WoW", patch.Files[0].OldName)
|
||||
assert.EqualValues(t, pull.Additions, patch.Files[0].Addition)
|
||||
assert.EqualValues(t, pull.Deletions, patch.Files[0].Deletion)
|
||||
assert.Equal(t, gitdiff.DiffFileAdd, patch.Files[0].Type)
|
||||
}
|
||||
|
||||
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
|
||||
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
|
||||
if assert.Len(t, files, 1) {
|
||||
if assert.Len(t, files, pull.ChangedFiles) {
|
||||
assert.Equal(t, "File-WoW", files[0].Filename)
|
||||
assert.Empty(t, files[0].PreviousFilename)
|
||||
assert.EqualValues(t, 1, files[0].Additions)
|
||||
assert.EqualValues(t, 1, files[0].Changes)
|
||||
assert.EqualValues(t, 0, files[0].Deletions)
|
||||
assert.EqualValues(t, pull.Additions, files[0].Additions)
|
||||
assert.EqualValues(t, pull.Deletions, files[0].Deletions)
|
||||
assert.Equal(t, "added", files[0].Status)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
pull = pulls[1]
|
||||
assert.EqualValues(t, 1, pull.Poster.ID)
|
||||
assert.Len(t, pull.RequestedReviewers, 4)
|
||||
assert.Len(t, pull.RequestedReviewersTeams, 0)
|
||||
assert.EqualValues(t, 3, pull.RequestedReviewers[0].ID)
|
||||
assert.EqualValues(t, 4, pull.RequestedReviewers[1].ID)
|
||||
assert.EqualValues(t, 2, pull.RequestedReviewers[2].ID)
|
||||
assert.EqualValues(t, 5, pull.RequestedReviewers[3].ID)
|
||||
assert.EqualValues(t, 1, pull.ChangedFiles)
|
||||
|
||||
if assert.EqualValues(t, 2, pull.ID) {
|
||||
resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK)
|
||||
bs, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "")
|
||||
assert.NoError(t, err)
|
||||
if assert.Len(t, patch.Files, pull.ChangedFiles) {
|
||||
assert.Equal(t, "README.md", patch.Files[0].Name)
|
||||
assert.Equal(t, "README.md", patch.Files[0].OldName)
|
||||
assert.EqualValues(t, pull.Additions, patch.Files[0].Addition)
|
||||
assert.EqualValues(t, pull.Deletions, patch.Files[0].Deletion)
|
||||
assert.Equal(t, gitdiff.DiffFileChange, patch.Files[0].Type)
|
||||
}
|
||||
|
||||
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
|
||||
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
|
||||
if assert.Len(t, files, pull.ChangedFiles) {
|
||||
assert.Equal(t, "README.md", files[0].Filename)
|
||||
// FIXME: The PreviousFilename name should be the same as Filename if it's a file change
|
||||
assert.Equal(t, "", files[0].PreviousFilename)
|
||||
assert.EqualValues(t, pull.Additions, files[0].Additions)
|
||||
assert.EqualValues(t, pull.Deletions, files[0].Deletions)
|
||||
assert.Equal(t, "changed", files[0].Status)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
pull = pulls[2]
|
||||
assert.EqualValues(t, 1, pull.Poster.ID)
|
||||
assert.Len(t, pull.RequestedReviewers, 1)
|
||||
assert.Len(t, pull.RequestedReviewersTeams, 0)
|
||||
assert.EqualValues(t, 1, pull.RequestedReviewers[0].ID)
|
||||
assert.EqualValues(t, 0, pull.ChangedFiles)
|
||||
|
||||
if assert.EqualValues(t, 1, pull.ID) {
|
||||
resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK)
|
||||
bs, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "")
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, pull.ChangedFiles, patch.NumFiles)
|
||||
|
||||
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
|
||||
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
|
||||
assert.Len(t, files, pull.ChangedFiles)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
func TestAPIViewPullsByBaseHead(t *testing.T) {
|
||||
|
@ -26,8 +26,8 @@ func TestListPullCommits(t *testing.T) {
|
||||
DecodeJSON(t, resp, &pullCommitList)
|
||||
|
||||
if assert.Len(t, pullCommitList.Commits, 2) {
|
||||
assert.Equal(t, "5f22f7d0d95d614d25a5b68592adb345a4b5c7fd", pullCommitList.Commits[0].ID)
|
||||
assert.Equal(t, "4a357436d925b5c974181ff12a994538ddc5a269", pullCommitList.Commits[1].ID)
|
||||
assert.Equal(t, "985f0301dba5e7b34be866819cd15ad3d8f508ee", pullCommitList.Commits[0].ID)
|
||||
assert.Equal(t, "5c050d3b6d2db231ab1f64e324f1b6b9a0b181c2", pullCommitList.Commits[1].ID)
|
||||
}
|
||||
assert.Equal(t, "4a357436d925b5c974181ff12a994538ddc5a269", pullCommitList.LastReviewCommitSha)
|
||||
})
|
||||
|
Loading…
x
Reference in New Issue
Block a user