summaryrefslogtreecommitdiffstats
path: root/modules/repository
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-10-11 10:27:00 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-10-11 10:27:00 +0000
commit65aa53fc52ff15efe54df4147564828d535837f8 (patch)
tree31c51dad04fdcca80e6d3043c8bd49d2f1a51f83 /modules/repository
parentInitial commit. (diff)
downloadforgejo-upstream.tar.xz
forgejo-upstream.zip
Adding upstream version 8.0.3.HEADupstream/8.0.3upstreamdebian
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'modules/repository')
-rw-r--r--modules/repository/branch.go145
-rw-r--r--modules/repository/branch_test.go32
-rw-r--r--modules/repository/collaborator.go44
-rw-r--r--modules/repository/collaborator_test.go308
-rw-r--r--modules/repository/commits.go173
-rw-r--r--modules/repository/commits_test.go210
-rw-r--r--modules/repository/create.go297
-rw-r--r--modules/repository/create_test.go46
-rw-r--r--modules/repository/delete.go33
-rw-r--r--modules/repository/env.go87
-rw-r--r--modules/repository/fork.go32
-rw-r--r--modules/repository/hooks.go233
-rw-r--r--modules/repository/init.go182
-rw-r--r--modules/repository/init_test.go30
-rw-r--r--modules/repository/license.go113
-rw-r--r--modules/repository/license_test.go181
-rw-r--r--modules/repository/main_test.go16
-rw-r--r--modules/repository/push.go70
-rw-r--r--modules/repository/repo.go383
-rw-r--r--modules/repository/repo_test.go76
-rw-r--r--modules/repository/temp.go45
21 files changed, 2736 insertions, 0 deletions
diff --git a/modules/repository/branch.go b/modules/repository/branch.go
new file mode 100644
index 00000000..2bf9930f
--- /dev/null
+++ b/modules/repository/branch.go
@@ -0,0 +1,145 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// SyncRepoBranches synchronizes branch table with repository branches
+func SyncRepoBranches(ctx context.Context, repoID, doerID int64) (int64, error) {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ return 0, err
+ }
+
+ log.Debug("SyncRepoBranches: in Repo[%d:%s]", repo.ID, repo.FullName())
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ log.Error("OpenRepository[%s]: %w", repo.FullName(), err)
+ return 0, err
+ }
+ defer gitRepo.Close()
+
+ return SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doerID)
+}
+
+func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, doerID int64) (int64, error) {
+ objFmt, err := gitRepo.GetObjectFormat()
+ if err != nil {
+ return 0, fmt.Errorf("GetObjectFormat: %w", err)
+ }
+ _, err = db.GetEngine(ctx).ID(repo.ID).Update(&repo_model.Repository{ObjectFormatName: objFmt.Name()})
+ if err != nil {
+ return 0, fmt.Errorf("UpdateRepository: %w", err)
+ }
+ repo.ObjectFormatName = objFmt.Name() // keep consistent with db
+
+ allBranches := container.Set[string]{}
+ {
+ branches, _, err := gitRepo.GetBranchNames(0, 0)
+ if err != nil {
+ return 0, err
+ }
+ log.Trace("SyncRepoBranches[%s]: branches[%d]: %v", repo.FullName(), len(branches), branches)
+ for _, branch := range branches {
+ allBranches.Add(branch)
+ }
+ }
+
+ dbBranches := make(map[string]*git_model.Branch)
+ {
+ branches, err := db.Find[git_model.Branch](ctx, git_model.FindBranchOptions{
+ ListOptions: db.ListOptionsAll,
+ RepoID: repo.ID,
+ })
+ if err != nil {
+ return 0, err
+ }
+ for _, branch := range branches {
+ dbBranches[branch.Name] = branch
+ }
+ }
+
+ var toAdd []*git_model.Branch
+ var toUpdate []*git_model.Branch
+ var toRemove []int64
+ for branch := range allBranches {
+ dbb := dbBranches[branch]
+ commit, err := gitRepo.GetBranchCommit(branch)
+ if err != nil {
+ return 0, err
+ }
+ if dbb == nil {
+ toAdd = append(toAdd, &git_model.Branch{
+ RepoID: repo.ID,
+ Name: branch,
+ CommitID: commit.ID.String(),
+ CommitMessage: commit.Summary(),
+ PusherID: doerID,
+ CommitTime: timeutil.TimeStamp(commit.Committer.When.Unix()),
+ })
+ } else if commit.ID.String() != dbb.CommitID {
+ toUpdate = append(toUpdate, &git_model.Branch{
+ ID: dbb.ID,
+ RepoID: repo.ID,
+ Name: branch,
+ CommitID: commit.ID.String(),
+ CommitMessage: commit.Summary(),
+ PusherID: doerID,
+ CommitTime: timeutil.TimeStamp(commit.Committer.When.Unix()),
+ })
+ }
+ }
+
+ for _, dbBranch := range dbBranches {
+ if !allBranches.Contains(dbBranch.Name) && !dbBranch.IsDeleted {
+ toRemove = append(toRemove, dbBranch.ID)
+ }
+ }
+
+ log.Trace("SyncRepoBranches[%s]: toAdd: %v, toUpdate: %v, toRemove: %v", repo.FullName(), toAdd, toUpdate, toRemove)
+
+ if len(toAdd) == 0 && len(toRemove) == 0 && len(toUpdate) == 0 {
+ return int64(len(allBranches)), nil
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if len(toAdd) > 0 {
+ if err := git_model.AddBranches(ctx, toAdd); err != nil {
+ return err
+ }
+ }
+
+ for _, b := range toUpdate {
+ if _, err := db.GetEngine(ctx).ID(b.ID).
+ Cols("commit_id, commit_message, pusher_id, commit_time, is_deleted").
+ Update(b); err != nil {
+ return err
+ }
+ }
+
+ if len(toRemove) > 0 {
+ if err := git_model.DeleteBranches(ctx, repo.ID, doerID, toRemove); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }); err != nil {
+ return 0, err
+ }
+ return int64(len(allBranches)), nil
+}
diff --git a/modules/repository/branch_test.go b/modules/repository/branch_test.go
new file mode 100644
index 00000000..b98618a1
--- /dev/null
+++ b/modules/repository/branch_test.go
@@ -0,0 +1,32 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestSyncRepoBranches(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ _, err := db.GetEngine(db.DefaultContext).ID(1).Update(&repo_model.Repository{ObjectFormatName: "bad-fmt"})
+ require.NoError(t, db.TruncateBeans(db.DefaultContext, &git_model.Branch{}))
+ require.NoError(t, err)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ assert.Equal(t, "bad-fmt", repo.ObjectFormatName)
+ _, err = SyncRepoBranches(db.DefaultContext, 1, 0)
+ require.NoError(t, err)
+ repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ assert.Equal(t, "sha1", repo.ObjectFormatName)
+ branch, err := git_model.GetBranch(db.DefaultContext, 1, "master")
+ require.NoError(t, err)
+ assert.EqualValues(t, "master", branch.Name)
+}
diff --git a/modules/repository/collaborator.go b/modules/repository/collaborator.go
new file mode 100644
index 00000000..17915d34
--- /dev/null
+++ b/modules/repository/collaborator.go
@@ -0,0 +1,44 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "xorm.io/builder"
+)
+
+func AddCollaborator(ctx context.Context, repo *repo_model.Repository, u *user_model.User) error {
+ if user_model.IsBlocked(ctx, repo.OwnerID, u.ID) || user_model.IsBlocked(ctx, u.ID, repo.OwnerID) {
+ return user_model.ErrBlockedByUser
+ }
+
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ has, err := db.Exist[repo_model.Collaboration](ctx, builder.Eq{
+ "repo_id": repo.ID,
+ "user_id": u.ID,
+ })
+ if err != nil {
+ return err
+ } else if has {
+ return nil
+ }
+
+ if err = db.Insert(ctx, &repo_model.Collaboration{
+ RepoID: repo.ID,
+ UserID: u.ID,
+ Mode: perm.AccessModeWrite,
+ }); err != nil {
+ return err
+ }
+
+ return access_model.RecalculateUserAccess(ctx, repo, u.ID)
+ })
+}
diff --git a/modules/repository/collaborator_test.go b/modules/repository/collaborator_test.go
new file mode 100644
index 00000000..3844197b
--- /dev/null
+++ b/modules/repository/collaborator_test.go
@@ -0,0 +1,308 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ perm_model "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRepository_AddCollaborator(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testSuccess := func(repoID, userID int64) {
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ require.NoError(t, repo.LoadOwner(db.DefaultContext))
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
+ require.NoError(t, AddCollaborator(db.DefaultContext, repo, user))
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repoID}, &user_model.User{ID: userID})
+ }
+ testSuccess(1, 4)
+ testSuccess(1, 4)
+ testSuccess(3, 4)
+}
+
+func TestRepository_AddCollaborator_IsBlocked(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testSuccess := func(repoID, userID int64) {
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ require.NoError(t, repo.LoadOwner(db.DefaultContext))
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
+
+ // Owner blocked user.
+ unittest.AssertSuccessfulInsert(t, &user_model.BlockedUser{UserID: repo.OwnerID, BlockID: userID})
+ require.ErrorIs(t, AddCollaborator(db.DefaultContext, repo, user), user_model.ErrBlockedByUser)
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repoID}, &user_model.User{ID: userID})
+ _, err := db.DeleteByBean(db.DefaultContext, &user_model.BlockedUser{UserID: repo.OwnerID, BlockID: userID})
+ require.NoError(t, err)
+
+ // User has owner blocked.
+ unittest.AssertSuccessfulInsert(t, &user_model.BlockedUser{UserID: userID, BlockID: repo.OwnerID})
+ require.ErrorIs(t, AddCollaborator(db.DefaultContext, repo, user), user_model.ErrBlockedByUser)
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repoID}, &user_model.User{ID: userID})
+ }
+ // Ensure idempotency (public repository).
+ testSuccess(1, 4)
+ testSuccess(1, 4)
+ // Add collaborator to private repository.
+ testSuccess(3, 4)
+}
+
+func TestRepoPermissionPublicNonOrgRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // public non-organization repo
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 4})
+ require.NoError(t, repo.LoadUnits(db.DefaultContext))
+
+ // plain user
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ perm, err := access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // change to collaborator
+ require.NoError(t, AddCollaborator(db.DefaultContext, repo, user))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // collaborator
+ collaborator := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, collaborator)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // owner
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, owner)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // admin
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, admin)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+}
+
+func TestRepoPermissionPrivateNonOrgRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // private non-organization repo
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ require.NoError(t, repo.LoadUnits(db.DefaultContext))
+
+ // plain user
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+ perm, err := access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.False(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // change to collaborator to default write access
+ require.NoError(t, AddCollaborator(db.DefaultContext, repo, user))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ require.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, user.ID, perm_model.AccessModeRead))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // owner
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, owner)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // admin
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, admin)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+}
+
+func TestRepoPermissionPublicOrgRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // public organization repo
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 32})
+ require.NoError(t, repo.LoadUnits(db.DefaultContext))
+
+ // plain user
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ perm, err := access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // change to collaborator to default write access
+ require.NoError(t, AddCollaborator(db.DefaultContext, repo, user))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ require.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, user.ID, perm_model.AccessModeRead))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // org member team owner
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, owner)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // org member team tester
+ member := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, member)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ }
+ assert.True(t, perm.CanWrite(unit.TypeIssues))
+ assert.False(t, perm.CanWrite(unit.TypeCode))
+
+ // admin
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, admin)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+}
+
+func TestRepoPermissionPrivateOrgRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // private organization repo
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 24})
+ require.NoError(t, repo.LoadUnits(db.DefaultContext))
+
+ // plain user
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ perm, err := access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.False(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // change to collaborator to default write access
+ require.NoError(t, AddCollaborator(db.DefaultContext, repo, user))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ require.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, user.ID, perm_model.AccessModeRead))
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, user)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.False(t, perm.CanWrite(unit.Type))
+ }
+
+ // org member team owner
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, owner)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // update team information and then check permission
+ team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 5})
+ err = organization.UpdateTeamUnits(db.DefaultContext, team, nil)
+ require.NoError(t, err)
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, owner)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+
+ // org member team tester
+ tester := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, tester)
+ require.NoError(t, err)
+ assert.True(t, perm.CanWrite(unit.TypeIssues))
+ assert.False(t, perm.CanWrite(unit.TypeCode))
+ assert.False(t, perm.CanRead(unit.TypeCode))
+
+ // org member team reviewer
+ reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 20})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, reviewer)
+ require.NoError(t, err)
+ assert.False(t, perm.CanRead(unit.TypeIssues))
+ assert.False(t, perm.CanWrite(unit.TypeCode))
+ assert.True(t, perm.CanRead(unit.TypeCode))
+
+ // admin
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ perm, err = access_model.GetUserRepoPermission(db.DefaultContext, repo, admin)
+ require.NoError(t, err)
+ for _, unit := range repo.Units {
+ assert.True(t, perm.CanRead(unit.Type))
+ assert.True(t, perm.CanWrite(unit.Type))
+ }
+}
diff --git a/modules/repository/commits.go b/modules/repository/commits.go
new file mode 100644
index 00000000..ede60429
--- /dev/null
+++ b/modules/repository/commits.go
@@ -0,0 +1,173 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "time"
+
+ "code.gitea.io/gitea/models/avatars"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// PushCommit represents a commit in a push operation.
+type PushCommit struct {
+ Sha1 string
+ Message string
+ AuthorEmail string
+ AuthorName string
+ CommitterEmail string
+ CommitterName string
+ Timestamp time.Time
+}
+
+// PushCommits represents list of commits in a push operation.
+type PushCommits struct {
+ Commits []*PushCommit
+ HeadCommit *PushCommit
+ CompareURL string
+ Len int
+}
+
+// NewPushCommits creates a new PushCommits object.
+func NewPushCommits() *PushCommits {
+ return &PushCommits{}
+}
+
+// toAPIPayloadCommit converts a single PushCommit to an api.PayloadCommit object.
+func (pc *PushCommits) toAPIPayloadCommit(ctx context.Context, emailUsers map[string]*user_model.User, repoPath, repoLink string, commit *PushCommit) (*api.PayloadCommit, error) {
+ var err error
+ authorUsername := ""
+ author, ok := emailUsers[commit.AuthorEmail]
+ if !ok {
+ author, err = user_model.GetUserByEmail(ctx, commit.AuthorEmail)
+ if err == nil {
+ authorUsername = author.Name
+ emailUsers[commit.AuthorEmail] = author
+ }
+ } else {
+ authorUsername = author.Name
+ }
+
+ committerUsername := ""
+ committer, ok := emailUsers[commit.CommitterEmail]
+ if !ok {
+ committer, err = user_model.GetUserByEmail(ctx, commit.CommitterEmail)
+ if err == nil {
+ // TODO: check errors other than email not found.
+ committerUsername = committer.Name
+ emailUsers[commit.CommitterEmail] = committer
+ }
+ } else {
+ committerUsername = committer.Name
+ }
+
+ fileStatus, err := git.GetCommitFileStatus(ctx, repoPath, commit.Sha1)
+ if err != nil {
+ return nil, fmt.Errorf("FileStatus [commit_sha1: %s]: %w", commit.Sha1, err)
+ }
+
+ return &api.PayloadCommit{
+ ID: commit.Sha1,
+ Message: commit.Message,
+ URL: fmt.Sprintf("%s/commit/%s", repoLink, url.PathEscape(commit.Sha1)),
+ Author: &api.PayloadUser{
+ Name: commit.AuthorName,
+ Email: commit.AuthorEmail,
+ UserName: authorUsername,
+ },
+ Committer: &api.PayloadUser{
+ Name: commit.CommitterName,
+ Email: commit.CommitterEmail,
+ UserName: committerUsername,
+ },
+ Added: fileStatus.Added,
+ Removed: fileStatus.Removed,
+ Modified: fileStatus.Modified,
+ Timestamp: commit.Timestamp,
+ }, nil
+}
+
+// ToAPIPayloadCommits converts a PushCommits object to api.PayloadCommit format.
+// It returns all converted commits and, if provided, the head commit or an error otherwise.
+func (pc *PushCommits) ToAPIPayloadCommits(ctx context.Context, repoPath, repoLink string) ([]*api.PayloadCommit, *api.PayloadCommit, error) {
+ commits := make([]*api.PayloadCommit, len(pc.Commits))
+ var headCommit *api.PayloadCommit
+
+ emailUsers := make(map[string]*user_model.User)
+
+ for i, commit := range pc.Commits {
+ apiCommit, err := pc.toAPIPayloadCommit(ctx, emailUsers, repoPath, repoLink, commit)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ commits[i] = apiCommit
+ if pc.HeadCommit != nil && pc.HeadCommit.Sha1 == commits[i].ID {
+ headCommit = apiCommit
+ }
+ }
+ if pc.HeadCommit != nil && headCommit == nil {
+ var err error
+ headCommit, err = pc.toAPIPayloadCommit(ctx, emailUsers, repoPath, repoLink, pc.HeadCommit)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ return commits, headCommit, nil
+}
+
+// AvatarLink tries to match user in database with e-mail
+// in order to show custom avatar, and falls back to general avatar link.
+func (pc *PushCommits) AvatarLink(ctx context.Context, email string) string {
+ size := avatars.DefaultAvatarPixelSize * setting.Avatar.RenderedSizeFactor
+
+ v, _ := cache.GetWithContextCache(ctx, "push_commits", email, func() (string, error) {
+ u, err := user_model.GetUserByEmail(ctx, email)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByEmail: %v", err)
+ return "", err
+ }
+ return avatars.GenerateEmailAvatarFastLink(ctx, email, size), nil
+ }
+ return u.AvatarLinkWithSize(ctx, size), nil
+ })
+
+ return v
+}
+
+// CommitToPushCommit transforms a git.Commit to PushCommit type.
+func CommitToPushCommit(commit *git.Commit) *PushCommit {
+ return &PushCommit{
+ Sha1: commit.ID.String(),
+ Message: commit.Message(),
+ AuthorEmail: commit.Author.Email,
+ AuthorName: commit.Author.Name,
+ CommitterEmail: commit.Committer.Email,
+ CommitterName: commit.Committer.Name,
+ Timestamp: commit.Author.When,
+ }
+}
+
+// GitToPushCommits transforms a list of git.Commits to PushCommits type.
+func GitToPushCommits(gitCommits []*git.Commit) *PushCommits {
+ commits := make([]*PushCommit, 0, len(gitCommits))
+ for _, commit := range gitCommits {
+ commits = append(commits, CommitToPushCommit(commit))
+ }
+ return &PushCommits{
+ Commits: commits,
+ HeadCommit: nil,
+ CompareURL: "",
+ Len: len(commits),
+ }
+}
diff --git a/modules/repository/commits_test.go b/modules/repository/commits_test.go
new file mode 100644
index 00000000..82841b32
--- /dev/null
+++ b/modules/repository/commits_test.go
@@ -0,0 +1,210 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "crypto/md5"
+ "fmt"
+ "strconv"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPushCommits_ToAPIPayloadCommits(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pushCommits := NewPushCommits()
+ pushCommits.Commits = []*PushCommit{
+ {
+ Sha1: "69554a6",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User2",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User2",
+ Message: "not signed commit",
+ },
+ {
+ Sha1: "27566bd",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User2",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User2",
+ Message: "good signed commit (with not yet validated email)",
+ },
+ {
+ Sha1: "5099b81",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User2",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User2",
+ Message: "good signed commit",
+ },
+ }
+ pushCommits.HeadCommit = &PushCommit{Sha1: "69554a6"}
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 16})
+ payloadCommits, headCommit, err := pushCommits.ToAPIPayloadCommits(git.DefaultContext, repo.RepoPath(), "/user2/repo16")
+ require.NoError(t, err)
+ assert.Len(t, payloadCommits, 3)
+ assert.NotNil(t, headCommit)
+
+ assert.Equal(t, "69554a6", payloadCommits[0].ID)
+ assert.Equal(t, "not signed commit", payloadCommits[0].Message)
+ assert.Equal(t, "/user2/repo16/commit/69554a6", payloadCommits[0].URL)
+ assert.Equal(t, "User2", payloadCommits[0].Committer.Name)
+ assert.Equal(t, "user2", payloadCommits[0].Committer.UserName)
+ assert.Equal(t, "User2", payloadCommits[0].Author.Name)
+ assert.Equal(t, "user2", payloadCommits[0].Author.UserName)
+ assert.EqualValues(t, []string{}, payloadCommits[0].Added)
+ assert.EqualValues(t, []string{}, payloadCommits[0].Removed)
+ assert.EqualValues(t, []string{"readme.md"}, payloadCommits[0].Modified)
+
+ assert.Equal(t, "27566bd", payloadCommits[1].ID)
+ assert.Equal(t, "good signed commit (with not yet validated email)", payloadCommits[1].Message)
+ assert.Equal(t, "/user2/repo16/commit/27566bd", payloadCommits[1].URL)
+ assert.Equal(t, "User2", payloadCommits[1].Committer.Name)
+ assert.Equal(t, "user2", payloadCommits[1].Committer.UserName)
+ assert.Equal(t, "User2", payloadCommits[1].Author.Name)
+ assert.Equal(t, "user2", payloadCommits[1].Author.UserName)
+ assert.EqualValues(t, []string{}, payloadCommits[1].Added)
+ assert.EqualValues(t, []string{}, payloadCommits[1].Removed)
+ assert.EqualValues(t, []string{"readme.md"}, payloadCommits[1].Modified)
+
+ assert.Equal(t, "5099b81", payloadCommits[2].ID)
+ assert.Equal(t, "good signed commit", payloadCommits[2].Message)
+ assert.Equal(t, "/user2/repo16/commit/5099b81", payloadCommits[2].URL)
+ assert.Equal(t, "User2", payloadCommits[2].Committer.Name)
+ assert.Equal(t, "user2", payloadCommits[2].Committer.UserName)
+ assert.Equal(t, "User2", payloadCommits[2].Author.Name)
+ assert.Equal(t, "user2", payloadCommits[2].Author.UserName)
+ assert.EqualValues(t, []string{"readme.md"}, payloadCommits[2].Added)
+ assert.EqualValues(t, []string{}, payloadCommits[2].Removed)
+ assert.EqualValues(t, []string{}, payloadCommits[2].Modified)
+
+ assert.Equal(t, "69554a6", headCommit.ID)
+ assert.Equal(t, "not signed commit", headCommit.Message)
+ assert.Equal(t, "/user2/repo16/commit/69554a6", headCommit.URL)
+ assert.Equal(t, "User2", headCommit.Committer.Name)
+ assert.Equal(t, "user2", headCommit.Committer.UserName)
+ assert.Equal(t, "User2", headCommit.Author.Name)
+ assert.Equal(t, "user2", headCommit.Author.UserName)
+ assert.EqualValues(t, []string{}, headCommit.Added)
+ assert.EqualValues(t, []string{}, headCommit.Removed)
+ assert.EqualValues(t, []string{"readme.md"}, headCommit.Modified)
+}
+
+func TestPushCommits_AvatarLink(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pushCommits := NewPushCommits()
+ pushCommits.Commits = []*PushCommit{
+ {
+ Sha1: "abcdef1",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user4@example.com",
+ AuthorName: "User Four",
+ Message: "message1",
+ },
+ {
+ Sha1: "abcdef2",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "message2",
+ },
+ }
+
+ setting.GravatarSource = "https://secure.gravatar.com/avatar"
+ setting.OfflineMode = true
+
+ assert.Equal(t,
+ "/avatars/avatar2?size="+strconv.Itoa(28*setting.Avatar.RenderedSizeFactor),
+ pushCommits.AvatarLink(db.DefaultContext, "user2@example.com"))
+
+ assert.Equal(t,
+ fmt.Sprintf("https://secure.gravatar.com/avatar/%x?d=identicon&s=%d", md5.Sum([]byte("nonexistent@example.com")), 28*setting.Avatar.RenderedSizeFactor),
+ pushCommits.AvatarLink(db.DefaultContext, "nonexistent@example.com"))
+}
+
+func TestCommitToPushCommit(t *testing.T) {
+ now := time.Now()
+ sig := &git.Signature{
+ Email: "example@example.com",
+ Name: "John Doe",
+ When: now,
+ }
+ const hexString = "0123456789abcdef0123456789abcdef01234567"
+ sha1, err := git.NewIDFromString(hexString)
+ require.NoError(t, err)
+ pushCommit := CommitToPushCommit(&git.Commit{
+ ID: sha1,
+ Author: sig,
+ Committer: sig,
+ CommitMessage: "Commit Message",
+ })
+ assert.Equal(t, hexString, pushCommit.Sha1)
+ assert.Equal(t, "Commit Message", pushCommit.Message)
+ assert.Equal(t, "example@example.com", pushCommit.AuthorEmail)
+ assert.Equal(t, "John Doe", pushCommit.AuthorName)
+ assert.Equal(t, "example@example.com", pushCommit.CommitterEmail)
+ assert.Equal(t, "John Doe", pushCommit.CommitterName)
+ assert.Equal(t, now, pushCommit.Timestamp)
+}
+
+func TestListToPushCommits(t *testing.T) {
+ now := time.Now()
+ sig := &git.Signature{
+ Email: "example@example.com",
+ Name: "John Doe",
+ When: now,
+ }
+
+ const hexString1 = "0123456789abcdef0123456789abcdef01234567"
+ hash1, err := git.NewIDFromString(hexString1)
+ require.NoError(t, err)
+ const hexString2 = "fedcba9876543210fedcba9876543210fedcba98"
+ hash2, err := git.NewIDFromString(hexString2)
+ require.NoError(t, err)
+
+ l := []*git.Commit{
+ {
+ ID: hash1,
+ Author: sig,
+ Committer: sig,
+ CommitMessage: "Message1",
+ },
+ {
+ ID: hash2,
+ Author: sig,
+ Committer: sig,
+ CommitMessage: "Message2",
+ },
+ }
+
+ pushCommits := GitToPushCommits(l)
+ if assert.Len(t, pushCommits.Commits, 2) {
+ assert.Equal(t, "Message1", pushCommits.Commits[0].Message)
+ assert.Equal(t, hexString1, pushCommits.Commits[0].Sha1)
+ assert.Equal(t, "example@example.com", pushCommits.Commits[0].AuthorEmail)
+ assert.Equal(t, now, pushCommits.Commits[0].Timestamp)
+
+ assert.Equal(t, "Message2", pushCommits.Commits[1].Message)
+ assert.Equal(t, hexString2, pushCommits.Commits[1].Sha1)
+ assert.Equal(t, "example@example.com", pushCommits.Commits[1].AuthorEmail)
+ assert.Equal(t, now, pushCommits.Commits[1].Timestamp)
+ }
+}
+
+// TODO TestPushUpdate
diff --git a/modules/repository/create.go b/modules/repository/create.go
new file mode 100644
index 00000000..ca2150b9
--- /dev/null
+++ b/modules/repository/create.go
@@ -0,0 +1,297 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/models/webhook"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// CreateRepositoryByExample creates a repository for the user/organization.
+func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, overwriteOrAdopt, isFork bool) (err error) {
+ if err = repo_model.IsUsableRepoName(repo.Name); err != nil {
+ return err
+ }
+
+ has, err := repo_model.IsRepositoryModelExist(ctx, u, repo.Name)
+ if err != nil {
+ return fmt.Errorf("IsRepositoryExist: %w", err)
+ } else if has {
+ return repo_model.ErrRepoAlreadyExist{
+ Uname: u.Name,
+ Name: repo.Name,
+ }
+ }
+
+ repoPath := repo_model.RepoPath(u.Name, repo.Name)
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return err
+ }
+ if !overwriteOrAdopt && isExist {
+ log.Error("Files already exist in %s and we are not going to adopt or delete.", repoPath)
+ return repo_model.ErrRepoFilesAlreadyExist{
+ Uname: u.Name,
+ Name: repo.Name,
+ }
+ }
+
+ if err = db.Insert(ctx, repo); err != nil {
+ return err
+ }
+ if err = repo_model.DeleteRedirect(ctx, u.ID, repo.Name); err != nil {
+ return err
+ }
+
+ // insert units for repo
+ defaultUnits := unit.DefaultRepoUnits
+ if isFork {
+ defaultUnits = unit.DefaultForkRepoUnits
+ }
+ units := make([]repo_model.RepoUnit, 0, len(defaultUnits))
+ for _, tp := range defaultUnits {
+ if tp == unit.TypeIssues {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: tp,
+ Config: &repo_model.IssuesConfig{
+ EnableTimetracker: setting.Service.DefaultEnableTimetracking,
+ AllowOnlyContributorsToTrackTime: setting.Service.DefaultAllowOnlyContributorsToTrackTime,
+ EnableDependencies: setting.Service.DefaultEnableDependencies,
+ },
+ })
+ } else if tp == unit.TypePullRequests {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: tp,
+ Config: &repo_model.PullRequestsConfig{
+ AllowMerge: true, AllowRebase: true, AllowRebaseMerge: true, AllowSquash: true, AllowFastForwardOnly: true,
+ DefaultMergeStyle: repo_model.MergeStyle(setting.Repository.PullRequest.DefaultMergeStyle),
+ AllowRebaseUpdate: true,
+ },
+ })
+ } else {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: tp,
+ })
+ }
+ }
+
+ if err = db.Insert(ctx, units); err != nil {
+ return err
+ }
+
+ // Remember visibility preference.
+ u.LastRepoVisibility = repo.IsPrivate
+ if err = user_model.UpdateUserCols(ctx, u, "last_repo_visibility"); err != nil {
+ return fmt.Errorf("UpdateUserCols: %w", err)
+ }
+
+ if err = user_model.IncrUserRepoNum(ctx, u.ID); err != nil {
+ return fmt.Errorf("IncrUserRepoNum: %w", err)
+ }
+ u.NumRepos++
+
+ // Give access to all members in teams with access to all repositories.
+ if u.IsOrganization() {
+ teams, err := organization.FindOrgTeams(ctx, u.ID)
+ if err != nil {
+ return fmt.Errorf("FindOrgTeams: %w", err)
+ }
+ for _, t := range teams {
+ if t.IncludesAllRepositories {
+ if err := models.AddRepository(ctx, t, repo); err != nil {
+ return fmt.Errorf("AddRepository: %w", err)
+ }
+ }
+ }
+
+ if isAdmin, err := access_model.IsUserRepoAdmin(ctx, repo, doer); err != nil {
+ return fmt.Errorf("IsUserRepoAdmin: %w", err)
+ } else if !isAdmin {
+ // Make creator repo admin if it wasn't assigned automatically
+ if err = AddCollaborator(ctx, repo, doer); err != nil {
+ return fmt.Errorf("AddCollaborator: %w", err)
+ }
+ if err = repo_model.ChangeCollaborationAccessMode(ctx, repo, doer.ID, perm.AccessModeAdmin); err != nil {
+ return fmt.Errorf("ChangeCollaborationAccessModeCtx: %w", err)
+ }
+ }
+ } else if err = access_model.RecalculateAccesses(ctx, repo); err != nil {
+ // Organization automatically called this in AddRepository method.
+ return fmt.Errorf("RecalculateAccesses: %w", err)
+ }
+
+ if setting.Service.AutoWatchNewRepos {
+ if err = repo_model.WatchRepo(ctx, doer.ID, repo.ID, true); err != nil {
+ return fmt.Errorf("WatchRepo: %w", err)
+ }
+ }
+
+ if err = webhook.CopyDefaultWebhooksToRepo(ctx, repo.ID); err != nil {
+ return fmt.Errorf("CopyDefaultWebhooksToRepo: %w", err)
+ }
+
+ return nil
+}
+
+const notRegularFileMode = os.ModeSymlink | os.ModeNamedPipe | os.ModeSocket | os.ModeDevice | os.ModeCharDevice | os.ModeIrregular
+
+// getDirectorySize returns the disk consumption for a given path
+func getDirectorySize(path string) (int64, error) {
+ var size int64
+ err := filepath.WalkDir(path, func(_ string, entry os.DirEntry, err error) error {
+ if os.IsNotExist(err) { // ignore the error because some files (like temp/lock file) may be deleted during traversing.
+ return nil
+ } else if err != nil {
+ return err
+ }
+ if entry.IsDir() {
+ return nil
+ }
+ info, err := entry.Info()
+ if os.IsNotExist(err) { // ignore the error as above
+ return nil
+ } else if err != nil {
+ return err
+ }
+ if (info.Mode() & notRegularFileMode) == 0 {
+ size += info.Size()
+ }
+ return nil
+ })
+ return size, err
+}
+
+// UpdateRepoSize updates the repository size, calculating it using getDirectorySize
+func UpdateRepoSize(ctx context.Context, repo *repo_model.Repository) error {
+ size, err := getDirectorySize(repo.RepoPath())
+ if err != nil {
+ return fmt.Errorf("updateSize: %w", err)
+ }
+
+ lfsSize, err := git_model.GetRepoLFSSize(ctx, repo.ID)
+ if err != nil {
+ return fmt.Errorf("updateSize: GetLFSMetaObjects: %w", err)
+ }
+
+ return repo_model.UpdateRepoSize(ctx, repo.ID, size, lfsSize)
+}
+
+// CheckDaemonExportOK creates/removes git-daemon-export-ok for git-daemon...
+func CheckDaemonExportOK(ctx context.Context, repo *repo_model.Repository) error {
+ if err := repo.LoadOwner(ctx); err != nil {
+ return err
+ }
+
+ // Create/Remove git-daemon-export-ok for git-daemon...
+ daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
+
+ isExist, err := util.IsExist(daemonExportFile)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
+ return err
+ }
+
+ isPublic := !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePublic
+ if !isPublic && isExist {
+ if err = util.Remove(daemonExportFile); err != nil {
+ log.Error("Failed to remove %s: %v", daemonExportFile, err)
+ }
+ } else if isPublic && !isExist {
+ if f, err := os.Create(daemonExportFile); err != nil {
+ log.Error("Failed to create %s: %v", daemonExportFile, err)
+ } else {
+ f.Close()
+ }
+ }
+
+ return nil
+}
+
+// UpdateRepository updates a repository with db context
+func UpdateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) {
+ repo.LowerName = strings.ToLower(repo.Name)
+
+ e := db.GetEngine(ctx)
+
+ if _, err = e.ID(repo.ID).AllCols().Update(repo); err != nil {
+ return fmt.Errorf("update: %w", err)
+ }
+
+ if err = UpdateRepoSize(ctx, repo); err != nil {
+ log.Error("Failed to update size for repository: %v", err)
+ }
+
+ if visibilityChanged {
+ if err = repo.LoadOwner(ctx); err != nil {
+ return fmt.Errorf("LoadOwner: %w", err)
+ }
+ if repo.Owner.IsOrganization() {
+ // Organization repository need to recalculate access table when visibility is changed.
+ if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil {
+ return fmt.Errorf("recalculateTeamAccesses: %w", err)
+ }
+ }
+
+ // If repo has become private, we need to set its actions to private.
+ if repo.IsPrivate {
+ _, err = e.Where("repo_id = ?", repo.ID).Cols("is_private").Update(&activities_model.Action{
+ IsPrivate: true,
+ })
+ if err != nil {
+ return err
+ }
+
+ if err = repo_model.ClearRepoStars(ctx, repo.ID); err != nil {
+ return err
+ }
+ }
+
+ // Create/Remove git-daemon-export-ok for git-daemon...
+ if err := CheckDaemonExportOK(ctx, repo); err != nil {
+ return err
+ }
+
+ forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID)
+ if err != nil {
+ return fmt.Errorf("getRepositoriesByForkID: %w", err)
+ }
+ for i := range forkRepos {
+ forkRepos[i].IsPrivate = repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate
+ if err = UpdateRepository(ctx, forkRepos[i], true); err != nil {
+ return fmt.Errorf("updateRepository[%d]: %w", forkRepos[i].ID, err)
+ }
+ }
+
+ // If visibility is changed, we need to update the issue indexer.
+ // Since the data in the issue indexer have field to indicate if the repo is public or not.
+ issue_indexer.UpdateRepoIndexer(ctx, repo.ID)
+ }
+
+ return nil
+}
diff --git a/modules/repository/create_test.go b/modules/repository/create_test.go
new file mode 100644
index 00000000..c743271c
--- /dev/null
+++ b/modules/repository/create_test.go
@@ -0,0 +1,46 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUpdateRepositoryVisibilityChanged(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Get sample repo and change visibility
+ repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 9)
+ require.NoError(t, err)
+ repo.IsPrivate = true
+
+ // Update it
+ err = UpdateRepository(db.DefaultContext, repo, true)
+ require.NoError(t, err)
+
+ // Check visibility of action has become private
+ act := activities_model.Action{}
+ _, err = db.GetEngine(db.DefaultContext).ID(3).Get(&act)
+
+ require.NoError(t, err)
+ assert.True(t, act.IsPrivate)
+}
+
+func TestGetDirectorySize(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ size, err := getDirectorySize(repo.RepoPath())
+ require.NoError(t, err)
+ assert.EqualValues(t, size, repo.Size)
+}
diff --git a/modules/repository/delete.go b/modules/repository/delete.go
new file mode 100644
index 00000000..04af98be
--- /dev/null
+++ b/modules/repository/delete.go
@@ -0,0 +1,33 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// CanUserDelete returns true if user could delete the repository
+func CanUserDelete(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (bool, error) {
+ if user.IsAdmin || user.ID == repo.OwnerID {
+ return true, nil
+ }
+
+ if err := repo.LoadOwner(ctx); err != nil {
+ return false, err
+ }
+
+ if repo.Owner.IsOrganization() {
+ isAdmin, err := organization.OrgFromUser(repo.Owner).IsOrgAdmin(ctx, user.ID)
+ if err != nil {
+ return false, err
+ }
+ return isAdmin, nil
+ }
+
+ return false, nil
+}
diff --git a/modules/repository/env.go b/modules/repository/env.go
new file mode 100644
index 00000000..e4f32092
--- /dev/null
+++ b/modules/repository/env.go
@@ -0,0 +1,87 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "fmt"
+ "os"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// env keys for git hooks need
+const (
+ EnvRepoName = "GITEA_REPO_NAME"
+ EnvRepoUsername = "GITEA_REPO_USER_NAME"
+ EnvRepoID = "GITEA_REPO_ID"
+ EnvRepoIsWiki = "GITEA_REPO_IS_WIKI"
+ EnvPusherName = "GITEA_PUSHER_NAME"
+ EnvPusherEmail = "GITEA_PUSHER_EMAIL"
+ EnvPusherID = "GITEA_PUSHER_ID"
+ EnvKeyID = "GITEA_KEY_ID" // public key ID
+ EnvDeployKeyID = "GITEA_DEPLOY_KEY_ID"
+ EnvPRID = "GITEA_PR_ID"
+ EnvPushTrigger = "GITEA_PUSH_TRIGGER"
+ EnvIsInternal = "GITEA_INTERNAL_PUSH"
+ EnvAppURL = "GITEA_ROOT_URL"
+ EnvActionPerm = "GITEA_ACTION_PERM"
+)
+
+type PushTrigger string
+
+const (
+ PushTriggerPRMergeToBase PushTrigger = "pr-merge-to-base"
+ PushTriggerPRUpdateWithBase PushTrigger = "pr-update-with-base"
+)
+
+// InternalPushingEnvironment returns an os environment to switch off hooks on push
+// It is recommended to avoid using this unless you are pushing within a transaction
+// or if you absolutely are sure that post-receive and pre-receive will do nothing
+// We provide the full pushing-environment for other hook providers
+func InternalPushingEnvironment(doer *user_model.User, repo *repo_model.Repository) []string {
+ return append(PushingEnvironment(doer, repo),
+ EnvIsInternal+"=true",
+ )
+}
+
+// PushingEnvironment returns an os environment to allow hooks to work on push
+func PushingEnvironment(doer *user_model.User, repo *repo_model.Repository) []string {
+ return FullPushingEnvironment(doer, doer, repo, repo.Name, 0)
+}
+
+// FullPushingEnvironment returns an os environment to allow hooks to work on push
+func FullPushingEnvironment(author, committer *user_model.User, repo *repo_model.Repository, repoName string, prID int64) []string {
+ isWiki := "false"
+ if strings.HasSuffix(repoName, ".wiki") {
+ isWiki = "true"
+ }
+
+ authorSig := author.NewGitSig()
+ committerSig := committer.NewGitSig()
+
+ environ := append(os.Environ(),
+ "GIT_AUTHOR_NAME="+authorSig.Name,
+ "GIT_AUTHOR_EMAIL="+authorSig.Email,
+ "GIT_COMMITTER_NAME="+committerSig.Name,
+ "GIT_COMMITTER_EMAIL="+committerSig.Email,
+ EnvRepoName+"="+repoName,
+ EnvRepoUsername+"="+repo.OwnerName,
+ EnvRepoIsWiki+"="+isWiki,
+ EnvPusherName+"="+committer.Name,
+ EnvPusherID+"="+fmt.Sprintf("%d", committer.ID),
+ EnvRepoID+"="+fmt.Sprintf("%d", repo.ID),
+ EnvPRID+"="+fmt.Sprintf("%d", prID),
+ EnvAppURL+"="+setting.AppURL,
+ "SSH_ORIGINAL_COMMAND=gitea-internal",
+ )
+
+ if !committer.KeepEmailPrivate {
+ environ = append(environ, EnvPusherEmail+"="+committer.Email)
+ }
+
+ return environ
+}
diff --git a/modules/repository/fork.go b/modules/repository/fork.go
new file mode 100644
index 00000000..fbf00087
--- /dev/null
+++ b/modules/repository/fork.go
@@ -0,0 +1,32 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// CanUserForkRepo returns true if specified user can fork repository.
+func CanUserForkRepo(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (bool, error) {
+ if user == nil {
+ return false, nil
+ }
+ if repo.OwnerID != user.ID && !repo_model.HasForkedRepo(ctx, user.ID, repo.ID) {
+ return true, nil
+ }
+ ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, user.ID)
+ if err != nil {
+ return false, err
+ }
+ for _, org := range ownedOrgs {
+ if repo.OwnerID != org.ID && !repo_model.HasForkedRepo(ctx, org.ID, repo.ID) {
+ return true, nil
+ }
+ }
+ return false, nil
+}
diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go
new file mode 100644
index 00000000..95849789
--- /dev/null
+++ b/modules/repository/hooks.go
@@ -0,0 +1,233 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) {
+ hookNames = []string{"pre-receive", "update", "post-receive"}
+ hookTpls = []string{
+ // for pre-receive
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+data=$(cat)
+exitcodes=""
+hookname=$(basename $0)
+GIT_DIR=${GIT_DIR:-$(dirname $0)/..}
+
+for hook in ${GIT_DIR}/hooks/${hookname}.d/*; do
+ test -x "${hook}" && test -f "${hook}" || continue
+ echo "${data}" | "${hook}"
+ exitcodes="${exitcodes} $?"
+done
+
+for i in ${exitcodes}; do
+ [ ${i} -eq 0 ] || exit ${i}
+done
+`, setting.ScriptType),
+
+ // for update
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+exitcodes=""
+hookname=$(basename $0)
+GIT_DIR=${GIT_DIR:-$(dirname $0/..)}
+
+for hook in ${GIT_DIR}/hooks/${hookname}.d/*; do
+ test -x "${hook}" && test -f "${hook}" || continue
+ "${hook}" $1 $2 $3
+ exitcodes="${exitcodes} $?"
+done
+
+for i in ${exitcodes}; do
+ [ ${i} -eq 0 ] || exit ${i}
+done
+`, setting.ScriptType),
+
+ // for post-receive
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+data=$(cat)
+exitcodes=""
+hookname=$(basename $0)
+GIT_DIR=${GIT_DIR:-$(dirname $0)/..}
+
+for hook in ${GIT_DIR}/hooks/${hookname}.d/*; do
+ test -x "${hook}" && test -f "${hook}" || continue
+ echo "${data}" | "${hook}"
+ exitcodes="${exitcodes} $?"
+done
+
+for i in ${exitcodes}; do
+ [ ${i} -eq 0 ] || exit ${i}
+done
+`, setting.ScriptType),
+ }
+
+ giteaHookTpls = []string{
+ // for pre-receive
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+%s hook --config=%s pre-receive
+`, setting.ScriptType, util.ShellEscape(setting.AppPath), util.ShellEscape(setting.CustomConf)),
+
+ // for update
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+%s hook --config=%s update $1 $2 $3
+`, setting.ScriptType, util.ShellEscape(setting.AppPath), util.ShellEscape(setting.CustomConf)),
+
+ // for post-receive
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+%s hook --config=%s post-receive
+`, setting.ScriptType, util.ShellEscape(setting.AppPath), util.ShellEscape(setting.CustomConf)),
+ }
+
+ // although only new git (>=2.29) supports proc-receive, it's still good to create its hook, in case the user upgrades git
+ hookNames = append(hookNames, "proc-receive")
+ hookTpls = append(hookTpls,
+ fmt.Sprintf(`#!/usr/bin/env %s
+# AUTO GENERATED BY GITEA, DO NOT MODIFY
+%s hook --config=%s proc-receive
+`, setting.ScriptType, util.ShellEscape(setting.AppPath), util.ShellEscape(setting.CustomConf)))
+ giteaHookTpls = append(giteaHookTpls, "")
+
+ return hookNames, hookTpls, giteaHookTpls
+}
+
+// CreateDelegateHooks creates all the hooks scripts for the repo
+func CreateDelegateHooks(repoPath string) (err error) {
+ hookNames, hookTpls, giteaHookTpls := getHookTemplates()
+ hookDir := filepath.Join(repoPath, "hooks")
+
+ for i, hookName := range hookNames {
+ oldHookPath := filepath.Join(hookDir, hookName)
+ newHookPath := filepath.Join(hookDir, hookName+".d", "gitea")
+
+ if err := os.MkdirAll(filepath.Join(hookDir, hookName+".d"), os.ModePerm); err != nil {
+ return fmt.Errorf("create hooks dir '%s': %w", filepath.Join(hookDir, hookName+".d"), err)
+ }
+
+ // WARNING: This will override all old server-side hooks
+ if err = util.Remove(oldHookPath); err != nil && !os.IsNotExist(err) {
+ return fmt.Errorf("unable to pre-remove old hook file '%s' prior to rewriting: %w ", oldHookPath, err)
+ }
+ if err = os.WriteFile(oldHookPath, []byte(hookTpls[i]), 0o777); err != nil {
+ return fmt.Errorf("write old hook file '%s': %w", oldHookPath, err)
+ }
+
+ if err = ensureExecutable(oldHookPath); err != nil {
+ return fmt.Errorf("Unable to set %s executable. Error %w", oldHookPath, err)
+ }
+
+ if err = util.Remove(newHookPath); err != nil && !os.IsNotExist(err) {
+ return fmt.Errorf("unable to pre-remove new hook file '%s' prior to rewriting: %w", newHookPath, err)
+ }
+ if err = os.WriteFile(newHookPath, []byte(giteaHookTpls[i]), 0o777); err != nil {
+ return fmt.Errorf("write new hook file '%s': %w", newHookPath, err)
+ }
+
+ if err = ensureExecutable(newHookPath); err != nil {
+ return fmt.Errorf("Unable to set %s executable. Error %w", oldHookPath, err)
+ }
+ }
+
+ return nil
+}
+
+func checkExecutable(filename string) bool {
+ // windows has no concept of a executable bit
+ if runtime.GOOS == "windows" {
+ return true
+ }
+ fileInfo, err := os.Stat(filename)
+ if err != nil {
+ return false
+ }
+ return (fileInfo.Mode() & 0o100) > 0
+}
+
+func ensureExecutable(filename string) error {
+ fileInfo, err := os.Stat(filename)
+ if err != nil {
+ return err
+ }
+ if (fileInfo.Mode() & 0o100) > 0 {
+ return nil
+ }
+ mode := fileInfo.Mode() | 0o100
+ return os.Chmod(filename, mode)
+}
+
+// CheckDelegateHooks checks the hooks scripts for the repo
+func CheckDelegateHooks(repoPath string) ([]string, error) {
+ hookNames, hookTpls, giteaHookTpls := getHookTemplates()
+
+ hookDir := filepath.Join(repoPath, "hooks")
+ results := make([]string, 0, 10)
+
+ for i, hookName := range hookNames {
+ oldHookPath := filepath.Join(hookDir, hookName)
+ newHookPath := filepath.Join(hookDir, hookName+".d", "gitea")
+
+ cont := false
+ isExist, err := util.IsExist(oldHookPath)
+ if err != nil {
+ results = append(results, fmt.Sprintf("unable to check if %s exists. Error: %v", oldHookPath, err))
+ }
+ if err == nil && !isExist {
+ results = append(results, fmt.Sprintf("old hook file %s does not exist", oldHookPath))
+ cont = true
+ }
+ isExist, err = util.IsExist(oldHookPath + ".d")
+ if err != nil {
+ results = append(results, fmt.Sprintf("unable to check if %s exists. Error: %v", oldHookPath+".d", err))
+ }
+ if err == nil && !isExist {
+ results = append(results, fmt.Sprintf("hooks directory %s does not exist", oldHookPath+".d"))
+ cont = true
+ }
+ isExist, err = util.IsExist(newHookPath)
+ if err != nil {
+ results = append(results, fmt.Sprintf("unable to check if %s exists. Error: %v", newHookPath, err))
+ }
+ if err == nil && !isExist {
+ results = append(results, fmt.Sprintf("new hook file %s does not exist", newHookPath))
+ cont = true
+ }
+ if cont {
+ continue
+ }
+ contents, err := os.ReadFile(oldHookPath)
+ if err != nil {
+ return results, err
+ }
+ if string(contents) != hookTpls[i] {
+ results = append(results, fmt.Sprintf("old hook file %s is out of date", oldHookPath))
+ }
+ if !checkExecutable(oldHookPath) {
+ results = append(results, fmt.Sprintf("old hook file %s is not executable", oldHookPath))
+ }
+ contents, err = os.ReadFile(newHookPath)
+ if err != nil {
+ return results, err
+ }
+ if string(contents) != giteaHookTpls[i] {
+ results = append(results, fmt.Sprintf("new hook file %s is out of date", newHookPath))
+ }
+ if !checkExecutable(newHookPath) {
+ results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath))
+ }
+ }
+ return results, nil
+}
diff --git a/modules/repository/init.go b/modules/repository/init.go
new file mode 100644
index 00000000..5f500c52
--- /dev/null
+++ b/modules/repository/init.go
@@ -0,0 +1,182 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/label"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/options"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type OptionFile struct {
+ DisplayName string
+ Description string
+}
+
+var (
+ // Gitignores contains the gitiginore files
+ Gitignores []string
+
+ // Licenses contains the license files
+ Licenses []string
+
+ // Readmes contains the readme files
+ Readmes []string
+
+ // LabelTemplateFiles contains the label template files, each item has its DisplayName and Description
+ LabelTemplateFiles []OptionFile
+ labelTemplateFileMap = map[string]string{} // DisplayName => FileName mapping
+)
+
+type optionFileList struct {
+ all []string // all files provided by bindata & custom-path. Sorted.
+ custom []string // custom files provided by custom-path. Non-sorted, internal use only.
+}
+
+// mergeCustomLabelFiles merges the custom label files. Always use the file's main name (DisplayName) as the key to de-duplicate.
+func mergeCustomLabelFiles(fl optionFileList) []string {
+ exts := map[string]int{"": 0, ".yml": 1, ".yaml": 2} // "yaml" file has the highest priority to be used.
+
+ m := map[string]string{}
+ merge := func(list []string) {
+ sort.Slice(list, func(i, j int) bool { return exts[filepath.Ext(list[i])] < exts[filepath.Ext(list[j])] })
+ for _, f := range list {
+ m[strings.TrimSuffix(f, filepath.Ext(f))] = f
+ }
+ }
+ merge(fl.all)
+ merge(fl.custom)
+
+ files := make([]string, 0, len(m))
+ for _, f := range m {
+ files = append(files, f)
+ }
+ sort.Strings(files)
+ return files
+}
+
+// LoadRepoConfig loads the repository config
+func LoadRepoConfig() error {
+ types := []string{"gitignore", "license", "readme", "label"} // option file directories
+ typeFiles := make([]optionFileList, len(types))
+ for i, t := range types {
+ var err error
+ if typeFiles[i].all, err = options.AssetFS().ListFiles(t, true); err != nil {
+ return fmt.Errorf("failed to list %s files: %w", t, err)
+ }
+ sort.Strings(typeFiles[i].all)
+ customPath := filepath.Join(setting.CustomPath, "options", t)
+ if isDir, err := util.IsDir(customPath); err != nil {
+ return fmt.Errorf("failed to check custom %s dir: %w", t, err)
+ } else if isDir {
+ if typeFiles[i].custom, err = util.StatDir(customPath); err != nil {
+ return fmt.Errorf("failed to list custom %s files: %w", t, err)
+ }
+ }
+ }
+
+ Gitignores = typeFiles[0].all
+ Licenses = typeFiles[1].all
+ Readmes = typeFiles[2].all
+
+ // Load label templates
+ LabelTemplateFiles = nil
+ labelTemplateFileMap = map[string]string{}
+ for _, file := range mergeCustomLabelFiles(typeFiles[3]) {
+ description, err := label.LoadTemplateDescription(file)
+ if err != nil {
+ return fmt.Errorf("failed to load labels: %w", err)
+ }
+ displayName := strings.TrimSuffix(file, filepath.Ext(file))
+ labelTemplateFileMap[displayName] = file
+ LabelTemplateFiles = append(LabelTemplateFiles, OptionFile{DisplayName: displayName, Description: description})
+ }
+
+ // Filter out invalid names and promote preferred licenses.
+ sortedLicenses := make([]string, 0, len(Licenses))
+ for _, name := range setting.Repository.PreferredLicenses {
+ if util.SliceContainsString(Licenses, name, true) {
+ sortedLicenses = append(sortedLicenses, name)
+ }
+ }
+ for _, name := range Licenses {
+ if !util.SliceContainsString(setting.Repository.PreferredLicenses, name, true) {
+ sortedLicenses = append(sortedLicenses, name)
+ }
+ }
+ Licenses = sortedLicenses
+ return nil
+}
+
+func CheckInitRepository(ctx context.Context, owner, name, objectFormatName string) (err error) {
+ // Somehow the directory could exist.
+ repoPath := repo_model.RepoPath(owner, name)
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return err
+ }
+ if isExist {
+ return repo_model.ErrRepoFilesAlreadyExist{
+ Uname: owner,
+ Name: name,
+ }
+ }
+
+ // Init git bare new repository.
+ if err = git.InitRepository(ctx, repoPath, true, objectFormatName); err != nil {
+ return fmt.Errorf("git.InitRepository: %w", err)
+ } else if err = CreateDelegateHooks(repoPath); err != nil {
+ return fmt.Errorf("createDelegateHooks: %w", err)
+ }
+ return nil
+}
+
+// InitializeLabels adds a label set to a repository using a template
+func InitializeLabels(ctx context.Context, id int64, labelTemplate string, isOrg bool) error {
+ list, err := LoadTemplateLabelsByDisplayName(labelTemplate)
+ if err != nil {
+ return err
+ }
+
+ labels := make([]*issues_model.Label, len(list))
+ for i := 0; i < len(list); i++ {
+ labels[i] = &issues_model.Label{
+ Name: list[i].Name,
+ Exclusive: list[i].Exclusive,
+ Description: list[i].Description,
+ Color: list[i].Color,
+ }
+ if isOrg {
+ labels[i].OrgID = id
+ } else {
+ labels[i].RepoID = id
+ }
+ }
+ for _, label := range labels {
+ if err = issues_model.NewLabel(ctx, label); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// LoadTemplateLabelsByDisplayName loads a label template by its display name
+func LoadTemplateLabelsByDisplayName(displayName string) ([]*label.Label, error) {
+ if fileName, ok := labelTemplateFileMap[displayName]; ok {
+ return label.LoadTemplateFile(fileName)
+ }
+ return nil, label.ErrTemplateLoad{TemplateFile: displayName, OriginalError: fmt.Errorf("label template %q not found", displayName)}
+}
diff --git a/modules/repository/init_test.go b/modules/repository/init_test.go
new file mode 100644
index 00000000..227efdc1
--- /dev/null
+++ b/modules/repository/init_test.go
@@ -0,0 +1,30 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMergeCustomLabels(t *testing.T) {
+ files := mergeCustomLabelFiles(optionFileList{
+ all: []string{"a", "a.yaml", "a.yml"},
+ custom: nil,
+ })
+ assert.EqualValues(t, []string{"a.yaml"}, files, "yaml file should win")
+
+ files = mergeCustomLabelFiles(optionFileList{
+ all: []string{"a", "a.yaml"},
+ custom: []string{"a"},
+ })
+ assert.EqualValues(t, []string{"a"}, files, "custom file should win")
+
+ files = mergeCustomLabelFiles(optionFileList{
+ all: []string{"a", "a.yml", "a.yaml"},
+ custom: []string{"a", "a.yml"},
+ })
+ assert.EqualValues(t, []string{"a.yml"}, files, "custom yml file should win if no yaml")
+}
diff --git a/modules/repository/license.go b/modules/repository/license.go
new file mode 100644
index 00000000..6ac3547e
--- /dev/null
+++ b/modules/repository/license.go
@@ -0,0 +1,113 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "regexp"
+ "strings"
+
+ "code.gitea.io/gitea/modules/options"
+)
+
+type LicenseValues struct {
+ Owner string
+ Email string
+ Repo string
+ Year string
+}
+
+func GetLicense(name string, values *LicenseValues) ([]byte, error) {
+ data, err := options.License(name)
+ if err != nil {
+ return nil, fmt.Errorf("GetRepoInitFile[%s]: %w", name, err)
+ }
+ return fillLicensePlaceholder(name, values, data), nil
+}
+
+func fillLicensePlaceholder(name string, values *LicenseValues, origin []byte) []byte {
+ placeholder := getLicensePlaceholder(name)
+
+ scanner := bufio.NewScanner(bytes.NewReader(origin))
+ output := bytes.NewBuffer(nil)
+ for scanner.Scan() {
+ line := scanner.Text()
+ if placeholder.MatchLine == nil || placeholder.MatchLine.MatchString(line) {
+ for _, v := range placeholder.Owner {
+ line = strings.ReplaceAll(line, v, values.Owner)
+ }
+ for _, v := range placeholder.Email {
+ line = strings.ReplaceAll(line, v, values.Email)
+ }
+ for _, v := range placeholder.Repo {
+ line = strings.ReplaceAll(line, v, values.Repo)
+ }
+ for _, v := range placeholder.Year {
+ line = strings.ReplaceAll(line, v, values.Year)
+ }
+ }
+ output.WriteString(line + "\n")
+ }
+
+ return output.Bytes()
+}
+
+type licensePlaceholder struct {
+ Owner []string
+ Email []string
+ Repo []string
+ Year []string
+ MatchLine *regexp.Regexp
+}
+
+func getLicensePlaceholder(name string) *licensePlaceholder {
+ // Some universal placeholders.
+ // If you want to add a new one, make sure you have check it by `grep -r 'NEW_WORD' options/license` and all of them are placeholders.
+ ret := &licensePlaceholder{
+ Owner: []string{
+ "<name of author>",
+ "<owner>",
+ "[NAME]",
+ "[name of copyright owner]",
+ "[name of copyright holder]",
+ "<COPYRIGHT HOLDERS>",
+ "<copyright holders>",
+ "<AUTHOR>",
+ "<author's name or designee>",
+ "[one or more legally recognised persons or entities offering the Work under the terms and conditions of this Licence]",
+ },
+ Email: []string{
+ "[EMAIL]",
+ },
+ Repo: []string{
+ "<program>",
+ "<one line to give the program's name and a brief idea of what it does.>",
+ },
+ Year: []string{
+ "<year>",
+ "[YEAR]",
+ "{YEAR}",
+ "[yyyy]",
+ "[Year]",
+ "[year]",
+ },
+ }
+
+ // Some special placeholders for specific licenses.
+ // It's unsafe to apply them to all licenses.
+ switch name {
+ case "0BSD":
+ return &licensePlaceholder{
+ Owner: []string{"AUTHOR"},
+ Email: []string{"EMAIL"},
+ Year: []string{"YEAR"},
+ MatchLine: regexp.MustCompile(`Copyright \(C\) YEAR by AUTHOR EMAIL`), // there is another AUTHOR in the file, but it's not a placeholder
+ }
+
+ // Other special placeholders can be added here.
+ }
+ return ret
+}
diff --git a/modules/repository/license_test.go b/modules/repository/license_test.go
new file mode 100644
index 00000000..a7d77743
--- /dev/null
+++ b/modules/repository/license_test.go
@@ -0,0 +1,181 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getLicense(t *testing.T) {
+ type args struct {
+ name string
+ values *LicenseValues
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ wantErr require.ErrorAssertionFunc
+ }{
+ {
+ name: "regular",
+ args: args{
+ name: "MIT",
+ values: &LicenseValues{Owner: "Gitea", Year: "2023"},
+ },
+ want: `MIT License
+
+Copyright (c) 2023 Gitea
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+`,
+ wantErr: require.NoError,
+ },
+ {
+ name: "license not found",
+ args: args{
+ name: "notfound",
+ },
+ wantErr: require.Error,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := GetLicense(tt.args.name, tt.args.values)
+ tt.wantErr(t, err, fmt.Sprintf("GetLicense(%v, %v)", tt.args.name, tt.args.values))
+
+ assert.Equalf(t, tt.want, string(got), "GetLicense(%v, %v)", tt.args.name, tt.args.values)
+ })
+ }
+}
+
+func Test_fillLicensePlaceholder(t *testing.T) {
+ type args struct {
+ name string
+ values *LicenseValues
+ origin string
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ }{
+ {
+ name: "owner",
+ args: args{
+ name: "regular",
+ values: &LicenseValues{Year: "2023", Owner: "Gitea", Email: "teabot@gitea.io", Repo: "gitea"},
+ origin: `
+<name of author>
+<owner>
+[NAME]
+[name of copyright owner]
+[name of copyright holder]
+<COPYRIGHT HOLDERS>
+<copyright holders>
+<AUTHOR>
+<author's name or designee>
+[one or more legally recognised persons or entities offering the Work under the terms and conditions of this Licence]
+`,
+ },
+ want: `
+Gitea
+Gitea
+Gitea
+Gitea
+Gitea
+Gitea
+Gitea
+Gitea
+Gitea
+Gitea
+`,
+ },
+ {
+ name: "email",
+ args: args{
+ name: "regular",
+ values: &LicenseValues{Year: "2023", Owner: "Gitea", Email: "teabot@gitea.io", Repo: "gitea"},
+ origin: `
+[EMAIL]
+`,
+ },
+ want: `
+teabot@gitea.io
+`,
+ },
+ {
+ name: "repo",
+ args: args{
+ name: "regular",
+ values: &LicenseValues{Year: "2023", Owner: "Gitea", Email: "teabot@gitea.io", Repo: "gitea"},
+ origin: `
+<program>
+<one line to give the program's name and a brief idea of what it does.>
+`,
+ },
+ want: `
+gitea
+gitea
+`,
+ },
+ {
+ name: "year",
+ args: args{
+ name: "regular",
+ values: &LicenseValues{Year: "2023", Owner: "Gitea", Email: "teabot@gitea.io", Repo: "gitea"},
+ origin: `
+<year>
+[YEAR]
+{YEAR}
+[yyyy]
+[Year]
+[year]
+`,
+ },
+ want: `
+2023
+2023
+2023
+2023
+2023
+2023
+`,
+ },
+ {
+ name: "0BSD",
+ args: args{
+ name: "0BSD",
+ values: &LicenseValues{Year: "2023", Owner: "Gitea", Email: "teabot@gitea.io", Repo: "gitea"},
+ origin: `
+Copyright (C) YEAR by AUTHOR EMAIL
+
+...
+
+... THE AUTHOR BE LIABLE FOR ...
+`,
+ },
+ want: `
+Copyright (C) 2023 by Gitea teabot@gitea.io
+
+...
+
+... THE AUTHOR BE LIABLE FOR ...
+`,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ assert.Equalf(t, tt.want, string(fillLicensePlaceholder(tt.args.name, tt.args.values, []byte(tt.args.origin))), "fillLicensePlaceholder(%v, %v, %v)", tt.args.name, tt.args.values, tt.args.origin)
+ })
+ }
+}
diff --git a/modules/repository/main_test.go b/modules/repository/main_test.go
new file mode 100644
index 00000000..f81dfcda
--- /dev/null
+++ b/modules/repository/main_test.go
@@ -0,0 +1,16 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/modules/repository/push.go b/modules/repository/push.go
new file mode 100644
index 00000000..66d0417c
--- /dev/null
+++ b/modules/repository/push.go
@@ -0,0 +1,70 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "code.gitea.io/gitea/modules/git"
+)
+
+// PushUpdateOptions defines the push update options
+type PushUpdateOptions struct {
+ PusherID int64
+ PusherName string
+ RepoUserName string
+ RepoName string
+ RefFullName git.RefName // branch, tag or other name to push
+ OldCommitID string
+ NewCommitID string
+ TimeNano int64
+}
+
+// IsNewRef return true if it's a first-time push to a branch, tag or etc.
+func (opts *PushUpdateOptions) IsNewRef() bool {
+ return git.IsEmptyCommitID(opts.OldCommitID, nil)
+}
+
+// IsDelRef return true if it's a deletion to a branch or tag
+func (opts *PushUpdateOptions) IsDelRef() bool {
+ return git.IsEmptyCommitID(opts.NewCommitID, nil)
+}
+
+// IsUpdateRef return true if it's an update operation
+func (opts *PushUpdateOptions) IsUpdateRef() bool {
+ return !opts.IsNewRef() && !opts.IsDelRef()
+}
+
+// IsNewTag return true if it's a creation to a tag
+func (opts *PushUpdateOptions) IsNewTag() bool {
+ return opts.RefFullName.IsTag() && opts.IsNewRef()
+}
+
+// IsDelTag return true if it's a deletion to a tag
+func (opts *PushUpdateOptions) IsDelTag() bool {
+ return opts.RefFullName.IsTag() && opts.IsDelRef()
+}
+
+// IsNewBranch return true if it's the first-time push to a branch
+func (opts *PushUpdateOptions) IsNewBranch() bool {
+ return opts.RefFullName.IsBranch() && opts.IsNewRef()
+}
+
+// IsUpdateBranch return true if it's not the first push to a branch
+func (opts *PushUpdateOptions) IsUpdateBranch() bool {
+ return opts.RefFullName.IsBranch() && opts.IsUpdateRef()
+}
+
+// IsDelBranch return true if it's a deletion to a branch
+func (opts *PushUpdateOptions) IsDelBranch() bool {
+ return opts.RefFullName.IsBranch() && opts.IsDelRef()
+}
+
+// RefName returns simple name for ref
+func (opts *PushUpdateOptions) RefName() string {
+ return opts.RefFullName.ShortName()
+}
+
+// RepoFullName returns repo full name
+func (opts *PushUpdateOptions) RepoFullName() string {
+ return opts.RepoUserName + "/" + opts.RepoName
+}
diff --git a/modules/repository/repo.go b/modules/repository/repo.go
new file mode 100644
index 00000000..a863bec9
--- /dev/null
+++ b/modules/repository/repo.go
@@ -0,0 +1,383 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+/*
+GitHub, GitLab, Gogs: *.wiki.git
+BitBucket: *.git/wiki
+*/
+var commonWikiURLSuffixes = []string{".wiki.git", ".git/wiki"}
+
+// WikiRemoteURL returns accessible repository URL for wiki if exists.
+// Otherwise, it returns an empty string.
+func WikiRemoteURL(ctx context.Context, remote string) string {
+ remote = strings.TrimSuffix(remote, ".git")
+ for _, suffix := range commonWikiURLSuffixes {
+ wikiURL := remote + suffix
+ if git.IsRepoURLAccessible(ctx, wikiURL) {
+ return wikiURL
+ }
+ }
+ return ""
+}
+
+// SyncRepoTags synchronizes releases table with repository tags
+func SyncRepoTags(ctx context.Context, repoID int64) error {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ return err
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return err
+ }
+ defer gitRepo.Close()
+
+ return SyncReleasesWithTags(ctx, repo, gitRepo)
+}
+
+// SyncReleasesWithTags synchronizes release table with repository tags
+func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) error {
+ log.Debug("SyncReleasesWithTags: in Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name)
+
+ // optimized procedure for pull-mirrors which saves a lot of time (in
+ // particular for repos with many tags).
+ if repo.IsMirror {
+ return pullMirrorReleaseSync(ctx, repo, gitRepo)
+ }
+
+ existingRelTags := make(container.Set[string])
+ opts := repo_model.FindReleasesOptions{
+ IncludeDrafts: true,
+ IncludeTags: true,
+ ListOptions: db.ListOptions{PageSize: 50},
+ RepoID: repo.ID,
+ }
+ for page := 1; ; page++ {
+ opts.Page = page
+ rels, err := db.Find[repo_model.Release](gitRepo.Ctx, opts)
+ if err != nil {
+ return fmt.Errorf("unable to GetReleasesByRepoID in Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ if len(rels) == 0 {
+ break
+ }
+ for _, rel := range rels {
+ if rel.IsDraft {
+ continue
+ }
+ commitID, err := gitRepo.GetTagCommitID(rel.TagName)
+ if err != nil && !git.IsErrNotExist(err) {
+ return fmt.Errorf("unable to GetTagCommitID for %q in Repo[%d:%s/%s]: %w", rel.TagName, repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ if git.IsErrNotExist(err) || commitID != rel.Sha1 {
+ if err := repo_model.PushUpdateDeleteTag(ctx, repo, rel.TagName); err != nil {
+ return fmt.Errorf("unable to PushUpdateDeleteTag: %q in Repo[%d:%s/%s]: %w", rel.TagName, repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ } else {
+ existingRelTags.Add(strings.ToLower(rel.TagName))
+ }
+ }
+ }
+
+ _, err := gitRepo.WalkReferences(git.ObjectTag, 0, 0, func(sha1, refname string) error {
+ tagName := strings.TrimPrefix(refname, git.TagPrefix)
+ if existingRelTags.Contains(strings.ToLower(tagName)) {
+ return nil
+ }
+
+ if err := PushUpdateAddTag(ctx, repo, gitRepo, tagName, sha1, refname); err != nil {
+ // sometimes, some tags will be sync failed. i.e. https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tag/?h=v2.6.11
+ // this is a tree object, not a tag object which created before git
+ log.Error("unable to PushUpdateAddTag: %q to Repo[%d:%s/%s]: %v", tagName, repo.ID, repo.OwnerName, repo.Name, err)
+ }
+
+ return nil
+ })
+ return err
+}
+
+// PushUpdateAddTag must be called for any push actions to add tag
+func PushUpdateAddTag(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, tagName, sha1, refname string) error {
+ tag, err := gitRepo.GetTagWithID(sha1, tagName)
+ if err != nil {
+ return fmt.Errorf("unable to GetTag: %w", err)
+ }
+ commit, err := tag.Commit(gitRepo)
+ if err != nil {
+ return fmt.Errorf("unable to get tag Commit: %w", err)
+ }
+
+ sig := tag.Tagger
+ if sig == nil {
+ sig = commit.Author
+ }
+ if sig == nil {
+ sig = commit.Committer
+ }
+
+ var author *user_model.User
+ createdAt := time.Unix(1, 0)
+
+ if sig != nil {
+ author, err = user_model.GetUserByEmail(ctx, sig.Email)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ return fmt.Errorf("unable to GetUserByEmail for %q: %w", sig.Email, err)
+ }
+ createdAt = sig.When
+ }
+
+ commitsCount, err := commit.CommitsCount()
+ if err != nil {
+ return fmt.Errorf("unable to get CommitsCount: %w", err)
+ }
+
+ rel := repo_model.Release{
+ RepoID: repo.ID,
+ TagName: tagName,
+ LowerTagName: strings.ToLower(tagName),
+ Sha1: commit.ID.String(),
+ NumCommits: commitsCount,
+ CreatedUnix: timeutil.TimeStamp(createdAt.Unix()),
+ IsTag: true,
+ }
+ if author != nil {
+ rel.PublisherID = author.ID
+ }
+
+ return repo_model.SaveOrUpdateTag(ctx, repo, &rel)
+}
+
+// StoreMissingLfsObjectsInRepository downloads missing LFS objects
+func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, lfsClient lfs.Client) error {
+ contentStore := lfs.NewContentStore()
+
+ pointerChan := make(chan lfs.PointerBlob)
+ errChan := make(chan error, 1)
+ go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan)
+
+ downloadObjects := func(pointers []lfs.Pointer) error {
+ err := lfsClient.Download(ctx, pointers, func(p lfs.Pointer, content io.ReadCloser, objectError error) error {
+ if objectError != nil {
+ return objectError
+ }
+
+ defer content.Close()
+
+ _, err := git_model.NewLFSMetaObject(ctx, repo.ID, p)
+ if err != nil {
+ log.Error("Repo[%-v]: Error creating LFS meta object %-v: %v", repo, p, err)
+ return err
+ }
+
+ if err := contentStore.Put(p, content); err != nil {
+ log.Error("Repo[%-v]: Error storing content for LFS meta object %-v: %v", repo, p, err)
+ if _, err2 := git_model.RemoveLFSMetaObjectByOid(ctx, repo.ID, p.Oid); err2 != nil {
+ log.Error("Repo[%-v]: Error removing LFS meta object %-v: %v", repo, p, err2)
+ }
+ return err
+ }
+ return nil
+ })
+ if err != nil {
+ select {
+ case <-ctx.Done():
+ return nil
+ default:
+ }
+ }
+ return err
+ }
+
+ var batch []lfs.Pointer
+ for pointerBlob := range pointerChan {
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, repo.ID, pointerBlob.Oid)
+ if err != nil && err != git_model.ErrLFSObjectNotExist {
+ log.Error("Repo[%-v]: Error querying LFS meta object %-v: %v", repo, pointerBlob.Pointer, err)
+ return err
+ }
+ if meta != nil {
+ log.Trace("Repo[%-v]: Skipping unknown LFS meta object %-v", repo, pointerBlob.Pointer)
+ continue
+ }
+
+ log.Trace("Repo[%-v]: LFS object %-v not present in repository", repo, pointerBlob.Pointer)
+
+ exist, err := contentStore.Exists(pointerBlob.Pointer)
+ if err != nil {
+ log.Error("Repo[%-v]: Error checking if LFS object %-v exists: %v", repo, pointerBlob.Pointer, err)
+ return err
+ }
+
+ if exist {
+ log.Trace("Repo[%-v]: LFS object %-v already present; creating meta object", repo, pointerBlob.Pointer)
+ _, err := git_model.NewLFSMetaObject(ctx, repo.ID, pointerBlob.Pointer)
+ if err != nil {
+ log.Error("Repo[%-v]: Error creating LFS meta object %-v: %v", repo, pointerBlob.Pointer, err)
+ return err
+ }
+ } else {
+ if setting.LFS.MaxFileSize > 0 && pointerBlob.Size > setting.LFS.MaxFileSize {
+ log.Info("Repo[%-v]: LFS object %-v download denied because of LFS_MAX_FILE_SIZE=%d < size %d", repo, pointerBlob.Pointer, setting.LFS.MaxFileSize, pointerBlob.Size)
+ continue
+ }
+
+ batch = append(batch, pointerBlob.Pointer)
+ if len(batch) >= lfsClient.BatchSize() {
+ if err := downloadObjects(batch); err != nil {
+ return err
+ }
+ batch = nil
+ }
+ }
+ }
+ if len(batch) > 0 {
+ if err := downloadObjects(batch); err != nil {
+ return err
+ }
+ }
+
+ err, has := <-errChan
+ if has {
+ log.Error("Repo[%-v]: Error enumerating LFS objects for repository: %v", repo, err)
+ return err
+ }
+
+ return nil
+}
+
+// shortRelease to reduce load memory, this struct can replace repo_model.Release
+type shortRelease struct {
+ ID int64
+ TagName string
+ Sha1 string
+ IsTag bool
+}
+
+func (shortRelease) TableName() string {
+ return "release"
+}
+
+// pullMirrorReleaseSync is a pull-mirror specific tag<->release table
+// synchronization which overwrites all Releases from the repository tags. This
+// can be relied on since a pull-mirror is always identical to its
+// upstream. Hence, after each sync we want the pull-mirror release set to be
+// identical to the upstream tag set. This is much more efficient for
+// repositories like https://github.com/vim/vim (with over 13000 tags).
+func pullMirrorReleaseSync(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) error {
+ log.Trace("pullMirrorReleaseSync: rebuilding releases for pull-mirror Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name)
+ tags, numTags, err := gitRepo.GetTagInfos(0, 0)
+ if err != nil {
+ return fmt.Errorf("unable to GetTagInfos in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ err = db.WithTx(ctx, func(ctx context.Context) error {
+ dbReleases, err := db.Find[shortRelease](ctx, repo_model.FindReleasesOptions{
+ RepoID: repo.ID,
+ IncludeDrafts: true,
+ IncludeTags: true,
+ })
+ if err != nil {
+ return fmt.Errorf("unable to FindReleases in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ }
+
+ inserts, deletes, updates := calcSync(tags, dbReleases)
+ //
+ // make release set identical to upstream tags
+ //
+ for _, tag := range inserts {
+ release := repo_model.Release{
+ RepoID: repo.ID,
+ TagName: tag.Name,
+ LowerTagName: strings.ToLower(tag.Name),
+ Sha1: tag.Object.String(),
+ // NOTE: ignored, since NumCommits are unused
+ // for pull-mirrors (only relevant when
+ // displaying releases, IsTag: false)
+ NumCommits: -1,
+ CreatedUnix: timeutil.TimeStamp(tag.Tagger.When.Unix()),
+ IsTag: true,
+ }
+ if err := db.Insert(ctx, release); err != nil {
+ return fmt.Errorf("unable insert tag %s for pull-mirror Repo[%d:%s/%s]: %w", tag.Name, repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ }
+
+ // only delete tags releases
+ if len(deletes) > 0 {
+ if _, err := db.GetEngine(ctx).Where("repo_id=?", repo.ID).
+ In("id", deletes).
+ Delete(&repo_model.Release{}); err != nil {
+ return fmt.Errorf("unable to delete tags for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ }
+
+ for _, tag := range updates {
+ if _, err := db.GetEngine(ctx).Where("repo_id = ? AND lower_tag_name = ?", repo.ID, strings.ToLower(tag.Name)).
+ Cols("sha1").
+ Update(&repo_model.Release{
+ Sha1: tag.Object.String(),
+ }); err != nil {
+ return fmt.Errorf("unable to update tag %s for pull-mirror Repo[%d:%s/%s]: %w", tag.Name, repo.ID, repo.OwnerName, repo.Name, err)
+ }
+ }
+ return nil
+ })
+ if err != nil {
+ return fmt.Errorf("unable to rebuild release table for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ }
+
+ log.Trace("pullMirrorReleaseSync: done rebuilding %d releases", numTags)
+ return nil
+}
+
+func calcSync(destTags []*git.Tag, dbTags []*shortRelease) ([]*git.Tag, []int64, []*git.Tag) {
+ destTagMap := make(map[string]*git.Tag)
+ for _, tag := range destTags {
+ destTagMap[tag.Name] = tag
+ }
+ dbTagMap := make(map[string]*shortRelease)
+ for _, rel := range dbTags {
+ dbTagMap[rel.TagName] = rel
+ }
+
+ inserted := make([]*git.Tag, 0, 10)
+ updated := make([]*git.Tag, 0, 10)
+ for _, tag := range destTags {
+ rel := dbTagMap[tag.Name]
+ if rel == nil {
+ inserted = append(inserted, tag)
+ } else if rel.Sha1 != tag.Object.String() {
+ updated = append(updated, tag)
+ }
+ }
+ deleted := make([]int64, 0, 10)
+ for _, tag := range dbTags {
+ if destTagMap[tag.TagName] == nil && tag.IsTag {
+ deleted = append(deleted, tag.ID)
+ }
+ }
+ return inserted, deleted, updated
+}
diff --git a/modules/repository/repo_test.go b/modules/repository/repo_test.go
new file mode 100644
index 00000000..f3e7be6d
--- /dev/null
+++ b/modules/repository/repo_test.go
@@ -0,0 +1,76 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/git"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_calcSync(t *testing.T) {
+ gitTags := []*git.Tag{
+ /*{
+ Name: "v0.1.0-beta", //deleted tag
+ Object: git.MustIDFromString(""),
+ },
+ {
+ Name: "v0.1.1-beta", //deleted tag but release should not be deleted because it's a release
+ Object: git.MustIDFromString(""),
+ },
+ */
+ {
+ Name: "v1.0.0", // keep as before
+ Object: git.MustIDFromString("1006e6e13c73ad3d9e2d5682ad266b5016523485"),
+ },
+ {
+ Name: "v1.1.0", // retagged with new commit id
+ Object: git.MustIDFromString("bbdb7df30248e7d4a26a909c8d2598a152e13868"),
+ },
+ {
+ Name: "v1.2.0", // new tag
+ Object: git.MustIDFromString("a5147145e2f24d89fd6d2a87826384cc1d253267"),
+ },
+ }
+
+ dbReleases := []*shortRelease{
+ {
+ ID: 1,
+ TagName: "v0.1.0-beta",
+ Sha1: "244758d7da8dd1d9e0727e8cb7704ed4ba9a17c3",
+ IsTag: true,
+ },
+ {
+ ID: 2,
+ TagName: "v0.1.1-beta",
+ Sha1: "244758d7da8dd1d9e0727e8cb7704ed4ba9a17c3",
+ IsTag: false,
+ },
+ {
+ ID: 3,
+ TagName: "v1.0.0",
+ Sha1: "1006e6e13c73ad3d9e2d5682ad266b5016523485",
+ },
+ {
+ ID: 4,
+ TagName: "v1.1.0",
+ Sha1: "53ab18dcecf4152b58328d1f47429510eb414d50",
+ },
+ }
+
+ inserts, deletes, updates := calcSync(gitTags, dbReleases)
+ if assert.Len(t, inserts, 1, "inserts") {
+ assert.EqualValues(t, *gitTags[2], *inserts[0], "inserts equal")
+ }
+
+ if assert.Len(t, deletes, 1, "deletes") {
+ assert.EqualValues(t, 1, deletes[0], "deletes equal")
+ }
+
+ if assert.Len(t, updates, 1, "updates") {
+ assert.EqualValues(t, *gitTags[1], *updates[0], "updates equal")
+ }
+}
diff --git a/modules/repository/temp.go b/modules/repository/temp.go
new file mode 100644
index 00000000..04faa9db
--- /dev/null
+++ b/modules/repository/temp.go
@@ -0,0 +1,45 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// LocalCopyPath returns the local repository temporary copy path.
+func LocalCopyPath() string {
+ if filepath.IsAbs(setting.Repository.Local.LocalCopyPath) {
+ return setting.Repository.Local.LocalCopyPath
+ }
+ return path.Join(setting.AppDataPath, setting.Repository.Local.LocalCopyPath)
+}
+
+// CreateTemporaryPath creates a temporary path
+func CreateTemporaryPath(prefix string) (string, error) {
+ if err := os.MkdirAll(LocalCopyPath(), os.ModePerm); err != nil {
+ log.Error("Unable to create localcopypath directory: %s (%v)", LocalCopyPath(), err)
+ return "", fmt.Errorf("Failed to create localcopypath directory %s: %w", LocalCopyPath(), err)
+ }
+ basePath, err := os.MkdirTemp(LocalCopyPath(), prefix+".git")
+ if err != nil {
+ log.Error("Unable to create temporary directory: %s-*.git (%v)", prefix, err)
+ return "", fmt.Errorf("Failed to create dir %s-*.git: %w", prefix, err)
+ }
+ return basePath, nil
+}
+
+// RemoveTemporaryPath removes the temporary path
+func RemoveTemporaryPath(basePath string) error {
+ if _, err := os.Stat(basePath); !os.IsNotExist(err) {
+ return util.RemoveAll(basePath)
+ }
+ return nil
+}