feat: [CODE-3352]: Add git pre-receive preprocessor (#3535)

* Replace map[string]string with []CommitInfo in FindCommitterMismatchOutput
* Merge remote-tracking branch 'origin/main' into dd/pre-preceive-preprocessor
* Merge remote-tracking branch 'origin/main' into dd/pre-preceive-preprocessor
* Add total to FindCommitterMismatchOutput
* Reuse changedRefs for only deleted branches on push check
* Merge remote-tracking branch 'origin/main' into dd/pre-preceive-preprocessor
* Fix typo commiter -> committer
* Rename listAllObjects in listGitObjDir and loop thru dirs outside listGitObjDir
* Use line with commiter prefix to find commiter email
* Merge remote-tracking branch 'origin/main' into dd/pre-preceive-preprocessor
* Merge remote-tracking branch 'origin/main' into dd/pre-preceive-preprocessor
* Rename vars and add total to findOversizeFiles
* Use CatFileBatch instead of git show info to find commiter emails and remove unused CatFileBatchCheck
* Use WithAlternateObjectDirs in findCommiterEmailsMismatch
* Merge remote-tracking branch 'origin/mai
This commit is contained in:
Darko Draskovic 2025-03-18 20:21:28 +00:00 committed by Harness
parent 9882bf0ae4
commit 07224c37de
14 changed files with 442 additions and 154 deletions

View File

@ -32,6 +32,7 @@ type RestrictedGIT interface {
GetBranch(ctx context.Context, params *git.GetBranchParams) (*git.GetBranchOutput, error)
Diff(ctx context.Context, in *git.DiffParams, files ...api.FileDiffRequest) (<-chan *git.FileDiff, <-chan error)
GetBlob(ctx context.Context, params *git.GetBlobParams) (*git.GetBlobOutput, error)
// TODO: remove. Kept for backwards compatibility.
FindOversizeFiles(
ctx context.Context,
params *git.FindOversizeFilesParams,

View File

@ -85,10 +85,11 @@ func (c *Controller) PreReceive(
return output, nil
}
var principal *types.Principal
// For internal calls - through the application interface (API) - no need to verify protection rules.
if !in.Internal && repo.State == enum.RepoStateActive {
// TODO: use store.PrincipalInfoCache once we abstracted principals.
principal, err := c.principalStore.Find(ctx, in.PrincipalID)
principal, err = c.principalStore.Find(ctx, in.PrincipalID)
if err != nil {
return hook.Output{}, fmt.Errorf("failed to find inner principal with id %d: %w", in.PrincipalID, err)
}
@ -120,12 +121,8 @@ func (c *Controller) PreReceive(
return hook.Output{}, fmt.Errorf("failed to extend pre-receive hook: %w", err)
}
err = c.checkFileSizeLimit(ctx, rgit, repo, in, &output)
if output.Error != nil {
return output, nil
}
if err != nil {
return hook.Output{}, err
if err = c.processObjects(ctx, repo, principal, refUpdates, in, &output); err != nil {
return hook.Output{}, fmt.Errorf("failed to process pre-receive objects: %w", err)
}
err = c.checkLFSObjects(ctx, rgit, repo, in, &output)
@ -258,6 +255,13 @@ type changedRefs struct {
other changes
}
func (c *changedRefs) hasOnlyDeletedBranches() bool {
if len(c.branches.created) > 0 || len(c.branches.updated) > 0 || len(c.branches.forced) > 0 {
return false
}
return true
}
func groupRefsByAction(refUpdates []hook.ReferenceUpdate, forced []bool) (c changedRefs) {
for i, refUpdate := range refUpdates {
switch {

View File

@ -1,73 +0,0 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package githook
import (
"context"
"fmt"
"github.com/harness/gitness/app/services/settings"
"github.com/harness/gitness/git"
"github.com/harness/gitness/git/hook"
"github.com/harness/gitness/types"
"github.com/gotidy/ptr"
)
func (c *Controller) checkFileSizeLimit(
ctx context.Context,
rgit RestrictedGIT,
repo *types.RepositoryCore,
in types.GithookPreReceiveInput,
output *hook.Output,
) error {
// return if all new refs are nil refs
if isAllRefDeletions(in.RefUpdates) {
return nil
}
sizeLimit, err := settings.RepoGet(
ctx,
c.settings,
repo.ID,
settings.KeyFileSizeLimit,
settings.DefaultFileSizeLimit,
)
if err != nil {
return fmt.Errorf("failed to check settings for file size limit: %w", err)
}
if sizeLimit <= 0 {
return nil
}
res, err := rgit.FindOversizeFiles(
ctx,
&git.FindOversizeFilesParams{
RepoUID: repo.GitUID,
GitObjectDirs: in.Environment.AlternateObjectDirs,
SizeLimit: sizeLimit,
},
)
if err != nil {
return fmt.Errorf("failed to get file sizes: %w", err)
}
if len(res.FileInfos) > 0 {
output.Error = ptr.String("Changes blocked by files exceeding the file size limit")
printOversizeFiles(output, res.FileInfos, sizeLimit)
}
return nil
}

View File

@ -0,0 +1,119 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package githook
import (
"context"
"fmt"
"github.com/harness/gitness/app/services/settings"
"github.com/harness/gitness/git"
"github.com/harness/gitness/git/hook"
"github.com/harness/gitness/types"
"github.com/gotidy/ptr"
)
func (c *Controller) processObjects(
ctx context.Context,
repo *types.RepositoryCore,
principal *types.Principal,
refUpdates changedRefs,
in types.GithookPreReceiveInput,
output *hook.Output,
) error {
if refUpdates.hasOnlyDeletedBranches() {
return nil
}
var sizeLimit int64
var err error
sizeLimit, err = settings.RepoGet(
ctx,
c.settings,
repo.ID,
settings.KeyFileSizeLimit,
settings.DefaultFileSizeLimit,
)
if err != nil {
return fmt.Errorf("failed to check settings for file size limit: %w", err)
}
principalCommitterMatch, err := settings.RepoGet(
ctx,
c.settings,
repo.ID,
settings.KeyPrincipalCommitterMatch,
settings.DefaultPrincipalCommitterMatch,
)
if err != nil {
return fmt.Errorf("failed to check settings for principal committer match: %w", err)
}
if sizeLimit == 0 && !principalCommitterMatch {
return nil
}
preReceiveObjsIn := git.ProcessPreReceiveObjectsParams{
ReadParams: git.ReadParams{
RepoUID: repo.GitUID,
AlternateObjectDirs: in.Environment.AlternateObjectDirs,
},
}
if sizeLimit > 0 {
preReceiveObjsIn.FindOversizeFilesParams = &git.FindOversizeFilesParams{
SizeLimit: sizeLimit,
}
}
if principalCommitterMatch && principal != nil {
preReceiveObjsIn.FindCommitterMismatchParams = &git.FindCommitterMismatchParams{
PrincipalEmail: principal.Email,
}
}
preReceiveObjsOut, err := c.git.ProcessPreReceiveObjects(
ctx,
preReceiveObjsIn,
)
if err != nil {
return fmt.Errorf("failed to process pre-receive objects: %w", err)
}
if preReceiveObjsOut.FindOversizeFilesOutput != nil &&
len(preReceiveObjsOut.FindOversizeFilesOutput.FileInfos) > 0 {
output.Error = ptr.String("Changes blocked by files exceeding the file size limit")
printOversizeFiles(
output,
preReceiveObjsOut.FindOversizeFilesOutput.FileInfos,
preReceiveObjsOut.FindOversizeFilesOutput.Total,
sizeLimit,
)
}
if preReceiveObjsOut.FindCommitterMismatchOutput != nil &&
len(preReceiveObjsOut.FindCommitterMismatchOutput.CommitInfos) > 0 {
output.Error = ptr.String("Committer verification failed: authenticated user and committer must match")
printCommitterMismatch(
output,
preReceiveObjsOut.FindCommitterMismatchOutput.CommitInfos,
preReceiveObjsIn.FindCommitterMismatchParams.PrincipalEmail,
preReceiveObjsOut.FindCommitterMismatchOutput.Total,
)
}
return nil
}

View File

@ -107,6 +107,7 @@ func FMTDuration(d time.Duration) string {
func printOversizeFiles(
output *hook.Output,
oversizeFiles []git.FileInfo,
total int64,
sizeLimit int64,
) {
output.Messages = append(
@ -126,7 +127,6 @@ func printOversizeFiles(
)
}
total := len(oversizeFiles)
output.Messages = append(
output.Messages,
colorScanSummary.Sprintf(
@ -137,6 +137,39 @@ func printOversizeFiles(
)
}
func printCommitterMismatch(
output *hook.Output,
commitInfos []git.CommitInfo,
principalEmail string,
total int64,
) {
output.Messages = append(
output.Messages,
colorScanHeader.Sprintf(
"Push contains commits where committer is not the authenticated user (%s):",
principalEmail,
),
"", // add empty line for making it visually more consumable
)
for _, info := range commitInfos {
output.Messages = append(
output.Messages,
fmt.Sprintf(" %s Committer: %s", info.SHA, info.Committer),
"", // add empty line for making it visually more consumable
)
}
output.Messages = append(
output.Messages,
colorScanSummary.Sprintf(
"%d %s found not matching the authenticated user (%s)",
total, singularOrPlural("commit", total > 1), principalEmail,
),
"", "", // add two empty lines for making it visually more consumable
)
}
func singularOrPlural(noun string, plural bool) string {
if plural {
return noun + "s"

View File

@ -46,9 +46,9 @@ func (c *Controller) ListCommits(ctx context.Context,
gitRef = repo.DefaultBranch
}
commiterRegex, err := c.contributorsRegex(ctx, filter.Committer, filter.CommitterIDs)
committerRegex, err := c.contributorsRegex(ctx, filter.Committer, filter.CommitterIDs)
if err != nil {
return types.ListCommitResponse{}, fmt.Errorf("failed create commiter regex: %w", err)
return types.ListCommitResponse{}, fmt.Errorf("failed create committer regex: %w", err)
}
authorRegex, err := c.contributorsRegex(ctx, filter.Author, filter.AuthorIDs)
@ -60,12 +60,12 @@ func (c *Controller) ListCommits(ctx context.Context,
ReadParams: git.CreateReadParams(repo),
GitREF: gitRef,
After: filter.After,
Page: int32(filter.Page),
Limit: int32(filter.Limit),
Page: int32(filter.Page), //nolint:gosec
Limit: int32(filter.Limit), //nolint:gosec
Path: filter.Path,
Since: filter.Since,
Until: filter.Until,
Committer: commiterRegex,
Committer: committerRegex,
Author: authorRegex,
IncludeStats: filter.IncludeStats,
Regex: true,

View File

@ -22,25 +22,37 @@ import (
// SecuritySettings represents the security related part of repository settings as exposed externally.
type SecuritySettings struct {
SecretScanningEnabled *bool `json:"secret_scanning_enabled" yaml:"secret_scanning_enabled"`
SecretScanningEnabled *bool `json:"secret_scanning_enabled" yaml:"secret_scanning_enabled"`
PrincipalCommitterMatch *bool `json:"principal_committer_match" yaml:"principal_committer_match"`
}
func GetDefaultSecuritySettings() *SecuritySettings {
return &SecuritySettings{
SecretScanningEnabled: ptr.Bool(settings.DefaultSecretScanningEnabled),
SecretScanningEnabled: ptr.Bool(settings.DefaultSecretScanningEnabled),
PrincipalCommitterMatch: ptr.Bool(settings.DefaultPrincipalCommitterMatch),
}
}
func GetSecuritySettingsMappings(s *SecuritySettings) []settings.SettingHandler {
return []settings.SettingHandler{
settings.Mapping(settings.KeySecretScanningEnabled, s.SecretScanningEnabled),
settings.Mapping(settings.KeyPrincipalCommitterMatch, s.PrincipalCommitterMatch),
}
}
func GetSecuritySettingsAsKeyValues(s *SecuritySettings) []settings.KeyValue {
kvs := make([]settings.KeyValue, 0, 1)
kvs := make([]settings.KeyValue, 0, 2)
if s.SecretScanningEnabled != nil {
kvs = append(kvs, settings.KeyValue{Key: settings.KeySecretScanningEnabled, Value: *s.SecretScanningEnabled})
}
if s.PrincipalCommitterMatch != nil {
kvs = append(kvs, settings.KeyValue{
Key: settings.KeyPrincipalCommitterMatch,
Value: s.PrincipalCommitterMatch,
})
}
return kvs
}

View File

@ -195,7 +195,7 @@ func ParseCommitFilter(r *http.Request) (*types.CommitFilter, error) {
return nil, err
}
commiterIDs, err := QueryParamListAsPositiveInt64(r, QueryParamCommitterID)
committerIDs, err := QueryParamListAsPositiveInt64(r, QueryParamCommitterID)
if err != nil {
return nil, err
}
@ -215,7 +215,7 @@ func ParseCommitFilter(r *http.Request) (*types.CommitFilter, error) {
Since: since,
Until: until,
Committer: QueryParamOrDefault(r, QueryParamCommitter, ""),
CommitterIDs: commiterIDs,
CommitterIDs: committerIDs,
Author: QueryParamOrDefault(r, QueryParamAuthor, ""),
AuthorIDs: authorIDs,
IncludeStats: includeStats,

View File

@ -18,10 +18,12 @@ type Key string
var (
// KeySecretScanningEnabled [bool] enables secret scanning if set to true.
KeySecretScanningEnabled Key = "secret_scanning_enabled"
DefaultSecretScanningEnabled = false
KeyFileSizeLimit Key = "file_size_limit"
DefaultFileSizeLimit = int64(1e+8) // 100 MB
KeyInstallID Key = "install_id"
DefaultInstallID = string("")
KeySecretScanningEnabled Key = "secret_scanning_enabled"
DefaultSecretScanningEnabled = false
KeyFileSizeLimit Key = "file_size_limit"
DefaultFileSizeLimit = int64(1e+8) // 100 MB
KeyInstallID Key = "install_id"
DefaultInstallID = string("")
KeyPrincipalCommitterMatch Key = "principal_committer_match"
DefaultPrincipalCommitterMatch = false
)

View File

@ -43,26 +43,6 @@ func CatFileBatch(
repoPath string,
alternateObjectDirs []string,
flags ...command.CmdOptionFunc,
) (WriteCloserError, *bufio.Reader, func()) {
flags = append(flags, command.WithFlag("--batch"))
return catFileBatch(ctx, repoPath, alternateObjectDirs, flags...)
}
func CatFileBatchCheck(
ctx context.Context,
repoPath string,
alternateObjectDirs []string,
flags ...command.CmdOptionFunc,
) (WriteCloserError, *bufio.Reader, func()) {
flags = append(flags, command.WithFlag("--batch-check"))
return catFileBatch(ctx, repoPath, alternateObjectDirs, flags...)
}
func catFileBatch(
ctx context.Context,
repoPath string,
alternateObjectDirs []string,
flags ...command.CmdOptionFunc,
) (WriteCloserError, *bufio.Reader, func()) {
const bufferSize = 32 * 1024
// We often want to feed the commits in order into cat-file --batch,
@ -87,6 +67,7 @@ func catFileBatch(
go func() {
stderr := bytes.Buffer{}
cmd := command.New("cat-file",
command.WithFlag("--batch"),
command.WithAlternateObjectDirs(alternateObjectDirs...),
)
cmd.Add(flags...)

View File

@ -96,22 +96,23 @@ func (s *Service) ListLFSPointers(
repoPath := getFullPathForRepo(s.reposRoot, params.RepoUID)
var lfsInfos []LFSInfo
var candidateObjects []parser.BatchCheckObject
// first get the sha of the objects that could be lfs pointers
var objects []parser.BatchCheckObject
for _, gitObjDir := range params.AlternateObjectDirs {
objects, err := catFileBatchCheckAllObjects(ctx, repoPath, gitObjDir)
objs, err := s.listGitObjDir(ctx, repoPath, gitObjDir)
if err != nil {
return nil, err
}
objects = append(objects, objs...)
}
for _, obj := range objects {
if obj.Type == string(TreeNodeTypeBlob) && obj.Size <= lfsPointerMaxSize {
candidateObjects = append(candidateObjects, obj)
}
var candidateObjects []parser.BatchCheckObject
for _, obj := range objects {
if obj.Type == string(TreeNodeTypeBlob) && obj.Size <= lfsPointerMaxSize {
candidateObjects = append(candidateObjects, obj)
}
}
var lfsInfos []LFSInfo
if len(candidateObjects) == 0 {
return &ListLFSPointersOutput{LFSInfos: lfsInfos}, nil
}

View File

@ -270,21 +270,8 @@ func (s *Service) GetCommitDivergences(
}, nil
}
type FindOversizeFilesParams struct {
RepoUID string
GitObjectDirs []string
SizeLimit int64
}
type FindOversizeFilesOutput struct {
FileInfos []FileInfo
}
type FileInfo struct {
SHA sha.SHA
Size int64
}
// TODO: remove. Kept for backwards compatibility.
//
//nolint:gocognit
func (s *Service) FindOversizeFiles(
ctx context.Context,
@ -295,22 +282,22 @@ func (s *Service) FindOversizeFiles(
}
repoPath := getFullPathForRepo(s.reposRoot, params.RepoUID)
var fileInfos []FileInfo
var objects []parser.BatchCheckObject
for _, gitObjDir := range params.GitObjectDirs {
objects, err := catFileBatchCheckAllObjects(ctx, repoPath, gitObjDir)
objs, err := s.listGitObjDir(ctx, repoPath, gitObjDir)
if err != nil {
return nil, err
}
objects = append(objects, objs...)
}
for _, obj := range objects {
if obj.Type == string(TreeNodeTypeBlob) {
if obj.Size > params.SizeLimit {
fileInfos = append(fileInfos, FileInfo{
SHA: obj.SHA,
Size: obj.Size,
})
}
}
var fileInfos []FileInfo
for _, obj := range objects {
if obj.Type == string(TreeNodeTypeBlob) && obj.Size > params.SizeLimit {
fileInfos = append(fileInfos, FileInfo{
SHA: obj.SHA,
Size: obj.Size,
})
}
}
@ -319,7 +306,7 @@ func (s *Service) FindOversizeFiles(
}, nil
}
func catFileBatchCheckAllObjects(
func (s *Service) listGitObjDir(
ctx context.Context,
repoPath string,
gitObjDir string,
@ -330,7 +317,8 @@ func catFileBatchCheckAllObjects(
// --batch-all-objects reports objects in the current repository and in all alternate directories.
// We want to report objects in the current repository only.
if err := os.Rename(gitObjDir+oldFilename, gitObjDir+newFilename); err != nil && !errors.Is(err, fs.ErrNotExist) {
if err := os.Rename(gitObjDir+oldFilename, gitObjDir+newFilename); err != nil &&
!errors.Is(err, fs.ErrNotExist) {
return nil, fmt.Errorf("failed to rename %s to %s: %w", oldFilename, newFilename, err)
}
@ -356,7 +344,8 @@ func catFileBatchCheckAllObjects(
return nil, fmt.Errorf("failed to parse output of cat-file batch check all objects: %w", err)
}
if err := os.Rename(gitObjDir+newFilename, gitObjDir+oldFilename); err != nil && !errors.Is(err, fs.ErrNotExist) {
if err := os.Rename(gitObjDir+newFilename, gitObjDir+oldFilename); err != nil &&
!errors.Is(err, fs.ErrNotExist) {
return nil, fmt.Errorf("failed to rename %s to %s: %w", newFilename, oldFilename, err)
}

View File

@ -62,11 +62,22 @@ type Interface interface {
CommitFiles(ctx context.Context, params *CommitFilesParams) (CommitFilesResponse, error)
MergeBase(ctx context.Context, params MergeBaseParams) (MergeBaseOutput, error)
IsAncestor(ctx context.Context, params IsAncestorParams) (IsAncestorOutput, error)
// TODO: remove. Kept for backwards compatibility.
FindOversizeFiles(
ctx context.Context,
params *FindOversizeFilesParams,
) (*FindOversizeFilesOutput, error)
/*
* Pre-receive processor
*/
ProcessPreReceiveObjects(
ctx context.Context,
params ProcessPreReceiveObjectsParams,
) (ProcessPreReceiveObjectsOutput, error)
/*
* Git Cli Service
*/

View File

@ -0,0 +1,208 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package git
import (
"context"
"fmt"
"io"
"strings"
"github.com/harness/gitness/git/api"
"github.com/harness/gitness/git/parser"
"github.com/harness/gitness/git/sha"
)
const (
maxOversizeFiles = 10
maxCommitterMismatches = 10
)
type FindOversizeFilesParams struct {
// TODO: remove. Kept for backward compatibility
RepoUID string
GitObjectDirs []string
SizeLimit int64
}
type FileInfo struct {
SHA sha.SHA
Size int64
}
type FindOversizeFilesOutput struct {
FileInfos []FileInfo
Total int64
}
type FindCommitterMismatchParams struct {
PrincipalEmail string
}
type CommitInfo struct {
SHA sha.SHA
Committer string
}
type FindCommitterMismatchOutput struct {
CommitInfos []CommitInfo
Total int64
}
type ProcessPreReceiveObjectsParams struct {
ReadParams
FindOversizeFilesParams *FindOversizeFilesParams
FindCommitterMismatchParams *FindCommitterMismatchParams
}
type ProcessPreReceiveObjectsOutput struct {
FindOversizeFilesOutput *FindOversizeFilesOutput
FindCommitterMismatchOutput *FindCommitterMismatchOutput
}
func (s *Service) ProcessPreReceiveObjects(
ctx context.Context,
params ProcessPreReceiveObjectsParams,
) (ProcessPreReceiveObjectsOutput, error) {
repoPath := getFullPathForRepo(s.reposRoot, params.RepoUID)
var objects []parser.BatchCheckObject
for _, gitObjDir := range params.AlternateObjectDirs {
objs, err := s.listGitObjDir(ctx, repoPath, gitObjDir)
if err != nil {
return ProcessPreReceiveObjectsOutput{}, err
}
objects = append(objects, objs...)
}
var output ProcessPreReceiveObjectsOutput
if params.FindOversizeFilesParams != nil {
output.FindOversizeFilesOutput = findOversizeFiles(
objects, params.FindOversizeFilesParams,
)
}
if params.FindCommitterMismatchParams != nil {
out, err := findCommitterMismatch(
ctx,
objects,
repoPath,
params.ReadParams.AlternateObjectDirs,
params.FindCommitterMismatchParams,
)
if err != nil {
return ProcessPreReceiveObjectsOutput{}, err
}
output.FindCommitterMismatchOutput = out
}
return output, nil
}
func findOversizeFiles(
objects []parser.BatchCheckObject,
findOversizeFilesParams *FindOversizeFilesParams,
) *FindOversizeFilesOutput {
var fileInfos []FileInfo
var total int64 // limit the total num of objects returned
for _, obj := range objects {
if obj.Type == string(TreeNodeTypeBlob) && obj.Size > findOversizeFilesParams.SizeLimit {
if total < maxOversizeFiles {
fileInfos = append(fileInfos, FileInfo{
SHA: obj.SHA,
Size: obj.Size,
})
}
total++
}
}
return &FindOversizeFilesOutput{
FileInfos: fileInfos,
Total: total,
}
}
func findCommitterMismatch(
ctx context.Context,
objects []parser.BatchCheckObject,
repoPath string,
alternateObjectDirs []string,
findCommitterEmailsMismatchParams *FindCommitterMismatchParams,
) (*FindCommitterMismatchOutput, error) {
var commitSHAs []string
for _, obj := range objects {
if obj.Type == string(TreeNodeTypeCommit) {
commitSHAs = append(commitSHAs, obj.SHA.String())
}
}
writer, reader, cancel := api.CatFileBatch(ctx, repoPath, alternateObjectDirs)
defer cancel()
defer writer.Close()
var total int64
var commitInfos []CommitInfo
for _, commitSHA := range commitSHAs {
_, writeErr := writer.Write([]byte(commitSHA + "\n"))
if writeErr != nil {
return nil, fmt.Errorf("failed to write to cat-file batch: %w", writeErr)
}
output, err := api.ReadBatchHeaderLine(reader)
if err != nil {
return nil, fmt.Errorf("failed to read cat-file batch header: %w", err)
}
limitedReader := io.LimitReader(reader, output.Size+1) // plus eol
data, err := io.ReadAll(limitedReader)
if err != nil {
return nil, fmt.Errorf("failed to read: %w", err)
}
text := strings.Split(string(data), "\n")
for _, line := range text {
if !strings.HasPrefix(line, "committer ") {
continue
}
committerEmail := line[strings.Index(line, "<")+1 : strings.Index(line, ">")]
if !strings.EqualFold(committerEmail, findCommitterEmailsMismatchParams.PrincipalEmail) {
if total < maxCommitterMismatches {
sha, err := sha.New(commitSHA)
if err != nil {
return nil, fmt.Errorf("failed to create new sha: %w", err)
}
commitInfos = append(commitInfos, CommitInfo{
SHA: sha,
Committer: committerEmail,
})
}
total++
}
break
}
}
return &FindCommitterMismatchOutput{
CommitInfos: commitInfos,
Total: total,
}, nil
}