Code Suggestions - Backend Support (#2006)

This commit is contained in:
Johannes Batzill 2024-04-26 16:54:50 +00:00 committed by Harness
parent 86537b2c39
commit 9e7f80ebe0
38 changed files with 3286 additions and 263 deletions

View File

@ -0,0 +1,352 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pullreq
import (
"context"
"fmt"
"strings"
"time"
apiauth "github.com/harness/gitness/app/api/auth"
"github.com/harness/gitness/app/api/controller"
"github.com/harness/gitness/app/api/usererror"
"github.com/harness/gitness/app/auth"
"github.com/harness/gitness/app/bootstrap"
"github.com/harness/gitness/app/services/protection"
"github.com/harness/gitness/contextutil"
"github.com/harness/gitness/git"
"github.com/harness/gitness/git/sha"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/gotidy/ptr"
"github.com/rs/zerolog/log"
)
type SuggestionReference struct {
CommentID int64 `json:"comment_id"`
CheckSum string `json:"check_sum"`
}
func (e *SuggestionReference) sanitize() error {
if e.CommentID <= 0 {
return usererror.BadRequest("Comment ID has to be a positive number.")
}
e.CheckSum = strings.TrimSpace(e.CheckSum)
if e.CheckSum == "" {
return usererror.BadRequest("Check sum has to be provided.")
}
return nil
}
type CommentApplySuggestionsInput struct {
Suggestions []SuggestionReference `json:"suggestions"`
Title string `json:"title"`
Message string `json:"message"`
DryRunRules bool `json:"dry_run_rules"`
BypassRules bool `json:"bypass_rules"`
}
func (i *CommentApplySuggestionsInput) sanitize() error {
if len(i.Suggestions) == 0 {
return usererror.BadRequest("No suggestions provided.")
}
for _, suggestion := range i.Suggestions {
if err := suggestion.sanitize(); err != nil {
return err
}
}
// cleanup title / message (NOTE: git doesn't support white space only)
i.Title = strings.TrimSpace(i.Title)
i.Message = strings.TrimSpace(i.Message)
return nil
}
type CommentApplySuggestionsOutput struct {
CommitID string `json:"commit_id"`
DryRunRules bool `json:"dry_run_rules,omitempty"`
RuleViolations []types.RuleViolations `json:"rule_violations,omitempty"`
}
// CommentApplySuggestions applies suggestions for code comments.
//
//nolint:gocognit,gocyclo,cyclop
func (c *Controller) CommentApplySuggestions(
ctx context.Context,
session *auth.Session,
repoRef string,
prNum int64,
in *CommentApplySuggestionsInput,
) (CommentApplySuggestionsOutput, []types.RuleViolations, error) {
repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf("failed to acquire access to repo: %w", err)
}
pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf("failed to find pull request by number: %w", err)
}
if err := in.sanitize(); err != nil {
return CommentApplySuggestionsOutput{}, nil, err
}
// verify branch rules
isRepoOwner, err := apiauth.IsRepoOwner(ctx, c.authorizer, session, repo)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf("failed to determine if user is repo owner: %w", err)
}
protectionRules, err := c.protectionManager.ForRepository(ctx, repo.ID)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf(
"failed to fetch protection rules for the repository: %w", err)
}
violations, err := protectionRules.RefChangeVerify(ctx, protection.RefChangeVerifyInput{
Actor: &session.Principal,
AllowBypass: in.BypassRules,
IsRepoOwner: isRepoOwner,
Repo: repo,
RefAction: protection.RefActionUpdate,
RefType: protection.RefTypeBranch,
RefNames: []string{pr.SourceBranch},
})
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf("failed to verify protection rules: %w", err)
}
if in.DryRunRules {
return CommentApplySuggestionsOutput{
DryRunRules: true,
RuleViolations: violations,
}, nil, nil
}
if protection.IsCritical(violations) {
return CommentApplySuggestionsOutput{}, violations, nil
}
actions := []git.CommitFileAction{}
type activityUpdate struct {
act *types.PullReqActivity
resolve bool
checksum string
}
activityUpdates := map[int64]activityUpdate{}
// cache file shas to reduce number of git calls (use commit as some code comments can be temp out of sync)
getFileSHAKey := func(commitID string, path string) string { return commitID + ":" + path }
fileSHACache := map[string]sha.SHA{}
for _, suggestionEntry := range in.Suggestions {
activity, err := c.getCommentForPR(ctx, pr, suggestionEntry.CommentID)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf(
"failed to find activity %d: %w", suggestionEntry.CommentID, err)
}
var ccActivity *types.PullReqActivity
if activity.IsValidCodeComment() {
ccActivity = activity
} else if activity.ParentID != nil {
parentActivity, err := c.activityStore.Find(ctx, *activity.ParentID)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf(
"failed to find parent activity %d: %w", *activity.ParentID, err)
}
if parentActivity.IsValidCodeComment() {
ccActivity = parentActivity
}
}
if ccActivity == nil {
return CommentApplySuggestionsOutput{}, nil, usererror.BadRequest(
"Only code comments or replies on code comments support applying suggestions.")
}
// code comment can't be part of multiple suggestions being applied
if _, ok := activityUpdates[ccActivity.ID]; ok {
return CommentApplySuggestionsOutput{}, nil, usererror.BadRequestf(
"Code comment %d is part of multiple suggestions being applied.",
ccActivity.ID,
)
}
// retrieve and verify code comment data
cc := ccActivity.AsCodeComment()
if cc.Outdated {
return CommentApplySuggestionsOutput{}, nil, usererror.BadRequest(
"Suggestions by outdated code comments cannot be applied.")
}
// retrieve and verify code comment payload
payload, err := ccActivity.GetPayload()
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf(
"failed to get payload of related code comment activity %d: %w", ccActivity.ID, err)
}
ccPayload, ok := payload.(*types.PullRequestActivityPayloadCodeComment)
if !ok {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf(
"provided code comment activity %d has payload of wrong type %T", ccActivity.ID, payload)
}
if !ccPayload.LineStartNew || !ccPayload.LineEndNew {
return CommentApplySuggestionsOutput{}, nil, usererror.BadRequest(
"Only suggestions on the PR source branch can be applied.")
}
suggestions := parseSuggestions(activity.Text)
var suggestionToApply *suggestion
for i := range suggestions {
if strings.EqualFold(suggestions[i].checkSum, suggestionEntry.CheckSum) {
suggestionToApply = &suggestions[i]
break
}
}
if suggestionToApply == nil {
return CommentApplySuggestionsOutput{}, nil, usererror.NotFoundf(
"No suggestion found for activity %d that matches check sum %q.",
suggestionEntry.CommentID,
suggestionEntry.CheckSum,
)
}
// use file-sha for optimistic locking on file to avoid any racing conditions.
fileSHAKey := getFileSHAKey(cc.SourceSHA, cc.Path)
fileSHA, ok := fileSHACache[fileSHAKey]
if !ok {
node, err := c.git.GetTreeNode(ctx, &git.GetTreeNodeParams{
ReadParams: git.CreateReadParams(repo),
GitREF: cc.SourceSHA,
Path: cc.Path,
IncludeLatestCommit: false,
})
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf(
"failed to read tree node for commit %q path %q: %w",
cc.SourceSHA,
cc.Path,
err,
)
}
// TODO: git api should return sha.SHA type
fileSHA = sha.Must(node.Node.SHA)
fileSHACache[fileSHAKey] = fileSHA
}
// add suggestion to actions
actions = append(actions,
git.CommitFileAction{
Action: git.PatchTextAction,
Path: cc.Path,
SHA: fileSHA,
Payload: []byte(fmt.Sprintf(
"%d:%d\u0000%s",
cc.LineNew,
cc.LineNew+cc.SpanNew,
suggestionToApply.code,
)),
})
activityUpdates[activity.ID] = activityUpdate{
act: activity,
checksum: suggestionToApply.checkSum,
resolve: ccActivity == activity,
}
if ccActivity != activity {
activityUpdates[ccActivity.ID] = activityUpdate{
act: ccActivity,
resolve: true,
}
}
}
// we want to complete the operation independent of request cancel - start with new, time restricted context.
// TODO: This is a small change to reduce likelihood of dirty state (e.g. git work done but db canceled).
// We still require a proper solution to handle an application crash or very slow execution times
const timeout = 1 * time.Minute
ctx, cancel := context.WithTimeout(
contextutil.WithNewValues(context.Background(), ctx),
timeout,
)
defer cancel()
// Create internal write params. Note: This will skip the pre-commit protection rules check.
writeParams, err := controller.CreateRPCInternalWriteParams(ctx, c.urlProvider, session, repo)
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf("failed to create RPC write params: %w", err)
}
// backfill title if not provided (keeping it basic for now, user can provide more detailed title)
if in.Title == "" {
in.Title = "Apply code review suggestions"
}
now := time.Now()
commitOut, err := c.git.CommitFiles(ctx, &git.CommitFilesParams{
WriteParams: writeParams,
Title: in.Title,
Message: in.Message,
Branch: pr.SourceBranch,
Committer: identityFromPrincipalInfo(*bootstrap.NewSystemServiceSession().Principal.ToPrincipalInfo()),
CommitterDate: &now,
Author: identityFromPrincipalInfo(*session.Principal.ToPrincipalInfo()),
AuthorDate: &now,
Actions: actions,
})
if err != nil {
return CommentApplySuggestionsOutput{}, nil, fmt.Errorf("failed to commit changes: %w", err)
}
// update activities (use UpdateOptLock as it can have racing condition with comment migration)
resolved := ptr.Of(now.UnixMilli())
resolvedBy := &session.Principal.ID
for _, update := range activityUpdates {
_, err = c.activityStore.UpdateOptLock(ctx, update.act, func(act *types.PullReqActivity) error {
if update.resolve {
act.Resolved = resolved
act.ResolvedBy = resolvedBy
}
if update.checksum != "" {
act.UpdateMetadata(types.WithPullReqActivitySuggestionsMetadataUpdate(
func(s *types.PullReqActivitySuggestionsMetadata) {
s.AppliedCheckSum = update.checksum
s.AppliedCommitSHA = commitOut.CommitID.String()
}))
}
return nil
})
if err != nil {
// best effort - commit already happened
log.Ctx(ctx).Warn().Err(err).Msgf("failed to update activity %d after applying suggestions", update.act.ID)
}
}
return CommentApplySuggestionsOutput{
CommitID: commitOut.CommitID.String(),
RuleViolations: violations,
}, nil, nil
}

View File

@ -107,17 +107,32 @@ func (c *Controller) CommentCreate(
return nil, fmt.Errorf("failed to find pull request by number: %w", err)
}
var parentAct *types.PullReqActivity
if in.IsReply() {
parentAct, err = c.checkIsReplyable(ctx, pr, in.ParentID)
if err != nil {
return nil, fmt.Errorf("failed to verify reply: %w", err)
}
}
// fetch code snippet from git for code comments
var cut git.DiffCutOutput
if in.IsCodeComment() {
// fetch code snippet from git for code comments
cut, err = c.fetchDiffCut(ctx, repo, in)
if err != nil {
return nil, err
}
}
var act *types.PullReqActivity
// generate all metadata updates
var metadataUpdates []types.PullReqActivityMetadataUpdate
// suggestion metadata in case of code comments or code comment replies (don't restrict to either side for now).
if in.IsCodeComment() || (in.IsReply() && parentAct.IsValidCodeComment()) {
metadataUpdates = appendMetadataUpdateForSuggestions(metadataUpdates, in.Text)
}
var act *types.PullReqActivity
err = controller.TxOptLock(ctx, c.tx, func(ctx context.Context) error {
var err error
@ -129,7 +144,7 @@ func (c *Controller) CommentCreate(
}
}
act = getCommentActivity(session, pr, in)
act = getCommentActivity(session, pr, in, metadataUpdates)
// In the switch the pull request activity (the code comment)
// is written to the DB (as code comment, a reply, or ordinary comment).
@ -146,13 +161,6 @@ func (c *Controller) CommentCreate(
err = c.writeActivity(ctx, pr, act)
case in.IsReply():
var parentAct *types.PullReqActivity
parentAct, err = c.checkIsReplyable(ctx, pr, in.ParentID)
if err != nil {
return err
}
act.ParentID = &parentAct.ID
act.Kind = parentAct.Kind
_ = act.SetPayload(types.PullRequestActivityPayloadComment{})
@ -272,7 +280,12 @@ func (c *Controller) writeReplyActivity(ctx context.Context, parent, act *types.
return nil
}
func getCommentActivity(session *auth.Session, pr *types.PullReq, in *CommentCreateInput) *types.PullReqActivity {
func getCommentActivity(
session *auth.Session,
pr *types.PullReq,
in *CommentCreateInput,
metadataUpdates []types.PullReqActivityMetadataUpdate,
) *types.PullReqActivity {
now := time.Now().UnixMilli()
act := &types.PullReqActivity{
ID: 0, // Will be populated in the data layer
@ -297,6 +310,8 @@ func getCommentActivity(session *auth.Session, pr *types.PullReq, in *CommentCre
Author: *session.Principal.ToPrincipalInfo(),
}
act.UpdateMetadata(metadataUpdates...)
return act
}
@ -321,6 +336,10 @@ func (c *Controller) fetchDiffCut(
repo *types.Repository,
in *CommentCreateInput,
) (git.DiffCutOutput, error) {
// maxDiffLineCount restricts the total length of a code comment diff to 1000 lines.
// TODO: This can still lead to wrong code comments in cases like a large file being replaced by one line.
const maxDiffLineCount = 1000
cut, err := c.git.DiffCut(ctx, &git.DiffCutParams{
ReadParams: git.ReadParams{RepoUID: repo.GitUID},
SourceCommitSHA: in.SourceCommitSHA,
@ -330,6 +349,7 @@ func (c *Controller) fetchDiffCut(
LineStartNew: in.LineStartNew,
LineEnd: in.LineEnd,
LineEndNew: in.LineEndNew,
LineLimit: maxDiffLineCount,
})
if errors.AsStatus(err) == errors.StatusNotFound {
return git.DiffCutOutput{}, usererror.BadRequest(errors.Message(err))
@ -391,3 +411,24 @@ func (c *Controller) reportCommentCreated(
IsReply: isReply,
})
}
func appendMetadataUpdateForSuggestions(
updates []types.PullReqActivityMetadataUpdate,
comment string,
) []types.PullReqActivityMetadataUpdate {
suggestions := parseSuggestions(comment)
if len(suggestions) == 0 {
return updates
}
return append(
updates,
types.WithPullReqActivitySuggestionsMetadataUpdate(
func(m *types.PullReqActivitySuggestionsMetadata) {
m.CheckSums = make([]string, len(suggestions))
for i := range suggestions {
m.CheckSums[i] = suggestions[i].checkSum
}
}),
)
}

View File

@ -71,10 +71,28 @@ func (c *Controller) CommentUpdate(
return act, nil
}
// fetch parent activity
var parentAct *types.PullReqActivity
if act.IsReply() {
parentAct, err = c.activityStore.Find(ctx, *act.ParentID)
if err != nil {
return nil, fmt.Errorf("failed to find parent pull request activity: %w", err)
}
}
// generate all metadata updates
var metadataUpdates []types.PullReqActivityMetadataUpdate
// suggestion metadata in case of code comments or code comment replies (don't restrict to either side for now).
if act.IsValidCodeComment() || (act.IsReply() && parentAct.IsValidCodeComment()) {
metadataUpdates = appendMetadataUpdateForSuggestions(metadataUpdates, in.Text)
}
act, err = c.activityStore.UpdateOptLock(ctx, act, func(act *types.PullReqActivity) error {
now := time.Now().UnixMilli()
act.Edited = now
act.Text = in.Text
act.UpdateMetadata(metadataUpdates...)
return nil
})
if err != nil {

View File

@ -160,8 +160,10 @@ func (c *Controller) getRepoCheckAccess(ctx context.Context,
return repo, nil
}
func (c *Controller) getCommentCheckModifyAccess(ctx context.Context,
pr *types.PullReq, commentID int64,
func (c *Controller) getCommentForPR(
ctx context.Context,
pr *types.PullReq,
commentID int64,
) (*types.PullReqActivity, error) {
if commentID <= 0 {
return nil, usererror.BadRequest("A valid comment ID must be provided.")
@ -194,7 +196,7 @@ func (c *Controller) getCommentCheckModifyAccess(ctx context.Context,
func (c *Controller) getCommentCheckEditAccess(ctx context.Context,
session *auth.Session, pr *types.PullReq, commentID int64,
) (*types.PullReqActivity, error) {
comment, err := c.getCommentCheckModifyAccess(ctx, pr, commentID)
comment, err := c.getCommentForPR(ctx, pr, commentID)
if err != nil {
return nil, err
}
@ -209,7 +211,7 @@ func (c *Controller) getCommentCheckEditAccess(ctx context.Context,
func (c *Controller) getCommentCheckChangeStatusAccess(ctx context.Context,
pr *types.PullReq, commentID int64,
) (*types.PullReqActivity, error) {
comment, err := c.getCommentCheckModifyAccess(ctx, pr, commentID)
comment, err := c.getCommentForPR(ctx, pr, commentID)
if err != nil {
return nil, err
}

View File

@ -0,0 +1,201 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pullreq
import (
"crypto/sha256"
"fmt"
"strings"
)
type suggestion struct {
checkSum string
code string
}
// parseSuggestions parses the provided string for any markdown code blocks that are suggestions.
func parseSuggestions(s string) []suggestion {
const languageSuggestion = "suggestion"
out := []suggestion{}
for len(s) > 0 {
code, language, remaining, found := findNextMarkdownCodeBlock(s)
// always update s to the remainder
s = remaining
if !found {
break
}
if !strings.EqualFold(language, languageSuggestion) {
continue
}
out = append(out,
suggestion{
checkSum: hashCodeBlock(code),
code: code,
},
)
}
return out
}
func hashCodeBlock(s string) string {
return fmt.Sprintf("%x", sha256.Sum256([]byte(s)))
}
// findNextMarkdownCodeBlock finds a code block in markdown.
// NOTE: In the future we might want to use a proper markdown parser.
func findNextMarkdownCodeBlock(s string) (code string, language string, remaining string, found bool) {
// find fenced code block header
var startSequence string
s = foreachLine(s, func(line string) bool {
line, ok := trimMarkdownWhitespace(line)
if !ok {
return true
}
// try to find start sequence of a fenced code block (```+ or ~~~+)
startSequence, line = cutLongestPrefix(line, '~')
if len(startSequence) < 3 {
startSequence, line = cutLongestPrefix(line, '`')
if len(startSequence) < 3 {
// no code block prefix found in this line
return true
}
if strings.Contains(line, "`") {
// any single tic in the same line breaks a code block ``` opening
return true
}
}
language = strings.TrimSpace(line)
return false
})
if len(startSequence) == 0 {
return "", "", "", false
}
// parse codeBuilder block
codeBuilder := strings.Builder{}
linesAdded := 0
addLineToCode := func(line string) {
// To normalize we:
// - always use LF line ending
// - strip any line ending from last line
//
// e.g. "```suggestion\n```" is the same as "```suggestion\n" is the same as "```suggestion"
//
// This ensures similar result with and without end markers for fenced code blocks,
// and gives the user control on adding new lines at the end of the file.
if linesAdded > 0 {
codeBuilder.WriteByte('\n')
}
linesAdded++
codeBuilder.WriteString(line)
}
s = foreachLine(s, func(line string) bool {
// keep original line for appending it to code block if required
originalLine := line
line, ok := trimMarkdownWhitespace(line)
if !ok {
addLineToCode(originalLine)
return true
}
if !strings.HasPrefix(line, startSequence) {
addLineToCode(originalLine)
return true
}
_, line = cutLongestPrefix(line, rune(startSequence[0])) // any higher number of chars as starting sequence works
line = strings.TrimSpace(line) // spaces are fine
if len(line) > 0 {
// end of fenced code block can't contain anything else but spaces
addLineToCode(originalLine)
return true
}
return false
})
return codeBuilder.String(), language, s, true
}
// trimMarkdownWhitespace returns the provided line by removing any leading whitespaces.
// If the white space makes it an indented code block line, false is returned.
func trimMarkdownWhitespace(line string) (string, bool) {
// remove any leading spaces
prefix, updatedLine := cutLongestPrefix(line, ' ')
if len(prefix) >= 4 {
// line is considered a code line by indentation
return line, false
}
// check for leading tabs (doesn't matter how many)
if strings.HasPrefix(updatedLine, "\t") {
// line is considered a code line by indentation
return line, false
}
return updatedLine, true
}
// foreachLine iterates over the provided string and calls "process" method for each line.
// If process returns false, or the scan reaches the end of the lines, the scanning stops.
// The method returns the remaining text of s.
func foreachLine(s string, process func(line string) bool) string {
for len(s) > 0 {
line, remaining, _ := strings.Cut(s, "\n")
// always update s to the remaining string
s = remaining
// handle CLRF
if lineLen := len(line); lineLen > 0 && line[lineLen-1] == '\r' {
line = line[:lineLen-1]
}
if !process(line) {
return s
}
}
return s
}
// cutLongestPrefix returns the longest prefix of repeating 'c' together with the remainder of the string.
func cutLongestPrefix(s string, c rune) (string, string) {
if len(s) == 0 {
return "", ""
}
i := strings.IndexFunc(s, func(r rune) bool { return r != c })
if i < 0 {
// no character found that's different from the provided rune!
return s, ""
}
return s[:i], s[i:]
}

View File

@ -0,0 +1,417 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pullreq
import (
"reflect"
"testing"
)
func Test_parseSuggestions(t *testing.T) {
tests := []struct {
name string
arg string
want []suggestion
}{
{
name: "test empty",
arg: "",
want: []suggestion{},
},
{
name: "test no code block",
arg: "a\nb",
want: []suggestion{},
},
{
name: "test indented code block",
arg: " a\nb",
want: []suggestion{},
},
{
name: "test not enough fence markers (`)",
arg: "`` suggestion\nb",
want: []suggestion{},
},
{
name: "test not enough fence markers (~)",
arg: "~~ suggestion\nb",
want: []suggestion{},
},
{
name: "test indented fences start marker (` with space)",
arg: " ``` suggestion\nb",
want: []suggestion{},
},
{
name: "test indented fence start marker (~ with tab)",
arg: "\t~~~ suggestion\nb",
want: []suggestion{},
},
{
name: "test indented fence end marker (` with space)",
arg: "``` suggestion\na\n ```\n",
want: []suggestion{
{
checkSum: "6e0f2a7504f8e96c862c0f963faea994e527bd32a1c5c2c79acbf6baf57854e7",
code: "a\n ```",
},
},
},
{
name: "test indented fence end marker (~ with tab)",
arg: "~~~ suggestion\na\n\t~~~\n",
want: []suggestion{
{
checkSum: "f5b959e235539ff7c9d2a687a1a5d05fa0c15e325dc50c83947c9d27c9d4fddf",
code: "a\n\t~~~",
},
},
},
{
name: "test fence marker with invalid char (` with `)",
arg: "``` suggestion `\nb",
want: []suggestion{},
},
{
name: "test wrong language (`)",
arg: "``` abc\nb",
want: []suggestion{},
},
{
name: "test wrong language (~)",
arg: "~~~ abc\nb",
want: []suggestion{},
},
{
name: "test language prefix (`)",
arg: "``` suggestions\nb",
want: []suggestion{},
},
{
name: "test suggestion empty without code or endmarker (`)",
arg: "``` suggestion",
want: []suggestion{
{
checkSum: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
code: "",
},
},
},
{
name: "test suggestion empty without endmarker (`)",
arg: "``` suggestion\n",
want: []suggestion{
{
checkSum: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
code: "",
},
},
},
{
name: "test suggestion empty with endmarker (~)",
arg: "~~~ suggestion\n~~~",
want: []suggestion{
{
checkSum: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
code: "",
},
},
},
{
name: "test suggestion newline only without endmarker (`)",
arg: "``` suggestion\n\n",
want: []suggestion{
{
checkSum: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
code: "", // first \n is for end of header, second \n is for beginning of trailer
},
},
},
{
name: "test suggestion newline only without endmarker (`)",
arg: "``` suggestion\n\n\n",
want: []suggestion{
{
checkSum: "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
code: "\n", // first \n is for end of header, second \n is for beginning of trailer
},
},
},
{
name: "test suggestion without end and line without newline (`)",
arg: "``` suggestion\na",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion without end and line with newline (~)",
arg: "~~~ suggestion\na\n",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion with wrong end (`)",
arg: "``` suggestion\na\n~~~",
want: []suggestion{
{
checkSum: "862bb949147c31270cce026205482a2fcd797047a5fdc01a0baa1bdfaf136386",
code: "a\n~~~",
},
},
},
{
name: "test suggestion with wrong end (~)",
arg: "~~~ suggestion\na\n```\n",
want: []suggestion{
{
checkSum: "83380dbcc26319cbdbb70c1ad11480c464cf731560a5d645afb727da33930611",
code: "a\n```",
},
},
},
{
name: "test suggestion with not enough endmarker (`)",
arg: "``` suggestion\na\n``\n",
want: []suggestion{
{
checkSum: "5b2b3107a7cceac969684464f39d300c8d1480b5fa300bc1b222c8e21db6c757",
code: "a\n``",
},
},
},
{
name: "test suggestion with not enough endmarker (~, more than 3)",
arg: "~~~~ suggestion\na\n~~~",
want: []suggestion{
{
checkSum: "862bb949147c31270cce026205482a2fcd797047a5fdc01a0baa1bdfaf136386",
code: "a\n~~~",
},
},
},
{
name: "test suggestion with trailing invalid chars on endmarker (`)",
arg: "``` suggestion\na\n```a\n",
want: []suggestion{
{
checkSum: "bd87ec5a4beda93c6912f0b786556f3a7c30772222dc65c104e2e60770492339",
code: "a\n```a",
},
},
},
{
name: "test suggestion with trailing invalid chars on endmarker (~)",
arg: "~~~ suggestion\na\n~~~a",
want: []suggestion{
{
checkSum: "07333da4c8a7348e4acd1a06566bb82d1a5f2a963e126158842308ec8b1d68f0",
code: "a\n~~~a",
},
},
},
{
name: "test basic suggestion with text around(`)",
arg: "adb\n``` suggestion\na\n```\nawef\n2r3",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test basic suggestion with text around(~)",
arg: "adb\n~~~ suggestion\na\n~~~\nawef\n2r3",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion with spaces in markers (~)",
arg: " ~~~ \t\tsuggestion \t\na\n ~~~ \t ",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion with spaces in markers (`, more than 3)",
arg: " ```` \t suggestion \t\t \na\n ```` \t ",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion with too many end marker chars (`)",
arg: "``` suggestion\na\n`````````",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion with too many end marker chars (`)",
arg: "~~~~~ suggestion\na\n~~~~~~~~~~~~",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
},
},
{
name: "test suggestion that contains opposite marker (`)",
arg: "``` suggestion\n~~~ suggestion\na\n~~~\n```",
want: []suggestion{
{
checkSum: "ed25a1606bf819448bf7e76fce9dbd2897fa5a379b67be74b5819ee521455783",
code: "~~~ suggestion\na\n~~~",
},
},
},
{
name: "test suggestion that contains opposite marker (~)",
arg: "~~~ suggestion\n``` suggestion\na\n```\n~~~",
want: []suggestion{
{
checkSum: "2463ad212ec8179e1f4d2a9ac35349b02e46bcba2173f6b05c9f73dfb4ca7ed9",
code: "``` suggestion\na\n```",
},
},
},
{
name: "test suggestion that contains shorter marker (`)",
arg: "```` suggestion\n``` suggestion\na\n```\n````",
want: []suggestion{
{
checkSum: "2463ad212ec8179e1f4d2a9ac35349b02e46bcba2173f6b05c9f73dfb4ca7ed9",
code: "``` suggestion\na\n```",
},
},
},
{
name: "test suggestion that contains shorter marker (~)",
arg: "~~~~ suggestion\n~~~ suggestion\na\n~~~\n~~~~",
want: []suggestion{
{
checkSum: "ed25a1606bf819448bf7e76fce9dbd2897fa5a379b67be74b5819ee521455783",
code: "~~~ suggestion\na\n~~~",
},
},
},
{
name: "test multiple suggestions same marker (`)",
arg: "``` suggestion\na\n```\nsomething``\n``` suggestion\nb\n```",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
{
checkSum: "3e23e8160039594a33894f6564e1b1348bbd7a0088d42c4acb73eeaed59c009d",
code: "b",
},
},
},
{
name: "test multiple suggestions same marker (~)",
arg: "~~~ suggestion\na\n~~~\nsomething~~\n~~~ suggestion\nb\n~~~",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
{
checkSum: "3e23e8160039594a33894f6564e1b1348bbd7a0088d42c4acb73eeaed59c009d",
code: "b",
},
},
},
{
name: "test multiple suggestions different markder (`,~)",
arg: "``` suggestion\na\n```\nsomething~~\n~~~ suggestion\nb\n~~~",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
{
checkSum: "3e23e8160039594a33894f6564e1b1348bbd7a0088d42c4acb73eeaed59c009d",
code: "b",
},
},
},
{
name: "test multiple suggestions last not ending (`,~)",
arg: "``` suggestion\na\n```\nsomething~~\n~~~ suggestion\nb\n~~~\n\n``` suggestion\nc",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
{
checkSum: "3e23e8160039594a33894f6564e1b1348bbd7a0088d42c4acb73eeaed59c009d",
code: "b",
},
{
checkSum: "2e7d2c03a9507ae265ecf5b5356885a53393a2029d241394997265a1a25aefc6",
code: "c",
},
},
},
{
name: "test with crlf and multiple (`,~)",
arg: "abc\n``` suggestion\r\na\n```\r\n~~~ suggestion\nb\r\n~~~",
want: []suggestion{
{
checkSum: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
code: "a",
},
{
checkSum: "3e23e8160039594a33894f6564e1b1348bbd7a0088d42c4acb73eeaed59c009d",
code: "b",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := parseSuggestions(tt.arg)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("parseSuggestions() = %s, want %s", got, tt.want)
}
})
}
}

View File

@ -62,12 +62,7 @@ func (c *Controller) CommitFiles(ctx context.Context,
repoRef string,
in *CommitFilesOptions,
) (types.CommitFilesResponse, []types.RuleViolations, error) {
requiredPermission := enum.PermissionRepoPush
if in.DryRunRules {
requiredPermission = enum.PermissionRepoView
}
repo, err := c.getRepoCheckAccess(ctx, session, repoRef, requiredPermission, false)
repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush, false)
if err != nil {
return types.CommitFilesResponse{}, nil, err
}

View File

@ -0,0 +1,62 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pullreq
import (
"encoding/json"
"net/http"
"github.com/harness/gitness/app/api/controller/pullreq"
"github.com/harness/gitness/app/api/render"
"github.com/harness/gitness/app/api/request"
)
func HandleCommentApplySuggestions(pullreqCtrl *pullreq.Controller) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
session, _ := request.AuthSessionFrom(ctx)
repoRef, err := request.GetRepoRefFromPath(r)
if err != nil {
render.TranslatedUserError(ctx, w, err)
return
}
pullreqNumber, err := request.GetPullReqNumberFromPath(r)
if err != nil {
render.TranslatedUserError(ctx, w, err)
return
}
in := new(pullreq.CommentApplySuggestionsInput)
err = json.NewDecoder(r.Body).Decode(in)
if err != nil {
render.BadRequestf(ctx, w, "Invalid Request Body: %s.", err)
return
}
out, violations, err := pullreqCtrl.CommentApplySuggestions(ctx, session, repoRef, pullreqNumber, in)
if err != nil {
render.TranslatedUserError(ctx, w, err)
return
}
if violations != nil {
render.Violations(w, violations)
return
}
render.JSON(w, http.StatusCreated, out)
}
}

View File

@ -71,6 +71,11 @@ type commentCreatePullReqRequest struct {
pullreq.CommentCreateInput
}
type commentApplySuggestionstRequest struct {
pullReqRequest
pullreq.CommentApplySuggestionsInput
}
type pullReqCommentRequest struct {
pullReqRequest
ID int64 `path:"pullreq_comment_id"`
@ -436,6 +441,19 @@ func pullReqOperations(reflector *openapi3.Reflector) {
_ = reflector.Spec.AddOperation(http.MethodPut,
"/repos/{repo_ref}/pullreq/{pullreq_number}/comments/{pullreq_comment_id}/status", commentStatusPullReq)
commentApplySuggestions := openapi3.Operation{}
commentApplySuggestions.WithTags("pullreq")
commentApplySuggestions.WithMapOfAnything(map[string]interface{}{"operationId": "commentApplySuggestions"})
_ = reflector.SetRequest(&commentApplySuggestions, new(commentApplySuggestionstRequest), http.MethodPost)
_ = reflector.SetJSONResponse(&commentApplySuggestions, new(pullreq.CommentApplySuggestionsOutput), http.StatusOK)
_ = reflector.SetJSONResponse(&commentApplySuggestions, new(usererror.Error), http.StatusBadRequest)
_ = reflector.SetJSONResponse(&commentApplySuggestions, new(usererror.Error), http.StatusInternalServerError)
_ = reflector.SetJSONResponse(&commentApplySuggestions, new(usererror.Error), http.StatusUnauthorized)
_ = reflector.SetJSONResponse(&commentApplySuggestions, new(usererror.Error), http.StatusForbidden)
_ = reflector.SetJSONResponse(&commentApplySuggestions, new(types.RulesViolations), http.StatusUnprocessableEntity)
_ = reflector.Spec.AddOperation(http.MethodPost,
"/repos/{repo_ref}/pullreq/{pullreq_number}/comments/apply-suggestions", commentApplySuggestions)
reviewerAdd := openapi3.Operation{}
reviewerAdd.WithTags("pullreq")
reviewerAdd.WithMapOfAnything(map[string]interface{}{"operationId": "reviewerAddPullReq"})

View File

@ -165,6 +165,11 @@ func NotFound(message string) *Error {
return New(http.StatusNotFound, message)
}
// NotFoundf returns a new user facing not found error.
func NotFoundf(format string, args ...any) *Error {
return Newf(http.StatusNotFound, format, args...)
}
// ConflictWithPayload returns a new user facing conflict error with payload.
func ConflictWithPayload(message string, values ...map[string]any) *Error {
return NewWithPayload(http.StatusConflict, message, values...)

View File

@ -515,6 +515,7 @@ func SetupPullReq(r chi.Router, pullreqCtrl *pullreq.Controller) {
r.Get("/activities", handlerpullreq.HandleListActivities(pullreqCtrl))
r.Route("/comments", func(r chi.Router) {
r.Post("/", handlerpullreq.HandleCommentCreate(pullreqCtrl))
r.Post("/apply-suggestions", handlerpullreq.HandleCommentApplySuggestions(pullreqCtrl))
r.Route(fmt.Sprintf("/{%s}", request.PathParamPullReqCommentID), func(r chi.Router) {
r.Patch("/", handlerpullreq.HandleCommentUpdate(pullreqCtrl))
r.Delete("/", handlerpullreq.HandleCommentDelete(pullreqCtrl))

View File

@ -141,7 +141,12 @@ func (s *PullReqActivityStore) Find(ctx context.Context, id int64) (*types.PullR
return nil, database.ProcessSQLErrorf(ctx, err, "Failed to find pull request activity")
}
return s.mapPullReqActivity(ctx, dst), nil
act, err := s.mapPullReqActivity(ctx, dst)
if err != nil {
return nil, fmt.Errorf("failed to map pull request activity: %w", err)
}
return act, nil
}
// Create creates a new pull request.
@ -207,7 +212,12 @@ func (s *PullReqActivityStore) Create(ctx context.Context, act *types.PullReqAct
db := dbtx.GetAccessor(ctx, s.db)
query, arg, err := db.BindNamed(sqlQuery, mapInternalPullReqActivity(act))
dbAct, err := mapInternalPullReqActivity(act)
if err != nil {
return fmt.Errorf("failed to map pull request activity: %w", err)
}
query, arg, err := db.BindNamed(sqlQuery, dbAct)
if err != nil {
return database.ProcessSQLErrorf(ctx, err, "Failed to bind pull request activity object")
}
@ -278,7 +288,10 @@ func (s *PullReqActivityStore) Update(ctx context.Context, act *types.PullReqAct
updatedAt := time.Now()
dbAct := mapInternalPullReqActivity(act)
dbAct, err := mapInternalPullReqActivity(act)
if err != nil {
return fmt.Errorf("failed to map pull request activity: %w", err)
}
dbAct.Version++
dbAct.Updated = updatedAt.UnixMilli()
@ -301,7 +314,11 @@ func (s *PullReqActivityStore) Update(ctx context.Context, act *types.PullReqAct
return gitness_store.ErrVersionConflict
}
*act = *s.mapPullReqActivity(ctx, dbAct)
updatedAct, err := s.mapPullReqActivity(ctx, dbAct)
if err != nil {
return fmt.Errorf("failed to map db pull request activity: %w", err)
}
*act = *updatedAct
return nil
}
@ -465,7 +482,13 @@ func (s *PullReqActivityStore) CountUnresolved(ctx context.Context, prID int64)
return count, nil
}
func mapPullReqActivity(act *pullReqActivity) *types.PullReqActivity {
func mapPullReqActivity(act *pullReqActivity) (*types.PullReqActivity, error) {
metadata := &types.PullReqActivityMetadata{}
err := json.Unmarshal(act.Metadata, &metadata)
if err != nil {
return nil, fmt.Errorf("failed to deserialize metadata: %w", err)
}
m := &types.PullReqActivity{
ID: act.ID,
Version: act.Version,
@ -484,7 +507,7 @@ func mapPullReqActivity(act *pullReqActivity) *types.PullReqActivity {
Kind: act.Kind,
Text: act.Text,
PayloadRaw: act.Payload,
Metadata: make(map[string]interface{}),
Metadata: metadata,
ResolvedBy: act.ResolvedBy.Ptr(),
Resolved: act.Resolved.Ptr(),
Author: types.PrincipalInfo{},
@ -503,12 +526,10 @@ func mapPullReqActivity(act *pullReqActivity) *types.PullReqActivity {
}
}
_ = json.Unmarshal(act.Metadata, &m.Metadata)
return m
return m, nil
}
func mapInternalPullReqActivity(act *types.PullReqActivity) *pullReqActivity {
func mapInternalPullReqActivity(act *types.PullReqActivity) (*pullReqActivity, error) {
m := &pullReqActivity{
ID: act.ID,
Version: act.Version,
@ -542,16 +563,25 @@ func mapInternalPullReqActivity(act *types.PullReqActivity) *pullReqActivity {
m.CodeCommentSpanOld = null.IntFrom(int64(act.CodeComment.SpanOld))
}
m.Metadata, _ = json.Marshal(act.Metadata)
var err error
m.Metadata, err = json.Marshal(act.Metadata)
if err != nil {
return nil, fmt.Errorf("failed to serialize metadata: %w", err)
}
return m
return m, nil
}
func (s *PullReqActivityStore) mapPullReqActivity(ctx context.Context, act *pullReqActivity) *types.PullReqActivity {
m := mapPullReqActivity(act)
func (s *PullReqActivityStore) mapPullReqActivity(
ctx context.Context,
act *pullReqActivity,
) (*types.PullReqActivity, error) {
m, err := mapPullReqActivity(act)
if err != nil {
return nil, err
}
var author, resolver *types.PrincipalInfo
var err error
author, err = s.pCache.Get(ctx, act.CreatedBy)
if err != nil {
@ -569,7 +599,7 @@ func (s *PullReqActivityStore) mapPullReqActivity(ctx context.Context, act *pull
m.Resolver = resolver
}
return m
return m, nil
}
func (s *PullReqActivityStore) mapSlicePullReqActivity(
@ -594,7 +624,10 @@ func (s *PullReqActivityStore) mapSlicePullReqActivity(
// attach the principal infos back to the slice items
m := make([]*types.PullReqActivity, len(activities))
for i, act := range activities {
m[i] = mapPullReqActivity(act)
m[i], err = mapPullReqActivity(act)
if err != nil {
return nil, fmt.Errorf("failed to map pull request activity %d: %w", act.ID, err)
}
if author, ok := infoMap[act.CreatedBy]; ok {
m[i].Author = *author
}

View File

@ -70,12 +70,12 @@ func testParseSignatureFromCatFileLineFor(t *testing.T, name string, email strin
func TestParseTagDataFromCatFile(t *testing.T) {
when, _ := time.Parse(defaultGitTimeLayout, "Fri Sep 23 10:57:49 2022 -0700")
testParseTagDataFromCatFileFor(t, sha.EmptyTree, GitObjectTypeTag, "name1",
testParseTagDataFromCatFileFor(t, sha.EmptyTree.String(), GitObjectTypeTag, "name1",
Signature{Identity: Identity{Name: "max", Email: "max@mail.com"}, When: when},
"some message", "some message")
// test with signature
testParseTagDataFromCatFileFor(t, sha.EmptyTree, GitObjectTypeCommit, "name2",
testParseTagDataFromCatFileFor(t, sha.EmptyTree.String(), GitObjectTypeCommit, "name2",
Signature{Identity: Identity{Name: "max", Email: "max@mail.com"}, When: when},
"gpgsig -----BEGIN PGP SIGNATURE-----\n\nw...B\n-----END PGP SIGNATURE-----\n\nsome message",
"some message")

View File

@ -34,7 +34,7 @@ type BlobReader struct {
}
// GetBlob returns the blob for the given object sha.
func (g *Git) GetBlob(
func GetBlob(
ctx context.Context,
repoPath string,
alternateObjectDirs []string,

View File

@ -303,7 +303,7 @@ func (g *Git) DiffShortStat(
shortstatArgs := []string{baseRef + separator + headRef}
if len(baseRef) == 0 || baseRef == types.NilSHA {
shortstatArgs = []string{sha.EmptyTree, headRef}
shortstatArgs = []string{sha.EmptyTree.String(), headRef}
}
stat, err := GetDiffShortStat(ctx, repoPath, shortstatArgs...)
if err != nil {
@ -338,6 +338,7 @@ func (g *Git) GetDiffHunkHeaders(
cmd := command.New("diff",
command.WithFlag("--patch"),
command.WithFlag("--full-index"),
command.WithFlag("--no-color"),
command.WithFlag("--unified=0"),
command.WithArg(sourceRef),
@ -510,6 +511,7 @@ func (g *Git) diffCutFromHunk(
cmd := command.New("diff",
command.WithFlag("--patch"),
command.WithFlag("--full-index"),
command.WithFlag("--no-color"),
command.WithFlag("--unified=100000000"),
command.WithArg(oldSHA),

View File

@ -50,7 +50,7 @@ func (g *Git) GetSubmodule(
ref, commit)
}
reader, err := g.GetBlob(ctx, repoPath, nil, node.SHA, 0)
reader, err := GetBlob(ctx, repoPath, nil, node.SHA, 0)
if err != nil {
return nil, processGitErrorf(err, "error reading commit for ref '%s'", ref)
}

View File

@ -18,6 +18,7 @@ import (
"context"
"io"
"github.com/harness/gitness/git/api"
"github.com/harness/gitness/git/sha"
)
@ -45,7 +46,7 @@ func (s *Service) GetBlob(ctx context.Context, params *GetBlobParams) (*GetBlobO
repoPath := getFullPathForRepo(s.reposRoot, params.RepoUID)
// TODO: do we need to validate request for nil?
reader, err := s.git.GetBlob(
reader, err := api.GetBlob(
ctx,
repoPath,
params.AlternateObjectDirs,

View File

@ -252,6 +252,7 @@ type DiffCutParams struct {
LineStartNew bool
LineEnd int
LineEndNew bool
LineLimit int
}
// DiffCut extracts diff snippet from a git diff hunk.
@ -265,7 +266,7 @@ func (s *Service) DiffCut(ctx context.Context, params *DiffCutParams) (DiffCutOu
mergeBaseSHA, _, err := s.git.GetMergeBase(ctx, repoPath, "", params.TargetCommitSHA, params.SourceCommitSHA)
if err != nil {
return DiffCutOutput{}, fmt.Errorf("DiffCut: failed to find merge base: %w", err)
return DiffCutOutput{}, fmt.Errorf("failed to find merge base: %w", err)
}
header, linesHunk, err := s.git.DiffCut(ctx,
@ -280,7 +281,7 @@ func (s *Service) DiffCut(ctx context.Context, params *DiffCutParams) (DiffCutOu
LineEndNew: params.LineEndNew,
BeforeLines: 2,
AfterLines: 2,
LineLimit: 40,
LineLimit: params.LineLimit,
})
if err != nil {
return DiffCutOutput{}, fmt.Errorf("DiffCut: failed to get diff hunk: %w", err)

View File

@ -34,14 +34,15 @@ const (
type FileAction string
const (
CreateAction FileAction = "CREATE"
UpdateAction FileAction = "UPDATE"
DeleteAction = "DELETE"
MoveAction = "MOVE"
CreateAction FileAction = "CREATE"
UpdateAction FileAction = "UPDATE"
DeleteAction FileAction = "DELETE"
MoveAction FileAction = "MOVE"
PatchTextAction FileAction = "PATCH_TEXT"
)
func (FileAction) Enum() []interface{} {
return []interface{}{CreateAction, UpdateAction, DeleteAction, MoveAction}
return []interface{}{CreateAction, UpdateAction, DeleteAction, MoveAction, PatchTextAction}
}
// CommitFileAction holds file operation data.
@ -83,7 +84,7 @@ type CommitFilesResponse struct {
CommitID sha.SHA
}
//nolint:gocognit
//nolint:gocognit,nestif
func (s *Service) CommitFiles(ctx context.Context, params *CommitFilesParams) (CommitFilesResponse, error) {
if err := params.Validate(); err != nil {
return CommitFilesResponse{}, err
@ -124,23 +125,22 @@ func (s *Service) CommitFiles(ctx context.Context, params *CommitFilesParams) (C
// ensure input data is valid
// the commit will be nil for empty repositories
commit, err := s.validateAndPrepareHeader(ctx, repoPath, isEmpty, params)
commit, err := s.validateAndPrepareCommitFilesHeader(ctx, repoPath, isEmpty, params)
if err != nil {
return CommitFilesResponse{}, err
}
// ref updater
var oldCommitSHA sha.SHA
var newCommitSHA sha.SHA
var refOldSHA sha.SHA
var refNewSHA sha.SHA
branchRef := api.GetReferenceFromBranchName(params.Branch)
if params.Branch != params.NewBranch {
// we are creating a new branch, rather than updating the existing one
oldCommitSHA = sha.Nil
refOldSHA = sha.Nil
branchRef = api.GetReferenceFromBranchName(params.NewBranch)
} else if commit != nil {
oldCommitSHA = commit.SHA
refOldSHA = commit.SHA
}
refUpdater, err := hook.CreateRefUpdater(s.hookClientFactory, params.EnvVars, repoPath, branchRef)
@ -149,28 +149,44 @@ func (s *Service) CommitFiles(ctx context.Context, params *CommitFilesParams) (C
err = sharedrepo.Run(ctx, refUpdater, s.tmpDir, repoPath, func(r *sharedrepo.SharedRepo) error {
var parentCommits []sha.SHA
var oldTreeSHA sha.SHA
if isEmpty {
oldTreeSHA = sha.EmptyTree
err = s.prepareTreeEmptyRepo(ctx, r, params.Actions)
if err != nil {
return fmt.Errorf("failed to prepare empty tree: %w", err)
}
} else {
parentCommits = append(parentCommits, commit.SHA)
// get tree sha
rootNode, err := s.git.GetTreeNode(ctx, repoPath, commit.SHA.String(), "")
if err != nil {
return fmt.Errorf("CommitFiles: failed to get original node: %w", err)
}
oldTreeSHA = rootNode.SHA
err = r.SetIndex(ctx, commit.SHA)
if err != nil {
return fmt.Errorf("failed to set index in shared repository: %w", err)
}
err = s.prepareTree(ctx, r, commit.SHA, params.Actions)
}
if err != nil {
return fmt.Errorf("failed to prepare tree: %w", err)
if err != nil {
return fmt.Errorf("failed to prepare tree: %w", err)
}
}
treeHash, err := r.WriteTree(ctx)
treeSHA, err := r.WriteTree(ctx)
if err != nil {
return fmt.Errorf("failed to write tree object: %w", err)
}
if oldTreeSHA.Equal(treeSHA) {
return errors.InvalidArgument("No effective changes.")
}
message := strings.TrimSpace(params.Title)
if len(params.Message) > 0 {
message += "\n\n" + strings.TrimSpace(params.Message)
@ -192,15 +208,15 @@ func (s *Service) CommitFiles(ctx context.Context, params *CommitFilesParams) (C
When: committerDate,
}
commitSHA, err := r.CommitTree(ctx, authorSig, committerSig, treeHash, message, false, parentCommits...)
commitSHA, err := r.CommitTree(ctx, authorSig, committerSig, treeSHA, message, false, parentCommits...)
if err != nil {
return fmt.Errorf("failed to commit the tree: %w", err)
}
newCommitSHA = commitSHA
refNewSHA = commitSHA
if err := refUpdater.Init(ctx, oldCommitSHA, newCommitSHA); err != nil {
return fmt.Errorf("failed to init ref updater old=%s new=%s: %w", oldCommitSHA, newCommitSHA, err)
if err := refUpdater.Init(ctx, refOldSHA, refNewSHA); err != nil {
return fmt.Errorf("failed to init ref updater old=%s new=%s: %w", refOldSHA, refNewSHA, err)
}
return nil
@ -211,10 +227,10 @@ func (s *Service) CommitFiles(ctx context.Context, params *CommitFilesParams) (C
// get commit
commit, err = s.git.GetCommit(ctx, repoPath, newCommitSHA.String())
commit, err = s.git.GetCommit(ctx, repoPath, refNewSHA.String())
if err != nil {
return CommitFilesResponse{}, fmt.Errorf("failed to get commit for SHA %s: %w",
newCommitSHA.String(), err)
refNewSHA.String(), err)
}
return CommitFilesResponse{
@ -225,14 +241,65 @@ func (s *Service) CommitFiles(ctx context.Context, params *CommitFilesParams) (C
func (s *Service) prepareTree(
ctx context.Context,
r *sharedrepo.SharedRepo,
treeish sha.SHA,
treeishSHA sha.SHA,
actions []CommitFileAction,
) error {
// execute all actions
// patch file actions are executed in batch for a single file
patchMap := map[string][]*CommitFileAction{}
// keep track of what paths have been written to detect conflicting actions
modifiedPaths := map[string]bool{}
for i := range actions {
if err := s.processAction(ctx, r, treeish, &actions[i]); err != nil {
return err
act := &actions[i]
// patch text actions are executed in per-file batches.
if act.Action == PatchTextAction {
patchMap[act.Path] = append(patchMap[act.Path], act)
continue
}
// anything else is executed as is
modifiedPath, err := s.processAction(ctx, r, treeishSHA, act)
if err != nil {
return fmt.Errorf("failed to process action %s on %q: %w", act.Action, act.Path, err)
}
if modifiedPaths[modifiedPath] {
return errors.InvalidArgument("More than one conflicting actions are modifying file %q.", modifiedPath)
}
modifiedPaths[modifiedPath] = true
}
for filePath, patchActions := range patchMap {
// combine input across actions
var fileSHA sha.SHA
var payloads [][]byte
for _, act := range patchActions {
payloads = append(payloads, act.Payload)
if fileSHA.IsEmpty() {
fileSHA = act.SHA
continue
}
// there can only be one file sha for a given path and commit.
if !act.SHA.IsEmpty() && !fileSHA.Equal(act.SHA) {
return errors.InvalidArgument(
"patch text actions for %q contain different SHAs %q and %q",
filePath,
act.SHA,
fileSHA,
)
}
}
if err := r.PatchTextFile(ctx, treeishSHA, filePath, fileSHA, payloads); err != nil {
return fmt.Errorf("failed to process action %s on %q: %w", PatchTextAction, filePath, err)
}
if modifiedPaths[filePath] {
return errors.InvalidArgument("More than one conflicting action are modifying file %q.", filePath)
}
modifiedPaths[filePath] = true
}
return nil
@ -261,7 +328,7 @@ func (s *Service) prepareTreeEmptyRepo(
return nil
}
func (s *Service) validateAndPrepareHeader(
func (s *Service) validateAndPrepareCommitFilesHeader(
ctx context.Context,
repoPath string,
isEmpty bool,
@ -313,24 +380,28 @@ func (s *Service) processAction(
r *sharedrepo.SharedRepo,
treeishSHA sha.SHA,
action *CommitFileAction,
) (err error) {
) (modifiedPath string, err error) {
filePath := api.CleanUploadFileName(action.Path)
if filePath == "" {
return errors.InvalidArgument("path cannot be empty")
return "", errors.InvalidArgument("path cannot be empty")
}
modifiedPath = filePath
switch action.Action {
case CreateAction:
err = r.CreateFile(ctx, treeishSHA, filePath, filePermissionDefault, action.Payload)
case UpdateAction:
err = r.UpdateFile(ctx, treeishSHA, filePath, action.SHA, filePermissionDefault, action.Payload)
case MoveAction:
err = r.MoveFile(ctx, treeishSHA, filePath, action.SHA, filePermissionDefault, action.Payload)
modifiedPath, err = r.MoveFile(ctx, treeishSHA, filePath, action.SHA, filePermissionDefault, action.Payload)
case DeleteAction:
err = r.DeleteFile(ctx, filePath)
case PatchTextAction:
return "", fmt.Errorf("action %s not supported by this method", action.Action)
default:
err = fmt.Errorf("unknown file action %q", action.Action)
}
return err
return modifiedPath, err
}
/*

View File

@ -104,7 +104,7 @@ func DiffCut(r io.Reader, params DiffCutParams) (HunkHeader, Hunk, error) {
}
diffCut = append(diffCut, line)
if len(diffCut) > params.LineLimit {
if params.LineLimit > 0 && len(diffCut) >= params.LineLimit {
break // safety break
}
}
@ -290,7 +290,7 @@ func BlobCut(r io.Reader, params DiffCutParams) (CutHeader, Cut, error) {
linesNeeded--
}
if len(cutLines) >= params.LineLimit {
if params.LineLimit > 0 && len(cutLines) >= params.LineLimit {
break
}
}

View File

@ -106,7 +106,7 @@ func TestDiffCut(t *testing.T) {
LineStart: 7, LineStartNew: false,
LineEnd: 7, LineEndNew: true,
BeforeLines: 0, AfterLines: 0,
LineLimit: 1000,
LineLimit: 0,
},
expCutHeader: "@@ -7,2 +7 @@",
expCut: []string{"-7", "-8", "+6,7,8"},
@ -298,6 +298,12 @@ func TestBlobCut(t *testing.T) {
expCutHeader: CutHeader{Line: 3, Span: 2},
expCut: Cut{CutHeader: CutHeader{Line: 2, Span: 3}, Lines: []string{"2", "3", "4"}},
},
{
name: "unlimited",
params: DiffCutParams{LineStart: 1, LineEnd: 6, BeforeLines: 1, AfterLines: 1, LineLimit: 0},
expCutHeader: CutHeader{Line: 1, Span: 6},
expCut: Cut{CutHeader: CutHeader{Line: 1, Span: 6}, Lines: []string{"1", "2", "3", "4", "5", "6"}},
},
}
for _, test := range tests {

View File

@ -18,8 +18,6 @@ import (
"bufio"
"io"
"regexp"
"github.com/harness/gitness/git/enum"
)
type DiffFileHunkHeaders struct {
@ -42,30 +40,6 @@ func ParseDiffFileHeader(line string) (DiffFileHeader, bool) {
}, true
}
var regExpDiffExtHeader = regexp.MustCompile(
"^(" +
enum.DiffExtHeaderOldMode + "|" +
enum.DiffExtHeaderNewMode + "|" +
enum.DiffExtHeaderDeletedFileMode + "|" +
enum.DiffExtHeaderNewFileMode + "|" +
enum.DiffExtHeaderCopyFrom + "|" +
enum.DiffExtHeaderCopyTo + "|" +
enum.DiffExtHeaderRenameFrom + "|" +
enum.DiffExtHeaderRenameTo + "|" +
enum.DiffExtHeaderSimilarity + "|" +
enum.DiffExtHeaderDissimilarity + "|" +
enum.DiffExtHeaderIndex +
") (.+)$")
func ParseDiffFileExtendedHeader(line string) (string, string) {
groups := regExpDiffExtHeader.FindStringSubmatch(line)
if groups == nil {
return "", ""
}
return groups[1], groups[2]
}
// GetHunkHeaders parses git diff output and returns all diff headers for all files.
// See for documentation: https://git-scm.com/docs/git-diff#generate_patch_text_with_p
func GetHunkHeaders(r io.Reader) ([]*DiffFileHunkHeaders, error) {

View File

@ -0,0 +1,64 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"regexp"
"github.com/harness/gitness/git/enum"
)
var regExpDiffExtHeader = regexp.MustCompile(
"^(" +
enum.DiffExtHeaderOldMode + "|" +
enum.DiffExtHeaderNewMode + "|" +
enum.DiffExtHeaderDeletedFileMode + "|" +
enum.DiffExtHeaderNewFileMode + "|" +
enum.DiffExtHeaderCopyFrom + "|" +
enum.DiffExtHeaderCopyTo + "|" +
enum.DiffExtHeaderRenameFrom + "|" +
enum.DiffExtHeaderRenameTo + "|" +
enum.DiffExtHeaderSimilarity + "|" +
enum.DiffExtHeaderDissimilarity + "|" +
enum.DiffExtHeaderIndex +
") (.+)$")
// ParseDiffFileExtendedHeader parses a generic extended header line.
func ParseDiffFileExtendedHeader(line string) (string, string) {
groups := regExpDiffExtHeader.FindStringSubmatch(line)
if groups == nil {
return "", ""
}
return groups[1], groups[2]
}
// regExpDiffFileIndexHeader parses the `index` extended header line with a format like:
//
// index f994c2cf569523ba736473bbfbac3700fa1db28d..0000000000000000000000000000000000000000
// index 68233d6cd204b0df84e91a1ce8c8b75e13529973..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
//
// NOTE: it's NEW_SHA..OLD_SHA.
var regExpDiffFileIndexHeader = regexp.MustCompile(`^index ([0-9a-f]{4,64})\.\.([0-9a-f]{4,64})( [0-9]+)?$`)
// DiffExtParseIndex parses the `index` extended diff header line.
func DiffExtHeaderParseIndex(line string) (newSHA string, oldSHA string, ok bool) {
groups := regExpDiffFileIndexHeader.FindStringSubmatch(line)
if groups == nil {
return "", "", false
}
return groups[1], groups[2], true
}

View File

@ -17,7 +17,8 @@ package parser
import "github.com/harness/gitness/errors"
var (
ErrSHADoesNotMatch = errors.InvalidArgument("sha does not match")
ErrHunkNotFound = errors.NotFound("hunk not found")
ErrBinaryFile = errors.InvalidArgument("can't handle a binary file")
ErrSHADoesNotMatch = errors.InvalidArgument("sha does not match")
ErrHunkNotFound = errors.NotFound("hunk not found")
ErrBinaryFile = errors.InvalidArgument("can't handle a binary file")
ErrPeekedMoreThanOnce = errors.PreconditionFailed("peeking more than once in a row is not supported")
)

View File

@ -15,9 +15,18 @@
package parser
import (
"bufio"
"bytes"
"io"
)
type Scanner interface {
Scan() bool
Err() error
Bytes() []byte
Text() string
}
func ScanZeroSeparated(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil // Return nothing if at end of file and no data passed
@ -30,3 +39,87 @@ func ScanZeroSeparated(data []byte, atEOF bool) (advance int, token []byte, err
}
return
}
// ScanLinesWithEOF is a variation of bufio's ScanLine method that returns the line endings.
// https://cs.opensource.google/go/go/+/master:src/bufio/scan.go;l=355;drc=bc2124dab14fa292e18df2937037d782f7868635
func ScanLinesWithEOF(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
// We have a full newline-terminated line.
return i + 1, data[:i+1], nil
}
// If we're at EOF, we have a final, non-terminated line. Return it.
if atEOF {
return len(data), data, nil
}
// Request more data.
return 0, nil, nil
}
func NewScannerWithPeek(r io.Reader, split bufio.SplitFunc) *ScannerWithPeek {
scanner := bufio.NewScanner(r)
scanner.Split(split)
return &ScannerWithPeek{
scanner: scanner,
}
}
type ScannerWithPeek struct {
peeked bool
peekedScanOut bool
nextLine []byte
nextErr error
scanner *bufio.Scanner
}
func (s *ScannerWithPeek) scan() bool {
scanOut := s.scanner.Scan()
s.nextErr = s.scanner.Err()
s.nextLine = s.scanner.Bytes()
return scanOut
}
func (s *ScannerWithPeek) Peek() bool {
if s.peeked {
s.nextLine = nil
s.nextErr = ErrPeekedMoreThanOnce
return false
}
// load next line
scanOut := s.scan()
// set peeked data
s.peeked = true
s.peekedScanOut = scanOut
return scanOut
}
func (s *ScannerWithPeek) Scan() bool {
if s.peeked {
s.peeked = false
return s.peekedScanOut
}
return s.scan()
}
func (s *ScannerWithPeek) Err() error {
return s.nextErr
}
func (s *ScannerWithPeek) Bytes() []byte {
return s.nextLine
}
func (s *ScannerWithPeek) Text() string {
return string(s.nextLine)
}

View File

@ -0,0 +1,67 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"bufio"
"bytes"
"testing"
"github.com/stretchr/testify/require"
)
func Test_scannerWithPeekSmoke(t *testing.T) {
scanner := NewScannerWithPeek(
bytes.NewReader([]byte("l1\nl2")),
bufio.ScanLines,
)
out := scanner.Peek()
require.True(t, out)
require.NoError(t, scanner.Err())
require.Equal(t, "l1", string(scanner.Bytes()))
out = scanner.Scan()
require.True(t, out)
require.NoError(t, scanner.Err())
require.Equal(t, "l1", string(scanner.Bytes()))
out = scanner.Scan()
require.True(t, out)
require.NoError(t, scanner.Err())
require.Equal(t, "l2", scanner.Text())
out = scanner.Scan()
require.False(t, out)
require.NoError(t, scanner.Err())
require.Nil(t, scanner.Bytes())
}
func Test_scannerWithPeekDualPeek(t *testing.T) {
scanner := NewScannerWithPeek(
bytes.NewReader([]byte("l1\nl2")),
bufio.ScanLines,
)
out := scanner.Peek()
require.True(t, out)
require.NoError(t, scanner.Err())
require.Equal(t, "l1", string(scanner.Bytes()))
out = scanner.Peek()
require.False(t, out)
require.ErrorIs(t, scanner.Err(), ErrPeekedMoreThanOnce)
require.Nil(t, scanner.Bytes())
}

143
git/parser/text.go Normal file
View File

@ -0,0 +1,143 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"bufio"
"errors"
"fmt"
"io"
"unicode/utf8"
)
var (
ErrLineTooLong = errors.New("line too long")
)
func newUTF8Scanner(inner Scanner, modifier func([]byte) []byte) *utf8Scanner {
return &utf8Scanner{
scanner: inner,
modifier: modifier,
}
}
// utf8Scanner is wrapping the provided scanner with UTF-8 checks and a modifier function.
type utf8Scanner struct {
nextLine []byte
nextErr error
modifier func([]byte) []byte
scanner Scanner
}
func (s *utf8Scanner) Scan() bool {
scanOut := s.scanner.Scan()
if !scanOut {
s.nextLine = nil
s.nextErr = s.scanner.Err()
// to stay consistent with diff parser, treat bufio.ErrTooLong as binary file
if errors.Is(s.nextErr, bufio.ErrTooLong) {
s.nextErr = ErrBinaryFile
}
return false
}
// finalize next bytes
original := s.scanner.Bytes()
// Git is using first 8000 chars, but for now we stay consistent with diff parser
// https://git.kernel.org/pub/scm/git/git.git/tree/xdiff-interface.c?h=v2.30.0#n187
if !utf8.Valid(original) {
s.nextLine = nil
s.nextErr = ErrBinaryFile
return false
}
// copy bytes to ensure nothing happens during modification
cpy := make([]byte, len(original))
copy(cpy, original)
if s.modifier != nil {
cpy = s.modifier(cpy)
}
s.nextLine = cpy
s.nextErr = nil
return true
}
func (s *utf8Scanner) Err() error {
return s.nextErr
}
func (s *utf8Scanner) Bytes() []byte {
return s.nextLine
}
func (s *utf8Scanner) Text() string {
return string(s.nextLine)
}
// ReadTextFile returns a Scanner that reads the provided text file line by line.
//
// The returned Scanner fulfills the following:
// - If any line is larger than 64kb, the scanning fails with ErrBinaryFile
// - If the reader returns invalid UTF-8, the scanning fails with ErrBinaryFile
// - Line endings are returned as-is, unless overwriteLE is provided
func ReadTextFile(r io.Reader, overwriteLE *string) (Scanner, string, error) {
scanner := NewScannerWithPeek(r, ScanLinesWithEOF)
peekOut := scanner.Peek()
if !peekOut && scanner.Err() != nil {
return nil, "", fmt.Errorf("unknown error while peeking first line: %w", scanner.Err())
}
// get raw bytes as we don't modify the slice
firstLine := scanner.Bytes()
// Heuristic - get line ending of file by first line, default to LF if there's no line endings in the file
lineEnding := "\n"
if HasLineEndingCRLF(firstLine) {
lineEnding = "\r\n"
}
return newUTF8Scanner(scanner, func(line []byte) []byte {
// overwrite line ending if requested (unless there's no line ending - e.g. last line)
if overwriteLE != nil {
if HasLineEndingCRLF(line) {
return append(line[:len(line)-2], []byte(*overwriteLE)...)
} else if HasLineEndingLF(line) {
return append(line[:len(line)-1], []byte(*overwriteLE)...)
}
}
return line
}), lineEnding, nil
}
func HasLineEnding(line []byte) bool {
// HasLineEndingLF is superset of HasLineEndingCRLF
return HasLineEndingLF(line)
}
func HasLineEndingLF(line []byte) bool {
return len(line) >= 1 && line[len(line)-1] == '\n'
}
func HasLineEndingCRLF(line []byte) bool {
return len(line) >= 2 && line[len(line)-2] == '\r' && line[len(line)-1] == '\n'
}

146
git/parser/text_test.go Normal file
View File

@ -0,0 +1,146 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"bytes"
"testing"
"github.com/gotidy/ptr"
"github.com/stretchr/testify/assert"
)
func Test_readTextFileEmpty(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader(nil), nil)
assert.NoError(t, err)
assert.Equal(t, "\n", le)
ok := scanner.Scan()
assert.False(t, ok)
assert.NoError(t, scanner.Err())
}
func Test_readTextFileFirstLineNotUTF8(t *testing.T) {
scanner, _, err := ReadTextFile(bytes.NewReader([]byte{0xFF, 0xFF}), nil)
// method itself doesn't return an error, only the scanning fails for utf8.
assert.NotNil(t, scanner)
assert.NoError(t, err)
ok := scanner.Scan()
assert.False(t, ok)
assert.ErrorIs(t, scanner.Err(), ErrBinaryFile)
}
func Test_readTextFileNoLineEnding(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader([]byte("abc")), nil)
assert.NoError(t, err)
assert.Equal(t, "\n", le)
ok := scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "abc", scanner.Text())
ok = scanner.Scan()
assert.False(t, ok)
assert.NoError(t, scanner.Err())
assert.Nil(t, scanner.Bytes())
}
func Test_readTextFileLineEndingLF(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader([]byte("abc\n")), nil)
assert.NoError(t, err)
assert.Equal(t, "\n", le)
ok := scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "abc\n", scanner.Text())
ok = scanner.Scan()
assert.False(t, ok)
assert.NoError(t, scanner.Err())
assert.Nil(t, scanner.Bytes())
}
func Test_readTextFileLineEndingCRLF(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader([]byte("abc\r\n")), nil)
assert.NoError(t, err)
assert.Equal(t, "\r\n", le)
ok := scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "abc\r\n", scanner.Text())
ok = scanner.Scan()
assert.False(t, ok)
assert.NoError(t, scanner.Err())
assert.Nil(t, scanner.Bytes())
}
func Test_readTextFileLineEndingMultiple(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader([]byte("abc\r\nd\n")), nil)
assert.NoError(t, err)
assert.Equal(t, "\r\n", le)
ok := scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "abc\r\n", scanner.Text())
ok = scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "d\n", scanner.Text())
ok = scanner.Scan()
assert.False(t, ok)
assert.NoError(t, scanner.Err())
assert.Nil(t, scanner.Bytes())
}
func Test_readTextFileLineEndingReplacementEmpty(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader([]byte("abc\r\n")), ptr.Of(""))
assert.NoError(t, err)
assert.Equal(t, "\r\n", le)
ok := scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "abc", scanner.Text())
}
func Test_readTextFileLineEndingReplacement(t *testing.T) {
scanner, le, err := ReadTextFile(bytes.NewReader([]byte("abc\r\nd")), ptr.Of("\n"))
assert.NoError(t, err)
assert.Equal(t, "\r\n", le)
ok := scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "abc\n", scanner.Text())
ok = scanner.Scan()
assert.True(t, ok)
assert.NoError(t, scanner.Err())
assert.Equal(t, "d", scanner.Text())
ok = scanner.Scan()
assert.False(t, ok)
assert.NoError(t, scanner.Err())
assert.Nil(t, scanner.Bytes())
}

View File

@ -27,9 +27,6 @@ import (
"github.com/swaggest/jsonschema-go"
)
// EmptyTree is the SHA of an empty tree.
const EmptyTree = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
var (
Nil = Must("0000000000000000000000000000000000000000")
None = SHA{}
@ -38,6 +35,9 @@ var (
// which is 64 chars - keep this forward-compatible.
regex = regexp.MustCompile("^[0-9a-f]{4,64}$")
nilRegex = regexp.MustCompile("^0{4,64}$")
// EmptyTree is the SHA of an empty tree.
EmptyTree = Must("4b825dc642cb6eb9a060e54bf8d69288fbee4904")
)
// SHA represents a git sha.

View File

@ -27,11 +27,9 @@ func TestSHA_MarshalJSON(t *testing.T) {
wantErr bool
}{
{
name: "happy path",
input: SHA{
str: EmptyTree,
},
want: []byte("\"" + EmptyTree + "\""),
name: "happy path",
input: EmptyTree,
want: []byte("\"" + EmptyTree.String() + "\""),
},
{
name: "happy path - quotes",
@ -68,12 +66,10 @@ func TestSHA_UnmarshalJSON(t *testing.T) {
wantErr bool
}{
{
name: "happy path",
input: []byte("\"" + EmptyTree + "\""),
expected: SHA{
str: EmptyTree,
},
wantErr: false,
name: "happy path",
input: []byte("\"" + EmptyTree.String() + "\""),
expected: EmptyTree,
wantErr: false,
},
{
name: "empty content returns empty",

View File

@ -0,0 +1,62 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sharedrepo
import (
"fmt"
"math"
"strconv"
"strings"
)
// lineNumberEOF indicates a line number pointing at the end of the file.
// Setting it to max Int64 ensures that any valid lineNumber is smaller than a EOF line number.
const lineNumberEOF = lineNumber(math.MaxInt64)
type lineNumber int64
func (n lineNumber) IsEOF() bool {
return n == lineNumberEOF
}
func (n lineNumber) String() string {
if n == lineNumberEOF {
return "eof"
}
return fmt.Sprint(int64(n))
}
func parseLineNumber(raw []byte) (lineNumber, error) {
if len(raw) == 0 {
return 0, fmt.Errorf("line number can't be empty")
}
if strings.EqualFold(string(raw), "eof") {
return lineNumberEOF, nil
}
val, err := strconv.ParseInt(string(raw), 10, 64)
if err != nil {
return 0, fmt.Errorf("unable to parse %q as line number", string(raw))
}
if val < 1 {
return 0, fmt.Errorf("line numbering starts at 1")
}
if val == int64(lineNumberEOF) {
return 0, fmt.Errorf("line numbering ends at %d", lineNumberEOF-1)
}
return lineNumber(val), err
}

View File

@ -0,0 +1,94 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sharedrepo
import (
"math"
"testing"
"github.com/stretchr/testify/assert"
)
func Test_lineNumberIsEOF(t *testing.T) {
assert.True(t, lineNumberEOF.IsEOF(), "lineNumberEOF should be EOF")
assert.True(t, lineNumber(math.MaxInt64).IsEOF(), "lineNumberEOF should be EOF")
assert.False(t, lineNumber(1).IsEOF(), "1 should not be EOF")
}
func Test_lineNumberString(t *testing.T) {
assert.Equal(t, "eof", lineNumberEOF.String(), "lineNumberEOF should be 'eof'")
assert.Equal(t, "eof", lineNumber(math.MaxInt64).String(), "math.MaxInt64 should be 'eof'")
assert.Equal(t, "1", lineNumber(1).String(), "1 should be '1'")
}
func Test_parseLineNumber(t *testing.T) {
tests := []struct {
name string
arg []byte
wantErr string
want lineNumber
}{
{
name: "test empty",
arg: nil,
wantErr: "line number can't be empty",
},
{
name: "test not a number",
arg: []byte("test"),
wantErr: "unable to parse",
},
{
name: "test maxInt64+1 fails",
arg: []byte("9223372036854775808"),
wantErr: "unable to parse",
},
{
name: "test smaller than 1",
arg: []byte("0"),
wantErr: "line numbering starts at 1",
},
{
name: "test maxInt64 not allowed",
arg: []byte("9223372036854775807"),
wantErr: "line numbering ends at 9223372036854775806",
},
{
name: "test smallest valid number (1)",
arg: []byte("1"),
want: 1,
},
{
name: "test biggest valid number (maxInt64-1)",
arg: []byte("9223372036854775806"),
want: 9223372036854775806,
},
{
name: "test eof",
arg: []byte("eof"),
want: lineNumberEOF,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := parseLineNumber(tt.arg)
if tt.wantErr != "" {
assert.ErrorContains(t, err, tt.wantErr, "error doesn't match expected.")
} else {
assert.Equal(t, tt.want, got, "parsed valued doesn't match expected")
}
})
}
}

View File

@ -33,6 +33,7 @@ import (
"github.com/harness/gitness/errors"
"github.com/harness/gitness/git/api"
"github.com/harness/gitness/git/command"
"github.com/harness/gitness/git/parser"
"github.com/harness/gitness/git/sha"
"github.com/harness/gitness/git/tempdir"
@ -528,21 +529,21 @@ func (r *SharedRepo) MoveFile(
objectSHA sha.SHA,
mode string,
payload []byte,
) error {
) (string, error) {
newPath, newContent, err := parseMovePayload(payload)
if err != nil {
return err
return "", err
}
// ensure file exists and matches SHA
entry, err := r.getFileEntry(ctx, treeishSHA, objectSHA, filePath)
if err != nil {
return err
return "", err
}
// ensure new path is available
if err = r.checkPathAvailability(ctx, treeishSHA, newPath, false); err != nil {
return err
return "", err
}
var fileHash sha.SHA
@ -550,7 +551,7 @@ func (r *SharedRepo) MoveFile(
if newContent != nil {
hash, err := r.WriteGitObject(ctx, bytes.NewReader(newContent))
if err != nil {
return fmt.Errorf("moveFile: error hashing object: %w", err)
return "", fmt.Errorf("moveFile: error hashing object: %w", err)
}
fileHash = hash
@ -565,14 +566,14 @@ func (r *SharedRepo) MoveFile(
}
if err = r.AddObjectToIndex(ctx, fileMode, fileHash, newPath); err != nil {
return fmt.Errorf("moveFile: add object error: %w", err)
return "", fmt.Errorf("moveFile: add object error: %w", err)
}
if err = r.RemoveFilesFromIndex(ctx, filePath); err != nil {
return fmt.Errorf("moveFile: remove object error: %w", err)
return "", fmt.Errorf("moveFile: remove object error: %w", err)
}
return nil
return newPath, nil
}
func (r *SharedRepo) DeleteFile(ctx context.Context, filePath string) error {
@ -590,6 +591,224 @@ func (r *SharedRepo) DeleteFile(ctx context.Context, filePath string) error {
return nil
}
func (r *SharedRepo) PatchTextFile(
ctx context.Context,
treeishSHA sha.SHA,
filePath string,
objectSHA sha.SHA,
payloadsRaw [][]byte,
) error {
payloads, err := parsePatchTextFilePayloads(payloadsRaw)
if err != nil {
return err
}
entry, err := r.getFileEntry(ctx, treeishSHA, objectSHA, filePath)
if err != nil {
return err
}
blob, err := api.GetBlob(ctx, r.repoPath, nil, entry.SHA, 0)
if err != nil {
return fmt.Errorf("error reading blob: %w", err)
}
scanner, lineEnding, err := parser.ReadTextFile(blob.Content, nil)
if err != nil {
return fmt.Errorf("error reading blob as text file: %w", err)
}
pipeReader, pipeWriter := io.Pipe()
go func() {
err := patchTextFileWritePatchedFile(scanner, payloads, lineEnding, pipeWriter)
pipErr := pipeWriter.CloseWithError(err)
if pipErr != nil {
log.Ctx(ctx).Warn().Err(pipErr).Msgf("failed to close pipe writer with error: %s", err)
}
}()
objectSHA, err = r.WriteGitObject(ctx, pipeReader)
if err != nil {
return fmt.Errorf("error writing patched file to git store: %w", err)
}
if err = r.AddObjectToIndex(ctx, entry.Mode.String(), objectSHA, filePath); err != nil {
return fmt.Errorf("error updating object: %w", err)
}
return nil
}
// nolint: gocognit, gocyclo, cyclop
func patchTextFileWritePatchedFile(
fileScanner parser.Scanner,
replacements []patchTextFileReplacement,
lineEnding string,
writer io.Writer,
) error {
// sort replacements by `start ASC end ASC` to ensure proper processing (DO NOT CHANGE!)
// Use stable sort to ensure ordering of `[1,1)[1,1)` is maintained
sort.SliceStable(replacements, func(i, j int) bool {
if replacements[i].OmitFrom == replacements[j].OmitFrom {
return replacements[i].ContinueFrom < replacements[j].ContinueFrom
}
return replacements[i].OmitFrom < replacements[j].OmitFrom
})
// ensure replacements aren't overlapping
for i := 1; i < len(replacements); i++ {
// Stop prevalidation at EOF as we don't know the line count of the file (NOTE: start=EOF => end=EOF).
// Remaining overlaps are handled once EOF of the file is reached and we know the line number
if replacements[i-1].ContinueFrom.IsEOF() {
break
}
if replacements[i].OmitFrom < replacements[i-1].ContinueFrom {
return errors.InvalidArgument(
"Patch actions have conflicting ranges [%s,%s)x[%s,%s)",
replacements[i-1].OmitFrom, replacements[i-1].ContinueFrom,
replacements[i].OmitFrom, replacements[i].ContinueFrom,
)
}
}
// helper function to write output (helps to ensure that we always have line endings between lines)
previousWriteHadLineEndings := true
write := func(line []byte) error {
// skip lines without data - should never happen as an empty line still has line endings.
if len(line) == 0 {
return nil
}
// if the previous line didn't have line endings and there's another line coming, inject a line ending.
// NOTE: this can for example happen when a suggestion doesn't have line endings
if !previousWriteHadLineEndings {
_, err := writer.Write([]byte(lineEnding))
if err != nil {
return fmt.Errorf("failed to write forced injected line ending: %w", err)
}
}
_, err := writer.Write(line)
if err != nil {
return fmt.Errorf("failed to write line: %w", err)
}
previousWriteHadLineEndings = parser.HasLineEnding(line)
return nil
}
ri := 0 // replacement index
var processReplacements func(ln lineNumber) (skipLine bool, err error)
processReplacements = func(ln lineNumber) (bool, error) {
// no replacements left
if ri >= len(replacements) {
return false, nil
}
// Assumption: replacements are sorted `start ASC end ASC`
if ln < replacements[ri].OmitFrom {
return false, nil
}
// write replacement immediately once we hit its range to ensure we maintain proper order.
if ln == replacements[ri].OmitFrom {
rScanner, _, err := parser.ReadTextFile(bytes.NewReader(replacements[ri].Content), &lineEnding)
if err != nil {
return false, fmt.Errorf("error to start reading replacement as text file: %w", err)
}
for rScanner.Scan() {
if err := write(rScanner.Bytes()); err != nil {
return false, fmt.Errorf("failed to inject replacement line: %w", err)
}
}
if err := rScanner.Err(); err != nil {
return false, fmt.Errorf("failed to read replacement line: %w", err)
}
}
// if we reached the end of the replacement - move to next and reetrigger (to handle things like [1,2)+[2,2)+...)
if ln >= replacements[ri].ContinueFrom {
ri++
return processReplacements(ln)
}
// otherwise we are in the middle of the replacement - skip line
return true, nil
}
var ln lineNumber
for fileScanner.Scan() {
ln++
skipLine, err := processReplacements(ln)
if err != nil {
return fmt.Errorf("failed processing replacements for line %d: %w", ln, err)
}
if skipLine {
continue
}
line := fileScanner.Bytes()
if err := write(line); err != nil {
return fmt.Errorf("failed to copy line %d from original file: %w", ln, err)
}
}
// move ln at end of file (e.g. after last line)
ln++
// backfill EOF line numbers and finish overlap validation for remaining entries.
// If any replacement entries are left, we know the current one has ContinueFrom >= ln or is EOF.
for i := ri; i < len(replacements); i++ {
// copy original input for error messages
originalOmitFrom := replacements[i].OmitFrom
originalContinueFrom := replacements[i].ContinueFrom
// backfil EOF line numbers
if replacements[i].OmitFrom.IsEOF() {
replacements[i].OmitFrom = ln
}
if replacements[i].ContinueFrom.IsEOF() {
replacements[i].ContinueFrom = ln
}
// ensure replacement range isn't out of bounds
if replacements[i].OmitFrom > ln || replacements[i].ContinueFrom > ln {
return errors.InvalidArgument(
"Patch action for [%s,%s) is exceeding end of file with %d line(s).",
originalOmitFrom, originalContinueFrom, ln-1,
)
}
// ensure no overlap with next element
if i+1 < len(replacements) &&
replacements[i+1].OmitFrom < replacements[i].ContinueFrom {
return errors.InvalidArgument(
"Patch actions have conflicting ranges [%s,%s)x[%s,%s) for file with %d line(s).",
originalOmitFrom, originalContinueFrom,
replacements[i+1].OmitFrom, replacements[i+1].ContinueFrom,
ln-1,
)
}
}
skipLine, err := processReplacements(ln)
if err != nil {
return fmt.Errorf("failed processing replacements for EOF: %w", err)
}
// this should never happen! (as after full validation no remaining start/end is greater than line number at eof!)
if skipLine || ri < len(replacements) {
return fmt.Errorf(
"unexpected status reached at end of file ri=%d (cnt=%d) and skipLine=%t",
ri, len(replacements), skipLine,
)
}
return nil
}
func (r *SharedRepo) getFileEntry(
ctx context.Context,
treeishSHA sha.SHA,
@ -808,3 +1027,63 @@ func parseMovePayload(payload []byte) (string, []byte, error) {
return newPath, newContent, nil
}
type patchTextFileReplacement struct {
OmitFrom lineNumber
ContinueFrom lineNumber
Content []byte
}
func parsePatchTextFilePayloads(payloadsRaw [][]byte) ([]patchTextFileReplacement, error) {
replacements := []patchTextFileReplacement{}
for i := range payloadsRaw {
replacement, err := parsePatchTextFilePayload(payloadsRaw[i])
if err != nil {
return nil, err
}
replacements = append(replacements, replacement)
}
return replacements, nil
}
// parsePatchTextFilePayload parses the payload for a PATCH_TEXT_FILE action:
//
// <First Line to omit>:<First line to include again>\0<Replacement>
//
// Examples:
//
// `1:2\0some new line`
// `1:eof\0some new line\n`
// `1:1\0some new line\nsome other line`
func parsePatchTextFilePayload(payloadRaw []byte) (patchTextFileReplacement, error) {
lineInfo, replacement, ok := bytes.Cut(payloadRaw, []byte{0})
if !ok {
return patchTextFileReplacement{}, errors.InvalidArgument("Payload format is missing the content separator.")
}
startBytes, endBytes, ok := bytes.Cut(lineInfo, []byte{':'})
if !ok {
return patchTextFileReplacement{}, errors.InvalidArgument(
"Payload is missing the line number separator.")
}
start, err := parseLineNumber(startBytes)
if err != nil {
return patchTextFileReplacement{}, errors.InvalidArgument("Payload start line number is invalid: %s", err)
}
end, err := parseLineNumber(endBytes)
if err != nil {
return patchTextFileReplacement{}, errors.InvalidArgument("Payload end line number is invalid: %s", err)
}
if end < start {
return patchTextFileReplacement{}, errors.InvalidArgument("Payload end line has to be at least as big as start line.")
}
return patchTextFileReplacement{
OmitFrom: start,
ContinueFrom: end,
Content: replacement,
}, nil
}

View File

@ -0,0 +1,776 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sharedrepo
import (
"bytes"
"reflect"
"testing"
"github.com/harness/gitness/git/parser"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func Test_parsePatchTextFilePayloads(t *testing.T) {
tests := []struct {
name string
arg [][]byte
wantErr string
want []patchTextFileReplacement
}{
{
name: "test no payloads",
arg: nil,
want: []patchTextFileReplacement{},
},
{
name: "test no zero byte splitter",
arg: [][]byte{
[]byte("0:1"),
},
wantErr: "Payload format is missing the content separator",
},
{
name: "test line range wrong format",
arg: [][]byte{
[]byte("0\u0000"),
},
wantErr: "Payload is missing the line number separator",
},
{
name: "test start line error returned",
arg: [][]byte{
[]byte("0:1\u0000"),
},
wantErr: "Payload start line number is invalid",
},
{
name: "test end line error returned",
arg: [][]byte{
[]byte("1:a\u0000"),
},
wantErr: "Payload end line number is invalid",
},
{
name: "test end smaller than start",
arg: [][]byte{
[]byte("2:1\u0000"),
},
wantErr: "Payload end line has to be at least as big as start line",
},
{
name: "payload empty",
arg: [][]byte{
[]byte("1:2\u0000"),
},
want: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 2,
Content: []byte{},
},
},
},
{
name: "payload non-empty with zero byte and line endings",
arg: [][]byte{
[]byte("1:eof\u0000a\nb\r\nc\u0000d"),
},
want: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: lineNumberEOF,
Content: []byte("a\nb\r\nc\u0000d"),
},
},
},
{
name: "multiple payloads",
arg: [][]byte{
[]byte("1:3\u0000a"),
[]byte("2:eof\u0000b"),
},
want: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 3,
Content: []byte("a"),
},
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte("b"),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := parsePatchTextFilePayloads(tt.arg)
if tt.wantErr != "" {
assert.ErrorContains(t, err, tt.wantErr, "error doesn't match expected.")
} else if !reflect.DeepEqual(got, tt.want) {
t.Errorf("parsePatchTextFilePayloads() = %s, want %s", got, tt.want)
}
})
}
}
func Test_patchTextFileWritePatchedFile(t *testing.T) {
type arg struct {
file []byte
replacements []patchTextFileReplacement
}
tests := []struct {
name string
arg arg
wantErr string
want []byte
wantLE string
}{
{
name: "test no replacements (empty file)",
arg: arg{
file: []byte(""),
replacements: nil,
},
wantLE: "\n",
want: nil,
},
{
name: "test no replacements (single line no line ending)",
arg: arg{
file: []byte("l1"),
replacements: nil,
},
wantLE: "\n",
want: []byte("l1"),
},
{
name: "test no replacements keeps final line ending (LF)",
arg: arg{
file: []byte("l1\n"),
replacements: nil,
},
wantLE: "\n",
want: []byte("l1\n"),
},
{
name: "test no replacements keeps final line ending (CRLF)",
arg: arg{
file: []byte("l1\r\n"),
replacements: nil,
},
wantLE: "\r\n",
want: []byte("l1\r\n"),
},
{
name: "test no replacements multiple line endings",
arg: arg{
file: []byte("l1\r\nl2\nl3"),
replacements: nil,
},
wantLE: "\r\n",
want: []byte("l1\r\nl2\nl3"),
},
{
name: "test line ending correction with replacements (LF)",
arg: arg{
file: []byte("l1\nl2\r\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("rl1\nrl2\r\nrl3"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1\nrl2\nrl3\nl2\r\nl3"),
},
{
name: "test line ending correction with replacements (CRLF)",
arg: arg{
file: []byte("l1\r\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("rl1\nrl2\r\nrl3"),
},
},
},
wantLE: "\r\n",
want: []byte("l1\r\nrl1\r\nrl2\r\nrl3\r\nl2\nl3"),
},
{
name: "test line ending with replacements at eof (file none, replacement none)",
arg: arg{
file: []byte("l1\nl2"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1"),
},
{
name: "test line ending with replacements at eof (file none, replacement yes)",
arg: arg{
file: []byte("l1\nl2"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1\r\n"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1\n"),
},
{
name: "test line ending with replacements at eof (file yes, replacement none)",
arg: arg{
file: []byte("l1\nl2\r\n"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1"),
},
{
name: "test line ending with replacements at eof (file yes, replacement yes)",
arg: arg{
file: []byte("l1\nl2\r\n"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1\r\n"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1\n"),
},
{
name: "test final line ending doesn't increase line count",
arg: arg{
file: []byte("l1\n"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 3,
ContinueFrom: 3,
Content: []byte("rl1\r\n"),
},
},
},
wantErr: "Patch action for [3,3) is exceeding end of file with 1 line(s)",
},
{
name: "test replacement out of bounds (start)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 3,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1\r\n"),
},
},
},
wantErr: "Patch action for [3,eof) is exceeding end of file with 1 line(s)",
},
{
name: "test replacement out of bounds (end)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 3,
Content: []byte("rl1\r\n"),
},
},
},
wantErr: "Patch action for [1,3) is exceeding end of file with 1 line(s)",
},
{
name: "test replacement out of bounds (after eof)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1\r\n"),
},
{
OmitFrom: 2,
ContinueFrom: 3,
Content: []byte("rl1\r\n"),
},
},
},
wantErr: "Patch action for [2,3) is exceeding end of file with 1 line(s)",
},
{
name: "test replacement out of bounds (after last line)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 2,
Content: []byte("rl1\r\n"),
},
{
OmitFrom: 3,
ContinueFrom: 4,
Content: []byte("rl1\r\n"),
},
},
},
wantErr: "Patch action for [3,4) is exceeding end of file with 1 line(s)",
},
{
name: "test overlap before eof (with empty)",
arg: arg{
file: []byte(""),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 3,
Content: []byte(""),
},
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte(""),
},
},
},
wantErr: "Patch actions have conflicting ranges [1,3)x[2,2)",
},
{
name: "test overlap before eof (non-empty + unordered)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 3,
Content: []byte(""),
},
{
OmitFrom: 1,
ContinueFrom: 3,
Content: []byte(""),
},
},
},
wantErr: "Patch actions have conflicting ranges [1,3)x[2,3)",
},
{
name: "test overlap before eof (non-empty eof end)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 3,
Content: []byte(""),
},
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte(""),
},
},
},
wantErr: "Patch actions have conflicting ranges [1,3)x[2,eof)",
},
{
name: "test overlap after eof (empty)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: lineNumberEOF,
Content: []byte(""),
},
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte(""),
},
},
},
wantErr: "Patch actions have conflicting ranges [1,eof)x[2,2) for file with 3 line(s)",
},
{
name: "test overlap after eof (non-empty + unordered)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 3,
Content: []byte(""),
},
{
OmitFrom: 1,
ContinueFrom: lineNumberEOF,
Content: []byte(""),
},
},
},
wantErr: "Patch actions have conflicting ranges [1,eof)x[2,3) for file with 3 line(s)",
},
{
name: "test overlap after eof (none-empty eof end)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: lineNumberEOF,
Content: []byte(""),
},
{
OmitFrom: 1,
ContinueFrom: lineNumberEOF,
Content: []byte(""),
},
},
},
wantErr: "Patch actions have conflicting ranges [1,eof)x[2,eof) for file with 3 line(s)",
},
{
name: "test insert (empty)",
arg: arg{
file: nil,
replacements: []patchTextFileReplacement{
{
OmitFrom: lineNumberEOF,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1\r\nrl2"),
},
},
},
wantLE: "\n",
want: []byte("rl1\nrl2"),
},
{
name: "test insert (start)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 1,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("rl1\nl1"),
},
{
name: "test insert (middle)",
arg: arg{
file: []byte("l1\nl2"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1\nl2"),
},
{
name: "test insert (end)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1"),
},
{
name: "test insert (eof)",
arg: arg{
file: []byte("l1"),
replacements: []patchTextFileReplacement{
{
OmitFrom: lineNumberEOF,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1"),
},
{
name: "test inserts (multiple at start+middle+end(normal+eof))",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 1,
Content: []byte("r1l1\nr1l2"),
},
{
OmitFrom: 1,
ContinueFrom: 1,
Content: []byte("r2l1\nr2l2"),
},
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("r3l1\nr3l2"),
},
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("r4l1\nr4l2"),
},
{
OmitFrom: 4,
ContinueFrom: 4,
Content: []byte("r5l1\nr5l2"),
},
{
OmitFrom: 4,
ContinueFrom: 4,
Content: []byte("r6l1\nr6l2"),
},
{
OmitFrom: lineNumberEOF,
ContinueFrom: lineNumberEOF,
Content: []byte("r7l1\nr7l2"),
},
{
OmitFrom: lineNumberEOF,
ContinueFrom: lineNumberEOF,
Content: []byte("r8l1\nr8l2"),
},
},
},
wantLE: "\n",
want: []byte(
"r1l1\nr1l2\nr2l1\nr2l2\nl1\nr3l1\nr3l2\nr4l1\nr4l2\nl2\nl3\nr5l1\nr5l2\nr6l1\nr6l2\nr7l1\nr7l2\nr8l1\nr8l2"),
},
{
name: "test replace (head)",
arg: arg{
file: []byte("l1\nl2"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 2,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("rl1\nl2"),
},
{
name: "test replace (middle)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 2,
ContinueFrom: 3,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nrl1\nl3"),
},
{
name: "test replace (end)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 3,
ContinueFrom: 4,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nl2\nrl1"),
},
{
name: "test replace (eof)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 3,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("l1\nl2\nrl1"),
},
{
name: "test replace (1-end)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: 4,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("rl1"),
},
{
name: "test replace (1-eof)",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 1,
ContinueFrom: lineNumberEOF,
Content: []byte("rl1"),
},
},
},
wantLE: "\n",
want: []byte("rl1"),
},
{
name: "test sorting",
arg: arg{
file: []byte("l1\nl2\nl3"),
replacements: []patchTextFileReplacement{
{
OmitFrom: 4,
ContinueFrom: 4,
Content: []byte("r5l1\nr5l2\r\n"),
},
{
OmitFrom: 4,
ContinueFrom: lineNumberEOF,
Content: []byte("r7l1\nr7l2\r\n"),
},
{
OmitFrom: 1,
ContinueFrom: 1,
Content: []byte("r0l1\nr0l2\r\n"),
},
{
OmitFrom: 2,
ContinueFrom: 4,
Content: []byte("r4l1\nr4l2\r\n"),
},
{
OmitFrom: 4,
ContinueFrom: 4,
Content: []byte("r6l1\nr6l2\r\n"),
},
{
OmitFrom: 1,
ContinueFrom: 2,
Content: []byte("r2l1\nr2l2\r\n"),
},
{
OmitFrom: 1,
ContinueFrom: 1,
Content: []byte("r1l1\nr1l2\r\n"),
},
{
OmitFrom: lineNumberEOF,
ContinueFrom: lineNumberEOF,
Content: []byte("r9l1\nr9l2\r\n"),
},
{
OmitFrom: 4,
ContinueFrom: lineNumberEOF,
Content: []byte("r8l1\nr8l2\r\n"),
},
{
OmitFrom: 2,
ContinueFrom: 2,
Content: []byte("r3l1\nr3l2\r\n"),
},
{
OmitFrom: lineNumberEOF,
ContinueFrom: lineNumberEOF,
Content: []byte("r10l1\nr10l2\r\n"),
},
},
},
wantLE: "\n",
want: []byte("r0l1\nr0l2\nr1l1\nr1l2\nr2l1\nr2l2\nr3l1\nr3l2\nr4l1\nr4l2\nr5l1\nr5l2\nr6l1\nr6l2\nr7l1\nr7l2" +
"\nr8l1\nr8l2\nr9l1\nr9l2\nr10l1\nr10l2\n"),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanner, le, err := parser.ReadTextFile(bytes.NewReader(tt.arg.file), nil)
require.NoError(t, err, "failed to read input file")
writer := bytes.Buffer{}
err = patchTextFileWritePatchedFile(scanner, tt.arg.replacements, le, &writer)
got := writer.Bytes()
if tt.wantErr != "" {
assert.ErrorContains(t, err, tt.wantErr, "error doesn't match expected.")
} else {
assert.Equal(t, tt.wantLE, le, "line ending doesn't match")
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("patchTextFileWritePatchedFile() = %q, want %q", string(got), string(tt.want))
}
}
})
}
}

View File

@ -44,7 +44,7 @@ const (
type TreeNode struct {
Type TreeNodeType
Mode TreeNodeMode
SHA string
SHA string // TODO: make sha.SHA
Name string
Path string
}

View File

@ -17,20 +17,11 @@ package types
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"github.com/harness/gitness/types/enum"
)
var (
// jsonRawMessageNullBytes represents the byte array that's equivalent to a nil json.RawMessage.
jsonRawMessageNullBytes = []byte("null")
// ErrNoPayload is returned in case the activity doesn't have any payload set.
ErrNoPayload = errors.New("activity has no payload")
)
// PullReqActivity represents a pull request activity.
type PullReqActivity struct {
ID int64 `json:"id"`
@ -53,9 +44,9 @@ type PullReqActivity struct {
Type enum.PullReqActivityType `json:"type"`
Kind enum.PullReqActivityKind `json:"kind"`
Text string `json:"text"`
PayloadRaw json.RawMessage `json:"payload"`
Metadata map[string]interface{} `json:"metadata"`
Text string `json:"text"`
PayloadRaw json.RawMessage `json:"payload"`
Metadata *PullReqActivityMetadata `json:"metadata"`
ResolvedBy *int64 `json:"-"` // not returned, because the resolver info is in the Resolver field
Resolved *int64 `json:"resolved,omitempty"`
@ -149,6 +140,21 @@ func (a *PullReqActivity) GetPayload() (PullReqActivityPayload, error) {
return payload, nil
}
// UpdateMetadata updates the metadata with the provided options.
func (a *PullReqActivity) UpdateMetadata(updates ...PullReqActivityMetadataUpdate) {
if a.Metadata == nil {
a.Metadata = &PullReqActivityMetadata{}
}
for _, update := range updates {
update.apply(a.Metadata)
}
if a.Metadata.IsEmpty() {
a.Metadata = nil
}
}
// PullReqActivityFilter stores pull request activity query parameters.
type PullReqActivityFilter struct {
After int64 `json:"after"`
@ -158,121 +164,3 @@ type PullReqActivityFilter struct {
Types []enum.PullReqActivityType `json:"type"`
Kinds []enum.PullReqActivityKind `json:"kind"`
}
// PullReqActivityPayload is an interface used to identify PR activity payload types.
// The approach is inspired by what protobuf is doing for oneof.
type PullReqActivityPayload interface {
// ActivityType returns the pr activity type the payload is meant for.
// NOTE: this allows us to do easy payload type verification without any kind of reflection.
ActivityType() enum.PullReqActivityType
}
// activityPayloadFactoryMethod is an alias for a function that creates a new PullReqActivityPayload.
// NOTE: this is used to create new instances for activities on the fly (to avoid reflection)
// NOTE: we could add new() to PullReqActivityPayload interface, but it shouldn't be the payloads' responsibility.
type activityPayloadFactoryMethod func() PullReqActivityPayload
// allPullReqActivityPayloads is a map that contains the payload factory methods for all activity types with payload.
var allPullReqActivityPayloads = func(
factoryMethods []activityPayloadFactoryMethod,
) map[enum.PullReqActivityType]activityPayloadFactoryMethod {
payloadMap := make(map[enum.PullReqActivityType]activityPayloadFactoryMethod)
for _, factoryMethod := range factoryMethods {
payloadMap[factoryMethod().ActivityType()] = factoryMethod
}
return payloadMap
}([]activityPayloadFactoryMethod{
func() PullReqActivityPayload { return PullRequestActivityPayloadComment{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadCodeComment{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadMerge{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadStateChange{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadTitleChange{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadReviewSubmit{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadBranchUpdate{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadBranchDelete{} },
})
// newPayloadForActivity returns a new payload instance for the requested activity type.
func newPayloadForActivity(t enum.PullReqActivityType) (PullReqActivityPayload, error) {
payloadFactoryMethod, ok := allPullReqActivityPayloads[t]
if !ok {
return nil, fmt.Errorf("pr activity type '%s' doesn't have a payload", t)
}
return payloadFactoryMethod(), nil
}
type PullRequestActivityPayloadComment struct{}
func (a PullRequestActivityPayloadComment) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeComment
}
type PullRequestActivityPayloadCodeComment struct {
Title string `json:"title"`
Lines []string `json:"lines"`
LineStartNew bool `json:"line_start_new"`
LineEndNew bool `json:"line_end_new"`
}
func (a *PullRequestActivityPayloadCodeComment) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeCodeComment
}
type PullRequestActivityPayloadMerge struct {
MergeMethod enum.MergeMethod `json:"merge_method"`
MergeSHA string `json:"merge_sha"`
TargetSHA string `json:"target_sha"`
SourceSHA string `json:"source_sha"`
RulesBypassed bool `json:"rules_bypassed,omitempty"`
}
func (a *PullRequestActivityPayloadMerge) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeMerge
}
type PullRequestActivityPayloadStateChange struct {
Old enum.PullReqState `json:"old"`
New enum.PullReqState `json:"new"`
OldDraft bool `json:"old_draft"`
NewDraft bool `json:"new_draft"`
}
func (a *PullRequestActivityPayloadStateChange) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeStateChange
}
type PullRequestActivityPayloadTitleChange struct {
Old string `json:"old"`
New string `json:"new"`
}
func (a *PullRequestActivityPayloadTitleChange) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeTitleChange
}
type PullRequestActivityPayloadReviewSubmit struct {
CommitSHA string `json:"commit_sha"`
Decision enum.PullReqReviewDecision `json:"decision"`
}
func (a *PullRequestActivityPayloadReviewSubmit) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeReviewSubmit
}
type PullRequestActivityPayloadBranchUpdate struct {
Old string `json:"old"`
New string `json:"new"`
}
func (a *PullRequestActivityPayloadBranchUpdate) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeBranchUpdate
}
type PullRequestActivityPayloadBranchDelete struct {
SHA string `json:"sha"`
}
func (a *PullRequestActivityPayloadBranchDelete) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeBranchDelete
}

View File

@ -0,0 +1,66 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
// PullReqActivityMetadata contains metadata related to pull request activity.
type PullReqActivityMetadata struct {
Suggestions *PullReqActivitySuggestionsMetadata `json:"suggestions,omitempty"`
}
func (m *PullReqActivityMetadata) IsEmpty() bool {
// WARNING: This only works as long as there's no non-comparable fields in the struct.
return m == nil || *m == PullReqActivityMetadata{}
}
type PullReqActivityMetadataUpdate interface {
apply(m *PullReqActivityMetadata)
}
type pullReqActivityMetadataUpdateFunc func(m *PullReqActivityMetadata)
func (f pullReqActivityMetadataUpdateFunc) apply(m *PullReqActivityMetadata) {
f(m)
}
func WithPullReqActivityMetadataUpdate(f func(m *PullReqActivityMetadata)) PullReqActivityMetadataUpdate {
return pullReqActivityMetadataUpdateFunc(f)
}
// PullReqActivitySuggestionsMetadata contains metadata for code comment suggestions.
type PullReqActivitySuggestionsMetadata struct {
CheckSums []string `json:"check_sums,omitempty"`
AppliedCheckSum string `json:"applied_check_sum,omitempty"`
AppliedCommitSHA string `json:"applied_commit_sha,omitempty"`
}
func (m *PullReqActivitySuggestionsMetadata) IsEmpty() bool {
return len(m.CheckSums) == 0 && m.AppliedCheckSum == "" && m.AppliedCommitSHA == ""
}
func WithPullReqActivitySuggestionsMetadataUpdate(
f func(m *PullReqActivitySuggestionsMetadata),
) PullReqActivityMetadataUpdate {
return pullReqActivityMetadataUpdateFunc(func(m *PullReqActivityMetadata) {
if m.Suggestions == nil {
m.Suggestions = &PullReqActivitySuggestionsMetadata{}
}
f(m.Suggestions)
if m.Suggestions.IsEmpty() {
m.Suggestions = nil
}
})
}

View File

@ -0,0 +1,148 @@
// Copyright 2023 Harness, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import (
"errors"
"fmt"
"github.com/harness/gitness/types/enum"
)
var (
// jsonRawMessageNullBytes represents the byte array that's equivalent to a nil json.RawMessage.
jsonRawMessageNullBytes = []byte("null")
// ErrNoPayload is returned in case the activity doesn't have any payload set.
ErrNoPayload = errors.New("activity has no payload")
)
// PullReqActivityPayload is an interface used to identify PR activity payload types.
// The approach is inspired by what protobuf is doing for oneof.
type PullReqActivityPayload interface {
// ActivityType returns the pr activity type the payload is meant for.
// NOTE: this allows us to do easy payload type verification without any kind of reflection.
ActivityType() enum.PullReqActivityType
}
// activityPayloadFactoryMethod is an alias for a function that creates a new PullReqActivityPayload.
// NOTE: this is used to create new instances for activities on the fly (to avoid reflection)
// NOTE: we could add new() to PullReqActivityPayload interface, but it shouldn't be the payloads' responsibility.
type activityPayloadFactoryMethod func() PullReqActivityPayload
// allPullReqActivityPayloads is a map that contains the payload factory methods for all activity types with payload.
var allPullReqActivityPayloads = func(
factoryMethods []activityPayloadFactoryMethod,
) map[enum.PullReqActivityType]activityPayloadFactoryMethod {
payloadMap := make(map[enum.PullReqActivityType]activityPayloadFactoryMethod)
for _, factoryMethod := range factoryMethods {
payloadMap[factoryMethod().ActivityType()] = factoryMethod
}
return payloadMap
}([]activityPayloadFactoryMethod{
func() PullReqActivityPayload { return PullRequestActivityPayloadComment{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadCodeComment{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadMerge{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadStateChange{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadTitleChange{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadReviewSubmit{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadBranchUpdate{} },
func() PullReqActivityPayload { return &PullRequestActivityPayloadBranchDelete{} },
})
// newPayloadForActivity returns a new payload instance for the requested activity type.
func newPayloadForActivity(t enum.PullReqActivityType) (PullReqActivityPayload, error) {
payloadFactoryMethod, ok := allPullReqActivityPayloads[t]
if !ok {
return nil, fmt.Errorf("pr activity type '%s' doesn't have a payload", t)
}
return payloadFactoryMethod(), nil
}
type PullRequestActivityPayloadComment struct{}
func (a PullRequestActivityPayloadComment) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeComment
}
type PullRequestActivityPayloadCodeComment struct {
Title string `json:"title"`
Lines []string `json:"lines"`
LineStartNew bool `json:"line_start_new"`
LineEndNew bool `json:"line_end_new"`
}
func (a *PullRequestActivityPayloadCodeComment) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeCodeComment
}
type PullRequestActivityPayloadMerge struct {
MergeMethod enum.MergeMethod `json:"merge_method"`
MergeSHA string `json:"merge_sha"`
TargetSHA string `json:"target_sha"`
SourceSHA string `json:"source_sha"`
RulesBypassed bool `json:"rules_bypassed,omitempty"`
}
func (a *PullRequestActivityPayloadMerge) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeMerge
}
type PullRequestActivityPayloadStateChange struct {
Old enum.PullReqState `json:"old"`
New enum.PullReqState `json:"new"`
OldDraft bool `json:"old_draft"`
NewDraft bool `json:"new_draft"`
}
func (a *PullRequestActivityPayloadStateChange) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeStateChange
}
type PullRequestActivityPayloadTitleChange struct {
Old string `json:"old"`
New string `json:"new"`
}
func (a *PullRequestActivityPayloadTitleChange) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeTitleChange
}
type PullRequestActivityPayloadReviewSubmit struct {
CommitSHA string `json:"commit_sha"`
Decision enum.PullReqReviewDecision `json:"decision"`
}
func (a *PullRequestActivityPayloadReviewSubmit) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeReviewSubmit
}
type PullRequestActivityPayloadBranchUpdate struct {
Old string `json:"old"`
New string `json:"new"`
}
func (a *PullRequestActivityPayloadBranchUpdate) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeBranchUpdate
}
type PullRequestActivityPayloadBranchDelete struct {
SHA string `json:"sha"`
}
func (a *PullRequestActivityPayloadBranchDelete) ActivityType() enum.PullReqActivityType {
return enum.PullReqActivityTypeBranchDelete
}