2019-06-22 20:35:34 +03:00
// Copyright 2019 The Gitea Authors.
// All rights reserved.
2022-11-27 21:20:29 +03:00
// SPDX-License-Identifier: MIT
2019-06-22 20:35:34 +03:00
package pull
import (
"bufio"
2022-01-20 02:26:57 +03:00
"context"
2019-06-22 20:35:34 +03:00
"io"
"strconv"
"sync"
2023-01-09 06:50:54 +03:00
"code.gitea.io/gitea/models/db"
2022-06-12 18:51:54 +03:00
git_model "code.gitea.io/gitea/models/git"
2022-06-13 12:37:59 +03:00
issues_model "code.gitea.io/gitea/models/issues"
2019-10-28 21:31:55 +03:00
"code.gitea.io/gitea/modules/git/pipeline"
2019-06-22 20:35:34 +03:00
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
)
// LFSPush pushes lfs objects referred to in new commits in the head repository from the base repository
2022-06-13 12:37:59 +03:00
func LFSPush ( ctx context . Context , tmpBasePath , mergeHeadSHA , mergeBaseSHA string , pr * issues_model . PullRequest ) error {
2019-06-22 20:35:34 +03:00
// Now we have to implement git lfs push
// git rev-list --objects --filter=blob:limit=1k HEAD --not base
// pass blob shas in to git cat-file --batch-check (possibly unnecessary)
// ensure only blobs and <=1k size then pass in to git cat-file --batch
// to read each sha and check each as a pointer
// Then if they are lfs -> add them to the baseRepo
revListReader , revListWriter := io . Pipe ( )
shasToCheckReader , shasToCheckWriter := io . Pipe ( )
catFileCheckReader , catFileCheckWriter := io . Pipe ( )
shasToBatchReader , shasToBatchWriter := io . Pipe ( )
catFileBatchReader , catFileBatchWriter := io . Pipe ( )
errChan := make ( chan error , 1 )
wg := sync . WaitGroup { }
wg . Add ( 6 )
// Create the go-routines in reverse order.
// 6. Take the output of cat-file --batch and check if each file in turn
// to see if they're pointers to files in the LFS store associated with
// the head repo and add them to the base repo if so
2023-10-14 11:37:24 +03:00
go createLFSMetaObjectsFromCatFileBatch ( db . DefaultContext , catFileBatchReader , & wg , pr )
2019-06-22 20:35:34 +03:00
// 5. Take the shas of the blobs and batch read them
2022-01-20 02:26:57 +03:00
go pipeline . CatFileBatch ( ctx , shasToBatchReader , catFileBatchWriter , & wg , tmpBasePath )
2019-06-22 20:35:34 +03:00
// 4. From the provided objects restrict to blobs <=1k
2019-10-28 21:31:55 +03:00
go pipeline . BlobsLessThan1024FromCatFileBatchCheck ( catFileCheckReader , shasToBatchWriter , & wg )
2019-06-22 20:35:34 +03:00
// 3. Run batch-check on the objects retrieved from rev-list
2022-01-20 02:26:57 +03:00
go pipeline . CatFileBatchCheck ( ctx , shasToCheckReader , catFileCheckWriter , & wg , tmpBasePath )
2019-06-22 20:35:34 +03:00
// 2. Check each object retrieved rejecting those without names as they will be commits or trees
2019-10-28 21:31:55 +03:00
go pipeline . BlobsFromRevListObjects ( revListReader , shasToCheckWriter , & wg )
2019-06-22 20:35:34 +03:00
// 1. Run rev-list objects from mergeHead to mergeBase
2022-01-20 02:26:57 +03:00
go pipeline . RevListObjects ( ctx , revListWriter , & wg , tmpBasePath , mergeHeadSHA , mergeBaseSHA , errChan )
2019-06-22 20:35:34 +03:00
wg . Wait ( )
select {
case err , has := <- errChan :
if has {
return err
}
default :
}
return nil
}
2023-10-14 11:37:24 +03:00
func createLFSMetaObjectsFromCatFileBatch ( ctx context . Context , catFileBatchReader * io . PipeReader , wg * sync . WaitGroup , pr * issues_model . PullRequest ) {
2019-06-22 20:35:34 +03:00
defer wg . Done ( )
defer catFileBatchReader . Close ( )
2021-04-09 01:25:57 +03:00
contentStore := lfs . NewContentStore ( )
2019-06-22 20:35:34 +03:00
bufferedReader := bufio . NewReader ( catFileBatchReader )
buf := make ( [ ] byte , 1025 )
for {
// File descriptor line: sha
_ , err := bufferedReader . ReadString ( ' ' )
if err != nil {
_ = catFileBatchReader . CloseWithError ( err )
break
}
// Throw away the blob
if _ , err := bufferedReader . ReadString ( ' ' ) ; err != nil {
_ = catFileBatchReader . CloseWithError ( err )
break
}
sizeStr , err := bufferedReader . ReadString ( '\n' )
if err != nil {
_ = catFileBatchReader . CloseWithError ( err )
break
}
size , err := strconv . Atoi ( sizeStr [ : len ( sizeStr ) - 1 ] )
if err != nil {
_ = catFileBatchReader . CloseWithError ( err )
break
}
pointerBuf := buf [ : size + 1 ]
if _ , err := io . ReadFull ( bufferedReader , pointerBuf ) ; err != nil {
_ = catFileBatchReader . CloseWithError ( err )
break
}
pointerBuf = pointerBuf [ : size ]
// Now we need to check if the pointerBuf is an LFS pointer
2021-04-09 01:25:57 +03:00
pointer , _ := lfs . ReadPointerFromBuffer ( pointerBuf )
if ! pointer . IsValid ( ) {
2019-06-22 20:35:34 +03:00
continue
}
2021-04-09 01:25:57 +03:00
exist , _ := contentStore . Exists ( pointer )
if ! exist {
continue
}
2019-06-22 20:35:34 +03:00
// Then we need to check that this pointer is in the db
2023-10-14 11:37:24 +03:00
if _ , err := git_model . GetLFSMetaObjectByOid ( ctx , pr . HeadRepoID , pointer . Oid ) ; err != nil {
2022-06-12 18:51:54 +03:00
if err == git_model . ErrLFSObjectNotExist {
2019-06-22 20:35:34 +03:00
log . Warn ( "During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v" , pr . Index , pr . BaseRepo , pointer . Oid , pr . HeadRepo )
continue
}
_ = catFileBatchReader . CloseWithError ( err )
break
}
// OK we have a pointer that is associated with the head repo
// and is actually a file in the LFS
// Therefore it should be associated with the base repo
2023-12-07 10:27:36 +03:00
if _ , err := git_model . NewLFSMetaObject ( ctx , pr . BaseRepoID , pointer ) ; err != nil {
2019-06-22 20:35:34 +03:00
_ = catFileBatchReader . CloseWithError ( err )
break
}
}
}