Add basic file upload retry with a hardcoded limit

This commit is contained in:
Gabe Kangas 2020-06-17 22:01:53 -07:00
parent 728b5fe3d7
commit e63d0656ba
4 changed files with 10 additions and 6 deletions

View file

@ -2,6 +2,6 @@ package main
type ChunkStorage interface {
Setup(config Config)
Save(filePath string) string
Save(filePath string, retryCount int) string
GenerateRemotePlaylist(playlist string, variant Variant) string
}

View file

@ -52,7 +52,7 @@ func (s *IPFSStorage) Setup(config Config) {
s.createIPFSDirectory("./hls")
}
func (s *IPFSStorage) Save(filePath string) string {
func (s *IPFSStorage) Save(filePath string, retryCount int) string {
someFile, err := getUnixfsNode(filePath)
defer someFile.Close()

View file

@ -108,7 +108,7 @@ func monitorVideoContent(pathToMonitor string, configuration Config, storage Chu
newObjectPathChannel := make(chan string, 1)
go func() {
newObjectPath := storage.Save(path.Join(configuration.PrivateHLSPath, segment.RelativeUploadPath))
newObjectPath := storage.Save(path.Join(configuration.PrivateHLSPath, segment.RelativeUploadPath), 0)
newObjectPathChannel <- newObjectPath
}()
newObjectPath := <-newObjectPathChannel

View file

@ -36,14 +36,14 @@ func (s *S3Storage) Setup(configuration Config) {
s.sess = s.connectAWS()
}
func (s *S3Storage) Save(filePath string) string {
func (s *S3Storage) Save(filePath string, retryCount int) string {
// fmt.Println("Saving", filePath)
file, err := os.Open(filePath)
defer file.Close()
if err != nil {
log.Fatal(err)
log.Errorln(err)
}
uploader := s3manager.NewUploader(s.sess)
@ -55,7 +55,11 @@ func (s *S3Storage) Save(filePath string) string {
})
if err != nil {
panic(err)
log.Errorln(err)
if retryCount < 4 {
log.Println("Retrying...")
s.Save(filePath, retryCount+1)
}
}
// fmt.Println("Uploaded", filePath, "to", response.Location)