Skip to content

Refactor UploadHandler and add ResumeUpload #166

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 21 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
0853905
feat(upload):refactor code for file upload/update/delete, and add res…
cnlangzi May 27, 2021
f8ed134
fix(gomod):removed go mod replace
cnlangzi May 27, 2021
dba705b
fix(merkle): renamed StreamingMerklHasher with StreamMerklHasher
cnlangzi May 28, 2021
280928b
fix(upload): updated fields on ResumeFileChange
cnlangzi Jun 3, 2021
8705b0f
Merge branch 'master' into feature/continuous_upload
cnlangzi Jun 3, 2021
b497a94
fix(devserver):#145 added devserver feature on blobber and validtor; …
cnlangzi Jun 4, 2021
afb6db2
fix(devserver):wrapped transaction.MakeSCRestAPICall with middleware
cnlangzi Jun 4, 2021
d6d85b9
fix(upload): fixed reload state of allocationChange from db
cnlangzi Jun 8, 2021
c66b448
fix(upload):added to improve build performance;enabled FullSaveAssoc…
cnlangzi Jun 9, 2021
aea90b5
fix(devserver):#145 added ./dev.local/cli.sh to run blobber & validat…
cnlangzi Jun 11, 2021
4721c8e
fix(devserver):#145 fixed typo
cnlangzi Jun 11, 2021
f8d70ff
fix(devserver):#145 added guideline link on README.md
cnlangzi Jun 11, 2021
4c861be
fix(devserver):#145 added build tags was missing
cnlangzi Jun 11, 2021
306b6dd
fix(devserver):#145 fixed bug on log_dir, files_dir and db_dir
cnlangzi Jun 11, 2021
c3161ef
fix(devserver):#145 added install debuggers
cnlangzi Jun 11, 2021
bfa366c
fix(devserver):#145 fixed typo
cnlangzi Jun 11, 2021
86a8ee4
fix(devserver):#145 fixed bugs on validator
cnlangzi Jun 11, 2021
2246567
Merge branch 'master' into feature/continuous_upload
cnlangzi Jun 11, 2021
9951887
fix(upload):0chain/blobber#111 disable go replace for gosdk
cnlangzi Jun 11, 2021
a1774d9
fix(upload):#111 drop devserver part
cnlangzi Jun 11, 2021
849936e
fix(upload):#111 drop devserver on cli.sh
cnlangzi Jun 11, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 2 additions & 23 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
.DS_Store
.idea/
.vscode/
__debug_bin
**/client
!**/client/main.go
**/docker.local/blobber*
Expand All @@ -9,26 +10,4 @@
**/blobber/blobber/files
**/blobber/blobber/data
**/pkg/
docker.local/blobber1/data/badgerdb/blobberstate/000000.vlog
docker.local/blobber1/data/badgerdb/blobberstate/LOCK
docker.local/blobber1/files/7a7/a9b/206/177a1b97ac960fa1ed4f5d5bc925fd1821cf69d387165f402275bdf/refs/123/6e9/a3b/6c8bbc889679c68af4372b7d68e2f2343c03ae7aaa4b25fbb8257c3
docker.local/blobber1/files/bdf/b41/6eb/0cf7bd794d8c27fd840519ceb1d82a2207270b70f3f7c70b288c517/refs/123/6e9/a3b/6c8bbc889679c68af4372b7d68e2f2343c03ae7aaa4b25fbb8257c3
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/03d/67d/23a/b7ecc9af4f5bbfef03892ca2ec83138
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/164/731/b66/0a01c9212d23b19c8d6d9b5ad04d95d
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/262/451/a50/c1940645c1428309a4eb50349843e33
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/29a/f44/4b3/acad044ba91d6c6b2bff2f5156c6d36
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/484/7a7/d69/b0d4e90ed6c3ae8ea9e07afbda4dfa9
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/530/aaa/fb4/749ba793413bfd63bfa681d1ef76843
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/740/6ce/091/6703b9620afdd7a568d42fc2980f02d
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/873/b24/50a/26cf8f5c6e7fc0435351272603df08f
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/967/e7d/1f1/f472f222c60e31ce5a95ca92749d7d7
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/a66/228/e80/db0cbfd58778557fb6592da86837e3e
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/abc/954/6e7/687b12ef616ded07546fea91ba73042
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/bdd/772/f67/d87ee8bf3560d724ea030bf49790156
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/c19/186/b15/8a7f8c38530fec679cbdad1e43b1a3c
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/c2b/e1e/7c1/baec0d6299391c115b39187b249c15a
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/dd5/f91/f9f/2fb2b6c93e400f3cdc1e29654bb397d
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/objects/f4c/044/59a/1c3f87be1184b5b495592408f6708fe
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/refs/123/6e9/a3b/6c8bbc889679c68af4372b7d68e2f2343c03ae7aaa4b25fbb8257c3
docker.local/blobber1/files/e55/003/7d3/f56519dff06156f16a19a40389414f94cfd4c3a548f46c218cb484e/refs/388/12d/bd3/b084ff297706d7bd307996c3363c2df53acc49c97b83201f8dc7bd9
docker.local/blobber1/data/badgerdb/blobberstate/MANIFEST
dev.local/data
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@

- [Run blobber on ec2 / vm / bare metal over https](https://github.com/0chain/blobber/blob/master/https/README.md)

- [Blobber local development guideline](dev.local/README.md)

## Initial Setup


Expand Down
2 changes: 2 additions & 0 deletions code/go/0chain.net/blobber/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
blobber
Copy link
Contributor

@Sriep Sriep Jun 12, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please remove.
Any scripts for building blobber should not use go source directories for executables please put them somewhere else.
If its just your own private build for debugging, then you should keep them private and not push to the main repository.

/config
7 changes: 6 additions & 1 deletion code/go/0chain.net/blobber/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ func initHandlers(r *mux.Router) {
}

func SetupWorkerConfig() {

config.Configuration.ContentRefWorkerFreq = viper.GetInt64("contentref_cleaner.frequency")
config.Configuration.ContentRefWorkerTolerance = viper.GetInt64("contentref_cleaner.tolerance")

Expand Down Expand Up @@ -223,11 +224,13 @@ func main() {
portString := flag.String("port", "", "port")
grpcPortString := flag.String("grpc_port", "", "grpc_port")
hostname := flag.String("hostname", "", "hostname")
configDir := flag.String("config_dir", "./config", "config_dir")

flag.Parse()

config.SetupDefaultConfig()
config.SetupConfig()

config.SetupConfig(*configDir)

config.Configuration.DeploymentMode = byte(*deploymentMode)

Expand Down Expand Up @@ -490,9 +493,11 @@ func SetupBlobberOnBC(logDir string) error {
zcncore.SetLogFile(logName, false)
zcncore.SetLogLevel(3)
if err := zcncore.InitZCNSDK(serverChain.BlockWorker, config.Configuration.SignatureScheme); err != nil {
log.Println("InitZCNSDK:", err)
return err
}
if err := zcncore.SetWalletInfo(node.Self.GetWalletString(), false); err != nil {
log.Println("SetWalletInfo:", err)
return err
}
go RegisterBlobber()
Expand Down
18 changes: 13 additions & 5 deletions code/go/0chain.net/blobbercore/allocation/allocationchange.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ const (
RENAME_OPERATION = "rename"
COPY_OPERATION = "copy"
UPDATE_ATTRS_OPERATION = "update_attrs"

// RESUME_OPERATION upload operation for INIT/APPEND/FINALIZE
RESUME_OPERATION = "resume"
)

const (
Expand All @@ -31,6 +34,7 @@ const (

var OperationNotApplicable = common.NewError("operation_not_valid", "Not an applicable operation")

// AllocationChangeProcessor request transaction of file operation. it is president in postgres, and can be rebuilt for next http reqeust(eg CommitHandler)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

155 characters is quite long, maybe you could split the line.

type AllocationChangeProcessor interface {
CommitToFileStore(ctx context.Context) error
DeleteTempFile() error
Expand Down Expand Up @@ -67,6 +71,9 @@ func (AllocationChange) TableName() string {
return "allocation_changes"
}

// GetAllocationChanges reload connection's changes in allocation from postgres.
// 1. update connection's status with NewConnection if connection_id is not found in postgres
// 2. mark as NewConnection if connection_id is marked as DeleteConnection
func GetAllocationChanges(ctx context.Context, connectionID string, allocationID string, clientID string) (*AllocationChangeCollector, error) {
cc := &AllocationChangeCollector{}
db := datastore.GetStore().GetTransaction(ctx)
Expand Down Expand Up @@ -104,14 +111,13 @@ func (cc *AllocationChangeCollector) Save(ctx context.Context) error {
db := datastore.GetStore().GetTransaction(ctx)
if cc.Status == NewConnection {
cc.Status = InProgressConnection
err := db.Create(cc).Error
return err
} else {
err := db.Save(cc).Error
return err
return db.Create(cc).Error
}

return db.Session(&gorm.Session{FullSaveAssociations: true}).Updates(cc).Error
}

// ComputeProperties unmarshal all ChangeProcesses from postgres
func (cc *AllocationChangeCollector) ComputeProperties() {
cc.AllocationChanges = make([]AllocationChangeProcessor, 0, len(cc.Changes))
for _, change := range cc.Changes {
Expand All @@ -129,6 +135,8 @@ func (cc *AllocationChangeCollector) ComputeProperties() {
acp = new(CopyFileChange)
case UPDATE_ATTRS_OPERATION:
acp = new(AttributesChange)
case RESUME_OPERATION:
acp = new(ResumeFileChange)
}

if acp == nil {
Expand Down
62 changes: 35 additions & 27 deletions code/go/0chain.net/blobbercore/allocation/newfilechange.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package allocation
import (
"context"
"encoding/json"
"fmt"
"path/filepath"
"strings"

Expand All @@ -14,33 +15,39 @@ import (
)

type NewFileChange struct {
ConnectionID string `json:"connection_id" validation:"required"`
AllocationID string `json:"allocation_id"`
Filename string `json:"filename" validation:"required"`
ThumbnailFilename string `json:"thumbnail_filename"`
Path string `json:"filepath" validation:"required"`
Size int64 `json:"size"`
Hash string `json:"content_hash,omitempty"`
ThumbnailSize int64 `json:"thumbnail_size"`
ThumbnailHash string `json:"thumbnail_content_hash,omitempty"`
MerkleRoot string `json:"merkle_root,omitempty"`
ActualHash string `json:"actual_hash,omitempty" validation:"required"`
ActualSize int64 `json:"actual_size,omitempty" validation:"required"`
ActualThumbnailSize int64 `json:"actual_thumb_size"`
ActualThumbnailHash string `json:"actual_thumb_hash"`
MimeType string `json:"mimetype,omitempty"`
EncryptedKey string `json:"encrypted_key,omitempty"`
CustomMeta string `json:"custom_meta,omitempty"`
Attributes reference.Attributes `json:"attributes,omitempty"`

// IsResumable the request is resumable upload
IsResumable bool `json:"is_resumable,omitempty"`
// UploadLength indicates the size of the entire upload in bytes. The value MUST be a non-negative integer.
UploadLength int64 `json:"upload_length,omitempty"`
// Upload-Offset indicates a byte offset within a resource. The value MUST be a non-negative integer.
UploadOffset int64 `json:"upload_offset,omitempty"`
// IsFinal the request is final chunk
IsFinal bool `json:"is_final,omitempty"`
//client side: unmarshal them from 'updateMeta'/'uploadMeta'
ConnectionID string `json:"connection_id" validation:"required"`
//client side:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This style is confusing to read. I suggest grouping the members under one ///client side comment.

Filename string `json:"filename" validation:"required"`
//client side:
Path string `json:"filepath" validation:"required"`
//client side:
ActualHash string `json:"actual_hash,omitempty" validation:"required"`
//client side:
ActualSize int64 `json:"actual_size,omitempty" validation:"required"`
//client side:
ActualThumbnailSize int64 `json:"actual_thumb_size"`
//client side:
ActualThumbnailHash string `json:"actual_thumb_hash"`
//client side:
MimeType string `json:"mimetype,omitempty"`
//client side:
Attributes reference.Attributes `json:"attributes,omitempty"`
//client side:
MerkleRoot string `json:"merkle_root,omitempty"`

//server side: update them by ChangeProcessor
AllocationID string `json:"allocation_id"`
//client side:
Hash string `json:"content_hash,omitempty"`
Size int64 `json:"size"`
//server side:
ThumbnailHash string `json:"thumbnail_content_hash,omitempty"`
ThumbnailSize int64 `json:"thumbnail_size"`
ThumbnailFilename string `json:"thumbnail_filename"`

EncryptedKey string `json:"encrypted_key,omitempty"`
CustomMeta string `json:"custom_meta,omitempty"`
}

func (nf *NewFileChange) ProcessChange(ctx context.Context,
Expand Down Expand Up @@ -101,6 +108,7 @@ func (nf *NewFileChange) ProcessChange(ctx context.Context,
newFile.LookupHash = reference.GetReferenceLookup(dirRef.AllocationID, nf.Path)
newFile.Size = nf.Size
newFile.MimeType = nf.MimeType
fmt.Println(allocationRoot)
newFile.WriteMarker = allocationRoot
newFile.ThumbnailHash = nf.ThumbnailHash
newFile.ThumbnailSize = nf.ThumbnailSize
Expand Down
166 changes: 166 additions & 0 deletions code/go/0chain.net/blobbercore/allocation/resumefilechange.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
package allocation

import (
"context"
"encoding/json"
"fmt"
"path/filepath"
"strings"

"0chain.net/blobbercore/filestore"
"0chain.net/blobbercore/reference"
"0chain.net/blobbercore/stats"
"0chain.net/blobbercore/util"

"0chain.net/core/common"
gosdk "github.com/0chain/gosdk/core/util"
)

// ResumeFileChange file change processor for continuous upload in INIT/APPEND/FINALIZE
type ResumeFileChange struct {
NewFileChange

ShardHash string `json:"shard_hash,omitempty"`
MerkleHasher gosdk.StreamMerkleHasher `json:"hasher,omitempty"` // streaming merkle hasher to save current state of tree
IsFinal bool `json:"is_final,omitempty"` // current chunk is last or not
ChunkIndex int `json:"chunk_index,omitempty"` // the seq of current chunk. all chunks MUST be uploaded one by one because of streaming merkle hash
UploadOffset int64 `json:"upload_offset,omitempty"` // It is next position that new incoming chunk should be append to
}

// ProcessChange update references, and create a new FileRef
func (nf *ResumeFileChange) ProcessChange(ctx context.Context,
change *AllocationChange, allocationRoot string) (*reference.Ref, error) {

path, _ := filepath.Split(nf.Path)
path = filepath.Clean(path)
tSubDirs := reference.GetSubDirsFromPath(path)

rootRef, err := reference.GetReferencePath(ctx, nf.AllocationID, nf.Path)
if err != nil {
return nil, err
}

dirRef := rootRef
treelevel := 0
for {
found := false
for _, child := range dirRef.Children {
if child.Type == reference.DIRECTORY && treelevel < len(tSubDirs) {
if child.Name == tSubDirs[treelevel] {
dirRef = child
found = true
break
}
}
}
if found {
treelevel++
continue
}
if len(tSubDirs) > treelevel {
newRef := reference.NewDirectoryRef()
newRef.AllocationID = dirRef.AllocationID
newRef.Path = "/" + strings.Join(tSubDirs[:treelevel+1], "/")
newRef.ParentPath = "/" + strings.Join(tSubDirs[:treelevel], "/")
newRef.Name = tSubDirs[treelevel]
newRef.LookupHash = reference.GetReferenceLookup(dirRef.AllocationID, newRef.Path)
dirRef.AddChild(newRef)
dirRef = newRef
treelevel++
continue
} else {
break
}
}

var newFile = reference.NewFileRef()
newFile.ActualFileHash = nf.ActualHash
newFile.ActualFileSize = nf.ActualSize
newFile.AllocationID = dirRef.AllocationID
newFile.ContentHash = nf.Hash
newFile.CustomMeta = nf.CustomMeta
newFile.MerkleRoot = nf.MerkleRoot
newFile.Name = nf.Filename
newFile.ParentPath = dirRef.Path
newFile.Path = nf.Path
newFile.LookupHash = reference.GetReferenceLookup(dirRef.AllocationID, nf.Path)
newFile.Size = nf.Size
newFile.MimeType = nf.MimeType
fmt.Println(allocationRoot)
newFile.WriteMarker = allocationRoot
newFile.ThumbnailHash = nf.ThumbnailHash
newFile.ThumbnailSize = nf.ThumbnailSize
newFile.ActualThumbnailHash = nf.ActualThumbnailHash
newFile.ActualThumbnailSize = nf.ActualThumbnailSize
newFile.EncryptedKey = nf.EncryptedKey

if err = newFile.SetAttributes(&nf.Attributes); err != nil {
return nil, common.NewErrorf("process_new_file_change",
"setting file attributes: %v", err)
}

dirRef.AddChild(newFile)
if _, err := rootRef.CalculateHash(ctx, true); err != nil {
return nil, err
}
stats.NewFileCreated(ctx, newFile.ID)
return rootRef, nil
}

// Marshal marshal and change to persistent to postgres
func (nf *ResumeFileChange) Marshal() (string, error) {
ret, err := json.Marshal(nf)
if err != nil {
return "", err
}
return string(ret), nil
}

// Unmarshal reload and unmarshal change from allocation_changes.input on postgres
func (nf *ResumeFileChange) Unmarshal(input string) error {
if err := json.Unmarshal([]byte(input), nf); err != nil {
return err
}

return util.UnmarshalValidation(nf)
}

// DeleteTempFile delete temp files from allocation's temp dir
func (nf *ResumeFileChange) DeleteTempFile() error {
fileInputData := &filestore.FileInputData{}
fileInputData.Name = nf.Filename
fileInputData.Path = nf.Path
fileInputData.Hash = nf.Hash
err := filestore.GetFileStore().DeleteTempFile(nf.AllocationID, fileInputData, nf.ConnectionID)
if nf.ThumbnailSize > 0 {
fileInputData := &filestore.FileInputData{}
fileInputData.Name = nf.ThumbnailFilename
fileInputData.Path = nf.Path
fileInputData.Hash = nf.ThumbnailHash
err = filestore.GetFileStore().DeleteTempFile(nf.AllocationID, fileInputData, nf.ConnectionID)
}
return err
}

// CommitToFileStore move files from temp dir to object dir
func (nf *ResumeFileChange) CommitToFileStore(ctx context.Context) error {
fileInputData := &filestore.FileInputData{}
fileInputData.Name = nf.Filename
fileInputData.Path = nf.Path
fileInputData.Hash = nf.Hash
_, err := filestore.GetFileStore().CommitWrite(nf.AllocationID, fileInputData, nf.ConnectionID)
if err != nil {
return common.NewError("file_store_error", "Error committing to file store. "+err.Error())
}
if nf.ThumbnailSize > 0 {
fileInputData := &filestore.FileInputData{}
fileInputData.Name = nf.ThumbnailFilename
fileInputData.Path = nf.Path
fileInputData.Hash = nf.ThumbnailHash
_, err := filestore.GetFileStore().CommitWrite(nf.AllocationID, fileInputData, nf.ConnectionID)
if err != nil {
return common.NewError("file_store_error", "Error committing thumbnail to file store. "+err.Error())
}
}
return nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ import (
"path/filepath"

"0chain.net/blobbercore/filestore"
"0chain.net/blobbercore/stats"
"0chain.net/blobbercore/reference"
"0chain.net/blobbercore/stats"
"0chain.net/blobbercore/util"
"0chain.net/core/common"
. "0chain.net/core/logging"
Expand Down
Loading