oss native large file copy consume too much time which will eventually lead to client timeout because of no data transmit throughout native copy. change maxCopySize to 128MB, ensure only sm all medium size file use oss native copy to avoid connection reset by peer. And fix Move function with CopyLargeFileInParallel to optimize oss upload copy

Signed-off-by: yaoyao.xyy <yaoyao.xyy@alibaba-inc.com>
This commit is contained in:
yaoyao.xyy 2016-11-08 12:14:13 +08:00
parent f394e82d2b
commit a4a227e351
3 changed files with 12 additions and 10 deletions

6
Godeps/Godeps.json generated
View file

@ -167,15 +167,15 @@
}, },
{ {
"ImportPath": "github.com/denverdino/aliyungo/common", "ImportPath": "github.com/denverdino/aliyungo/common",
"Rev": "ce70ed03a598bb3ba258ff9c90a83a257959067c" "Rev": "afedced274aa9a7fcdd47ac97018f0f8db4e5de2"
}, },
{ {
"ImportPath": "github.com/denverdino/aliyungo/oss", "ImportPath": "github.com/denverdino/aliyungo/oss",
"Rev": "ce70ed03a598bb3ba258ff9c90a83a257959067c" "Rev": "afedced274aa9a7fcdd47ac97018f0f8db4e5de2"
}, },
{ {
"ImportPath": "github.com/denverdino/aliyungo/util", "ImportPath": "github.com/denverdino/aliyungo/util",
"Rev": "ce70ed03a598bb3ba258ff9c90a83a257959067c" "Rev": "afedced274aa9a7fcdd47ac97018f0f8db4e5de2"
}, },
{ {
"ImportPath": "github.com/docker/goamz/aws", "ImportPath": "github.com/docker/goamz/aws",

View file

@ -389,15 +389,17 @@ func (d *driver) List(ctx context.Context, opath string) ([]string, error) {
return append(files, directories...), nil return append(files, directories...), nil
} }
const maxConcurrency = 10
// Move moves an object stored at sourcePath to destPath, removing the original // Move moves an object stored at sourcePath to destPath, removing the original
// object. // object.
func (d *driver) Move(ctx context.Context, sourcePath string, destPath string) error { func (d *driver) Move(ctx context.Context, sourcePath string, destPath string) error {
logrus.Infof("Move from %s to %s", d.ossPath(sourcePath), d.ossPath(destPath)) logrus.Infof("Move from %s to %s", d.ossPath(sourcePath), d.ossPath(destPath))
err := d.Bucket.CopyLargeFileInParallel(d.ossPath(sourcePath), d.ossPath(destPath),
err := d.Bucket.CopyLargeFile(d.ossPath(sourcePath), d.ossPath(destPath),
d.getContentType(), d.getContentType(),
getPermissions(), getPermissions(),
oss.Options{}) oss.Options{},
maxConcurrency)
if err != nil { if err != nil {
logrus.Errorf("Failed for move from %s to %s: %v", d.ossPath(sourcePath), d.ossPath(destPath), err) logrus.Errorf("Failed for move from %s to %s: %v", d.ossPath(sourcePath), d.ossPath(destPath), err)
return parseError(sourcePath, err) return parseError(sourcePath, err)

View file

@ -1313,7 +1313,7 @@ func (b *Bucket) CopyLargeFile(sourcePath string, destPath string, contentType s
} }
const defaultChunkSize = int64(128 * 1024 * 1024) //128MB const defaultChunkSize = int64(128 * 1024 * 1024) //128MB
const maxCopytSize = int64(1024 * 1024 * 1024) //1G const maxCopytSize = int64(128 * 1024 * 1024) //128MB
// Copy large file in the same bucket // Copy large file in the same bucket
func (b *Bucket) CopyLargeFileInParallel(sourcePath string, destPath string, contentType string, perm ACL, options Options, maxConcurrency int) error { func (b *Bucket) CopyLargeFileInParallel(sourcePath string, destPath string, contentType string, perm ACL, options Options, maxConcurrency int) error {
@ -1322,10 +1322,10 @@ func (b *Bucket) CopyLargeFileInParallel(sourcePath string, destPath string, con
maxConcurrency = 1 maxConcurrency = 1
} }
log.Printf("Copy large file from %s to %s\n", sourcePath, destPath)
currentLength, err := b.GetContentLength(sourcePath) currentLength, err := b.GetContentLength(sourcePath)
log.Printf("Parallel Copy large file[size: %d] from %s to %s\n",currentLength, sourcePath, destPath)
if err != nil { if err != nil {
return err return err
} }