optimization for reading whole chunk with gzip encoding

This commit is contained in:
Chris Lu
2018-12-07 01:57:55 -08:00
parent 29f1673d97
commit 1bfb96f34d
5 changed files with 54 additions and 16 deletions

View File

@@ -2,6 +2,7 @@ package util
import (
"bytes"
"compress/gzip"
"encoding/json"
"errors"
"fmt"
@@ -184,24 +185,38 @@ func NormalizeUrl(url string) string {
return "http://" + url
}
func ReadUrl(fileUrl string, offset int64, size int, buf []byte) (n int64, e error) {
func ReadUrl(fileUrl string, offset int64, size int, buf []byte, isReadRange bool) (n int64, e error) {
req, _ := http.NewRequest("GET", fileUrl, nil)
req.Header.Add("Range", fmt.Sprintf("bytes=%d-%d", offset, offset+int64(size)))
if isReadRange {
req.Header.Add("Range", fmt.Sprintf("bytes=%d-%d", offset, offset+int64(size)))
} else {
req.Header.Set("Accept-Encoding", "gzip")
}
r, err := client.Do(req)
if err != nil {
return 0, err
}
defer r.Body.Close()
if r.StatusCode >= 400 {
return 0, fmt.Errorf("%s: %s", fileUrl, r.Status)
}
var reader io.ReadCloser
switch r.Header.Get("Content-Encoding") {
case "gzip":
reader, err = gzip.NewReader(r.Body)
defer reader.Close()
default:
reader = r.Body
}
var i, m int
for {
m, err = r.Body.Read(buf[i:])
m, err = reader.Read(buf[i:])
if m == 0 {
return
}