Skip to content

Commit

Permalink
bugfix download
Browse files Browse the repository at this point in the history
  • Loading branch information
sjqzhang committed Feb 27, 2019
1 parent acd0a89 commit 2f992fa
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 11 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ func main() {
# 更新说明
## 从低版升给到高版本,可能存在配置项变动的情况,一定要注意使用新的版本时的配置项。如何获得新版本的配置项及说明?先备份旧的配置项(文件名不能为cfg.json),再运行新的版本,配置项就自动生成。然后再修改相应的配置项。

- v1.1.9 增加文件自动迁移功能,增加文件可重复问题。


# Q&A
Expand All @@ -191,6 +192,7 @@ func main() {
一、如果是海量存储,不要开启文件token认证功能,减少性能开消。
二、尽量用标准上传,上传后业务保存path,在业务用的时候再并接上域名(方便迁移扩展等)。
三、如果使用断点续传,上传后一定要用文件id置换成path存储(如何置换看QA/API文档),为后面访问减少性能开消。
四、尽量使用物理服务器部署,因为主要压力或性能来自于IO
总结:业务保存的文件的path,减少后期访问路径转换带来开消,文件访问权限由业务来完成,这样性能最好,通用性强(可直接其它web服务器)。

重要提醒:如果开启小文件合并功能,后期是无法删除小文件的。
Expand All @@ -214,9 +216,8 @@ go-fastdfs的文件定位与其它分布式系统不同,它的寻址是直接
答案是可以的,你担心的问题是路径改变,go fastdfs为你考虑了这一点
步骤:
一、下载最新版的go-fastdfs
二、将原来的fastdfs目录复制到go-fastdfs的 files目录下
二、将原来的fastdfs文件目录复制到go-fastdfs的 files目录下
三、将配置enable_migrate设为true
四、调用 http://10.1.5.9:8080/repair_fileinfo
注意:迁移过程中会扫描整下files目录下的所有文件,
速度较慢,迁移完成后请将enable_migrate设为false

Expand Down
29 changes: 22 additions & 7 deletions fileserver.go
Original file line number Diff line number Diff line change
Expand Up @@ -913,8 +913,17 @@ func (this *Server) RepairFileInfoFromFile() {
if fi.IsDir() || fi.Size() == 0 {
continue
}
file_path = strings.Replace(file_path, "\\", "/", -1)
if DOCKER_DIR != "" {
file_path = strings.Replace(file_path, DOCKER_DIR, "", 1)
}
if strings.HasPrefix(file_path, STORE_DIR_NAME+"/"+LARGE_DIR_NAME) {
log.Info(fmt.Sprintf("ignore small file file %s", file_path+"/"+fi.Name()))
continue
}
pathMd5 = this.util.MD5(file_path + "/" + fi.Name())
if finfo, _ := this.GetFileInfoFromLevelDB(pathMd5); finfo != nil && finfo.Md5 != "" {
log.Info(fmt.Sprintf("exist ignore file %s", file_path+"/"+fi.Name()))
continue
}
sum, err = this.util.GetFileSumByName(file_path+"/"+fi.Name(), Config().FileSumArithmetic)
Expand All @@ -925,13 +934,14 @@ func (this *Server) RepairFileInfoFromFile() {
fileInfo = FileInfo{
Size: fi.Size(),
Name: fi.Name(),
Path: strings.Replace(file_path, "\\", "/", -1),
Path: file_path,
Md5: sum,
TimeStamp: fi.ModTime().Unix(),
Peers: []string{this.host},
OffSet: -1,
}
log.Info(fileInfo)
//log.Info(fileInfo)
log.Info(file_path, fi.Name())
this.postFileToPeer(&fileInfo)
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
}
Expand Down Expand Up @@ -1087,7 +1097,7 @@ func (this *Server) DownloadFromPeer(peer string, fileInfo *FileInfo) {
req := httplib.Get(peer + "/" + Config().Group + "/" + p + "/" + filename)
fpath = DOCKER_DIR + fileInfo.Path + "/" + filename
timeout := fileInfo.Size/1024/1024/8 + 30
req.SetTimeout(time.Second*5, time.Second*time.Duration(timeout))
req.SetTimeout(time.Second*30, time.Second*time.Duration(timeout))
if fileInfo.OffSet != -1 { //small file download
data, err = req.Bytes()
if err != nil {
Expand Down Expand Up @@ -1269,11 +1279,11 @@ func (this *Server) Download(w http.ResponseWriter, r *http.Request) {
}
NotFound:
if info, err = os.Stat(fullpath); err != nil || info.Size() == 0 || notFound {
log.Error(err)
log.Error(err, fullpath, smallPath)
if isSmallFile && notFound {
pathMd5 = this.util.MD5(smallPath)
} else {
if err == nil && Config().ShowDir {
if err == nil && Config().ShowDir && info.IsDir() {
goto SHOW_DIR
}
pathMd5 = this.util.MD5(fullpath)
Expand Down Expand Up @@ -2023,7 +2033,7 @@ func (this *Server) SaveUploadFile(file multipart.File, header *multipart.FileHe
if Config().RenameFile {
outPath = fmt.Sprintf(folder+"/%s", fileInfo.ReName)
}
if this.util.FileExists(outPath) {
if this.util.FileExists(outPath) && Config().EnableDistinctFile {
for i := 0; i < 10000; i++ {
outPath = fmt.Sprintf(folder+"/%d_%s", i, header.Filename)
fileInfo.Name = fmt.Sprintf("%d_%s", i, header.Filename)
Expand Down Expand Up @@ -2973,7 +2983,7 @@ func (this *Server) Index(w http.ResponseWriter, r *http.Request) {
<head>
<meta charset="utf-8" />
<title>Uploader</title>
<title>go-fastdfs</title>
<style>form { bargin } .form-line { display:block;height: 30px;margin:8px; } #stdUpload {background: #fafafa;border-radius: 10px;width: 745px; }</style>
<link href="https://transloadit.edgly.net/releases/uppy/v0.30.0/dist/uppy.min.css" rel="stylesheet"></head>
Expand Down Expand Up @@ -3017,6 +3027,8 @@ func (this *Server) Index(w http.ResponseWriter, r *http.Request) {
} else {
uppy = string(data)
}
} else {
this.util.WriteFile(uppyFileName,uppy)
}
fmt.Fprintf(w,
fmt.Sprintf(uppy, uploadUrl, Config().DefaultScene, uploadBigUrl))
Expand Down Expand Up @@ -3384,6 +3396,9 @@ func (this *Server) Main() {
go this.Consumer()
go this.ConsumerLog()
go this.ConsumerDownLoad()
if Config().EnableMigrate {
go this.RepairFileInfoFromFile()
}
if Config().AutoRepair {
go func() {
for {
Expand Down
2 changes: 1 addition & 1 deletion static/report.html
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>ECharts</title>
<title>go-fastdfs report</title>
<!-- 引入 echarts.js -->
<script type="text/javascript" src="https://cdn.bootcss.com/echarts/3.7.0/echarts.min.js"></script>
<!-- 引入jquery.js -->
Expand Down
3 changes: 2 additions & 1 deletion static/uppy.html
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

<head>
<meta charset="utf-8" />
<title>Uploader</title>
<title>go-fastdfs</title>
<style>form { bargin } .form-line { display:block;height: 30px;margin:8px; } #stdUpload {background: #fafafa;border-radius: 10px;width: 745px; }</style>
<link href="https://transloadit.edgly.net/releases/uppy/v0.30.0/dist/uppy.min.css" rel="stylesheet"></head>

Expand All @@ -23,6 +23,7 @@
</div>
<div>断点续传(如果文件很大时可以考虑)</div>
<div>


<div id="drag-drop-area"></div>
<script src="https://transloadit.edgly.net/releases/uppy/v0.30.0/dist/uppy.min.js"></script>
Expand Down

0 comments on commit 2f992fa

Please sign in to comment.