Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor #4

Merged
merged 2 commits into from
Mar 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
dl
*.part*
127 changes: 47 additions & 80 deletions dl.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,9 @@ import (
"flag"
"fmt"
"io"
"log"
"mime"
"net/http"
"os"
"path"
"strconv"
"strings"
"sync"
Expand All @@ -17,17 +15,12 @@ import (
)

type download struct {
uri string
filesize uint64
filename string
}

type downloadPart struct {
index int
uri string
dir string
startByte uint64
endByte uint64
uri string
filesize uint64
filename string
workingDir string
boost int
parts []downloadPart
}

func main() {
Expand All @@ -39,13 +32,14 @@ func main() {

file_uris := flag.Args()

var dl download
var workingDir string
var err error

for _, uri := range file_uris {
var dl download
dl.uri = uri
dl.filesize, dl.filename, err = fetchMetadata(dl.uri)
dl.boost = *boostPtr

err = dl.FetchMetadata()
if err != nil {
panic(err)
}
Expand All @@ -56,103 +50,80 @@ func main() {
}

if *workingDirPtr != "" {
workingDir = *workingDirPtr
dl.workingDir = *workingDirPtr
} else {
workingDir, err = os.Getwd()
dl.workingDir, err = os.Getwd()
if err != nil {
log.Println(err)
panic(err)
}
}

fmt.Println(dl.filename)

fetch(&dl, workingDir, *boostPtr)
concatFiles(dl.filename, dl.filesize, *boostPtr, workingDir)
dl.Fetch()
dl.ConcatFiles()
}
}

func fetchMetadata(uri string) (filesize uint64, filename string, err error) {
resp, err := http.Head(uri)
func (dl *download) FetchMetadata() error {
resp, err := http.Head(dl.uri)
if err != nil {
return
return err
}
defer resp.Body.Close()

contentLength := resp.Header.Get("Content-Length")
filesize, err = strconv.ParseUint(contentLength, 0, 64)
dl.filesize, err = strconv.ParseUint(contentLength, 0, 64)
if err != nil {
return
return err
}

contentDisposition := resp.Header.Get("Content-Disposition")
_, params, err := mime.ParseMediaType(contentDisposition)
if err != nil {
filename = filenameFromURI(uri)
return filesize, filename, nil
dl.filename = dl.filenameFromURI()
return err
} else {
dl.filename = params["filename"]
}
filename = params["filename"]

// No filename specified in the header; use the pathname
if filename == "" {
filename = filenameFromURI(uri)
if dl.filename == "" {
dl.filename = dl.filenameFromURI()
}

return
return nil
}

func fetch(dl *download, dir string, boost int) {
func (dl *download) Fetch() error {
var wg sync.WaitGroup

bar := progressbar.DefaultBytes(
int64(dl.filesize),
"Downloading",
)

for i := 0; i < boost; i++ {
start, end := calculatePartBoundary(dl.filesize, boost, i)
for i := 0; i < dl.boost; i++ {
start, end := dl.calculatePartBoundary(i)
wg.Add(1)
dlPart := downloadPart{
index: i,
uri: dl.uri,
dir: dir,
dir: dl.workingDir,
startByte: start,
endByte: end,
}
go fetchPart(&wg, dlPart, bar)
dlPart.filename = dlPart.downloadPartFilename()
dl.parts = append(dl.parts, dlPart)
go dlPart.fetchPart(&wg, bar)
}

wg.Wait()
return nil
}

func fetchPart(wg *sync.WaitGroup, part downloadPart, bar *progressbar.ProgressBar) {
defer wg.Done()

byteRange := fmt.Sprintf("bytes=%d-%d", part.startByte, part.endByte)
req, _ := http.NewRequest("GET", part.uri, nil)
req.Header.Set("Range", byteRange)
req.Header.Set("User-Agent", "dl/1.0")

client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return
}
defer resp.Body.Close()

// Create the file
filename := downloadPartFilename(part.index, part.dir)
out, err := os.Create(filename)
if err != nil {
return
}
defer out.Close()

// Write the body to file
_, _ = io.Copy(io.MultiWriter(out, bar), resp.Body)
}

func calculatePartBoundary(filesize uint64, totalParts int, part int) (startByte uint64, endByte uint64) {
chunkSize := filesize / uint64(totalParts)
func (dl *download) calculatePartBoundary(part int) (startByte uint64, endByte uint64) {
chunkSize := dl.filesize / uint64(dl.boost)
var previousEndByte uint64

if part == 0 {
Expand All @@ -164,45 +135,41 @@ func calculatePartBoundary(filesize uint64, totalParts int, part int) (startByte
}

// For the last part, pick up all remaining bytes
if part == (totalParts - 1) {
endByte = filesize - 1
if part == (dl.boost - 1) {
endByte = dl.filesize - 1
} else {
endByte = startByte + chunkSize - 1
}

return
}

func downloadPartFilename(index int, dir string) string {
return path.Join(dir, fmt.Sprintf("download.part%d", index))
}

func filenameFromURI(uri string) string {
splitURI := strings.Split(uri, "/")
func (dl *download) filenameFromURI() string {
splitURI := strings.Split(dl.uri, "/")
return splitURI[len(splitURI)-1]
}

func concatFiles(filename string, filesize uint64, parts int, dir string) {
func (dl *download) ConcatFiles() {
var readers []io.Reader

bar := progressbar.DefaultBytes(
int64(filesize),
int64(dl.filesize),
"Combining ",
)

for i := 0; i < parts; i++ {
downloadPart, err := os.Open(downloadPartFilename(i, dir))
for _, part := range dl.parts {
downloadPart, err := os.Open(part.downloadPartFilename())
if err != nil {
panic(err)
}
defer os.Remove(downloadPartFilename(i, dir))
defer os.Remove(part.downloadPartFilename())
defer downloadPart.Close()
readers = append(readers, downloadPart)
}

inputFiles := io.MultiReader(readers...)

outFile, err := os.Create(filename)
outFile, err := os.Create(dl.filename)
if err != nil {
panic(err)
}
Expand Down
52 changes: 52 additions & 0 deletions dl_part.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package main

import (
"fmt"
"io"
"net/http"
"os"
"path"
"sync"

"github.com/schollz/progressbar/v3"
)

type downloadPart struct {
index int
uri string
dir string
filename string
startByte uint64
endByte uint64
}

func (p *downloadPart) downloadPartFilename() string {
return path.Join(p.dir, fmt.Sprintf("download.part%d", p.index))
}

func (p *downloadPart) fetchPart(wg *sync.WaitGroup, bar *progressbar.ProgressBar) {
defer wg.Done()

byteRange := fmt.Sprintf("bytes=%d-%d", p.startByte, p.endByte)
req, _ := http.NewRequest("GET", p.uri, nil)
req.Header.Set("Range", byteRange)
req.Header.Set("User-Agent", "dl/1.0")

client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return
}
defer resp.Body.Close()

// Create the file
filename := p.downloadPartFilename()
out, err := os.Create(filename)
if err != nil {
return
}
defer out.Close()

// Write the body to file
_, _ = io.Copy(io.MultiWriter(out, bar), resp.Body)
}
5 changes: 4 additions & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,7 @@ module github.com/mgomes/dl

go 1.15

require github.com/schollz/progressbar/v3 v3.7.2
require (
github.com/schollz/progressbar/v3 v3.7.2
golang.org/x/sys v0.0.0-20220325203850-36772127a21f // indirect
)
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,6 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201113135734-0a15ea8d9b02 h1:5Ftd3YbC/kANXWCBjvppvUmv1BMakgFcBKA7MpYYp4M=
golang.org/x/sys v0.0.0-20201113135734-0a15ea8d9b02/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220325203850-36772127a21f h1:TrmogKRsSOxRMJbLYGrB4SBbW+LJcEllYBLME5Zk5pU=
golang.org/x/sys v0.0.0-20220325203850-36772127a21f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=