From 2a916bc4330d5c1af49a973cd0d7dec654aaa8b7 Mon Sep 17 00:00:00 2001 From: Mattias Runge-Broberg Date: Fri, 15 Jan 2021 18:51:31 +0100 Subject: [PATCH] Updates to journal functionallity to make it safer Lots of concurrency fixes and some readme updates --- README.md | 1 + cmd/barf/run/cli.go | 13 +- cmd/barf/run/daemon.go | 2 +- docs/svg/copy-monitor-many.svg | 2 +- docs/svg/copy-monitor.svg | 2 +- docs/svg/copy-normal.svg | 2 +- docs/svg/copy-remote.svg | 2 +- docs/svg/daemon-journal.svg | 2 +- internal/cmd/cmd.go | 5 +- internal/{journal => coordinator}/actions.go | 49 +++--- internal/coordinator/entries.go | 78 ++++++++++ internal/{journal => coordinator}/event.go | 2 +- internal/coordinator/init.go | 46 ++++++ internal/coordinator/log.go | 27 ++++ internal/{journal => coordinator}/update.go | 20 +-- internal/journal/entries.go | 152 ------------------- internal/journal/entry.go | 132 ++++++++++++++++ internal/journal/init.go | 27 ++-- internal/journal/log.go | 34 +++++ internal/journal/operation.go | 24 +++ internal/journal/status.go | 24 +++ internal/op/operation.go | 3 +- internal/op/status.go | 11 +- internal/proc/logfile/logfile.go | 2 +- internal/rsync/exec.go | 51 ++++--- internal/runner/init.go | 14 +- internal/runner/runners.go | 22 ++- internal/runner/runners/copy.go | 22 ++- internal/runner/runners/dummy.go | 2 +- internal/ui/progress.go | 40 +---- internal/utils/fs/fs.go | 81 ++++++++++ internal/utils/median.go | 13 +- scripts/scenarios.sh | 6 +- scripts/scenarios/copy-monitor-many.sh | 8 +- scripts/scenarios/copy-normal.sh | 3 + scripts/scenarios/daemon-journal.sh | 3 +- scripts/scenarios/lib/helpers.sh | 7 + 37 files changed, 606 insertions(+), 328 deletions(-) rename internal/{journal => coordinator}/actions.go (57%) create mode 100644 internal/coordinator/entries.go rename internal/{journal => coordinator}/event.go (96%) create mode 100644 internal/coordinator/init.go create mode 100644 internal/coordinator/log.go rename internal/{journal => coordinator}/update.go (57%) delete mode 100644 internal/journal/entries.go create mode 100644 internal/journal/entry.go create mode 100644 internal/journal/log.go create mode 100644 internal/journal/operation.go create mode 100644 internal/journal/status.go create mode 100644 internal/utils/fs/fs.go diff --git a/README.md b/README.md index 7523697..ee4f7d2 100644 --- a/README.md +++ b/README.md @@ -106,4 +106,5 @@ Every project needs a reason for being. - Uses a domain socket for communication (CLI -> background process) - Stores state and logs under ```~/.config/barf``` - Uses the installed version of [rsync](https://rsync.samba.org/), make sure there is one +- Well defined socket protocol, allowing for other types of clients - Only tested on Linux, but might work on other systems diff --git a/cmd/barf/run/cli.go b/cmd/barf/run/cli.go index 08fc53a..d4f9bd1 100644 --- a/cmd/barf/run/cli.go +++ b/cmd/barf/run/cli.go @@ -2,12 +2,11 @@ package run import ( "fmt" + "os" "barf/internal/com/socket" "barf/internal/proc/daemon" "barf/internal/ui" - - cli "github.com/jawher/mow.cli" ) // StartCLI starts the CLI process @@ -17,7 +16,7 @@ func StartCLI(width int, action func() error) { if err != nil { fmt.Println(err) - cli.Exit(255) + os.Exit(255) return } @@ -25,14 +24,14 @@ func StartCLI(width int, action func() error) { if err != nil { fmt.Println(err) - cli.Exit(255) + os.Exit(255) return } socket.OnClose(func() { if !normalClose { fmt.Println("Lost connection to backend") - cli.Exit(1) + os.Exit(1) } }) @@ -42,7 +41,7 @@ func StartCLI(width int, action func() error) { if err != nil { fmt.Println(err) - cli.Exit(255) + os.Exit(255) return } @@ -52,5 +51,5 @@ func StartCLI(width int, action func() error) { socket.Close() socket.WaitOnClose() - cli.Exit(exitCode) + os.Exit(exitCode) } diff --git a/cmd/barf/run/daemon.go b/cmd/barf/run/daemon.go index 3644fbc..bfea931 100644 --- a/cmd/barf/run/daemon.go +++ b/cmd/barf/run/daemon.go @@ -57,7 +57,7 @@ func startDaemon() error { fmt.Println("Listening for connections") - err = runner.StartRunner() + err = runner.Start() if err != nil { return err diff --git a/docs/svg/copy-monitor-many.svg b/docs/svg/copy-monitor-many.svg index 7a7bd4f..edd0c1a 100644 --- a/docs/svg/copy-monitor-many.svg +++ b/docs/svg/copy-monitor-many.svg @@ -1 +1 @@ -barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote1/1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.CopySyncing big...8%|228.2MB/s|228.2MB/2.9GB|1/4|ETA:11s^Cbarf:/home/barf$barfcopy~/local/*~/remote2/2.Copy0%|0B/s|0B/0B|0/0|ETA:0s2.CopySyncing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s2.CopySyncing big...8%|226.8MB/s|226.8MB/2.9GB|1/4|ETA:12sbarf:/home/barf$barfcopy~/local/*~/remote3/3.Copy0%|0B/s|0B/0B|0/0|ETA:0s3.CopySyncing...0%|0B/s|0B/2.9GB|0/4|ETA:0s3.CopySyncing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s3.CopySyncing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s3.CopySyncing big...4%|99.2MB/s|99.3MB/2.9GB|1/4|ETA:28sbarf:/home/barf$barfmonitor2.CopySyncing huge...37%|188.2MB/s|1.1GB/2.9GB|2/2.CopySyncing huge...37%|188.2MB/s|1.1GB/2.9GB|2/4|ETA:9s3.CopySyncing big...17%|109.1MB/s|492.3MB/2.9GB|1/4|ETA:22s1.CopySyncing huge...65%|248.9MB/s|1.9GB/2.9GB|2/4|ETA:4s2.CopySyncing huge...41%|188.2MB/s|1.2GB/2.9GB|2/4|ETA:9s3.CopySyncing big...21%|115.9MB/s|620.8MB/2.9GB|1/4|ETA:20s1.CopySyncing huge...70%|248.9MB/s|2.0GB/2.9GB|2/4|ETA:3s2.CopySyncing huge...46%|188.2MB/s|1.3GB/2.9GB|2/4|ETA:8s3.CopySyncing big...26%|123.2MB/s|753.8MB/2.9GB|1/4|ETA:17s2.CopySyncing huge...46%|188.2MB/s|1.3GB/2.9GB|2/3.CopySyncing big...33%|130.5MB/s|953.7MB/2.9GB|1/4|ETA:15s1.CopySyncing huge...74%|248.9MB/s|2.1GB/2.9GB|2/4|ETA:3s3.CopySyncing huge...33%|130.5MB/s|953.7MB/2.9GB|2/4|ETA:15s2.CopySyncing huge...51%|188.2MB/s|1.5GB/2.9GB|2/4|ETA:7s2.CopySyncing huge...51%|188.2MB/s|1.5GB/2.9GB|2/1.CopySyncing huge...78%|248.9MB/s|2.2GB/2.9GB|2/4|ETA:2s1.CopySyncing medium...81%|248.9MB/s|2.3GB/2.9GB|3/4|ETA:2s2.CopySyncing huge...56%|188.2MB/s|1.6GB/2.9GB|2/4|ETA:6s3.CopySyncing huge...35%|130.5MB/s|1.0GB/2.9GB|2/4|ETA:14s1.CopySyncing medium...82%|248.9MB/s|2.4GB/2.9GB|3/4|ETA:2s2.CopySyncing huge...60%|188.2MB/s|1.7GB/2.9GB|2/4|ETA:6s2.CopySyncing huge...60%|188.23.CopySyncing huge...39%|130.5MB/s|1.1GB/2.9GB|2/4|ETA:13s1.CopySyncing medium...86%|248.9MB/s|2.5GB/2.9GB|3/4|ETA:1s2.CopySyncing huge...62%|188.2MB/s|1.8GB/2.9GB|2/4|ETA:5s2.CopySyncing huge...62%|188.23.CopySyncing huge...41%|130.5MB/s|1.2GB/2.9GB|2/4|ETA:13s1.CopySyncing medium...89%|248.9MB/s|2.6GB/2.9GB|3/4|ETA:1s2.CopySyncing huge...66%|188.22.CopySyncing huge...66%|188.2MB/s|1.9GB/2.9GB|2/4|ETA:5s3.CopySyncing huge...46%|130.5MB/s|1.3GB/2.9GB|2/4|ETA:12s1.CopySyncing medium...93%|248.9MB/s|2.7GB/2.9GB|3/4|ETA:1s2.CopySyncing huge...70%|188.2MB/s|2.0GB/2.9GB|2/4|ETA:4s1.CopySyncing medium...96%|248.9MB/s|2.8GB/2.9GB|3/4|ETA:1s1.CopySyncing medium...97%|248.9MB/s|2.8GB/2.9GB|3/4|ETA:1s1.CopySyncing small...97%|248.9MB/s|2.8GB/2.9GB|4/4|ETA:1s3.CopySyncing huge...50%|130.5MB/s|1.4GB/2.9GB|2/4|ETA:11s1.CopyCompleted successfully!100%|248.9MB/s|2.9GB/2.9GB|4/4|20s2.CopySyncing huge...75%|188.2MB/s|2.1GB/2.9GB|2/4|ETA:4s3.CopySyncing huge...53%|130.5MB/s|1.5GB/2.9GB|2/4|ETA:10s2.CopySyncing medium...81%|188.2MB/s|2.3GB/2.9GB|3/4|ETA:3s3.CopySyncing huge...58%|130.5MB/s|1.7GB/2.9GB|2/4|ETA:9s2.CopySyncing medium...84%|188.2MB/s|2.4GB/2.9GB|3/4|ETA:2s3.CopySyncing huge...63%|130.5MB/s|1.8GB/2.9GB|2/4|ETA:8s2.CopySyncing medium...89%|188.2MB/s|2.5GB/2.9GB|3/4|ETA:1s3.CopySyncing huge...68%|135.8MB/s|1.9GB/2.9GB|2/4|ETA:7s2.CopySyncing medium...97%|188.2MB/s|2.8GB/2.9GB|3/4|ETA:1s2.CopySyncing small...97%|188.2MB/s|2.8GB/2.9GB|4/4|ETA:1s2.CopyCompleted successfully!100%|188.2MB/s|2.9GB/2.9GB|4/4|21s3.CopySyncing medium...81%|137.0MB/s|2.3GB/2.9GB|3/4|ETA:4s3.CopySyncing medium...82%|137.0MB/s|2.4GB/2.9GB|3/4|ETA:3s3.CopySyncing medium...89%|137.0MB/s|2.6GB/2.9GB|3/4|ETA:2s3.CopySyncing medium...97%|137.0MB/s|2.8GB/2.9GB|3/4|ETA:1s3.CopySyncing small...97%|137.0MB/s|2.8GB/2.9GB|4/4|ETA:1s3.CopySyncing small...100%|137.0MB/s|2.9GB/2.9GB|4/4|ETA:1s3.CopyCompleted successfully!100%|137.0MB/s|2.9GB/2.9GB|4/4|22s1.CopySyncing...0%|0B/s|0B/2.9GB|0/4|ETA:0s1.CopySyncing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s2.CopySyncing...0%|0B/s|0B/2.9GB|0/4|ETA:0s2.CopySyncing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s2.CopySyncing huge...41%|188.2MB/s|1.2GB/2.9GB|2/2.CopySyncing huge...46%|188.23.CopySyncing big...31%|130.5MB/s|895.1MB/2.9GB|1/4|ETA:15s1.CopySyncing huge...81%|248.9MB/s|2.3GB/2.9GB|2/4|ETA:2s2.CopySyncing huge...60%|188.2MB/s|1.7GB/2.9GB|2/2.CopySyncing huge...66%|188.2MB/s|1.9GB/2.9GB|2/2.CopySyncing huge...70%|188.21.CopySyncing small...100%|248.9MB/s|2.9GB/2.9GB|4/4|ETA:1s2.CopySyncing huge...2.CopySyncing huge...79%|188.2MB/s|2.3GB/2.9GB|2/4|ETA:3s2.CopySyncing huge...2.CopySyncing huge...81%|188.2MB/s|2.3GB/2.9GB|2/4|ETA:3s2.CopySyncing medium...84%|188.2MB/s|2.CopySyncing medium...89%|188.2MB/s|2.CopySyncing medium...2.CopySyncing medium...95%|188.2MB/s|2.7GB/2.9GB|3/4|ETA:1s3.CopySyncing huge...75%|137.0MB/s|2.1GB/2.9GB|2/4|ETA:5s3.CopySyncing huge...81%|137.0MB/s|2.3GB/2.9GB|2/4|ETA:4s2.CopyCompleted successfully!3.CopySyncing medium...94%|137.0MB/s|2.7GB/2.9GB|3/4|ETA:1s2.CopyCompleted successfully! \ No newline at end of file +barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote1/1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.Copybig...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s^Cbarf:/home/barf$barfcopy~/local/*~/remote2/2.Copy0%|0B/s|0B/0B|0/0|ETA:0s2.Copybig...0%|0B/s|0B/2.9GB|1/4|ETA:0s2.Copybig...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0sbarf:/home/barf$barfcopy~/local/*~/remote3/3.Copy0%|0B/s|0B/0B|0/0|ETA:0s3.Copybig...0%|0B/s|0B/2.9GB|1/4|ETA:0s3.Copybig...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0sbarf:/home/barf$barfmonitor3.CopyProcessingbig...14%|151.1MB/s|404.2MB/2.9GB|1/4|ETA:16s1.CopyProcessing huge...52%|126.6MB/s|1.5GB/2.9GB|2/4|ETA:11s2.CopyProcessing huge...35%|128.4MB/s|1.0GB/2.9GB|2/4|ETA:14s3.CopyProcessing big...17%|142.9MB/s|496.8MB/2.9GB|1/4|ETA:17s1.CopyProcessing huge...57%|126.6MB/s|1.6GB/2.9GB|2/4|ETA:10s2.CopyProcessing huge...41%|99.2MB/s|1.2GB/2.9GB|2/4|ETA:17s3.CopyProcessing big...23%|129.5MB/s|650.8MB/2.9GB|1/4|ETA:17s1.CopyProcessing huge...61%|122.5MB/s|1.7GB/2.9GB|2/4|ETA:9s2.CopyProcessing huge...45%|99.2MB/s|1.3GB/2.9GB|2/4|ETA:16s3.CopyProcessing huge...33%|126.9MB/s|953.7MB/2.9GB|2/4|ETA:15s1.CopyProcessing huge...66%|118.7MB/s|1.9GB/2.9GB|2/4|ETA:8s2.CopyProcessing huge...50%|122.2MB/s|1.4GB/2.9GB|2/4|ETA:12s2.CopyProcessing huge...50%|122.2MB/s|1.4GB/2.9GB|2/43.CopyProcessing huge...33%|126.0MB/s|953.7MB/2.9GB|2/4|ETA:15s1.CopyProcessing huge...70%|118.7MB/s|2.0GB/2.9GB|2/4|ETA:7s2.CopyProcessing huge...55%|127.3MB/s|1.6GB/2.9GB|2/4|ETA:10s3.CopyProcessing huge...37%|129.3MB/s|1.1GB/2.9GB|2/4|ETA:14s1.CopyProcessing huge...75%|125.7MB/s|2.1GB/2.9GB|2/4|ETA:6s2.CopyProcessing huge...59%|133.7MB/s|1.7GB/2.9GB|2/4|ETA:9s3.CopyProcessing huge...42%|130.4MB/s|1.2GB/2.9GB|2/4|ETA:13s1.CopyProcessing huge...79%|131.4MB/s|2.3GB/2.9GB|2/4|ETA:4s1.CopyProcessing huge...81%|131.4MB/s|2.3GB/2.9GB|2/4|ETA:4s1.CopyProcessing medium...81%|131.4MB/s|2.3GB/2.9GB|3/4|ETA:4s1.CopyProcessing medium...81%|126.7MB/s|2.3GB/2.9GB|3/4|ETA:4s2.CopyProcessing huge...64%|136.8MB/s|1.8GB/2.9GB|2/4|ETA:7s3.CopyProcessing huge...46%|129.2MB/s|1.3GB/2.9GB|2/4|ETA:12s1.CopyProcessing medium...82%|81.5MB/s|2.4GB/2.9GB|3/4|ETA:6s2.CopyProcessing huge...68%|136.8MB/s|1.9GB/2.9GB|2/2.CopyProcessing huge...68%|136.8MB/s|1.9GB/2.9GB|2/4|ETA:7s3.CopyProcessing huge...51%|131.3MB/s|1.4GB/2.9GB|2/4|ETA:11s1.CopyProcessing medium...87%|62.8MB/s|2.5GB/2.9GB|3/4|ETA:6s2.CopyProcessing huge...73%|133.02.CopyProcessing huge...73%|133.0MB/s|2.1GB/2.9GB|2/4|ETA:6s3.CopyProcessing huge...56%|134.0MB/s|1.6GB/2.9GB|2/4|ETA:9s1.CopyProcessing medium...92%|62.8MB/s|2.6GB/2.9GB|3/4|ETA:4s2.CopyProcessing huge...81%|129.7MB/s|2.3GB/2.9GB|2/4|ETA:4s2.CopyProcessing medium...81%|129.7MB/s|2.3GB/2.9GB|3/4|ETA:4s1.CopyProcessing medium...96%|94.9MB/s|2.7GB/2.9GB|3/4|ETA:1s2.CopyProcessing medium...81%|129.7MB/s|2.3GB/2.9GB|3/3.CopyProcessing huge...60%|134.0MB/s|1.7GB/2.9GB|2/4|ETA:8s1.CopyProcessing small...97%|105.8MB/s|2.8GB/2.9GB|4/4|ETA:1s1.CopyProcessing small...100%|93.0MB/s|2.9GB/2.9GB|4/4|ETA:1s1.CopyProcessing small...100%|81.7MB/s|2.9GB/2.9GB|4/4|ETA:1s1.CopyFinished successfully!100%|81.7MB/s|2.9GB/2.9GB|4/4|19s2.CopyProcessing medium...81%|126.4MB/s|2.3GB/2.9GB|3/4|ETA:4s3.CopyProcessing huge...64%|134.0MB/s|1.8GB/2.9GB|2/4|ETA:8s2.CopyProcessing medium...85%|119.4MB/s|2.CopyProcessing medium...85%|119.4MB/s|2.4GB/2.9GB|3/4|ETA:3s3.CopyProcessing huge...69%|135.5MB/s|2.0GB/2.9GB|2/4|ETA:6s2.CopyProcessing medium...91%|121.8MB/s|2.6GB/2.9GB|3/4|ETA:2s3.CopyProcessing huge...74%|135.5MB/s|2.1GB/2.9GB|2/4|ETA:5s2.CopyProcessing medium...96%|128.2MB/s|2.7GB/2.9GB|3/4|ETA:1s2.CopyProcessing small...97%|139.6MB/s|2.8GB/2.9GB|4/4|ETA:1s3.CopyProcessing medium...81%|139.5MB/s|2.3GB/2.9GB|3/4|ETA:4s2.CopyFinished successfully!100%|89.7MB/s|2.9GB/2.9GB|4/4|20s3.CopyProcessing medium...86%|139.4MB/s|2.5GB/2.9GB|3/4|ETA:3s3.CopyProcessing medium...95%|139.4MB/s|2.7GB/2.9GB|3/4|ETA:1s3.CopyProcessing small...97%|170.1MB/s|2.8GB/2.9GB|4/4|ETA:1s3.CopyProcessing small...100%|179.7MB/s|2.9GB/2.9GB|4/4|ETA:1s3.CopyFinished successfully!100%|179.7MB/s|2.9GB/2.9GB|4/4|20s1.Copy0%|0B/s|0B/2.9GB|0/4|ETA:0s1.Copybig...0%|0B/s|0B/2.9GB|1/4|ETA:0s2.Copy0%|0B/s|0B/2.9GB|0/4|ETA:0s3.Copy0%|0B/s|0B/2.9GB|0/4|ETA:0s2.CopyProcessing huge...45%|99.2M2.CopyProcessing huge...45%|99.2MB/s|1.3GB/2.9GB|2/43.CopyProcessing big...27%|123.6MB/s|786.9MB/2.9GB|1/4|ETA:17s3.CopyProcessing big...32%|123.6MB/s|926.9MB/2.9GB|1/4|ETA:16s3.CopyProcessing big...33%|126.9MB/s|953.7MB/2.9GB|1/4|ETA:15s2.CopyProcessing huge...50%|122.2M2.CopyProcessing huge...55%|127.3MB/s|1.6GB/2.9GB|2/42.CopyProcessing huge...59%|133.72.CopyProcessing huge...64%|136.8MB/s|1.8GB/2.9GB|2/2.CopyProcessing huge...77%|131.12.CopyProcessing huge...77%|131.1MB/s|2.2GB/2.9GB|2/4|ETA:5s2.CopyProcessing huge...81%|130.12.CopyProcessing huge...81%|130.1MB/s|2.3GB/2.9GB|2/4|ETA:4s1.CopyProcessing medium...97%|105.8MB/s|2.8GB/2.9GB|3/4|ETA:1s1.CopyProcessing small...2.CopyProcessing medium...81%|126.4MB/s|2.CopyProcessing medium...3.CopyProcessing huge...80%|137.4MB/s|2.3GB/3.CopyProcessing huge...80%|137.4MB/s|2.3GB/2.9GB|2/4|ETA:4s2.CopyProcessing small...97%|139.6MB/s|1.CopyFinished successfully!2.CopyProcessing small...2.CopyProcessing small...100%|113.3MB/s|2.9GB/2.9GB|4/4|ETA:1s2.CopyProcessing small...100%|89.7MB/s|3.CopyProcessing medium...97%|170.1MB/s|2.8GB/2.9GB|3/4|ETA:1s \ No newline at end of file diff --git a/docs/svg/copy-monitor.svg b/docs/svg/copy-monitor.svg index b9d1ae7..3817061 100644 --- a/docs/svg/copy-monitor.svg +++ b/docs/svg/copy-monitor.svg @@ -1 +1 @@ -barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote/1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.CopySyncing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s1.CopySyncing big...30%|291.3MB/s|885.9MB/2.9GB|1/4|ETA:7s1.CopySyncing big...33%|293.4MB/s|953.7MB/2.9GB|1/4|ETA:6s1.CopySyncing huge...33%|293.4MB/s|953.7MB/2.9GB|2/4|ETA:6s^Cbarf:/home/barf$barfmonitor1.CopySyncing huge...49%|291.3MB/s|1.4GB/2.9GB|2/4|ETA:5s1.CopySyncing medium...81%|291.3MB/s|2.3GB/2.9GB|3/4|ETA:1s1.CopySyncing medium...97%|291.3MB/s|2.8GB/2.9GB|3/4|ETA:1s1.CopySyncing small...97%|291.3MB/s|2.8GB/2.9GB|4/4|ETA:1s1.CopyCompleted successfully!100%|291.3MB/s|2.9GB/2.9GB|4/4|10s1.CopySyncing...0%|0B/s|0B/2.9GB|0/4|ETA:0s1.CopySyncingbig...10%|289.1MB/s|289.1MB/2.9GB|1/4|ETA:9s1.CopySyncing big...20%|290.2MB/s|582.3MB/2.9GB|1/4|ETA:8s1.CopySyncing huge...58%|291.3MB/s|1.7GB/2.9GB|2/4|ETA:4s1.CopySyncing huge...67%|291.3MB/s|1.9GB/2.9GB|2/4|ETA:3s1.CopySyncing huge...77%|291.3MB/s|2.2GB/2.9GB|2/4|ETA:2s1.CopySyncing huge...81%|291.3MB/s|2.3GB/2.9GB|2/4|ETA:1s1.CopySyncing medium...86%|291.3MB/s|2.5GB/2.9GB|3/4|ETA:1s \ No newline at end of file +barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote/1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.Copybig...13%|198.5MB/s|370.6MB/2.9GB|1/4|ETA:13s^Cbarf:/home/barf$barfmonitor1.CopyProcessing huge...38%|168.2MB/s|1.1GB/2.9GB|2/4|ETA:10s1.CopyProcessing medium...81%|256.9MB/s|2.3GB/2.9GB|3/4|ETA:2s1.CopyProcessing medium...86%|205.9MB/s|2.5GB/2.9GB|3/4|ETA:2s1.CopyProcessing small...97%|177.5MB/s|2.8GB/2.9GB|4/4|ETA:2s1.CopyProcessing small...100%|193.2MB/s|2.9GB/2.9GB|4/4|ETA:2s1.CopyFinished successfully!100%|193.2MB/s|2.9GB/2.9GB|4/4|13s1.Copy0%|0B/s|0B/2.9GB|0/4|ETA:0s1.Copybig...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.Copybig...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s1.Copybig...8%|211.7MB/s|211.7MB/2.9GB|1/4|ETA:12s1.CopyProcessing huge...33%|183.1MB/s|953.7MB/2.9GB|2/4|ETA:10s1.CopyProcessing huge...33%|175.8MB/s|953.7MB/2.9GB|2/4|ETA:11s1.CopyProcessing huge...47%|172.5MB/s|1.3GB/2.9GB|2/4|ETA:9s1.CopyProcessing huge...57%|188.7MB/s|1.6GB/2.9GB|2/4|ETA:6s1.CopyProcessing huge...67%|227.2MB/s|1.9GB/2.9GB|2/4|ETA:4s1.CopyProcessing huge...77%|247.3MB/s|2.2GB/2.9GB|2/4|ETA:2s1.CopyProcessing huge...81%|256.9MB/s|2.3GB/2.9GB|2/4|ETA:2s1.CopyProcessing medium...95%|177.5MB/s|2.7GB/2.9GB|3/4|ETA:2s \ No newline at end of file diff --git a/docs/svg/copy-normal.svg b/docs/svg/copy-normal.svg index 71958ea..0158d1c 100644 --- a/docs/svg/copy-normal.svg +++ b/docs/svg/copy-normal.svg @@ -1 +1 @@ -barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote/1.CopyPreparing...0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing...0%|0B/s|0B/2.9GB|0/4|ETA:0s1.CopySyncing big...20%|293.2MB/s|583.6MB/2.9GB|1/4|ETA:8s1.CopySyncing big...30%|291.9MB/s|874.8MB/2.9GB|1/4|ETA:7s1.CopySyncing huge...33%|291.9MB/s|953.7MB/2.9GB|2/4|ETA:6s1.CopySyncing huge...70%|291.9MB/s|2.0GB/2.9GB|2/4|ETA:3s1.CopySyncing huge...80%|292.4MB/s|2.3GB/2.9GB|2/4|ETA:2s1.CopySyncing medium...81%|292.4MB/s|2.3GB/2.9GB|3/4|ETA:1s1.CopySyncing small...97%|292.4MB/s|2.8GB/2.9GB|4/4|ETA:1s1.CopyCompleted successfully!100%|292.4MB/s|2.9GB/2.9GB|4/4|10sbarf:/home/barf$du-ach*~/local954Mbig1,4Ghuge477Mmedium96Msmall2,9Gtotalbarf:/home/barf$du-ach*~/remote1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.CopySyncing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s1.CopySyncingbig...10%|294.4MB/s|294.4MB/2.9GB|1/4|ETA:9s1.CopySyncing huge...33%|292.2MB/s|953.7MB/2.9GB|2/4|ETA:6s1.CopySyncing huge...40%|291.9MB/s|1.1GB/2.9GB|2/4|ETA:6s1.CopySyncing huge...50%|291.9MB/s|1.4GB/2.9GB|2/4|ETA:5s1.CopySyncing huge...60%|291.9MB/s|1.7GB/2.9GB|2/4|ETA:4s1.CopySyncing huge...81%|292.4MB/s|2.3GB/2.9GB|2/4|ETA:1s1.CopySyncing medium...97%|292.4MB/s|2.8GB/2.9GB|3/4|ETA:1s \ No newline at end of file +barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote/1.CopyPreparing...0%|0B/s|0B/0B|0/0|ETA:0s1.CopyProcessing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s1.CopyProcessing big...9%|240.2MB/s|240.2MB/2.9GB|1/4|ETA:11s1.CopyProcessing big...27%|256.6MB/s|784.8MB/2.9GB|1/4|ETA:8s1.CopyProcessing big...33%|259.2MB/s|953.7MB/2.9GB|1/4|ETA:7s1.CopyProcessing huge...33%|259.2MB/s|953.7MB/2.9GB|2/4|ETA:7s1.CopyProcessing huge...50%|130.2MB/s|1.4GB/2.9GB|2/4|ETA:11s1.CopyProcessing huge...58%|168.6MB/s|1.7GB/2.9GB|2/4|ETA:7s1.CopyProcessing huge...66%|178.8MB/s|1.9GB/2.9GB|2/4|ETA:5s1.CopyProcessing huge...75%|205.0MB/s|2.2GB/2.9GB|2/4|ETA:3s1.CopyProcessing huge...81%|220.3MB/s|2.3GB/2.9GB|2/4|ETA:2s1.CopyProcessing medium...81%|220.3MB/s|2.3GB/2.9GB|3/4|ETA:2s1.CopyProcessing medium...85%|160.1MB/s|2.4GB/2.9GB|3/4|ETA:2s1.CopyProcessing small...97%|139.0MB/s|2.8GB/2.9GB|4/4|ETA:1s1.CopyFinished successfully!100%|146.5MB/s|2.9GB/2.9GB|4/4|12sbarf:/home/barf$du-ach*~/local954Mbig1,4Ghuge477Mmedium96Msmall2,9Gtotalbarf:/home/barf$du-ach*~/remote1.CopyNo work needed!100%|0B/s|0B/0B(2.9GB)|0/0(4)|0s1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.CopyProcessing...0%|0B/s|0B/2.9GB|0/4|ETA:0s1.CopyProcessing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.CopyProcessing big...18%|248.4MB/s|513.1MB/2.9GB|1/4|ETA:9s1.CopyProcessing huge...36%|178.3MB/s|1.0GB/2.9GB|2/4|ETA:10s1.CopyProcessing huge...44%|130.2MB/s|1.3GB/2.9GB|2/4|ETA:12s1.CopyProcessing medium...93%|139.0MB/s|2.7GB/2.9GB|3/4|ETA:1s1.CopyProcessing medium...97%|139.0MB/s|2.8GB/2.9GB|3/4|ETA:1s1.CopyProcessing small...100%|146.5MB/s|2.9GB/2.9GB|4/4|ETA:1s \ No newline at end of file diff --git a/docs/svg/copy-remote.svg b/docs/svg/copy-remote.svg index ee38542..02b5015 100644 --- a/docs/svg/copy-remote.svg +++ b/docs/svg/copy-remote.svg @@ -1 +1 @@ -barf:/home/barf$barf:/home/barf$barfcopy~/local/*barf@barf:~/remote1.CopyPreparing...0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing medium...37%|111.0MB/s|211.5MB/572.2MB|1/2|ETA:3s1.CopySyncing medium...74%|106.4MB/s|422.4MB/572.2MB|1/2|ETA:1s1.CopySyncing medium...84%|107.2MB/s|476.8MB/572.2MB|1/2|ETA:1s1.CopySyncing small...96%|107.2MB/s|546.5MB/572.2MB|2/2|ETA:1s1.CopySyncing small...100%|107.2MB/s|572.2MB/572.2MB|2/2|ETA:1s1.CopyCompleted successfully!100%|107.2MB/s|572.2MB/572.2MB|2/2|5sbarf:/home/barf$du-ach*~/local477Mmedium96Msmall573Mtotalbarf:/home/barf$du-ach*~/remote1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing...0%|0B/s|0B/572.2MB|0/2|ETA:0s1.CopySyncing medium...0%|0B/s|0B/572.2MB|1/2|ETA:0s1.CopySyncing medium...1%|0B/s|32.0KB/572.2MB|1/2|ETA:0s1.CopySyncing medium...21%|116.4MB/s|116.4MB/572.2MB|1/2|ETA:3s1.CopySyncing medium...57%|107.2MB/s|321.8MB/572.2MB|1/2|ETA:2s1.CopySyncing small...84%|107.2MB/s|476.8MB/572.2MB|2/2|ETA:1s1.CopySyncing small...84%|107.2MB/s|476.9MB/572.2MB|2/2|ETA:1s \ No newline at end of file +barf:/home/barf$barf:/home/barf$barfcopy~/local/*barf@barf:~/remote1.CopyProcessing medium...22%|124.4MB/s|124.4MB/572.2MB|1/2|ETA:3s1.CopyFinished successfully!100%|48.6MB/s|572.2MB/572.2MB|2/2|5sbarf:/home/barf$du-ach*~/local477Mmedium96Msmall573Mtotalbarf:/home/barf$du-ach*~/remote1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.CopyProcessing...0%|0B/s|0B/572.2MB|0/2|ETA:0s1.CopyProcessing medium...0%|0B/s|0B/572.2MB|1/2|ETA:0s1.CopyProcessing medium...1%|0B/s|32.0KB/572.2MB|1/2|ETA:0s1.CopyProcessing medium...45%|126.5MB/s|257.2MB/572.2MB|1/2|ETA:2s1.CopyProcessing medium...69%|128.7MB/s|394.3MB/572.2MB|1/2|ETA:1s1.CopyProcessing medium...84%|127.9MB/s|476.8MB/572.2MB|1/2|ETA:1s1.CopyProcessing small...84%|127.9MB/s|476.8MB/572.2MB|2/2|ETA:1s1.CopyProcessing small...84%|127.9MB/s|476.9MB/572.2MB|2/2|ETA:1s1.CopyProcessing small...89%|79.1MB/s|508.0MB/572.2MB|2/2|ETA:1s1.CopyProcessing small...100%|48.6MB/s|572.2MB/572.2MB|2/2|ETA:1s \ No newline at end of file diff --git a/docs/svg/daemon-journal.svg b/docs/svg/daemon-journal.svg index 78dbe2c..96a1625 100644 --- a/docs/svg/daemon-journal.svg +++ b/docs/svg/daemon-journal.svg @@ -1 +1 @@ -barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote/1.CopySyncing big...22%|319.7MB/s|639.3MB/2.9GB|1/4|ETA:7s1.CopySyncing huge...33%|311.0MB/s|953.7MB/2.9GB|2/4|ETA:6s^Cbarf:/home/barf$cat~/.config/barf/journal/active/*.json|jq{"operation":{"id":"c00abn9qnsvp637bva7g","index":1,"title":"Copy","type":"copy","args":{"from":["~/local/big","~/local/huge","~/local/medium","~/local/small"],"to":"~/remote/"}},"status":{"bytesDiffTotal":3100000000,"bytesTotal":3100000000,"bytesDone":1000032768,"filesDiffTotal":4,"filesTotal":4,"filesDone":2,"progress":32.26,"speed":317152296,"startedTimestamp":1610655197,"finishedTimestamp":0,"secondsLeft":6,"fileName":"huge","step":"Syncing","finished":false,"exitCode":-1}}1.CopyPreparing...0%|0B/s|0B/0B|0/0|ETA:0s1.CopySyncing...0%|0B/s|0B/2.9GB|0/4|ETA:0s1.CopySyncing big...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.CopySyncing big...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s1.CopySyncingbig...11%|319.5MB/s|319.5MB/2.9GB|1/4|ETA:8s1.CopySyncing big...31%|319.5MB/s|906.8MB/2.9GB|1/4|ETA:6s1.CopySyncing big...33%|311.0MB/s|953.7MB/2.9GB|1/4|ETA:6s \ No newline at end of file +barf:/home/barf$barf:/home/barf$barfcopy~/local/*~/remote/1.Copy0%|0B/s|0B/2.9GB|0/4|ETA:0s1.Copybig...11%|304.1MB/s|304.1MB/2.9GB|1/4|ETA:8s1.Copyhuge...33%|299.0MB/s|953.7MB/2.9GB|2/4|ETA:6s^Cbarf:/home/barf$find/home/mattias/.config/barf/journal/active../c00q7epqnsvrpddi9on0./c00q7epqnsvrpddi9on0/status.json./c00q7epqnsvrpddi9on0/output.log./c00q7epqnsvrpddi9on0/operation.json1.Copy0%|0B/s|0B/0B|0/0|ETA:0s1.Copybig...0%|0B/s|0B/2.9GB|1/4|ETA:0s1.Copybig...0%|0B/s|32.0KB/2.9GB|1/4|ETA:0s1.Copybig...21%|303.3MB/s|604.6MB/2.9GB|1/4|ETA:7s1.Copybig...30%|302.4MB/s|884.9MB/2.9GB|1/4|ETA:6s1.Copybig...33%|299.0MB/s|953.7MB/2.9GB|1/4|ETA:6s \ No newline at end of file diff --git a/internal/cmd/cmd.go b/internal/cmd/cmd.go index da0de03..4929d00 100644 --- a/internal/cmd/cmd.go +++ b/internal/cmd/cmd.go @@ -40,7 +40,6 @@ func (cmd *Cmd) Start(args []string) (int, error) { } cmd.cmd = exec.Command(args[0], args[1:]...) - var wg sync.WaitGroup stdout, _ := cmd.cmd.StdoutPipe() stderr, _ := cmd.cmd.StderrPipe() @@ -48,6 +47,8 @@ func (cmd *Cmd) Start(args []string) (int, error) { go cmd.handleLog(stdout, cmd.stdoutHandler) go cmd.handleLog(stderr, cmd.stderrHandler) + cmd.stdoutHandler("Executing: " + strings.Join(args, " ")) + err := cmd.cmd.Start() if err != nil { @@ -56,7 +57,7 @@ func (cmd *Cmd) Start(args []string) (int, error) { return 255, err } - wg.Wait() + cmd.wg.Wait() err = cmd.cmd.Wait() diff --git a/internal/journal/actions.go b/internal/coordinator/actions.go similarity index 57% rename from internal/journal/actions.go rename to internal/coordinator/actions.go index f345251..84215dc 100644 --- a/internal/journal/actions.go +++ b/internal/coordinator/actions.go @@ -1,40 +1,27 @@ -package journal +package coordinator import ( "errors" "barf/internal/com/server" + "barf/internal/journal" "barf/internal/op" ) func create(opType op.OperationType, args op.OperationArgs) (*op.Operation, error) { - operation := op.NewOperation(opType, args) + index, err := getNextIndex() - if registerdStartHandler == nil { - return operation, errors.New("No start handler registered") + if err != nil { + return nil, err } - e := entry{ - Operation: operation, - Status: op.NewStatus(), - } + operation := op.NewOperation(opType, args, index) - err := addEntry(&e) + e, err := journal.NewJournalEntry(operation) - if err != nil { - return operation, err - } + addEntry(e) - err = server.OperationCreated(operation) - - if err != nil { - UpdateOperationStatus(operation.ID, &op.OperationStatus{ - Finished: true, - ExitCode: 255, - Error: err.Error(), - }) - return operation, err - } + _ = server.OperationCreated(operation) err = registerdStartHandler(operation) @@ -42,9 +29,10 @@ func create(opType op.OperationType, args op.OperationArgs) (*op.Operation, erro UpdateOperationStatus(operation.ID, &op.OperationStatus{ Finished: true, ExitCode: 255, - Error: err.Error(), + Message: err.Error(), }) - return operation, err + + return nil, err } return operation, nil @@ -61,8 +49,9 @@ func abort(operationID op.OperationID) error { UpdateOperationStatus(operationID, &op.OperationStatus{ Finished: true, ExitCode: 254, - Error: err.Error(), + Message: err.Error(), }) + return err } @@ -72,7 +61,7 @@ func abort(operationID op.OperationID) error { func list() ([]*op.Operation, error) { var operations []*op.Operation - for _, e := range entries { + for _, e := range getEntries() { operations = append(operations, e.Operation) } @@ -80,11 +69,11 @@ func list() ([]*op.Operation, error) { } func status(operationID op.OperationID) (*op.OperationStatus, error) { - entry, err := getEntry(operationID) + entry := getEntry(operationID) - if err != nil { - return nil, err + if entry == nil { + return nil, errors.New("No entry for operation with id " + string(operationID) + " found!") } - return entry.Status, errors.New("No such operation found") + return entry.Status, nil } diff --git a/internal/coordinator/entries.go b/internal/coordinator/entries.go new file mode 100644 index 0000000..681f5ef --- /dev/null +++ b/internal/coordinator/entries.go @@ -0,0 +1,78 @@ +package coordinator + +import ( + "errors" + "sync" + + "barf/internal/journal" + "barf/internal/op" +) + +var entries []*journal.JournalEntry +var entriesMu sync.Mutex + +func addEntry(e *journal.JournalEntry) { + entriesMu.Lock() + defer entriesMu.Unlock() + + entries = append(entries, e) +} + +func removeEntry(e *journal.JournalEntry) { + entriesMu.Lock() + defer entriesMu.Unlock() + + for i, s := range entries { + if s == e { + copy(entries[i:], entries[i+1:]) + entries = entries[:len(entries)-1] + + return + } + } +} + +func getEntry(operationID op.OperationID) *journal.JournalEntry { + entriesMu.Lock() + defer entriesMu.Unlock() + + for _, e := range entries { + if e.Operation.ID == operationID { + return e + } + } + + return nil +} + +func getEntries() []*journal.JournalEntry { + entriesMu.Lock() + defer entriesMu.Unlock() + + result := make([]*journal.JournalEntry, len(entries)) + copy(result, entries) + + return result +} + +func getNextIndex() (op.OperationIndex, error) { + var index op.OperationIndex = 1 + + indexFree := func(index op.OperationIndex) bool { + for _, e := range entries { + if e.Operation.Index == index { + return false + } + } + + return true + } + + for ; !indexFree(index); index++ { + if index == 0 { + return 0, errors.New("Could not get new index, overflow") + } + } + + return index, nil +} diff --git a/internal/journal/event.go b/internal/coordinator/event.go similarity index 96% rename from internal/journal/event.go rename to internal/coordinator/event.go index 8e3e2ed..c917639 100644 --- a/internal/journal/event.go +++ b/internal/coordinator/event.go @@ -1,4 +1,4 @@ -package journal +package coordinator import "barf/internal/op" diff --git a/internal/coordinator/init.go b/internal/coordinator/init.go new file mode 100644 index 0000000..7aa3ced --- /dev/null +++ b/internal/coordinator/init.go @@ -0,0 +1,46 @@ +package coordinator + +import ( + "errors" + + "barf/internal/com/server" + "barf/internal/journal" + "barf/internal/op" +) + +// Initialize reads active entries from disk +func Initialize() error { + if registerdStartHandler == nil { + return errors.New("No start handler registered") + } + + entryList, err := journal.Initialize() + + if err != nil { + return err + } + + entriesMu.Lock() + entries = entryList + entriesMu.Unlock() + + server.OnOperationCreate(create) + server.OnOperationAbort(abort) + server.OnOperationStatus(status) + server.OnListOperations(list) + + for _, e := range getEntries() { + err = registerdStartHandler(e.Operation) + + if err != nil { + UpdateOperationStatus(e.Operation.ID, &op.OperationStatus{ + Finished: true, + ExitCode: 255, + Message: err.Error(), + }) + return err + } + } + + return nil +} diff --git a/internal/coordinator/log.go b/internal/coordinator/log.go new file mode 100644 index 0000000..2d74fb3 --- /dev/null +++ b/internal/coordinator/log.go @@ -0,0 +1,27 @@ +package coordinator + +import ( + "barf/internal/op" +) + +// WriteOperationStdout writes to the operations stdout log +func WriteOperationStdout(operationID op.OperationID, line string) { + e := getEntry(operationID) + + if e == nil { + return // Just drop + } + + e.WriteStdout(line) +} + +// WriteOperationStderr writes to the operations stderr log +func WriteOperationStderr(operationID op.OperationID, line string) { + e := getEntry(operationID) + + if e == nil { + return // Just drop + } + + e.WriteStderr(line) +} diff --git a/internal/journal/update.go b/internal/coordinator/update.go similarity index 57% rename from internal/journal/update.go rename to internal/coordinator/update.go index bf7796d..2492fcc 100644 --- a/internal/journal/update.go +++ b/internal/coordinator/update.go @@ -1,32 +1,28 @@ -package journal +package coordinator import ( + "errors" + "barf/internal/com/server" "barf/internal/op" ) // UpdateOperationStatus sets the operation status func UpdateOperationStatus(operationID op.OperationID, status *op.OperationStatus) error { - e, err := getEntry(operationID) + e := getEntry(operationID) - if err != nil { - return err + if e == nil { + return errors.New("Could not find any entry for operation with id " + string(operationID) + " to report status on!") } - op.UpdateStatus(e.Status, status) - - err = writeEntry(e) + err := e.UpdateStatus(status) if err != nil { return err } if e.Status.Finished { - err = removeEntry(e) - - if err != nil { - return err - } + removeEntry(e) } return server.OperationStatus(operationID, e.Status) diff --git a/internal/journal/entries.go b/internal/journal/entries.go deleted file mode 100644 index 87b7591..0000000 --- a/internal/journal/entries.go +++ /dev/null @@ -1,152 +0,0 @@ -package journal - -import ( - "encoding/json" - "errors" - "io/ioutil" - "os" - "path" - "path/filepath" - "strings" - - "barf/internal/op" -) - -type entry struct { - Operation *op.Operation `json:"operation"` - Status *op.OperationStatus `json:"status"` -} - -var entries []*entry - -func addEntry(e *entry) error { - index, err := getNextIndex() - - if err != nil { - return err - } - - e.Operation.Index = index - entries = append(entries, e) - - return writeEntry(e) -} - -func removeEntry(e *entry) error { - for i, s := range entries { - if s == e { - copy(entries[i:], entries[i+1:]) - entries = entries[:len(entries)-1] - - return finishEntry(e) - } - } - - return nil -} - -func getEntry(operationID op.OperationID) (*entry, error) { - for _, e := range entries { - if e.Operation.ID == operationID { - return e, nil - } - } - - return nil, errors.New("No such operation found") -} - -func writeEntry(e *entry) error { - bytes, err := json.Marshal(e) - - if err != nil { - return err - } - - file := path.Join(activeDir, string(e.Operation.ID)+".json") - - return ioutil.WriteFile(file, bytes, 0600) -} - -func readEntry(operationID op.OperationID) (*entry, error) { - var e entry - file := path.Join(activeDir, string(operationID)+".json") - data, err := ioutil.ReadFile(file) - - if err != nil { - return &e, err - } - - err = json.Unmarshal(data, &e) - - return &e, err -} - -func finishEntry(e *entry) error { - from := path.Join(activeDir, string(e.Operation.ID)+".json") - to := path.Join(historyDir, string(e.Operation.ID)+".json") - - return os.Rename(from, to) -} - -func getNextIndex() (op.OperationIndex, error) { - var index op.OperationIndex = 1 - - indexFree := func(index op.OperationIndex) bool { - for _, e := range entries { - if e.Operation.Index == index { - return false - } - } - - return true - } - - for ; !indexFree(index); index++ { - if index == 0 { - return 0, errors.New("Could not get new index, overflow") - } - } - - return index, nil -} - -// ReadFromDisk reads active entries from disk -func ReadFromDisk() error { - if registerdStartHandler == nil { - return errors.New("No start handler registered") - } - - entries = nil - fileInfo, err := ioutil.ReadDir(activeDir) - - if err != nil { - return err - } - - for _, file := range fileInfo { - operationID := strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())) - - e, err := readEntry(op.OperationID(operationID)) - - if err != nil { - return err - } - - entries = append(entries, e) - } - - for _, e := range entries { - err = registerdStartHandler(e.Operation) - - if err != nil { - UpdateOperationStatus(e.Operation.ID, &op.OperationStatus{ - Finished: true, - ExitCode: 255, - Error: err.Error(), - }) - return err - } - } - - return nil -} diff --git a/internal/journal/entry.go b/internal/journal/entry.go new file mode 100644 index 0000000..5d39138 --- /dev/null +++ b/internal/journal/entry.go @@ -0,0 +1,132 @@ +package journal + +import ( + "fmt" + "io/ioutil" + "os" + "path" + "sync" + + "barf/internal/op" +) + +// JournalEntry represents an operation and its status in the journal +type JournalEntry struct { + Operation *op.Operation + Status *op.OperationStatus + log *operationLog + mutex sync.Mutex +} + +// NewJournalEntry creates a new journal entry based on the supplied operation +func NewJournalEntry(operation *op.Operation) (*JournalEntry, error) { + err := writeOperation(activeDir, operation) + + if err != nil { + return nil, err + } + + log, err := openLog(activeDir, operation.ID) + + if err != nil { + return nil, err + } + + e := JournalEntry{ + Operation: operation, + Status: op.NewStatus(), + log: log, + } + + return &e, nil +} + +func readJournalEntry(operationID op.OperationID) (*JournalEntry, error) { + operation, err := readOperation(activeDir, operationID) + + if err != nil { + return nil, err + } + + status := readStatus(activeDir, operationID) + + log, err := openLog(activeDir, operation.ID) + + if err != nil { + return nil, err + } + + e := JournalEntry{ + Operation: operation, + Status: status, + log: log, + } + + return &e, nil +} + +func loadActiveJournalEntries() ([]*JournalEntry, error) { + var entries []*JournalEntry + fileInfo, err := ioutil.ReadDir(activeDir) + + if err != nil { + return nil, err + } + + for _, file := range fileInfo { + e, err := readJournalEntry(op.OperationID(file.Name())) + + if err != nil { + fmt.Printf("Could not read active journal entry for %s, will skip it..\n", file.Name()) + fmt.Println(err) + continue + } + + entries = append(entries, e) + } + + return entries, nil +} + +// UpdateStatus updates the status of the operation +func (e *JournalEntry) UpdateStatus(status *op.OperationStatus) error { + e.mutex.Lock() + defer e.mutex.Unlock() + + op.UpdateStatus(e.Status, status) + + err := writeStatus(activeDir, e.Operation.ID, e.Status) + + if err != nil { + return err + } + + if e.Status.Finished { + return e.archive() + } + + return nil +} + +// WriteStdout writes to the log of the operation +func (e *JournalEntry) WriteStdout(line string) { + e.log.stdout(line) +} + +// WriteStderr writes to the log of the operation +func (e *JournalEntry) WriteStderr(line string) { + e.log.stderr(line) +} + +func (e *JournalEntry) archive() error { + err := e.log.close() + + if err != nil { + return err + } + + from := path.Join(activeDir, string(e.Operation.ID)) + to := path.Join(historyDir, string(e.Operation.ID)) + + return os.Rename(from, to) +} diff --git a/internal/journal/init.go b/internal/journal/init.go index 104554d..5b02de4 100644 --- a/internal/journal/init.go +++ b/internal/journal/init.go @@ -4,27 +4,28 @@ import ( "os" "path" - "barf/internal/com/server" "barf/internal/config" - - "github.com/asaskevich/EventBus" ) -var bus EventBus.Bus var activeDir = "" var historyDir = "" -func init() { - bus = EventBus.New() - +// Initialize initializes the journal, ensures directories and loads active entries +func Initialize() ([]*JournalEntry, error) { activeDir = path.Join(config.JournalDir, "active") historyDir = path.Join(config.JournalDir, "history") - os.Mkdir(activeDir, 0700) - os.Mkdir(historyDir, 0700) + err := os.MkdirAll(activeDir, 0700) + + if err != nil { + return nil, err + } + + err = os.MkdirAll(historyDir, 0700) + + if err != nil { + return nil, err + } - server.OnOperationCreate(create) - server.OnOperationAbort(abort) - server.OnOperationStatus(status) - server.OnListOperations(list) + return loadActiveJournalEntries() } diff --git a/internal/journal/log.go b/internal/journal/log.go new file mode 100644 index 0000000..7e29f69 --- /dev/null +++ b/internal/journal/log.go @@ -0,0 +1,34 @@ +package journal + +import ( + "log" + "os" + "path" + + "barf/internal/op" +) + +type operationLog struct { + close func() error + stdout func(...interface{}) + stderr func(...interface{}) +} + +func openLog(dir string, operationID op.OperationID) (*operationLog, error) { + filename := path.Join(dir, string(operationID), "output.log") + + f, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0600) + + if err != nil { + return nil, err + } + + stdout := log.New(f, " OUT ", log.LstdFlags|log.LUTC|log.Lmsgprefix) + stderr := log.New(f, " ERR ", log.LstdFlags|log.LUTC|log.Lmsgprefix) + + return &operationLog{ + close: f.Close, + stdout: stdout.Println, + stderr: stderr.Println, + }, nil +} diff --git a/internal/journal/operation.go b/internal/journal/operation.go new file mode 100644 index 0000000..44f6f9d --- /dev/null +++ b/internal/journal/operation.go @@ -0,0 +1,24 @@ +package journal + +import ( + "path" + + "barf/internal/op" + "barf/internal/utils/fs" +) + +func writeOperation(dir string, operation *op.Operation) error { + filename := path.Join(dir, string(operation.ID), "operation.json") + + return fs.WriteJSONFile(filename, operation) +} + +func readOperation(dir string, operationID op.OperationID) (*op.Operation, error) { + filename := path.Join(dir, string(operationID), "operation.json") + + var operation op.Operation + + err := fs.ReadJSONFile(filename, &operation) + + return &operation, err +} diff --git a/internal/journal/status.go b/internal/journal/status.go new file mode 100644 index 0000000..1f6d75a --- /dev/null +++ b/internal/journal/status.go @@ -0,0 +1,24 @@ +package journal + +import ( + "path" + + "barf/internal/op" + "barf/internal/utils/fs" +) + +func writeStatus(dir string, operationID op.OperationID, status *op.OperationStatus) error { + filename := path.Join(dir, string(operationID), "status.json") + + return fs.WriteJSONFile(filename, status) +} + +func readStatus(dir string, operationID op.OperationID) *op.OperationStatus { + filename := path.Join(dir, string(operationID), "status.json") + + status := op.NewStatus() + + _ = fs.ReadJSONFile(filename, status) + + return status +} diff --git a/internal/op/operation.go b/internal/op/operation.go index 3639dc8..79471ea 100644 --- a/internal/op/operation.go +++ b/internal/op/operation.go @@ -42,11 +42,12 @@ type Operation struct { } // NewOperation creates an operation object -func NewOperation(opType OperationType, args OperationArgs) *Operation { +func NewOperation(opType OperationType, args OperationArgs, index OperationIndex) *Operation { id := xid.New() return &Operation{ ID: OperationID(id.String()), + Index: index, Title: OperationTitle(strings.Title(string(opType))), Type: opType, Args: args, diff --git a/internal/op/status.go b/internal/op/status.go index fb25f39..992d7df 100644 --- a/internal/op/status.go +++ b/internal/op/status.go @@ -20,10 +20,9 @@ type OperationStatus struct { SecondsLeft int64 `json:"secondsLeft"` FileName string `json:"fileName"` - Step string `json:"step"` Finished bool `json:"finished"` + Message string `json:"message"` ExitCode int `json:"exitCode"` - Error string `json:"error,omitempty"` } // NewStatus creates a operation status object with default values @@ -79,8 +78,8 @@ func UpdateStatus(a *OperationStatus, b *OperationStatus) { a.FileName = b.FileName } - if len(b.Step) > 0 { - a.Step = b.Step + if len(b.Message) > 0 { + a.Message = b.Message } if b.Finished { @@ -96,8 +95,4 @@ func UpdateStatus(a *OperationStatus, b *OperationStatus) { if b.ExitCode > a.ExitCode { a.ExitCode = b.ExitCode } - - if len(b.Error) > 0 { - a.Error = b.Error - } } diff --git a/internal/proc/logfile/logfile.go b/internal/proc/logfile/logfile.go index e44e006..6516224 100644 --- a/internal/proc/logfile/logfile.go +++ b/internal/proc/logfile/logfile.go @@ -9,7 +9,7 @@ import ( // StartLogging redirectes stdout and stderr to a log file func StartLogging() { - f, err := os.OpenFile(config.LogFile, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666) + f, err := os.OpenFile(config.LogFile, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0600) if err != nil { log.Fatalf("Error opening log file: %v", err) diff --git a/internal/rsync/exec.go b/internal/rsync/exec.go index d1e29f7..d43bf27 100644 --- a/internal/rsync/exec.go +++ b/internal/rsync/exec.go @@ -4,10 +4,11 @@ import ( "barf/internal/cmd" "barf/internal/utils" "fmt" + "sync" ) type RsyncStatus struct { - Step string + Message string BytesDiffTotal int64 BytesTotal int64 @@ -26,25 +27,24 @@ type RsyncStatus struct { Finished bool ExitCode int - Error string } type StatusHandler func(status *RsyncStatus) type Rsync struct { cmd *cmd.Cmd - args []string stdoutHandler cmd.LogHandler stderrHandler cmd.LogHandler statusHandler StatusHandler status RsyncStatus speed []float64 + mu sync.Mutex } -func NewRsync(args []string) *Rsync { +// NewRsync creates a new rsync object +func NewRsync() *Rsync { r := &Rsync{ - args: args, status: RsyncStatus{ Finished: false, ExitCode: -1, @@ -54,7 +54,7 @@ func NewRsync(args []string) *Rsync { return r } -func (r *Rsync) getArgs(stepArgs []string) []string { +func (r *Rsync) getArgs(stepArgs []string, operationArgs []string) []string { args := []string{ "rsync", // "--checksum", // Very slow! @@ -70,7 +70,7 @@ func (r *Rsync) getArgs(stepArgs []string) []string { args = append(args, arg) } - for _, arg := range r.args { + for _, arg := range operationArgs { args = append(args, arg) } @@ -84,7 +84,7 @@ func (r *Rsync) getMedianSpeed(newSpeed float64) float64 { r.speed = append(r.speed, newSpeed) - if len(r.speed) > 5 { + if len(r.speed) > 4 { r.speed = r.speed[1:] } @@ -92,6 +92,9 @@ func (r *Rsync) getMedianSpeed(newSpeed float64) float64 { } func (r *Rsync) parseProgressLine(line string) { + r.mu.Lock() + defer r.mu.Unlock() + fileName, isDir, ok := parseFileName(line) if ok { @@ -101,6 +104,7 @@ func (r *Rsync) parseProgressLine(line string) { if !isDir { r.status.CurrentFileName = fileName r.status.CurrentFileIndex++ + r.status.Message = "Processing " + fileName + "..." } r.emitStatus() @@ -124,6 +128,9 @@ func (r *Rsync) parseProgressLine(line string) { } func (r *Rsync) parsePreparationLine(line string) { + r.mu.Lock() + defer r.mu.Unlock() + r.status.FilesTotal = parseNumberOfFiles(line, r.status.FilesTotal) r.status.FilesDiffTotal = parseNumberOfCreatedFiles(line, r.status.FilesDiffTotal) r.status.BytesTotal = parseTotalFileSize(line, r.status.BytesTotal) @@ -135,19 +142,22 @@ func (r *Rsync) parsePreparationLine(line string) { } func (r *Rsync) handleStderrLine(line string) { + r.mu.Lock() + defer r.mu.Unlock() + if r.stderrHandler != nil { r.stderrHandler(line) } } -func (r *Rsync) doPreparation() error { - r.status.Step = "Preparing" +func (r *Rsync) doPreparation(operationArgs []string) error { + r.status.Message = "Preparing..." r.emitStatus() args := r.getArgs([]string{ "--dry-run", "--stats", - }) + }, operationArgs) r.cmd = cmd.NewCmd() r.cmd.OnStdout(r.parsePreparationLine) @@ -158,13 +168,14 @@ func (r *Rsync) doPreparation() error { if err != nil { r.status.Finished = true - r.status.Error = err.Error() + r.status.Message = err.Error() r.status.ExitCode = exitCode r.emitStatus() } else if r.status.BytesDiffTotal == 0 { fmt.Println("No bytes found that needs transfer, will do nothing") r.status.Progress = 100 r.status.Finished = true + r.status.Message = "No work needed!" r.status.ExitCode = exitCode r.emitStatus() } @@ -172,14 +183,14 @@ func (r *Rsync) doPreparation() error { return err } -func (r *Rsync) doSync() error { - r.status.Step = "Syncing" +func (r *Rsync) doSync(operationArgs []string) error { + r.status.Message = "Processing..." r.emitStatus() args := r.getArgs([]string{ "--progress", "--out-format=__file:%n", - }) + }, operationArgs) r.cmd = cmd.NewCmd() r.cmd.OnStdout(r.parseProgressLine) @@ -192,7 +203,9 @@ func (r *Rsync) doSync() error { r.status.ExitCode = exitCode if err != nil { - r.status.Error = err.Error() + r.status.Message = err.Error() + } else { + r.status.Message = "Finished successfully!" } r.emitStatus() @@ -206,14 +219,14 @@ func (r *Rsync) emitStatus() { } } -func (r *Rsync) Start() { - err := r.doPreparation() +func (r *Rsync) Copy(operationArgs []string) { + err := r.doPreparation(operationArgs) if err != nil || r.status.Finished { return } - _ = r.doSync() + _ = r.doSync(operationArgs) } func (r *Rsync) Abort() error { diff --git a/internal/runner/init.go b/internal/runner/init.go index 71be62f..93e8b2f 100644 --- a/internal/runner/init.go +++ b/internal/runner/init.go @@ -1,15 +1,13 @@ package runner import ( - "barf/internal/journal" + "barf/internal/coordinator" ) -func init() { - journal.OnOperationStart(start) - journal.OnOperationAbort(abort) -} +// Start reads the journal and starts operations +func Start() error { + coordinator.OnOperationStart(start) + coordinator.OnOperationAbort(abort) -// StartRunner reads the journal and starts operations -func StartRunner() error { - return journal.ReadFromDisk() + return coordinator.Initialize() } diff --git a/internal/runner/runners.go b/internal/runner/runners.go index 676af2b..e089340 100644 --- a/internal/runner/runners.go +++ b/internal/runner/runners.go @@ -2,13 +2,15 @@ package runner import ( "fmt" + "sync" - "barf/internal/journal" + "barf/internal/coordinator" "barf/internal/op" "barf/internal/runner/runners" ) var activeRunners []runners.Runner +var activeRunnersMu sync.Mutex func createRunner(operation *op.Operation) error { r, err := runners.NewRunner(operation) @@ -19,18 +21,18 @@ func createRunner(operation *op.Operation) error { r.OnStdout(func(line string) { fmt.Println("Stdout["+r.OperationID()+"]: ", line) - // TODO: Write to file - // TODO: Send to clients? + + coordinator.WriteOperationStdout(r.OperationID(), line) }) r.OnStderr(func(line string) { fmt.Println("Stderr["+r.OperationID()+"]: ", line) - // TODO: Write to file - // TODO: Send to clients? + + coordinator.WriteOperationStderr(r.OperationID(), line) }) r.OnStatus(func(status *op.OperationStatus) { - err := journal.UpdateOperationStatus(r.OperationID(), status) + err := coordinator.UpdateOperationStatus(r.OperationID(), status) if err != nil { fmt.Println(err) @@ -41,7 +43,9 @@ func createRunner(operation *op.Operation) error { } }) + activeRunnersMu.Lock() activeRunners = append(activeRunners, r) + defer activeRunnersMu.Unlock() go r.Start() @@ -49,6 +53,9 @@ func createRunner(operation *op.Operation) error { } func removeRunner(operationID op.OperationID) { + activeRunnersMu.Lock() + defer activeRunnersMu.Unlock() + for i, r := range activeRunners { if r.OperationID() == operationID { copy(activeRunners[i:], activeRunners[i+1:]) @@ -60,6 +67,9 @@ func removeRunner(operationID op.OperationID) { } func getRunner(operationID op.OperationID) runners.Runner { + activeRunnersMu.Lock() + defer activeRunnersMu.Unlock() + for _, r := range activeRunners { if r.OperationID() == operationID { return r diff --git a/internal/runner/runners/copy.go b/internal/runner/runners/copy.go index 88208d3..241dada 100644 --- a/internal/runner/runners/copy.go +++ b/internal/runner/runners/copy.go @@ -19,25 +19,24 @@ type copyRunner struct { func (r *copyRunner) init(operation *op.Operation) { r.operation = operation + r.rsync = rsync.NewRsync() + r.rsync.OnStdout(r.handleStdout) + r.rsync.OnStderr(r.handleStderr) + r.rsync.OnStatus(r.handleStatus) +} +func (r *copyRunner) Start() { args := []string{} - fromArray, _ := typeconv.ToArray(operation.Args["from"]) + fromArray, _ := typeconv.ToArray(r.operation.Args["from"]) from := typeconv.ToStringArray(fromArray) for _, value := range from { args = append(args, value) } - args = append(args, operation.Args["to"].(string)) + args = append(args, r.operation.Args["to"].(string)) - r.rsync = rsync.NewRsync(args) - r.rsync.OnStdout(r.handleStdout) - r.rsync.OnStderr(r.handleStderr) - r.rsync.OnStatus(r.handleStatus) -} - -func (r *copyRunner) Start() { - r.rsync.Start() + r.rsync.Copy(args) } func (r *copyRunner) Abort() error { @@ -70,7 +69,7 @@ func (r *copyRunner) handleStderr(line string) { func (r *copyRunner) handleStatus(status *rsync.RsyncStatus) { r.statusHandler(&op.OperationStatus{ - Step: status.Step, + Message: status.Message, BytesDiffTotal: status.BytesDiffTotal, BytesTotal: status.BytesTotal, BytesDone: status.BytesDoneTotal, @@ -83,6 +82,5 @@ func (r *copyRunner) handleStatus(status *rsync.RsyncStatus) { FileName: status.CurrentFileName, Finished: status.Finished, ExitCode: status.ExitCode, - Error: status.Error, }) } diff --git a/internal/runner/runners/dummy.go b/internal/runner/runners/dummy.go index 7e23328..259d4ed 100644 --- a/internal/runner/runners/dummy.go +++ b/internal/runner/runners/dummy.go @@ -92,6 +92,6 @@ func (r *dummyRunner) handleFinish(exitCode int, errorMsg string) { r.statusHandler(&op.OperationStatus{ Finished: true, ExitCode: exitCode, - Error: errorMsg, + Message: errorMsg, }) } diff --git a/internal/ui/progress.go b/internal/ui/progress.go index b323520..dfbb89a 100644 --- a/internal/ui/progress.go +++ b/internal/ui/progress.go @@ -41,26 +41,14 @@ func getIndex(o *operationWithStatus) string { return fmt.Sprintf("%d", o.operation.Index) } -func getStep(o *operationWithStatus) string { - return o.status.Step +func getMessage(o *operationWithStatus) string { + return o.status.Message } func getFileName(o *operationWithStatus) string { return o.status.FileName } -func getFinished(o *operationWithStatus) string { - if o.status.Finished { - if o.status.ExitCode > 0 { - return fmt.Sprintf("Failed [code=%d]: %s", o.status.ExitCode, o.status.Error) - } - - return "Completed successfully!" - } - - return "" -} - func getByteProgress(o *operationWithStatus) string { totalDiffStr := utils.ByteCountSI(o.status.BytesDiffTotal) totalStr := utils.ByteCountSI(o.status.BytesTotal) @@ -149,37 +137,23 @@ func getProgressBar(o *operationWithStatus, width int, text string) string { func update() { mu.Lock() + defer mu.Unlock() + writer.Clear() for n, o := range operations { index := getIndex(o) title := getTitle(o) - step := getStep(o) - fileName := getFileName(o) - finished := getFinished(o) + message := getMessage(o) sizeProgress := getByteProgress(o) fileProgress := getFileProgress(o) progress := getProgress(o) speed := getSpeed(o) timeInfo := getTimeInfo(o) - barText := "" - - if len(step) > 0 { - barText = fmt.Sprintf("%s...", step) - } - - if len(fileName) > 0 { - barText = fmt.Sprintf("%s %s...", step, fileName) - } - - if len(finished) > 0 { - barText = fmt.Sprintf("%s", finished) - } - progressPrefix := fmt.Sprintf(" %s. %s ", index, title) progressSuffix := fmt.Sprintf(" %s | %s | %s | %s | %s ", progress, speed, sizeProgress, fileProgress, timeInfo) progressWidth := width - sLen(progressPrefix) - sLen(progressSuffix) - progressBar := getProgressBar(o, progressWidth, barText) + progressBar := getProgressBar(o, progressWidth, message) fmt.Fprintf(writer, "%s%s%s\n", progressPrefix, progressBar, progressSuffix) @@ -187,7 +161,5 @@ func update() { fmt.Fprintf(writer, "\n") } } - writer.Print() - mu.Unlock() } diff --git a/internal/utils/fs/fs.go b/internal/utils/fs/fs.go new file mode 100644 index 0000000..944d0d4 --- /dev/null +++ b/internal/utils/fs/fs.go @@ -0,0 +1,81 @@ +package fs + +import ( + "encoding/json" + "io/ioutil" + "os" + "path" +) + +func ensureDirectoryFor(filename string) error { + dirname := path.Dir(filename) + return os.MkdirAll(dirname, 0700) +} + +func writeTempFile(filename string, bytes []byte) error { + tmpfilename := filename + "~" + + return ioutil.WriteFile(tmpfilename, bytes, 0600) +} + +func moveTempFile(filename string) error { + tmpfilename := filename + "~" + + stat, err := os.Stat(tmpfilename) + + if err == nil { + if stat.Size() > 0 { + return os.Rename(tmpfilename, filename) + } + + return os.Remove(tmpfilename) + } + + return nil +} + +// WriteJSONFile writes the supplied structure as JSON via a temporary file for safety +func WriteJSONFile(filename string, b interface{}) error { + err := ensureDirectoryFor(filename) + + if err != nil { + return err + } + + bytes, err := json.Marshal(b) + + if err != nil { + return err + } + + err = writeTempFile(filename, bytes) + + if err != nil { + return err + } + + return moveTempFile(filename) +} + +// ReadJSONFile reads a JSON file into the supplied structure, first checks if there is a temporary file +func ReadJSONFile(filename string, b interface{}) error { + err := ensureDirectoryFor(filename) + + if err != nil { + return err + } + + err = moveTempFile(filename) + + if err != nil { + return err + } + + data, err := ioutil.ReadFile(filename) + + if err != nil { + return err + } + + return json.Unmarshal(data, b) +} diff --git a/internal/utils/median.go b/internal/utils/median.go index c841b5b..f738328 100644 --- a/internal/utils/median.go +++ b/internal/utils/median.go @@ -4,13 +4,16 @@ import "sort" // Median returns the median value of the supplied numbers func Median(numbers []float64) float64 { - sort.Float64s(numbers) + worknumbers := make([]float64, len(numbers)) + copy(worknumbers, numbers) - index := len(numbers) / 2 + sort.Float64s(worknumbers) - if len(numbers)%2 != 0 { - return numbers[index] + index := len(worknumbers) / 2 + + if len(worknumbers)%2 != 0 { + return worknumbers[index] } - return (numbers[index-1] + numbers[index]) / 2 + return (worknumbers[index-1] + worknumbers[index]) / 2 } diff --git a/scripts/scenarios.sh b/scripts/scenarios.sh index c0e19a0..ae431b7 100755 --- a/scripts/scenarios.sh +++ b/scripts/scenarios.sh @@ -24,7 +24,7 @@ function scenario { echo " - Generating data..." gendata - echo " - Running script and creating SVG..." + echo " - Running scenario and creating SVG..." svg-term --out $OUTDIR/$1.svg --command="bash $SDIR/$1.sh $TMPDIR" --window --height $2 --width $WIDTH --no-cursor echo " - Removing data..." @@ -40,10 +40,10 @@ function scenario { # scenario $name # done -scenario copy-normal 20 +scenario copy-normal 23 scenario copy-monitor 10 scenario copy-monitor-many 22 scenario copy-remote 16 -scenario daemon-journal 42 +scenario daemon-journal 14 rm -rf $TMPDIR diff --git a/scripts/scenarios/copy-monitor-many.sh b/scripts/scenarios/copy-monitor-many.sh index bcc9243..f84e050 100644 --- a/scripts/scenarios/copy-monitor-many.sh +++ b/scripts/scenarios/copy-monitor-many.sh @@ -9,17 +9,15 @@ mkdir -p $TMPDIR/remote2 mkdir -p $TMPDIR/remote3 prompt 'barf copy ~/local/* ~/remote1/' -timeout 3 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote1/ +timeout 2 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote1/ echo "^C" prompt 'barf copy ~/local/* ~/remote2/' -timeout 3 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote2/ +timeout 2 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote2/ echo "^C" prompt 'barf copy ~/local/* ~/remote3/' -timeout 3 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote3/ +timeout 2 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote3/ echo "^C" -sleep 0.2 - prompt 'barf monitor' barf -w 132 monitor diff --git a/scripts/scenarios/copy-normal.sh b/scripts/scenarios/copy-normal.sh index 2ec0944..42b1ded 100644 --- a/scripts/scenarios/copy-normal.sh +++ b/scripts/scenarios/copy-normal.sh @@ -12,4 +12,7 @@ barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote/ listSizes $TMPDIR/local local listSizes $TMPDIR/remote remote +prompt 'barf copy ~/local/* ~/remote/' +barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote/ + finish diff --git a/scripts/scenarios/daemon-journal.sh b/scripts/scenarios/daemon-journal.sh index 0864b3f..3517c25 100644 --- a/scripts/scenarios/daemon-journal.sh +++ b/scripts/scenarios/daemon-journal.sh @@ -10,7 +10,6 @@ prompt 'barf copy ~/local/* ~/remote/' timeout 4 barf -w 132 copy $TMPDIR/local/* $TMPDIR/remote/ echo "^C" -prompt 'cat ~/.config/barf/journal/active/*.json | jq' -cat ~/.config/barf/journal/active/*.json | sed "s|/*$TMPDIR|~|g" | jq +listTree ~/.config/barf/journal/active ~/.config/barf/journal/active finish diff --git a/scripts/scenarios/lib/helpers.sh b/scripts/scenarios/lib/helpers.sh index 9c7d913..85c15bd 100644 --- a/scripts/scenarios/lib/helpers.sh +++ b/scripts/scenarios/lib/helpers.sh @@ -23,6 +23,13 @@ function listSizes { popd &> /dev/null } +function listTree { + prompt "find $2" + pushd $1 &> /dev/null + find . + popd &> /dev/null +} + function finish { prompt '' sleep 6