From 2b6e8230ad957231973029f2453f02c28caf272f Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sat, 31 Oct 2020 22:37:18 +0200 Subject: [PATCH 01/24] BenchmarkM3TSZDecode --- src/dbnode/encoding/istream.go | 2 +- .../encoding/m3tsz/decoder_benchmark_test.go | 79 +++++++++++++++++++ .../encoding/m3tsz/timestamp_iterator.go | 2 +- 3 files changed, 81 insertions(+), 2 deletions(-) create mode 100644 src/dbnode/encoding/m3tsz/decoder_benchmark_test.go diff --git a/src/dbnode/encoding/istream.go b/src/dbnode/encoding/istream.go index f4eeba5426..509a6c36b6 100644 --- a/src/dbnode/encoding/istream.go +++ b/src/dbnode/encoding/istream.go @@ -35,7 +35,7 @@ type istream struct { remaining uint // bits remaining in current to be read } -// NewIStream creates a new Istream +// NewIStream creates a new IStream func NewIStream(reader io.Reader, bufioSize int) IStream { return &istream{ r: bufio.NewReaderSize(reader, bufioSize), diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go new file mode 100644 index 0000000000..a461eaad2d --- /dev/null +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -0,0 +1,79 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package m3tsz + +import ( + "bytes" + "encoding/base64" + "math/rand" + "testing" + + "github.com/m3db/m3/src/dbnode/encoding" + + "github.com/stretchr/testify/require" +) + +var ( + sampleSeriesBase64 = []string{ + "FiYqRnIdAACAQEAAAAArkizADVrDlnvgAATiGAAEbAABZgABkwAA6DFXAxnaGOwDF2ON7Yw85trFGksvYiyRjTFW3MeYs21wLHm9t/YkxtjbHW5vCYi6JwTF2LMcYsGI2DGdTRBjsCxRi7bHdsRZI2ZjDdGQsfbs15ijGHosPYqxNjjPGnMcYu29jbJmusVY03FibeGkMYY8xVizVHHsXY+3BjTR2NMYcE2ti7V2yMZb63hi7dmdMYdoxpizgGxMWa805ljgGMsVY4zRiLiHWslZo11lLOGLMdY61Zkjd2uMRZi1BljI2ostbo1hmDfHasVZUytjTeWOshZK3BjTdGtsWYwxdjwYjgMZpNwzLKM8+btsqGOwjHGMNubIxtnTVWVt1bUxRtLWmWtnY+x1nLU2YtjcuzJw7VWbMfYu0RjLVWbM6aY4lpjT2LtVaS0NqTGGJNeYq3torFWMNJaS1ZrTRWpuCYw1xjLFmItCaExJkDWGZMWZg6xjLMGLtiZmxps7EWLNlYw6NjzFmLtvZaxhi7GGNBiPAxmK8DRM0yj8uq2TKMk0DZOu+rPMsyjQumGOxTgGMNzaaxVrLEWLMUZk0xoDy2QN3Y8yNvLNGmM0boxRtrxGNMcY20dy7G2fM2bqyBjrXmHNyY4xlvzGWJsXcIxdt7H2LtIY2xRq7gGJsbZoxRiTVWVtvaey92LdGKMeYsxoMR+GM9WgZcMdsWKNrcIxNibl2KMaY0x5mTOWOvecYxRuDbGLsubWxJpjaWKsebExZv7JGKsucAxVu7HGOMfbkxdtjdGLMZY8xBkjH2Kt1d2xVtzIGLuCYyyBjTJ2KstbWxVtDbmMMzY6xF4bPWJtxdgxJvrJWMsdaGxhuzTWJs1egxRt7ZmItNYuxRpzFmOtvdyw9kTZ2LtzdaxZiTV2LsabYxJmTXWJtzZCx5pTH2Lt4cQxdtTiWNNea4xNn7imLtccaxVjTZmLMYYuxZnDSmNM0euxVmjU2KtwcWxRjrj2JsbdsxhjjHWNhiOAxW9rhjOwMdl2LN3aczRjbsmOOCbkxhkDa2LN3Zo1xtjGGMtxbexNmLJWJsZbQ19jDU2LNydwxZnLIGONwbI1xuTNGLNqYwxNnbVmQMdcg15uDF2NtKbaxdq7SWKtqa015jbbmNMib2x9mrHmMtxZA1htrWmLNzZGxNoLQmONzbA1drbGmJt0ZCxRjLIWJt0Y41lsDNWJtiaqxFjzF2OuEbk1ltjRGKNYZUxRtjI2MN/eI11vbe2Jsob4xljrJmKttaM19j7HGKuEaOxJkLdmJOIcW1hmLbWNMvY6xZmTHmMs9b82Fk7TmKM7cKxtijW2LMuYy2BpLQ2NNacOxpjbg2OODaSxp4LVmJtfbux1vcAA", + "FiYqRnIdAACAQEAAAAArkizADAfgAATiCSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSTAAA=", + "FiYqRnIdAACAQEAAAABDnTBYE/Bho3fYmHbfAAAnEMAB+lf3Y8nesGNjT6sqdklzsPFHkkSBobeKPJIkDQy3ijySJA0MlgE3WlChWbTdaUKFZkudjtB7gub4oltoPcFzfFEkkksFUt2Tfa6Fqpbsm+10lzsPqTuES/mJJJLpW9i+c6zi+SW7F851nF9uxfOdZxfLdi+c6zi+SSXOw8DYDYkTFt4GwGxImLLeBsBsSJi28DYDYkTFkulb2L5zrOL5JdC/dMVuc3q9t0xW5zer23TFbnN6vLbpitzm9XtumK3Ob1eW3TFbnN6vJLbpitzm9Xufh7R1X1eVLLJJaw/0a8y0ktYf6NebSS1h/o15lpJaw/0a82klrD/RrzLSS1h/o15lz8PaOq+rypZJYpkiO1dsA1MkR2rtkunPPlZJttpJc/D/fBmnRHWWSS1h/o15lpJaw/0a82klrD/RrzLSS1h/o15kloYescF8rGhh6xwXytz8P1pjNBhIbfrTGaDCQ/AAAnZn///2ERxWSw4LulSvs8twXdKlfZ7cF3SpX2e6F98YLEnhMbXxgsSeExtfGCxJ4TGWvjBYk8JjJJLn3cL98PJ8jbhfvh5PkZLoX3sr7uILjlr2V93EFxySS593C/fDyfI24X74eT5G6F97K+7iC47Xsr7uILjteyvu4guOWvZX3cQXHJJJa9lfdxBcdr2V93EFx3Pw9tAaypmht7aA1lTNDLe2gNZUzQyS3toDWVM0MktDJI57e/ac7HxxmbkR/pJYIWOVrdpMJJJaFjla3aTLQscrW7SbQscrW7SZaFjla3aTJJJaFjla3aTdC+AoWxZUHMtRGb6NHgwWojN9GjwZJaiM30aPBkudh5g7Stcc3JJbzB2la45u3mDtK1xzckksJxZZl2POLTiyzLsedpxZZl2POWnFlmXY85JJc7DzB2la45u3mDtK1xzdhOLLMux5xdK3UGgGFJIS2oNAMKSQkujZOLLMux5yXOw8wdpWuObt5g7Stcc3LeYO0rXHN28wdpWuObksEiGQkVkWJJLo3X2kSbTyRdCywmW9XXelz8OPQTV9E75bj0E1fRO+Szv8H06YXklzsf957cWnANv957cWnANv957cWnAMklv957cWnAMlhn8hhg0Dol0L4gCZVqxQ3Pw49BNX0Tvtx6CavonfJZEATKtWKGSWiAJlWrFDLRAEyrVihtEATKtWKG59+bUdK+4kLZtR0r7iQkls2o6V9xIS2bUdK+4kLZtR0r7iQls2o6V9xIWzajpX3EhLn4dmc/ehX1W7M5+9Cvqkkt2Zz96FfVbszn70K+qW7M5+9Cvqt2Zz96FfVYLZCxIudM1shYkXOmS5+HZnP3oV9Ukt2Zz96FfVLdmc/ehX1W7M5+9CvqtMWFPI9/rJJJYsdtxTlpY1jtuKctLLRg2ocIN1owbUOEG66F3R+2WXEH26P2yy4g+SSS3R+2WXEHy3R+2WXEH26P2yy4g+3R+2WXEH2Rg2ocIN0SSWPcAhr1yze4BDXrlkujZGDahwg3WjBtQ4QbpdCnzVjlG88kkklz7KKnPV58+602dsW5NrbO2Lcm1kklz7KKnPV58+1FTnq8+fJdabO2Lcm1ls7YtybWSSSS59lFTnq8+faipz1efPlqKnPV58+1FTnq8+fJJLrTZ2xbk2stnbFuTa2zti3Jtbo3f04J9i5nZEamPsK4pLo2wbtt7vTWS59MzEoWOlrTMShY6Wklz8PEbYJKYAbnYY8FgdCtcyWH3EI5E2HN9xCORNhyXOwx4LA6Fa5kuhfokiJV00GS59MzEoWOlpJLTMShY6WkuhfokiJV00GSSWG4TfVnMCbcJvqzmBLbhN9WcwLbhN9WcwJbcJvqzmBLnY+85UiQZUpb3nKkSDKlJYG4TfVnMCJdC+kcG+4Y9stSODfcMe2XNxxwbSeDL2XNy6dODPdz6pJJLCdLyOrAioktOl5HVgRVz7ultcsuJ3kkl0bsl+P9BB4kuhZtpdeDGBS6F9/6WHcIbJJZkvx/oIPEl0LZGbL+mLngLftIzZf0xc8B0jNl/TFzl0L/UkMAGrbSSSWZL8f6CDxLZL8f6CDxbJfj/QQeJdC+6q01qmjEkklzsdKzCAxSsUtpWYQGKVit3MU5BmXyliJeKJtHI8kks9H3goTte3o+8FCdry3o+8FCdr29H3goTteXQviXiibRyPJLeaMpHsCQFvNGUj2BLeaMpHsCS3mjKR7AlvNGUj2BJbzRlI9gSSSXRu0uvhH+2y2l18I/22S2l18I/22W0uvhH+2yW0uvhH+2ySSSS52H18ZprwH9vr4zTXgP5b6+M014D+SSSSSXRuii6kmXyCSSS0UXUky+QS0UXUky+QSXNy0u9Sg9bxrZzy5yJx+gl0L+SxCJdwZS3JYhEu4MrcliES7gyluSxCJdwZSSSSS3JYhEu4MpbksQiXcGUktyWIRLuDKSyLVs+rG4paLVs+rG4rRatn1Y3FLRatn1Y3FJaLVs+rG4pJJc/CxW6lPyeuSWhvJm7oR4Ekkl0L8VpabU7JWxWlptTslLYrS02p2SksWdn1v2KcS6NfcU2S5Ky3cU2S5KyS6VlnZ9b9inLWdn1v2KcktZ2fW/YpySWs7PrfsU7Wdn1v2KctZ2fW/YpyXPw8jCvW8FNtk3WHmchTJY7imyXJWQW7imyXJWW7imyXJWSSW7imyXJWSS3cU2S5KyW7imyXJWW7imyXJWS3cU2S5KyS6dV0H/Ok0skuhZXSD4UAdy6Ft3YTNCVqtd2EzQlapa7sJmhK1YAA", + "FiYqRnIdAACAQEAAAAAWlSx4Dadc6Q14AAE4hgAGQBgAGP9gAGTpgAGFcMxyJvHg8gDyAvFs8e7yAPIC8f7yAPFu8fLyCvH28gL2yu0G8gDyAPII8f7yAPB88YryAQHx9X7yEQHx+vH68U7x+vH+8gLyAPGQ8YTyAPIA8fzyBPDC8iLyAvII8e7yBvGw8ijyCvHw8gTyAvHi8ezx4vIA8hDx9vHw8e7yAPH68gDyBvGA8cDyAPIA8gDyCvE68PTx+vIK8fbyCPBE25jqmPIW8fbyAPCk8gTx/vIC8gjx9vAe7VrpJPIG8gDx+u/i8gjyBvH48g8W8ftU8S8W8WTyAvH+8grx8PD+8erx9PH88f7yCPEu8fryAvH48gbx/Pco7K7x+PIG8gD26u0E8gDyAPIA8grx9vIK8cbyAPIA8gL3GuoU8cDyEPH28gDx+vF08dzx+vIG8gDyAPF88ibx1vIK8fzyAPE68b7yAvH+8fryBvH48g7x+PH88jTx0vHi72bx8PH+8gLyBvA48fzyAvIA8f7yAPIW8eryAPIA8gDyBvGy8fLx+vII8fjyAPHc8EryBvHy8gLx7PHe8XTyAPH68gbyCu+28fTx+PIA8gbyAPCY8fLx/PIE8gbx9PGU1/zt7PH48hLx7vHE8gDyAPIG8gDx+vVU7Sjx/vIC8f7x+vGC8frx9vIE8gLyAPEc8hLx+PH68gjx/vH68f7x+vIG8gDx+vD48fryBPIA8grx+PEi8fjx+PIG8grx9vCY3tTx+PIA8grx8vHu8ezyAPH+8gDx/vJg8PryAPIC8fjyAPE68aLyAvII8fjyAPHY8YzyAPIA8gDyAPDk8fbx8PIG8fryBvEo8ijx9vH88gzyAPHA8ijx6vIU8gDx+vFRAfHvfvH/AfIK8f7yAvHy8gzyAPIK8fDyDvDo8WbyHPIE8fjyDvDq6hbyAPH68gbyAPEw8fryAPIA8grx6PHK8fLyAPIG8gbx9PFC8fLyAQPyAXvyAQHyCwHxauyQ8gDyAPH48gjyCvHw8hDx9vH68gbwqOlO7NLx+PII8fDxZvGy8e7yAwHyAX7yBwHyAPG+8fTyAvH48g0B8OV+8cMB8gDx+vIG8frx0vGU8gbx+PII8gDzAPAo8f7yCPH88fjw3vGM8gTx8vIG8gDyHvHI8fsB8gd+8gMB8gjw4PH28fryAPIG8gjxIvIU8fbx+vIG8gDw1uxE8fryBvIC8f7xpPHY8gbyAPIA8gEC8Mt88UcC8hLx+PIBBfIHdvEhBfHQ8gDx+vIC8gTxkPH+8gLx/vIG8fsB8el+8WkB8fDyCvIQ8fbx/PHw8gbyAvH+8gD1bO0s8gLyBvIA8gTxavIA8gDyAvII8fDxKPEM8fDyAPIG8fzw0vHq8gLx/vIA8gD2Ju0W8fryEPHw8gbxgPH28gbyKPHO8gzxzN0s6q7yBvII9tTtDvIG8gbyAPIC8f7xaPIA8fDyEPH28grx+PFo8fryDPH68frxhPIC8gbyAPIB4AABWoM1ggP///qWNF28AAArU2PFsgDY/WyCNj/bWtAdYnv2yCQHY/myCQXbebtrM0F2OdsfDZAGyANkDbEU2LZsgDY/WyCNkBbH+2PlsgbZAGyCtsKa1u2P1sgbZAGyAtj8bGu2PtsgDY/WyANkDgOx237Y+IDsgDZBmx/Ntuaq+zvVsirY+2yAtkAbFC2PhsfbZAGx+tkAbE42QBsf7ZAGyCtj7bKu2JhsgbY/WyBtj9a5O2QdsfzY/2yCNj9bGu2O1sfzY/2yBtkAbG02MZsgDZA2x+tkDbHo2QBsgbY/WyBtkAbGm2CxsgrY+2x+tj/bFk2EVsfrY/2yCNkBbIg2O9sfrZA2x+tkAbEG1UlsfrZA2yANkBbIUztpsQzZAWx/tj9bE22QNsgbZBWx9tkBbEs2N9sgLY/2yANkDbHO2QdsgrY+2yANkFbFlAdjrv2yAQHZAGyCtj+a9W2QJsgDZAWx/tkAbGw2QFsgbZAGx+tkDbHu2PZsgDZAGx+NkEbGq2FVsgjY/Gx+QXZA7tseEF2QlsgDZAGx+tkWguy7XbYcoLsfkAA=", + "FiYqRnIdAACAQEAAAAAnPgFYA+AABOIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJPFcvVHPFcpJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJPgAAGhz4AABno////L/JJJJJJJJJgAA==", + "FiYqRnIdAACAQEAAAAAWlSx4DYa+fHfgAATiGAAgoYABJ1gABamAAJ9DLWSWyU46nCsw5GX05SG/Y2OWyCwqmgq2cGuK2q25IwqGLg1gmuS2uWlAypm0+2QIDrJX7YLgOxGyh2lo3k2/a1eW3Gwqmdq3BWvU4I2/6w1GXS3eXH01VGm0woGYu0jXEC02G1mxlGTY47Gyi18WpMwbGjW2zm0Kz8Wx2wy2fY1wmoO1wHGiwy2XO3fm3O1RmzEwKWCAzp2382R2/cwj2fw21nE81KWtmwVGGoxKG5Y0/moSxTmVg1lmri2P4tk9VTEhi2RA1OG7c1vmsKxnmWYzy238202rgwCmuS2C2wU0h252xbWQw4EWtO4dHA4wNWhM3hWc81fG+oyLGqS26G5s2dWpIxTmZK3/m0I2129CwX2kwzgGxY1EnBMxKmtG5Bmxy2D2rKxa2fS1GW1+11G64xNmmW1DG1e1Im86wImlA1rWpg2k20gwPGbO2SXI625nCQwqWgM1BXAw2nHMOxQmqA1x2ps2+2qUxbWZU2tGvi2s2tiwsmYo1Hmqo3bW6swhmSC2dWnY0rW7owRWgW2CWi62Qm5kwPmGKzyWuC1UHPAw5m9M3jm524Vmk8w7WSe2XHDC4TW7KwbGqE3BGsG1S3AQxpWXEz6Gto1MmyGwbmZU2H2+419mxMwHWcAzWHCO3AGsWxNWm82jW3W1KmxEwLmJQ2825wyzGsQw0Gae3v2oI0sm5ywa2Ue1ymve4FHEoxRWfQ38m2I2vWpmxI2ee1mWoe1R2yQw9Wos1AGvs0nW7gyAWZu2BGfg1+2sGxiGkW2GXlg3IHCqx4IDoyX7e5gOsg0/mruwQGhG1eW8c3Gm8Kws2ZS4ZnCy30WjKwX2UI2Bmui1oHCeyRmxk19mrI1W25Uw5WWo2HWvm2u2nuwrmr8zXoHqg3vgygOzlAcD9ikNTRxANflu2sYFjFtSJqRNJhrmsH9hTMnBtvNW1vKMK9i7t0huMQHWYv3DvAcXpqktctxBuGhruwHCxv2PPAdTRr0NHFtbsM1kytcZsaN7ZsNMN9oYNThsdtw9o9sLplmuHBrUuRttnMbln6tX+A6F9+0dIDwvMDJkWt4Rlwt2ZyDsklloN21r8tahndMIBq2OChq7thBu5sbRnzNhNvCtmpqrQLCTvmiLAtlxvXtqCC7bl2wQILovtoBpUNWFshsWtptNt9yCNqpsAwHBIv2n/AdmdpktyJtFMHBrANk9xRtWRsOMLpmQtt1qhNUBvqsO5lstDVqktmluDsfdq2NvNrVtw5sPsUFkrNWRyXumxscsRhiotmVoBtmVsgsEFmHtmtspNwBsSsGphKtDptKOctuOsV9mht69tFNpFqIMexlUtbNrONLJx6Mq5ldti5tQNAxqYMvhlAte5rCtczwAACtTYy///+pYxJvAAAK1Mc9wJtrlohuG5hLwHN6v2s9AdtpuNQXXtu2CTBdR1vgtkJtutEVhQsn5t+NV5jutQliKNGVwftxdsANuhgKtExtlNDlp7N1RhOMstuCNetr9uR6A4p1+ynIDs4NI1ubOB9gmMH9rttN9qGNwRgWM3pqOtdZo/OWlhvNI5stNhNr7txZjRsvtqgN1ZpcNUBjhseRwKt7lytNzBgStANl2NQBu0NLxh6NQJtMtmlqkuEhjstlBsDuHZqkNixhEs7lqnNppt5NuNiXMPxt4NyNvDNb9hyNU9rztxhsKNUNhAsi9rMtuNvINqdgMMM1lFuM5u5NlVgUs+BpAtydqyNstk4MlppstXZtTt8hhSs0NrBtcxqWNQtiOQHV3v2ihAdVRpONw1lLtZhrJNzRrLtntjRMlJrGNc5qCtF1izMtZtAtOdtoNhJhEMrRrxtFVv0QXW4u2MnBczBsfuYtrrtLmC4oN21nYLoqQAA==", + "FiYqRnIdAACAQEAAAAAPGEnQDAfgAATiCSSSSSSSSSSSSSSSSSSSSSSSfAAAnEH///2PBJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJMBb8wFJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJgAA==", + "FiYqRnIdAACAQEAAAAAarE7gA+AABOIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJMAAA==", + "FiYqRnIdAACAQEAAAAA94IfIDfYY0GwPwAAJxDAAH592AASFBMAAeXVYAA/OkGiuzWt4Dtu8tNrDNvACt9dFxQKt9lRx22OG9JwIlN+YFwUkN241uCOOBi1tzUN8F1u9zOC6pv9fNrUVvQot5dNv2vtzelurVuBihvh2t6cdv+Pt3lRwKeOKG1uHjN/bCA79Rd+3xD4DvJgtxP9vwEt8rlut/t5p9vjCOLHdwTTN0k1u2QN5j1wVWN5/RxHnt0OVuQkN7KFwsetm9xxfeN4pBuxpOBGtuE8NzABwc2OHS1wc9OAtZvmPt6N1wXJNtN5vjWt+DVsstt45Bv98t2w5xRTN2UxvLnt5/hvD3ODCJvWluAQBxadOBxhv/AOAGVxGgODJpvXcOB3VwapN7xJxN4uH7pvK5OI1Rt2/uAoiB704V631N4HwsjN6gJwmkuMEZwKZN7G9ujTNz7lwnkN6AtvLwN4Xhvv5t7ZJw6UtvS5w8xN/O5u6Stxi5wU5N25VwkaNy4xu3cOAEZwK6NvnBvWZN8KFuBXN6mFwvqN9/Zw5EOEZ5vJBN5yBw9IuTW5ueOt9BRv/aOCLVvxst99Jv6dtvNNu5juA5VwxVN8lJuYtt56tvwlt+BJw9vt79dwAqN5PhuyMNnopuzcN8WpvbKN6MZuaBNx09yyzt1blxItt6lpw50OEGtxZSN2LBwvat8BVvv+tyM5xGxt5xNwjSuGR1wV8OEuxv0Stz1Bx30ODlNvu0t14pvqzN66Rv85t2WFvhzuBBBvJgN6G9vlBNz4hv06N77NvFOt2/hvWZuD1ZuzmNu25vqEOR1Vu1StzdFv4SuKDluEON4mlu6juC4JvPTORxVvgdN8G1v7fN6epvTsN7pltzKuOeRuwyODvVw0tOB71vxGt+hVvYvt8plxfzN+UNwBoOJgtxZCt9R5woct6gpxBNt9U9wRdt4P1vkeNsP1wmzN43VvlYNzbtuwxt0jJuqQOG0hvzwN27Rv2uuGQNuaHOIeVvWVOEtBtgHN8NtybruAM9ws9N/s5w2ot711wEzN5n9w6ZN84JvaetoexwmhOI5puccNv1VuLaOBlRxAft04VupXN6d9v9ktvchv87tswVvieN4EBvVvuJ99vTvOHhlv1SOH81u7jNwFtuABN1lFxUZN0GNxMzN5PtwGmN9T9wE9t9plup6t/QRwV6OOltt5PN3Iluszt+elwZlt95BwmOuK1tvzaOC1duCANyk5wHfuD5RvT9wTeFLvG8BtBOCqRwZ4uKQBwszt/ZBv/pt7AZvDFtq/xtwSN9PaA7jy1+4nOIDuXWt3fJvnwNsoRvILNsG1v8UOBWFvlct9utwqMOKOxujKN4tZvmAtwhhvjgOMWhvQoN1xRu/Ct629uXtt3EhwAhN7CBvgwNwPhwpWttsBwexNwj1xGPN9sRu00N+Ydu4Zt/OtwFht9rxwzktwZ1w8vNyHZwzLtwLVuseN6hRu69t/ptvMct5tZuv/uEstwa8t/lluvTOEmpws3N9TRumCuAUhtc8NzvlvhCt7ftvYvt4YBurmt2gJv3jN1idv4eN6cxve9N2MVuliuPUpvg5t8V5vjxtuWJvE0t9ZFvKQOB91wsYt9CqA73Gjcd3vu7tvAt1SRwoHtu19xTbOTKdtsPuKkBvdft6ChxqHN8WdugRNy9VvEbOErBwBVt7kFwNpuHixvF9t8xNvjyt2KtuUltondvSftzpttyLt3zNu8nN0QZvEhN4WZxNrt2Jtt2NN3AtvoNNyUlvWouN1Fw+iN/ZBvs8tl65xiDtz1Zwm0NyxduGSOBsJua9t4otvEjt1G1vYVNyQduiFOCxdw+KuGLtuyrN6s9wTztyHhvy0uKfJueHuMZpvIHN0pZxU1N7KhuliuBtxwFgt5PJty0QLeb2vm8RtAt615v8TN2IdwJ4tzWBtHFuIYdvYwuATlx6ptxC1wD7t8D1v0RN9w5utAt8d9wmJN4kVxPrOGGVvM1t/BVvRON3cZyJiN3ltv5QN/FtvLLtrYBw3ZuHj5wiwt/v1usHN5e1wu+N3Wdw1qN9D1w8Yt52VwkQt4iZxJctvfVvvqOLyhxIBNyPRujWuD6htvQt9StvI+t/utw2Vt8ppxLAttYpvjeN/ShvpXNvChvfst7cVvuTuHlRt/aN53hv6JN44RwyYynfEh2u3sVZTvzIuAd5u7CON7BvBiNyMJvoGOQHBuneuDzltU1t4hVwDqN2yBwJ8N88Bu49t1mdvNfOBbZwS3uPfJwaNNzzdwPIN63xukQN+ItviCwHesev21zxAeDhRwiLtqQJuoFt7cFw2QtsBhwYMuPyVuklNtDVwfwt8P5wZxNzNpu+RtkcVwBLNp+Vvmzt/8Rwo/OIq5uKcNtLJuQxuBhxvMLN/r5wl1Nyzpts7tseZvmDuGLRt5LN8FhwlmORGxvqtOBPRvkIt6FBu64t8MdwSbt8S1woEt0Ctt7otuI9wyut0GRwNiOFppuoxuBAFwJAOKkZuNrOBqtt+KOA3NxaYN5WdtXxOCzdwrNN4uBwyzN9qlwjkN5CBwBHOQJFtnlOD6pw5tt2K9xl3ONI1wx+uC8mAA", + "FiYqRnIdAACAQEAAAABGId9oDfcTD+KTwAAJxDAAJ9nmAAgRqMAAyOLYABNsmGkqKtGlSxai45t0OWyDGakfNqCDmpc7a1FhtxUW0JAagCBq4kWiuPaWIVuiqWx1DaZ8dpS42tOuau7NtH526t8adBRqMy2iVZabJtuBPoDuMH79pdk4DqL6Gn02aggVucnYFt0QL7qE0Wme4gOlusabFdtjBmuwRado5pei4Dp9lr9qEkIDuFg2yO4aVwppYX2oPOaYWFtkKm2OWaT3BpbpmnylanWZuI5m2XBab15puEWrJ4aSDBtt7W8VuayVFqRSGo68aX9RvhI22xIaNUVqXlmneXaOlpuUQGsIqaWoNqHvGlF1afUFtA8mwQ3aoENp6pWqtIay+1ssXG1j2aiLFqnZ2qTXaYa5uSFm9nYaW/tqnLmsU0ay+Rt2sG+vCaK2dqG4movralhdt+vm6W+aro9psSGi3/aXIZv2A21//aitVqG0YJps07xp5hIJtyDGq4lahdZrPB2omPabEhs3HG0nQacY9re2IDo9xb9qOc4DtrTm1Q3aNUVqrHWocCabaxtp7WtdEaYflq4FYDpn5L9pt04DuSY22gTawH9pMP2ng1gOqgMv25S4gO1kgaiLdpf62l0xaaTtu0ZG2Jwal7RpDT2lwaajbWA7kwq/bk5OA6Y8xqtWmnWOaaYls0J2xFVgOrKov2l0bgOlylaUGVvBlm5XIaiDlqyj2mHyareRuR92we7aPPJq44Wm0pakR9sznG6g3amm5qZ+2rCuarOVvZnG4NoaP19rOYGoXpameRt1bG05SafENpkjWmimafb5uFwW0Acagj9qIY2kdhap3tvQWmwEKadOFouV2sIraX+1tEcWwZ7a6+dqePmkkfgOk4dv24fIgWpYDv2nBvgOqscabjlqFd23mfgOylbv2oj2gWokrv2l5OgOpn4cAMxsNxWquhamiNqY+Gr6ubqCRsSsmpuvakdpp7aYDqnmr9vStYDrdPmqk6gOozNv2qYLgOsQxbwpNvfsGoc6alFFpT+mpnubhB1tEE2lTSav/hoYVWky0b72tuuammghak85qbDGqJBbbN9uOgoDqZW79qbJYDqg0mjIwbmVJtKpWrszgOpmuv2oi4gOnmbbYyFtYa2o6Yawi5pjvGojnbbG5ub/2pycafIxpZY2mJBgO/Szv2vr6gOmuzgOk8tv2nmBgOoEibcKdsfzmkSHahcVqN5Wicub5jxuoi2pqIgOrbpv2oM/gOn7abgLtuBN2nunazp1o0/GtrEbcRZt/0oDqkib9pHMIDqHxGmRvbsCtq88GrROaN2SA6Y0m/aVMeA7tBxuQvWoKMas3hrPTmojKbqsRq202lpxartdqZKYDqjr79s/7YDvOymnglao+5q5qmrC/g23crumwa4g2qgqaehtobo2mKEbwzlsWsGrlRaReNpOkGoYmbMD1vYDGi0zafyNq+32j5ybJT9uW9mrLSaVmlqAbWqzIbic9q1BWqDIahZJpWZ2nNhbkOttxMWjWKaf8hpnAWkA1bcd5stqmjjXal3ppTR4DrJeL9tieYDtL82o3YacAZq3B2plQbgM5thJGljMakTZorqmqSnbiFpr2qWn5ran9pp/qGk9gbgH9sd1mojAawPFpzL2nKDbYfhuYuGqdRaRa5qihGiwWbg21uRDmmFWajuZpnMGmipbmjBt222k90abMVqieWktEbB59vTxWq69aq4xqX22mAdbeXKA60BG/afuaA6TK5qNxGtW/b58dr+B2le2adQhpi1mnpEbLaptC+WxijaTpdqrRGrHbbpUBtuY2p4QaYuKCaXBq8aexeCboedqfCGn2Maq+KCaxO28abA6CbbRhty0mnV5aOa1oqvIDqvsr9sPNoDs532m+QaX7hpa/moolbaBRwqB2kGDaXnBpKHGqRQa+Z5u5UGqqba6w5pycGow2bHhxtVV2lTsaq4Vp3BWkh/bQNNta2WoGgacIVp1DGnfGbPZht5PWsiyaoWRqEwGmdlgO4Aov21sQgOmNBamh5rJv2jKvbtyps7AmqsoaphRqA7WnIlbVyRu4kWmb5aMNJpiJ2oyKa6F9uNi2mfDakyaA6Z7+/aX9qA7VVRsvkmrJuaqwmA6Lda/ayU2A7Xp9r+22oKdal5dqY24Dp0tb9uUtYDtSommY3achJpnNmjiJbYyNusUWlqpamUBqHYWo5/bW79s27mrZmahtFpqEmm8ubhK5ujlGnGHanJdpdYWsXXbza5vQ2mnqgamiFpxZGoLRb5/9uApmmaFacUBpiVmmmbb3dhrWRmnBAapKVpudGre6gOyNuv2uhPgOo3KahcBpgmGndWbcXluzgGpq7abVlp2poDqcFr9tdVYDuIbGmtqakJ9qT02pfvbg/dthuGkrPaV7FqCM2sNHbX9Vt1r4Douu79qKjYDqaPmrwybtK1se0GooVaSj9qDnmrVUb0/eA7AL6/asHmBaSge/6jS6/6hceA7RxdtDQmoK5aefRo/eGqwVborls13mk0/ak8qA6vRW/aS6GA7Z55ukIWmsWaLzlqve2rXJbRSVtZ62modaGH9qWs2jQAbzFxvBWWtgsalnVo/72iaEbSbOBbPYu+agDOBacCpqSLoJporLxsXrIJs8HWuUoaZPtpHzWlefbdO5u58mm7jat7Rpj5Gne3beCdorCmn3OafItqKYmmTTbHw9s2k2q4ZaeXBrIF4FqYcr5t8q4FuKkmm4Aa2Cpol42q7fbsKdtK2GtYGae19ouOWsWEbWTFuNfmpClaiXZqCnWrYgbWVJu9n2kt6apzFrDcmmGAb2lhsT3IDqR3b9qSSYAA=", + } +) + +// BenchmarkM3TSZDecode-12 10000 108797 ns/op +func BenchmarkM3TSZDecode(b *testing.B) { + var ( + sampleSeriesCount = len(sampleSeriesBase64) + sampleSeries = make([][]byte, 0, sampleSeriesCount) + encodingOpts = encoding.NewOptions() + reader = bytes.NewReader(nil) + rnd = rand.New(rand.NewSource(42)) + ) + + for _, b64 := range sampleSeriesBase64 { + data, err := base64.StdEncoding.DecodeString(b64) + require.NoError(b, err) + sampleSeries = append(sampleSeries, data) + } + + seriesRun := make([][]byte, 0, b.N) + for i := 0; i < b.N; i++ { + seriesRun = append(seriesRun, sampleSeries[rnd.Intn(sampleSeriesCount)]) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + reader.Reset(seriesRun[i]) + iter := NewReaderIterator(reader, true, encodingOpts) + for iter.Next() { + _, _, _ = iter.Current() + } + require.NoError(b, iter.Err()) + } +} diff --git a/src/dbnode/encoding/m3tsz/timestamp_iterator.go b/src/dbnode/encoding/m3tsz/timestamp_iterator.go index ca3402607c..8e436fb65f 100644 --- a/src/dbnode/encoding/m3tsz/timestamp_iterator.go +++ b/src/dbnode/encoding/m3tsz/timestamp_iterator.go @@ -31,7 +31,7 @@ import ( ) // TimestampIterator encapsulates all the state required for iterating over -// delta-of-delta compresed timestamps. +// delta-of-delta compressed timestamps. type TimestampIterator struct { PrevTime xtime.UnixNano PrevTimeDelta time.Duration From 9eed46a35cda4ab4bc35dbc25bc6a77a74be35d3 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Mon, 2 Nov 2020 00:19:58 +0200 Subject: [PATCH 02/24] istream64 + benchmark --- src/dbnode/encoding/istream64.go | 168 ++++++++++++++++++ src/dbnode/encoding/istream64_test.go | 144 +++++++++++++++ src/dbnode/encoding/m3tsz/decoder.go | 4 +- .../encoding/m3tsz/decoder_benchmark_test.go | 36 +++- src/dbnode/encoding/m3tsz/iterator.go | 9 + 5 files changed, 357 insertions(+), 4 deletions(-) create mode 100644 src/dbnode/encoding/istream64.go create mode 100644 src/dbnode/encoding/istream64_test.go diff --git a/src/dbnode/encoding/istream64.go b/src/dbnode/encoding/istream64.go new file mode 100644 index 0000000000..d0dddf209b --- /dev/null +++ b/src/dbnode/encoding/istream64.go @@ -0,0 +1,168 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package encoding + +import ( + "encoding/binary" + "io" + "io/ioutil" +) + +// istream64 encapsulates a readable stream based directly on []byte slice and operating in 64 bit words. +type istream64 struct { + data []byte // encoded data + err error // error encountered + current uint64 // current uint64 we are working off of + index int // current index within data slice + remaining uint // bits remaining in current to be read +} + +// NewIStream64 creates a new istream64 +func NewIStream64(data []byte) IStream { + return &istream64{data: data} +} + +func (is *istream64) ReadBit() (Bit, error) { + res, err := is.ReadBits(1) + return Bit(res), err +} + +func (is *istream64) Read(b []byte) (int, error) { + var i int + for ; i < len(b); i++ { + res, err := is.ReadBits(8) + if err != nil { + return i, err + } + b[i] = byte(res) + } + return i, nil +} + +func (is *istream64) ReadByte() (byte, error) { + res, err := is.ReadBits(8) + return byte(res), err +} + +func (is *istream64) ReadBits(numBits uint) (uint64, error) { + if is.err != nil { + return 0, is.err + } + if numBits <= is.remaining { + return is.consumeBuffer(numBits), nil + } + res := readBitsInWord(is.current, is.remaining) + bitsNeeded := numBits - is.remaining + if err := is.readWordFromStream(); err != nil { + return 0, err + } + if is.remaining < bitsNeeded { + return 0, io.EOF + } + return (res << bitsNeeded) | is.consumeBuffer(bitsNeeded), nil +} + +func (is *istream64) PeekBits(numBits uint) (uint64, error) { + if numBits <= is.remaining { + return readBitsInWord(is.current, numBits), nil + } + res := readBitsInWord(is.current, is.remaining) + bitsNeeded := numBits - is.remaining + next, rem, err := is.peekWordFromStream() + if err != nil { + return 0, err + } + if rem < bitsNeeded { + return 0, io.EOF + } + return (res << bitsNeeded) | readBitsInWord(next, bitsNeeded), nil +} + +//TODO: tests +func (is *istream64) RemainingBitsInCurrentByte() uint { + return is.remaining % 8 +} + +// readBitsInWord reads the first numBits in word w. +func readBitsInWord(w uint64, numBits uint) uint64 { + return w >> (64 - numBits) +} + +// consumeBuffer consumes numBits in is.current. +func (is *istream64) consumeBuffer(numBits uint) uint64 { + res := readBitsInWord(is.current, numBits) + is.current <<= numBits + is.remaining -= numBits + return res +} + +func (is *istream64) peekWordFromStream() (uint64, uint, error) { + if is.index+8 <= len(is.data) { + return binary.BigEndian.Uint64(is.data[is.index:]), 64, nil + } + if is.index >= len(is.data) { + return 0, 0, io.EOF + } + var res uint64 + var rem uint + for i := is.index; i < len(is.data); i++ { + res = (res << 8) | uint64(is.data[i]) + rem += 8 + } + return res << (64 - rem), rem, nil +} + +func (is *istream64) readWordFromStream() error { + if is.index+8 <= len(is.data) { + is.current = binary.BigEndian.Uint64(is.data[is.index:]) + is.remaining = 64 + is.index += 8 + return is.err + } + if is.index >= len(is.data) { + is.current = 0 + is.err = io.EOF + return is.err + } + var res uint64 + var rem uint + for ; is.index < len(is.data); is.index++ { + res = (res << 8) | uint64(is.data[is.index]) + rem += 8 + } + is.remaining = rem + is.current = res << (64 - rem) + return nil +} + +func (is *istream64) Reset(r io.Reader) { + is.err = nil + is.current = 0 + is.remaining = 0 + is.index = 0 + if r == nil { + is.data = nil + is.err = nil + return + } + //FIXME: this is slow and should accept a slice of bytes directly as an argument instead. + is.data, is.err = ioutil.ReadAll(r) +} diff --git a/src/dbnode/encoding/istream64_test.go b/src/dbnode/encoding/istream64_test.go new file mode 100644 index 0000000000..e4d4dd722d --- /dev/null +++ b/src/dbnode/encoding/istream64_test.go @@ -0,0 +1,144 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package encoding + +import ( + "bytes" + "io" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestIStream64ReadBits(t *testing.T) { + byteStream := []byte{ + 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, + 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, + } + + o := NewIStream64(byteStream) + is := o.(*istream64) + numBits := []uint{1, 3, 4, 8, 7, 2, 64, 64} + var res []uint64 + for _, v := range numBits { + read, err := is.ReadBits(v) + require.NoError(t, err) + res = append(res, read) + } + expected := []uint64{0x1, 0x4, 0xa, 0xfe, 0x7e, 0x3, 0x1234567890abcdef, 0x1} + require.Equal(t, expected, res) + + _, err := is.ReadBits(8) + require.EqualError(t, err, io.EOF.Error()) +} + +func TestIStream64ReadByte(t *testing.T) { + byteStream := []byte{ + 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, + 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, + } + + o := NewIStream64(byteStream) + is := o.(*istream64) + var res []byte + for range byteStream { + read, err := is.ReadByte() + require.NoError(t, err) + res = append(res, read) + } + require.Equal(t, byteStream, res) + + _, err := is.ReadByte() + require.EqualError(t, err, io.EOF.Error()) +} + +func TestIStream64PeekBitsSuccess(t *testing.T) { + byteStream := []byte{0xa9, 0xfe, 0xfe, 0xdf, 0x9b, 0x57, 0x21, 0xf1} + o := NewIStream64(byteStream) + is := o.(*istream64) + inputs := []struct { + numBits uint + expected uint64 + }{ + {0, 0}, + {1, 0x1}, + {8, 0xa9}, + {10, 0x2a7}, + {13, 0x153f}, + {16, 0xa9fe}, + {32, 0xa9fefedf}, + {64, 0xa9fefedf9b5721f1}, + } + for _, input := range inputs { + res, err := is.PeekBits(input.numBits) + require.NoError(t, err) + require.Equal(t, input.expected, res) + } + require.Equal(t, uint64(0), is.current) + require.Equal(t, 0, int(is.remaining)) +} + +func TestIStream64PeekBitsError(t *testing.T) { + byteStream := []byte{0x1, 0x2} + o := NewIStream64(byteStream) + is := o.(*istream64) + res, err := is.PeekBits(20) + require.EqualError(t, err, io.EOF.Error()) + require.Equal(t, uint64(0), res) +} + +func TestIStream64ReadAfterPeekBits(t *testing.T) { + byteStream := []byte{0xab, 0xcd} + o := NewIStream64(byteStream) + is := o.(*istream64) + res, err := is.PeekBits(10) + require.NoError(t, err) + require.Equal(t, uint64(0x2af), res) + _, err = is.PeekBits(20) + require.EqualError(t, err, io.EOF.Error()) + + inputs := []struct { + numBits uint + expected uint64 + }{ + {2, 0x2}, + {9, 0x15e}, + } + for _, input := range inputs { + res, err := is.ReadBits(input.numBits) + require.NoError(t, err) + require.Equal(t, input.expected, res) + } + _, err = is.ReadBits(8) + require.EqualError(t, err, io.EOF.Error()) +} + +func TestIStream64ResetIStream(t *testing.T) { + o := NewIStream64([]byte{0xff}) + is := o.(*istream64) + is.ReadBits(8) + is.ReadBits(1) + is.Reset(bytes.NewReader(nil)) + require.Equal(t, uint64(0), is.current) + require.Equal(t, uint(0), is.remaining) + require.Equal(t, 0, is.index) + require.NoError(t, is.err) +} diff --git a/src/dbnode/encoding/m3tsz/decoder.go b/src/dbnode/encoding/m3tsz/decoder.go index e29812dc31..b1d7a408e3 100644 --- a/src/dbnode/encoding/m3tsz/decoder.go +++ b/src/dbnode/encoding/m3tsz/decoder.go @@ -41,5 +41,7 @@ func NewDecoder(intOptimized bool, opts encoding.Options) encoding.Decoder { // Decode decodes the encoded data captured by the reader. func (dec *decoder) Decode(reader io.Reader) encoding.ReaderIterator { - return NewReaderIterator(reader, dec.intOptimized, dec.opts) + it := NewReaderIterator64(nil, dec.intOptimized, dec.opts) + it.Reset(reader, nil) + return it } diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index a461eaad2d..cf07bf2728 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -51,9 +51,9 @@ func BenchmarkM3TSZDecode(b *testing.B) { var ( sampleSeriesCount = len(sampleSeriesBase64) sampleSeries = make([][]byte, 0, sampleSeriesCount) - encodingOpts = encoding.NewOptions() - reader = bytes.NewReader(nil) - rnd = rand.New(rand.NewSource(42)) + encodingOpts = encoding.NewOptions() + reader = bytes.NewReader(nil) + rnd = rand.New(rand.NewSource(42)) ) for _, b64 := range sampleSeriesBase64 { @@ -77,3 +77,33 @@ func BenchmarkM3TSZDecode(b *testing.B) { require.NoError(b, iter.Err()) } } + +// BenchmarkM3TSZDecode64-12 17222 69151 ns/op +func BenchmarkM3TSZDecode64(b *testing.B) { + var ( + sampleSeriesCount = len(sampleSeriesBase64) + sampleSeries = make([][]byte, 0, sampleSeriesCount) + encodingOpts = encoding.NewOptions() + rnd = rand.New(rand.NewSource(42)) + ) + + for _, b64 := range sampleSeriesBase64 { + data, err := base64.StdEncoding.DecodeString(b64) + require.NoError(b, err) + sampleSeries = append(sampleSeries, data) + } + + seriesRun := make([][]byte, 0, b.N) + for i := 0; i < b.N; i++ { + seriesRun = append(seriesRun, sampleSeries[rnd.Intn(sampleSeriesCount)]) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + iter := NewReaderIterator64(seriesRun[i], true, encodingOpts) + for iter.Next() { + _, _, _ = iter.Current() + } + require.NoError(b, iter.Err()) + } +} diff --git a/src/dbnode/encoding/m3tsz/iterator.go b/src/dbnode/encoding/m3tsz/iterator.go index b2f7e69fa2..07517d4f09 100644 --- a/src/dbnode/encoding/m3tsz/iterator.go +++ b/src/dbnode/encoding/m3tsz/iterator.go @@ -60,6 +60,15 @@ func NewReaderIterator(reader io.Reader, intOptimized bool, opts encoding.Option } } +func NewReaderIterator64(data []byte, intOptimized bool, opts encoding.Options) encoding.ReaderIterator { + return &readerIterator{ + is: encoding.NewIStream64(data), + opts: opts, + tsIterator: NewTimestampIterator(opts, false), + intOptimized: intOptimized, + } +} + // Next moves to the next item func (it *readerIterator) Next() bool { if !it.hasNext() { From 1ae4a01449f623ab6f09eee70257f5be49ea985f Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Mon, 2 Nov 2020 08:02:28 +0200 Subject: [PATCH 03/24] Clean up benchmark code --- src/dbnode/encoding/istream64.go | 10 ++-- .../encoding/m3tsz/decoder_benchmark_test.go | 56 +++++++++---------- 2 files changed, 30 insertions(+), 36 deletions(-) diff --git a/src/dbnode/encoding/istream64.go b/src/dbnode/encoding/istream64.go index d0dddf209b..cb4b050eb1 100644 --- a/src/dbnode/encoding/istream64.go +++ b/src/dbnode/encoding/istream64.go @@ -40,11 +40,6 @@ func NewIStream64(data []byte) IStream { return &istream64{data: data} } -func (is *istream64) ReadBit() (Bit, error) { - res, err := is.ReadBits(1) - return Bit(res), err -} - func (is *istream64) Read(b []byte) (int, error) { var i int for ; i < len(b); i++ { @@ -62,6 +57,11 @@ func (is *istream64) ReadByte() (byte, error) { return byte(res), err } +func (is *istream64) ReadBit() (Bit, error) { + res, err := is.ReadBits(1) + return Bit(res), err +} + func (is *istream64) ReadBits(numBits uint) (uint64, error) { if is.err != nil { return 0, is.err diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index cf07bf2728..8ad3efd31d 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -49,28 +49,15 @@ var ( // BenchmarkM3TSZDecode-12 10000 108797 ns/op func BenchmarkM3TSZDecode(b *testing.B) { var ( - sampleSeriesCount = len(sampleSeriesBase64) - sampleSeries = make([][]byte, 0, sampleSeriesCount) - encodingOpts = encoding.NewOptions() - reader = bytes.NewReader(nil) - rnd = rand.New(rand.NewSource(42)) + encodingOpts = encoding.NewOptions() + reader = bytes.NewReader(nil) + seriesRun = prepareSampleSeriesRun(b) ) - for _, b64 := range sampleSeriesBase64 { - data, err := base64.StdEncoding.DecodeString(b64) - require.NoError(b, err) - sampleSeries = append(sampleSeries, data) - } - - seriesRun := make([][]byte, 0, b.N) - for i := 0; i < b.N; i++ { - seriesRun = append(seriesRun, sampleSeries[rnd.Intn(sampleSeriesCount)]) - } - b.ResetTimer() for i := 0; i < b.N; i++ { reader.Reset(seriesRun[i]) - iter := NewReaderIterator(reader, true, encodingOpts) + iter := NewReaderIterator(reader, DefaultIntOptimizationEnabled, encodingOpts) for iter.Next() { _, _, _ = iter.Current() } @@ -81,10 +68,25 @@ func BenchmarkM3TSZDecode(b *testing.B) { // BenchmarkM3TSZDecode64-12 17222 69151 ns/op func BenchmarkM3TSZDecode64(b *testing.B) { var ( - sampleSeriesCount = len(sampleSeriesBase64) - sampleSeries = make([][]byte, 0, sampleSeriesCount) - encodingOpts = encoding.NewOptions() - rnd = rand.New(rand.NewSource(42)) + encodingOpts = encoding.NewOptions() + seriesRun = prepareSampleSeriesRun(b) + ) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + iter := NewReaderIterator64(seriesRun[i], DefaultIntOptimizationEnabled, encodingOpts) + for iter.Next() { + _, _, _ = iter.Current() + } + require.NoError(b, iter.Err()) + } +} + +func prepareSampleSeriesRun(b *testing.B) [][]byte { + var ( + rnd = rand.New(rand.NewSource(42)) + sampleSeries = make([][]byte, 0, len(sampleSeriesBase64)) + seriesRun = make([][]byte, 0, b.N) ) for _, b64 := range sampleSeriesBase64 { @@ -93,17 +95,9 @@ func BenchmarkM3TSZDecode64(b *testing.B) { sampleSeries = append(sampleSeries, data) } - seriesRun := make([][]byte, 0, b.N) for i := 0; i < b.N; i++ { - seriesRun = append(seriesRun, sampleSeries[rnd.Intn(sampleSeriesCount)]) + seriesRun = append(seriesRun, sampleSeries[rnd.Intn(len(sampleSeries))]) } - b.ResetTimer() - for i := 0; i < b.N; i++ { - iter := NewReaderIterator64(seriesRun[i], true, encodingOpts) - for iter.Next() { - _, _, _ = iter.Current() - } - require.NoError(b, iter.Err()) - } + return seriesRun } From b98c2daa9f4ea26c61fca2292357d8f9eb4f6dff Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Mon, 2 Nov 2020 10:53:25 +0200 Subject: [PATCH 04/24] Improve tests --- src/dbnode/encoding/encoding_mock.go | 14 +++++ src/dbnode/encoding/istream64.go | 5 +- src/dbnode/encoding/istream64_test.go | 58 ++++++++++++++++++--- src/dbnode/encoding/m3tsz/decoder.go | 9 ++-- src/dbnode/encoding/m3tsz/roundtrip_test.go | 20 +++++-- src/dbnode/encoding/scheme.go | 2 +- src/dbnode/encoding/types.go | 3 ++ 7 files changed, 92 insertions(+), 19 deletions(-) diff --git a/src/dbnode/encoding/encoding_mock.go b/src/dbnode/encoding/encoding_mock.go index 526895fc58..6fa164f95a 100644 --- a/src/dbnode/encoding/encoding_mock.go +++ b/src/dbnode/encoding/encoding_mock.go @@ -1508,6 +1508,20 @@ func (mr *MockDecoderMockRecorder) Decode(reader interface{}) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Decode", reflect.TypeOf((*MockDecoder)(nil).Decode), reader) } +// Decode64 mocks base method +func (m *MockDecoder) Decode64(data []byte) ReaderIterator { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Decode64", data) + ret0, _ := ret[0].(ReaderIterator) + return ret0 +} + +// Decode64 indicates an expected call of Decode64 +func (mr *MockDecoderMockRecorder) Decode64(data interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Decode64", reflect.TypeOf((*MockDecoder)(nil).Decode64), data) +} + // MockIStream is a mock of IStream interface type MockIStream struct { ctrl *gomock.Controller diff --git a/src/dbnode/encoding/istream64.go b/src/dbnode/encoding/istream64.go index cb4b050eb1..d9ed470a2e 100644 --- a/src/dbnode/encoding/istream64.go +++ b/src/dbnode/encoding/istream64.go @@ -96,7 +96,6 @@ func (is *istream64) PeekBits(numBits uint) (uint64, error) { return (res << bitsNeeded) | readBitsInWord(next, bitsNeeded), nil } -//TODO: tests func (is *istream64) RemainingBitsInCurrentByte() uint { return is.remaining % 8 } @@ -116,6 +115,7 @@ func (is *istream64) consumeBuffer(numBits uint) uint64 { func (is *istream64) peekWordFromStream() (uint64, uint, error) { if is.index+8 <= len(is.data) { + // NB: this compiles to a single 64 bit load followed by a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). return binary.BigEndian.Uint64(is.data[is.index:]), 64, nil } if is.index >= len(is.data) { @@ -132,10 +132,11 @@ func (is *istream64) peekWordFromStream() (uint64, uint, error) { func (is *istream64) readWordFromStream() error { if is.index+8 <= len(is.data) { + // NB: this compiles to a single 64 bit load followed by a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). is.current = binary.BigEndian.Uint64(is.data[is.index:]) is.remaining = 64 is.index += 8 - return is.err + return nil } if is.index >= len(is.data) { is.current = 0 diff --git a/src/dbnode/encoding/istream64_test.go b/src/dbnode/encoding/istream64_test.go index e4d4dd722d..83d0c9e1df 100644 --- a/src/dbnode/encoding/istream64_test.go +++ b/src/dbnode/encoding/istream64_test.go @@ -34,8 +34,7 @@ func TestIStream64ReadBits(t *testing.T) { 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, } - o := NewIStream64(byteStream) - is := o.(*istream64) + is := NewIStream64(byteStream) numBits := []uint{1, 3, 4, 8, 7, 2, 64, 64} var res []uint64 for _, v := range numBits { @@ -56,8 +55,7 @@ func TestIStream64ReadByte(t *testing.T) { 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, } - o := NewIStream64(byteStream) - is := o.(*istream64) + is := NewIStream64(byteStream) var res []byte for range byteStream { read, err := is.ReadByte() @@ -98,8 +96,7 @@ func TestIStream64PeekBitsSuccess(t *testing.T) { func TestIStream64PeekBitsError(t *testing.T) { byteStream := []byte{0x1, 0x2} - o := NewIStream64(byteStream) - is := o.(*istream64) + is := NewIStream64(byteStream) res, err := is.PeekBits(20) require.EqualError(t, err, io.EOF.Error()) require.Equal(t, uint64(0), res) @@ -107,8 +104,7 @@ func TestIStream64PeekBitsError(t *testing.T) { func TestIStream64ReadAfterPeekBits(t *testing.T) { byteStream := []byte{0xab, 0xcd} - o := NewIStream64(byteStream) - is := o.(*istream64) + is := NewIStream64(byteStream) res, err := is.PeekBits(10) require.NoError(t, err) require.Equal(t, uint64(0x2af), res) @@ -131,6 +127,52 @@ func TestIStream64ReadAfterPeekBits(t *testing.T) { require.EqualError(t, err, io.EOF.Error()) } +func TestIStream64PeekAfterReadBits(t *testing.T) { + byteStream := []byte{0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA} + is := NewIStream64(byteStream) + + res, err := is.ReadBits(16) + require.NoError(t, err) + require.Equal(t, uint64(0x102), res) + + res, err = is.PeekBits(63) + require.NoError(t, err) + require.Equal(t, uint64(0x30405060708090A)>>1, res) + + res, err = is.PeekBits(64) + require.NoError(t, err) + require.Equal(t, uint64(0x30405060708090A), res) + + res, err = is.ReadBits(1) + require.NoError(t, err) + require.Equal(t, uint64(0), res) + + res, err = is.PeekBits(63) + require.NoError(t, err) + require.Equal(t, uint64(0x30405060708090A), res) + + res, err = is.PeekBits(64) + require.EqualError(t, err, io.EOF.Error()) +} + +func TestIStream64RemainingBitsInCurrentByte(t *testing.T) { + byteStream := []byte{0xff, 0, 0x42} + is := NewIStream64(byteStream) + for _, b := range byteStream { + for i := 0; i < 8; i++ { + var expected uint + if i > 0 { + expected = uint(8 - i) + } + require.Equal(t, expected, is.RemainingBitsInCurrentByte()) + bit, err := is.ReadBit() + require.NoError(t, err) + expectedBit := Bit(b>>i)&1 + require.Equal(t, expectedBit, bit) + } + } +} + func TestIStream64ResetIStream(t *testing.T) { o := NewIStream64([]byte{0xff}) is := o.(*istream64) diff --git a/src/dbnode/encoding/m3tsz/decoder.go b/src/dbnode/encoding/m3tsz/decoder.go index b1d7a408e3..ce1351a524 100644 --- a/src/dbnode/encoding/m3tsz/decoder.go +++ b/src/dbnode/encoding/m3tsz/decoder.go @@ -41,7 +41,10 @@ func NewDecoder(intOptimized bool, opts encoding.Options) encoding.Decoder { // Decode decodes the encoded data captured by the reader. func (dec *decoder) Decode(reader io.Reader) encoding.ReaderIterator { - it := NewReaderIterator64(nil, dec.intOptimized, dec.opts) - it.Reset(reader, nil) - return it + return NewReaderIterator(reader, dec.intOptimized, dec.opts) +} + +// Decode decodes the encoded slice of bytes. +func (dec *decoder) Decode64(data []byte) encoding.ReaderIterator { + return NewReaderIterator64(data, dec.intOptimized, dec.opts) } diff --git a/src/dbnode/encoding/m3tsz/roundtrip_test.go b/src/dbnode/encoding/m3tsz/roundtrip_test.go index e386c383cc..a404259312 100644 --- a/src/dbnode/encoding/m3tsz/roundtrip_test.go +++ b/src/dbnode/encoding/m3tsz/roundtrip_test.go @@ -22,11 +22,13 @@ package m3tsz import ( "bytes" + "io/ioutil" "math" "math/rand" "testing" "time" + "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/testgen" "github.com/m3db/m3/src/dbnode/ts" "github.com/m3db/m3/src/x/context" @@ -97,11 +99,13 @@ func TestIntOverflow(t *testing.T) { } func testRoundTrip(t *testing.T, input []ts.Datapoint) { - validateRoundTrip(t, input, true) - validateRoundTrip(t, input, false) + validateRoundTrip(t, input, true, false) + validateRoundTrip(t, input, false, false) + validateRoundTrip(t, input, true, true) + validateRoundTrip(t, input, false, true) } -func validateRoundTrip(t *testing.T, input []ts.Datapoint, intOpt bool) { +func validateRoundTrip(t *testing.T, input []ts.Datapoint, intOpt bool, useDecode64 bool) { ctx := context.NewContext() defer ctx.Close() @@ -146,8 +150,14 @@ func validateRoundTrip(t *testing.T, input []ts.Datapoint, intOpt bool) { stream, ok := encoder.Stream(ctx) require.True(t, ok) - it := decoder.Decode(stream) - require.True(t, ok) + var it encoding.ReaderIterator + if useDecode64 { + data, err := ioutil.ReadAll(stream) + require.NoError(t, err) + it = decoder.Decode64(data) + } else { + it = decoder.Decode(stream) + } defer it.Close() i := 0 diff --git a/src/dbnode/encoding/scheme.go b/src/dbnode/encoding/scheme.go index efcbcee138..1a02ff602f 100644 --- a/src/dbnode/encoding/scheme.go +++ b/src/dbnode/encoding/scheme.go @@ -141,7 +141,7 @@ func newTimeEncodingSchemes(schemes map[xtime.Unit]TimeEncodingScheme) TimeEncod } // newTimeEncodingScheme creates a new time encoding scheme. -// NB(xichen): numValueBitsForBbuckets should be ordered by value in ascending order (smallest value first). +// NB(xichen): numValueBitsForBuckets should be ordered by value in ascending order (smallest value first). func newTimeEncodingScheme(numValueBitsForBuckets []int, numValueBitsForDefault int) TimeEncodingScheme { numBuckets := len(numValueBitsForBuckets) buckets := make([]TimeBucket, 0, numBuckets) diff --git a/src/dbnode/encoding/types.go b/src/dbnode/encoding/types.go index 93af1b4746..c57222008c 100644 --- a/src/dbnode/encoding/types.go +++ b/src/dbnode/encoding/types.go @@ -329,6 +329,9 @@ type MutableSeriesIterators interface { type Decoder interface { // Decode decodes the encoded data in the reader. Decode(reader io.Reader) ReaderIterator + + // Decode64 decodes the encoded data slice of bytes. + Decode64(data []byte) ReaderIterator } // NewDecoderFn creates a new decoder. From 85ad1dd25fbd4099c7d4810b0e9db95c656e3006 Mon Sep 17 00:00:00 2001 From: Vilius Pranckaitis Date: Mon, 9 Nov 2020 17:15:25 +0200 Subject: [PATCH 05/24] one less shift --- src/dbnode/encoding/istream64.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dbnode/encoding/istream64.go b/src/dbnode/encoding/istream64.go index d9ed470a2e..d754bbee42 100644 --- a/src/dbnode/encoding/istream64.go +++ b/src/dbnode/encoding/istream64.go @@ -69,7 +69,7 @@ func (is *istream64) ReadBits(numBits uint) (uint64, error) { if numBits <= is.remaining { return is.consumeBuffer(numBits), nil } - res := readBitsInWord(is.current, is.remaining) + res := readBitsInWord(is.current, numBits) bitsNeeded := numBits - is.remaining if err := is.readWordFromStream(); err != nil { return 0, err @@ -77,14 +77,14 @@ func (is *istream64) ReadBits(numBits uint) (uint64, error) { if is.remaining < bitsNeeded { return 0, io.EOF } - return (res << bitsNeeded) | is.consumeBuffer(bitsNeeded), nil + return res | is.consumeBuffer(bitsNeeded), nil } func (is *istream64) PeekBits(numBits uint) (uint64, error) { if numBits <= is.remaining { return readBitsInWord(is.current, numBits), nil } - res := readBitsInWord(is.current, is.remaining) + res := readBitsInWord(is.current, numBits) bitsNeeded := numBits - is.remaining next, rem, err := is.peekWordFromStream() if err != nil { @@ -93,7 +93,7 @@ func (is *istream64) PeekBits(numBits uint) (uint64, error) { if rem < bitsNeeded { return 0, io.EOF } - return (res << bitsNeeded) | readBitsInWord(next, bitsNeeded), nil + return res | readBitsInWord(next, bitsNeeded), nil } func (is *istream64) RemainingBitsInCurrentByte() uint { From 3eb6e55b7b9ccdddb966472343b8c0db62edccfc Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sat, 28 Nov 2020 12:40:14 +0200 Subject: [PATCH 06/24] Align with changes in master --- .../encoding/m3tsz/decoder_benchmark_test.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index 8ad3efd31d..ecc18ac1f1 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -31,21 +31,6 @@ import ( "github.com/stretchr/testify/require" ) -var ( - sampleSeriesBase64 = []string{ - "FiYqRnIdAACAQEAAAAArkizADVrDlnvgAATiGAAEbAABZgABkwAA6DFXAxnaGOwDF2ON7Yw85trFGksvYiyRjTFW3MeYs21wLHm9t/YkxtjbHW5vCYi6JwTF2LMcYsGI2DGdTRBjsCxRi7bHdsRZI2ZjDdGQsfbs15ijGHosPYqxNjjPGnMcYu29jbJmusVY03FibeGkMYY8xVizVHHsXY+3BjTR2NMYcE2ti7V2yMZb63hi7dmdMYdoxpizgGxMWa805ljgGMsVY4zRiLiHWslZo11lLOGLMdY61Zkjd2uMRZi1BljI2ostbo1hmDfHasVZUytjTeWOshZK3BjTdGtsWYwxdjwYjgMZpNwzLKM8+btsqGOwjHGMNubIxtnTVWVt1bUxRtLWmWtnY+x1nLU2YtjcuzJw7VWbMfYu0RjLVWbM6aY4lpjT2LtVaS0NqTGGJNeYq3torFWMNJaS1ZrTRWpuCYw1xjLFmItCaExJkDWGZMWZg6xjLMGLtiZmxps7EWLNlYw6NjzFmLtvZaxhi7GGNBiPAxmK8DRM0yj8uq2TKMk0DZOu+rPMsyjQumGOxTgGMNzaaxVrLEWLMUZk0xoDy2QN3Y8yNvLNGmM0boxRtrxGNMcY20dy7G2fM2bqyBjrXmHNyY4xlvzGWJsXcIxdt7H2LtIY2xRq7gGJsbZoxRiTVWVtvaey92LdGKMeYsxoMR+GM9WgZcMdsWKNrcIxNibl2KMaY0x5mTOWOvecYxRuDbGLsubWxJpjaWKsebExZv7JGKsucAxVu7HGOMfbkxdtjdGLMZY8xBkjH2Kt1d2xVtzIGLuCYyyBjTJ2KstbWxVtDbmMMzY6xF4bPWJtxdgxJvrJWMsdaGxhuzTWJs1egxRt7ZmItNYuxRpzFmOtvdyw9kTZ2LtzdaxZiTV2LsabYxJmTXWJtzZCx5pTH2Lt4cQxdtTiWNNea4xNn7imLtccaxVjTZmLMYYuxZnDSmNM0euxVmjU2KtwcWxRjrj2JsbdsxhjjHWNhiOAxW9rhjOwMdl2LN3aczRjbsmOOCbkxhkDa2LN3Zo1xtjGGMtxbexNmLJWJsZbQ19jDU2LNydwxZnLIGONwbI1xuTNGLNqYwxNnbVmQMdcg15uDF2NtKbaxdq7SWKtqa015jbbmNMib2x9mrHmMtxZA1htrWmLNzZGxNoLQmONzbA1drbGmJt0ZCxRjLIWJt0Y41lsDNWJtiaqxFjzF2OuEbk1ltjRGKNYZUxRtjI2MN/eI11vbe2Jsob4xljrJmKttaM19j7HGKuEaOxJkLdmJOIcW1hmLbWNMvY6xZmTHmMs9b82Fk7TmKM7cKxtijW2LMuYy2BpLQ2NNacOxpjbg2OODaSxp4LVmJtfbux1vcAA", - "FiYqRnIdAACAQEAAAAArkizADAfgAATiCSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSTAAA=", - "FiYqRnIdAACAQEAAAABDnTBYE/Bho3fYmHbfAAAnEMAB+lf3Y8nesGNjT6sqdklzsPFHkkSBobeKPJIkDQy3ijySJA0MlgE3WlChWbTdaUKFZkudjtB7gub4oltoPcFzfFEkkksFUt2Tfa6Fqpbsm+10lzsPqTuES/mJJJLpW9i+c6zi+SW7F851nF9uxfOdZxfLdi+c6zi+SSXOw8DYDYkTFt4GwGxImLLeBsBsSJi28DYDYkTFkulb2L5zrOL5JdC/dMVuc3q9t0xW5zer23TFbnN6vLbpitzm9XtumK3Ob1eW3TFbnN6vJLbpitzm9Xufh7R1X1eVLLJJaw/0a8y0ktYf6NebSS1h/o15lpJaw/0a82klrD/RrzLSS1h/o15lz8PaOq+rypZJYpkiO1dsA1MkR2rtkunPPlZJttpJc/D/fBmnRHWWSS1h/o15lpJaw/0a82klrD/RrzLSS1h/o15kloYescF8rGhh6xwXytz8P1pjNBhIbfrTGaDCQ/AAAnZn///2ERxWSw4LulSvs8twXdKlfZ7cF3SpX2e6F98YLEnhMbXxgsSeExtfGCxJ4TGWvjBYk8JjJJLn3cL98PJ8jbhfvh5PkZLoX3sr7uILjlr2V93EFxySS593C/fDyfI24X74eT5G6F97K+7iC47Xsr7uILjteyvu4guOWvZX3cQXHJJJa9lfdxBcdr2V93EFx3Pw9tAaypmht7aA1lTNDLe2gNZUzQyS3toDWVM0MktDJI57e/ac7HxxmbkR/pJYIWOVrdpMJJJaFjla3aTLQscrW7SbQscrW7SZaFjla3aTJJJaFjla3aTdC+AoWxZUHMtRGb6NHgwWojN9GjwZJaiM30aPBkudh5g7Stcc3JJbzB2la45u3mDtK1xzckksJxZZl2POLTiyzLsedpxZZl2POWnFlmXY85JJc7DzB2la45u3mDtK1xzdhOLLMux5xdK3UGgGFJIS2oNAMKSQkujZOLLMux5yXOw8wdpWuObt5g7Stcc3LeYO0rXHN28wdpWuObksEiGQkVkWJJLo3X2kSbTyRdCywmW9XXelz8OPQTV9E75bj0E1fRO+Szv8H06YXklzsf957cWnANv957cWnANv957cWnAMklv957cWnAMlhn8hhg0Dol0L4gCZVqxQ3Pw49BNX0Tvtx6CavonfJZEATKtWKGSWiAJlWrFDLRAEyrVihtEATKtWKG59+bUdK+4kLZtR0r7iQkls2o6V9xIS2bUdK+4kLZtR0r7iQls2o6V9xIWzajpX3EhLn4dmc/ehX1W7M5+9Cvqkkt2Zz96FfVbszn70K+qW7M5+9Cvqt2Zz96FfVYLZCxIudM1shYkXOmS5+HZnP3oV9Ukt2Zz96FfVLdmc/ehX1W7M5+9CvqtMWFPI9/rJJJYsdtxTlpY1jtuKctLLRg2ocIN1owbUOEG66F3R+2WXEH26P2yy4g+SSS3R+2WXEHy3R+2WXEH26P2yy4g+3R+2WXEH2Rg2ocIN0SSWPcAhr1yze4BDXrlkujZGDahwg3WjBtQ4QbpdCnzVjlG88kkklz7KKnPV58+602dsW5NrbO2Lcm1kklz7KKnPV58+1FTnq8+fJdabO2Lcm1ls7YtybWSSSS59lFTnq8+faipz1efPlqKnPV58+1FTnq8+fJJLrTZ2xbk2stnbFuTa2zti3Jtbo3f04J9i5nZEamPsK4pLo2wbtt7vTWS59MzEoWOlrTMShY6Wklz8PEbYJKYAbnYY8FgdCtcyWH3EI5E2HN9xCORNhyXOwx4LA6Fa5kuhfokiJV00GS59MzEoWOlpJLTMShY6WkuhfokiJV00GSSWG4TfVnMCbcJvqzmBLbhN9WcwLbhN9WcwJbcJvqzmBLnY+85UiQZUpb3nKkSDKlJYG4TfVnMCJdC+kcG+4Y9stSODfcMe2XNxxwbSeDL2XNy6dODPdz6pJJLCdLyOrAioktOl5HVgRVz7ultcsuJ3kkl0bsl+P9BB4kuhZtpdeDGBS6F9/6WHcIbJJZkvx/oIPEl0LZGbL+mLngLftIzZf0xc8B0jNl/TFzl0L/UkMAGrbSSSWZL8f6CDxLZL8f6CDxbJfj/QQeJdC+6q01qmjEkklzsdKzCAxSsUtpWYQGKVit3MU5BmXyliJeKJtHI8kks9H3goTte3o+8FCdry3o+8FCdr29H3goTteXQviXiibRyPJLeaMpHsCQFvNGUj2BLeaMpHsCS3mjKR7AlvNGUj2BJbzRlI9gSSSXRu0uvhH+2y2l18I/22S2l18I/22W0uvhH+2yW0uvhH+2ySSSS52H18ZprwH9vr4zTXgP5b6+M014D+SSSSSXRuii6kmXyCSSS0UXUky+QS0UXUky+QSXNy0u9Sg9bxrZzy5yJx+gl0L+SxCJdwZS3JYhEu4MrcliES7gyluSxCJdwZSSSSS3JYhEu4MpbksQiXcGUktyWIRLuDKSyLVs+rG4paLVs+rG4rRatn1Y3FLRatn1Y3FJaLVs+rG4pJJc/CxW6lPyeuSWhvJm7oR4Ekkl0L8VpabU7JWxWlptTslLYrS02p2SksWdn1v2KcS6NfcU2S5Ky3cU2S5KyS6VlnZ9b9inLWdn1v2KcktZ2fW/YpySWs7PrfsU7Wdn1v2KctZ2fW/YpyXPw8jCvW8FNtk3WHmchTJY7imyXJWQW7imyXJWW7imyXJWSSW7imyXJWSS3cU2S5KyW7imyXJWW7imyXJWS3cU2S5KyS6dV0H/Ok0skuhZXSD4UAdy6Ft3YTNCVqtd2EzQlapa7sJmhK1YAA", - "FiYqRnIdAACAQEAAAAAWlSx4Dadc6Q14AAE4hgAGQBgAGP9gAGTpgAGFcMxyJvHg8gDyAvFs8e7yAPIC8f7yAPFu8fLyCvH28gL2yu0G8gDyAPII8f7yAPB88YryAQHx9X7yEQHx+vH68U7x+vH+8gLyAPGQ8YTyAPIA8fzyBPDC8iLyAvII8e7yBvGw8ijyCvHw8gTyAvHi8ezx4vIA8hDx9vHw8e7yAPH68gDyBvGA8cDyAPIA8gDyCvE68PTx+vIK8fbyCPBE25jqmPIW8fbyAPCk8gTx/vIC8gjx9vAe7VrpJPIG8gDx+u/i8gjyBvH48g8W8ftU8S8W8WTyAvH+8grx8PD+8erx9PH88f7yCPEu8fryAvH48gbx/Pco7K7x+PIG8gD26u0E8gDyAPIA8grx9vIK8cbyAPIA8gL3GuoU8cDyEPH28gDx+vF08dzx+vIG8gDyAPF88ibx1vIK8fzyAPE68b7yAvH+8fryBvH48g7x+PH88jTx0vHi72bx8PH+8gLyBvA48fzyAvIA8f7yAPIW8eryAPIA8gDyBvGy8fLx+vII8fjyAPHc8EryBvHy8gLx7PHe8XTyAPH68gbyCu+28fTx+PIA8gbyAPCY8fLx/PIE8gbx9PGU1/zt7PH48hLx7vHE8gDyAPIG8gDx+vVU7Sjx/vIC8f7x+vGC8frx9vIE8gLyAPEc8hLx+PH68gjx/vH68f7x+vIG8gDx+vD48fryBPIA8grx+PEi8fjx+PIG8grx9vCY3tTx+PIA8grx8vHu8ezyAPH+8gDx/vJg8PryAPIC8fjyAPE68aLyAvII8fjyAPHY8YzyAPIA8gDyAPDk8fbx8PIG8fryBvEo8ijx9vH88gzyAPHA8ijx6vIU8gDx+vFRAfHvfvH/AfIK8f7yAvHy8gzyAPIK8fDyDvDo8WbyHPIE8fjyDvDq6hbyAPH68gbyAPEw8fryAPIA8grx6PHK8fLyAPIG8gbx9PFC8fLyAQPyAXvyAQHyCwHxauyQ8gDyAPH48gjyCvHw8hDx9vH68gbwqOlO7NLx+PII8fDxZvGy8e7yAwHyAX7yBwHyAPG+8fTyAvH48g0B8OV+8cMB8gDx+vIG8frx0vGU8gbx+PII8gDzAPAo8f7yCPH88fjw3vGM8gTx8vIG8gDyHvHI8fsB8gd+8gMB8gjw4PH28fryAPIG8gjxIvIU8fbx+vIG8gDw1uxE8fryBvIC8f7xpPHY8gbyAPIA8gEC8Mt88UcC8hLx+PIBBfIHdvEhBfHQ8gDx+vIC8gTxkPH+8gLx/vIG8fsB8el+8WkB8fDyCvIQ8fbx/PHw8gbyAvH+8gD1bO0s8gLyBvIA8gTxavIA8gDyAvII8fDxKPEM8fDyAPIG8fzw0vHq8gLx/vIA8gD2Ju0W8fryEPHw8gbxgPH28gbyKPHO8gzxzN0s6q7yBvII9tTtDvIG8gbyAPIC8f7xaPIA8fDyEPH28grx+PFo8fryDPH68frxhPIC8gbyAPIB4AABWoM1ggP///qWNF28AAArU2PFsgDY/WyCNj/bWtAdYnv2yCQHY/myCQXbebtrM0F2OdsfDZAGyANkDbEU2LZsgDY/WyCNkBbH+2PlsgbZAGyCtsKa1u2P1sgbZAGyAtj8bGu2PtsgDY/WyANkDgOx237Y+IDsgDZBmx/Ntuaq+zvVsirY+2yAtkAbFC2PhsfbZAGx+tkAbE42QBsf7ZAGyCtj7bKu2JhsgbY/WyBtj9a5O2QdsfzY/2yCNj9bGu2O1sfzY/2yBtkAbG02MZsgDZA2x+tkDbHo2QBsgbY/WyBtkAbGm2CxsgrY+2x+tj/bFk2EVsfrY/2yCNkBbIg2O9sfrZA2x+tkAbEG1UlsfrZA2yANkBbIUztpsQzZAWx/tj9bE22QNsgbZBWx9tkBbEs2N9sgLY/2yANkDbHO2QdsgrY+2yANkFbFlAdjrv2yAQHZAGyCtj+a9W2QJsgDZAWx/tkAbGw2QFsgbZAGx+tkDbHu2PZsgDZAGx+NkEbGq2FVsgjY/Gx+QXZA7tseEF2QlsgDZAGx+tkWguy7XbYcoLsfkAA=", - "FiYqRnIdAACAQEAAAAAnPgFYA+AABOIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJPFcvVHPFcpJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJPgAAGhz4AABno////L/JJJJJJJJJgAA==", - "FiYqRnIdAACAQEAAAAAWlSx4DYa+fHfgAATiGAAgoYABJ1gABamAAJ9DLWSWyU46nCsw5GX05SG/Y2OWyCwqmgq2cGuK2q25IwqGLg1gmuS2uWlAypm0+2QIDrJX7YLgOxGyh2lo3k2/a1eW3Gwqmdq3BWvU4I2/6w1GXS3eXH01VGm0woGYu0jXEC02G1mxlGTY47Gyi18WpMwbGjW2zm0Kz8Wx2wy2fY1wmoO1wHGiwy2XO3fm3O1RmzEwKWCAzp2382R2/cwj2fw21nE81KWtmwVGGoxKG5Y0/moSxTmVg1lmri2P4tk9VTEhi2RA1OG7c1vmsKxnmWYzy238202rgwCmuS2C2wU0h252xbWQw4EWtO4dHA4wNWhM3hWc81fG+oyLGqS26G5s2dWpIxTmZK3/m0I2129CwX2kwzgGxY1EnBMxKmtG5Bmxy2D2rKxa2fS1GW1+11G64xNmmW1DG1e1Im86wImlA1rWpg2k20gwPGbO2SXI625nCQwqWgM1BXAw2nHMOxQmqA1x2ps2+2qUxbWZU2tGvi2s2tiwsmYo1Hmqo3bW6swhmSC2dWnY0rW7owRWgW2CWi62Qm5kwPmGKzyWuC1UHPAw5m9M3jm524Vmk8w7WSe2XHDC4TW7KwbGqE3BGsG1S3AQxpWXEz6Gto1MmyGwbmZU2H2+419mxMwHWcAzWHCO3AGsWxNWm82jW3W1KmxEwLmJQ2825wyzGsQw0Gae3v2oI0sm5ywa2Ue1ymve4FHEoxRWfQ38m2I2vWpmxI2ee1mWoe1R2yQw9Wos1AGvs0nW7gyAWZu2BGfg1+2sGxiGkW2GXlg3IHCqx4IDoyX7e5gOsg0/mruwQGhG1eW8c3Gm8Kws2ZS4ZnCy30WjKwX2UI2Bmui1oHCeyRmxk19mrI1W25Uw5WWo2HWvm2u2nuwrmr8zXoHqg3vgygOzlAcD9ikNTRxANflu2sYFjFtSJqRNJhrmsH9hTMnBtvNW1vKMK9i7t0huMQHWYv3DvAcXpqktctxBuGhruwHCxv2PPAdTRr0NHFtbsM1kytcZsaN7ZsNMN9oYNThsdtw9o9sLplmuHBrUuRttnMbln6tX+A6F9+0dIDwvMDJkWt4Rlwt2ZyDsklloN21r8tahndMIBq2OChq7thBu5sbRnzNhNvCtmpqrQLCTvmiLAtlxvXtqCC7bl2wQILovtoBpUNWFshsWtptNt9yCNqpsAwHBIv2n/AdmdpktyJtFMHBrANk9xRtWRsOMLpmQtt1qhNUBvqsO5lstDVqktmluDsfdq2NvNrVtw5sPsUFkrNWRyXumxscsRhiotmVoBtmVsgsEFmHtmtspNwBsSsGphKtDptKOctuOsV9mht69tFNpFqIMexlUtbNrONLJx6Mq5ldti5tQNAxqYMvhlAte5rCtczwAACtTYy///+pYxJvAAAK1Mc9wJtrlohuG5hLwHN6v2s9AdtpuNQXXtu2CTBdR1vgtkJtutEVhQsn5t+NV5jutQliKNGVwftxdsANuhgKtExtlNDlp7N1RhOMstuCNetr9uR6A4p1+ynIDs4NI1ubOB9gmMH9rttN9qGNwRgWM3pqOtdZo/OWlhvNI5stNhNr7txZjRsvtqgN1ZpcNUBjhseRwKt7lytNzBgStANl2NQBu0NLxh6NQJtMtmlqkuEhjstlBsDuHZqkNixhEs7lqnNppt5NuNiXMPxt4NyNvDNb9hyNU9rztxhsKNUNhAsi9rMtuNvINqdgMMM1lFuM5u5NlVgUs+BpAtydqyNstk4MlppstXZtTt8hhSs0NrBtcxqWNQtiOQHV3v2ihAdVRpONw1lLtZhrJNzRrLtntjRMlJrGNc5qCtF1izMtZtAtOdtoNhJhEMrRrxtFVv0QXW4u2MnBczBsfuYtrrtLmC4oN21nYLoqQAA==", - "FiYqRnIdAACAQEAAAAAPGEnQDAfgAATiCSSSSSSSSSSSSSSSSSSSSSSSfAAAnEH///2PBJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJMBb8wFJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJgAA==", - "FiYqRnIdAACAQEAAAAAarE7gA+AABOIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJMAAA==", - "FiYqRnIdAACAQEAAAAA94IfIDfYY0GwPwAAJxDAAH592AASFBMAAeXVYAA/OkGiuzWt4Dtu8tNrDNvACt9dFxQKt9lRx22OG9JwIlN+YFwUkN241uCOOBi1tzUN8F1u9zOC6pv9fNrUVvQot5dNv2vtzelurVuBihvh2t6cdv+Pt3lRwKeOKG1uHjN/bCA79Rd+3xD4DvJgtxP9vwEt8rlut/t5p9vjCOLHdwTTN0k1u2QN5j1wVWN5/RxHnt0OVuQkN7KFwsetm9xxfeN4pBuxpOBGtuE8NzABwc2OHS1wc9OAtZvmPt6N1wXJNtN5vjWt+DVsstt45Bv98t2w5xRTN2UxvLnt5/hvD3ODCJvWluAQBxadOBxhv/AOAGVxGgODJpvXcOB3VwapN7xJxN4uH7pvK5OI1Rt2/uAoiB704V631N4HwsjN6gJwmkuMEZwKZN7G9ujTNz7lwnkN6AtvLwN4Xhvv5t7ZJw6UtvS5w8xN/O5u6Stxi5wU5N25VwkaNy4xu3cOAEZwK6NvnBvWZN8KFuBXN6mFwvqN9/Zw5EOEZ5vJBN5yBw9IuTW5ueOt9BRv/aOCLVvxst99Jv6dtvNNu5juA5VwxVN8lJuYtt56tvwlt+BJw9vt79dwAqN5PhuyMNnopuzcN8WpvbKN6MZuaBNx09yyzt1blxItt6lpw50OEGtxZSN2LBwvat8BVvv+tyM5xGxt5xNwjSuGR1wV8OEuxv0Stz1Bx30ODlNvu0t14pvqzN66Rv85t2WFvhzuBBBvJgN6G9vlBNz4hv06N77NvFOt2/hvWZuD1ZuzmNu25vqEOR1Vu1StzdFv4SuKDluEON4mlu6juC4JvPTORxVvgdN8G1v7fN6epvTsN7pltzKuOeRuwyODvVw0tOB71vxGt+hVvYvt8plxfzN+UNwBoOJgtxZCt9R5woct6gpxBNt9U9wRdt4P1vkeNsP1wmzN43VvlYNzbtuwxt0jJuqQOG0hvzwN27Rv2uuGQNuaHOIeVvWVOEtBtgHN8NtybruAM9ws9N/s5w2ot711wEzN5n9w6ZN84JvaetoexwmhOI5puccNv1VuLaOBlRxAft04VupXN6d9v9ktvchv87tswVvieN4EBvVvuJ99vTvOHhlv1SOH81u7jNwFtuABN1lFxUZN0GNxMzN5PtwGmN9T9wE9t9plup6t/QRwV6OOltt5PN3Iluszt+elwZlt95BwmOuK1tvzaOC1duCANyk5wHfuD5RvT9wTeFLvG8BtBOCqRwZ4uKQBwszt/ZBv/pt7AZvDFtq/xtwSN9PaA7jy1+4nOIDuXWt3fJvnwNsoRvILNsG1v8UOBWFvlct9utwqMOKOxujKN4tZvmAtwhhvjgOMWhvQoN1xRu/Ct629uXtt3EhwAhN7CBvgwNwPhwpWttsBwexNwj1xGPN9sRu00N+Ydu4Zt/OtwFht9rxwzktwZ1w8vNyHZwzLtwLVuseN6hRu69t/ptvMct5tZuv/uEstwa8t/lluvTOEmpws3N9TRumCuAUhtc8NzvlvhCt7ftvYvt4YBurmt2gJv3jN1idv4eN6cxve9N2MVuliuPUpvg5t8V5vjxtuWJvE0t9ZFvKQOB91wsYt9CqA73Gjcd3vu7tvAt1SRwoHtu19xTbOTKdtsPuKkBvdft6ChxqHN8WdugRNy9VvEbOErBwBVt7kFwNpuHixvF9t8xNvjyt2KtuUltondvSftzpttyLt3zNu8nN0QZvEhN4WZxNrt2Jtt2NN3AtvoNNyUlvWouN1Fw+iN/ZBvs8tl65xiDtz1Zwm0NyxduGSOBsJua9t4otvEjt1G1vYVNyQduiFOCxdw+KuGLtuyrN6s9wTztyHhvy0uKfJueHuMZpvIHN0pZxU1N7KhuliuBtxwFgt5PJty0QLeb2vm8RtAt615v8TN2IdwJ4tzWBtHFuIYdvYwuATlx6ptxC1wD7t8D1v0RN9w5utAt8d9wmJN4kVxPrOGGVvM1t/BVvRON3cZyJiN3ltv5QN/FtvLLtrYBw3ZuHj5wiwt/v1usHN5e1wu+N3Wdw1qN9D1w8Yt52VwkQt4iZxJctvfVvvqOLyhxIBNyPRujWuD6htvQt9StvI+t/utw2Vt8ppxLAttYpvjeN/ShvpXNvChvfst7cVvuTuHlRt/aN53hv6JN44RwyYynfEh2u3sVZTvzIuAd5u7CON7BvBiNyMJvoGOQHBuneuDzltU1t4hVwDqN2yBwJ8N88Bu49t1mdvNfOBbZwS3uPfJwaNNzzdwPIN63xukQN+ItviCwHesev21zxAeDhRwiLtqQJuoFt7cFw2QtsBhwYMuPyVuklNtDVwfwt8P5wZxNzNpu+RtkcVwBLNp+Vvmzt/8Rwo/OIq5uKcNtLJuQxuBhxvMLN/r5wl1Nyzpts7tseZvmDuGLRt5LN8FhwlmORGxvqtOBPRvkIt6FBu64t8MdwSbt8S1woEt0Ctt7otuI9wyut0GRwNiOFppuoxuBAFwJAOKkZuNrOBqtt+KOA3NxaYN5WdtXxOCzdwrNN4uBwyzN9qlwjkN5CBwBHOQJFtnlOD6pw5tt2K9xl3ONI1wx+uC8mAA", - "FiYqRnIdAACAQEAAAABGId9oDfcTD+KTwAAJxDAAJ9nmAAgRqMAAyOLYABNsmGkqKtGlSxai45t0OWyDGakfNqCDmpc7a1FhtxUW0JAagCBq4kWiuPaWIVuiqWx1DaZ8dpS42tOuau7NtH526t8adBRqMy2iVZabJtuBPoDuMH79pdk4DqL6Gn02aggVucnYFt0QL7qE0Wme4gOlusabFdtjBmuwRado5pei4Dp9lr9qEkIDuFg2yO4aVwppYX2oPOaYWFtkKm2OWaT3BpbpmnylanWZuI5m2XBab15puEWrJ4aSDBtt7W8VuayVFqRSGo68aX9RvhI22xIaNUVqXlmneXaOlpuUQGsIqaWoNqHvGlF1afUFtA8mwQ3aoENp6pWqtIay+1ssXG1j2aiLFqnZ2qTXaYa5uSFm9nYaW/tqnLmsU0ay+Rt2sG+vCaK2dqG4movralhdt+vm6W+aro9psSGi3/aXIZv2A21//aitVqG0YJps07xp5hIJtyDGq4lahdZrPB2omPabEhs3HG0nQacY9re2IDo9xb9qOc4DtrTm1Q3aNUVqrHWocCabaxtp7WtdEaYflq4FYDpn5L9pt04DuSY22gTawH9pMP2ng1gOqgMv25S4gO1kgaiLdpf62l0xaaTtu0ZG2Jwal7RpDT2lwaajbWA7kwq/bk5OA6Y8xqtWmnWOaaYls0J2xFVgOrKov2l0bgOlylaUGVvBlm5XIaiDlqyj2mHyareRuR92we7aPPJq44Wm0pakR9sznG6g3amm5qZ+2rCuarOVvZnG4NoaP19rOYGoXpameRt1bG05SafENpkjWmimafb5uFwW0Acagj9qIY2kdhap3tvQWmwEKadOFouV2sIraX+1tEcWwZ7a6+dqePmkkfgOk4dv24fIgWpYDv2nBvgOqscabjlqFd23mfgOylbv2oj2gWokrv2l5OgOpn4cAMxsNxWquhamiNqY+Gr6ubqCRsSsmpuvakdpp7aYDqnmr9vStYDrdPmqk6gOozNv2qYLgOsQxbwpNvfsGoc6alFFpT+mpnubhB1tEE2lTSav/hoYVWky0b72tuuammghak85qbDGqJBbbN9uOgoDqZW79qbJYDqg0mjIwbmVJtKpWrszgOpmuv2oi4gOnmbbYyFtYa2o6Yawi5pjvGojnbbG5ub/2pycafIxpZY2mJBgO/Szv2vr6gOmuzgOk8tv2nmBgOoEibcKdsfzmkSHahcVqN5Wicub5jxuoi2pqIgOrbpv2oM/gOn7abgLtuBN2nunazp1o0/GtrEbcRZt/0oDqkib9pHMIDqHxGmRvbsCtq88GrROaN2SA6Y0m/aVMeA7tBxuQvWoKMas3hrPTmojKbqsRq202lpxartdqZKYDqjr79s/7YDvOymnglao+5q5qmrC/g23crumwa4g2qgqaehtobo2mKEbwzlsWsGrlRaReNpOkGoYmbMD1vYDGi0zafyNq+32j5ybJT9uW9mrLSaVmlqAbWqzIbic9q1BWqDIahZJpWZ2nNhbkOttxMWjWKaf8hpnAWkA1bcd5stqmjjXal3ppTR4DrJeL9tieYDtL82o3YacAZq3B2plQbgM5thJGljMakTZorqmqSnbiFpr2qWn5ran9pp/qGk9gbgH9sd1mojAawPFpzL2nKDbYfhuYuGqdRaRa5qihGiwWbg21uRDmmFWajuZpnMGmipbmjBt222k90abMVqieWktEbB59vTxWq69aq4xqX22mAdbeXKA60BG/afuaA6TK5qNxGtW/b58dr+B2le2adQhpi1mnpEbLaptC+WxijaTpdqrRGrHbbpUBtuY2p4QaYuKCaXBq8aexeCboedqfCGn2Maq+KCaxO28abA6CbbRhty0mnV5aOa1oqvIDqvsr9sPNoDs532m+QaX7hpa/moolbaBRwqB2kGDaXnBpKHGqRQa+Z5u5UGqqba6w5pycGow2bHhxtVV2lTsaq4Vp3BWkh/bQNNta2WoGgacIVp1DGnfGbPZht5PWsiyaoWRqEwGmdlgO4Aov21sQgOmNBamh5rJv2jKvbtyps7AmqsoaphRqA7WnIlbVyRu4kWmb5aMNJpiJ2oyKa6F9uNi2mfDakyaA6Z7+/aX9qA7VVRsvkmrJuaqwmA6Lda/ayU2A7Xp9r+22oKdal5dqY24Dp0tb9uUtYDtSommY3achJpnNmjiJbYyNusUWlqpamUBqHYWo5/bW79s27mrZmahtFpqEmm8ubhK5ujlGnGHanJdpdYWsXXbza5vQ2mnqgamiFpxZGoLRb5/9uApmmaFacUBpiVmmmbb3dhrWRmnBAapKVpudGre6gOyNuv2uhPgOo3KahcBpgmGndWbcXluzgGpq7abVlp2poDqcFr9tdVYDuIbGmtqakJ9qT02pfvbg/dthuGkrPaV7FqCM2sNHbX9Vt1r4Douu79qKjYDqaPmrwybtK1se0GooVaSj9qDnmrVUb0/eA7AL6/asHmBaSge/6jS6/6hceA7RxdtDQmoK5aefRo/eGqwVborls13mk0/ak8qA6vRW/aS6GA7Z55ukIWmsWaLzlqve2rXJbRSVtZ62modaGH9qWs2jQAbzFxvBWWtgsalnVo/72iaEbSbOBbPYu+agDOBacCpqSLoJporLxsXrIJs8HWuUoaZPtpHzWlefbdO5u58mm7jat7Rpj5Gne3beCdorCmn3OafItqKYmmTTbHw9s2k2q4ZaeXBrIF4FqYcr5t8q4FuKkmm4Aa2Cpol42q7fbsKdtK2GtYGae19ouOWsWEbWTFuNfmpClaiXZqCnWrYgbWVJu9n2kt6apzFrDcmmGAb2lhsT3IDqR3b9qSSYAA=", - } -) - // BenchmarkM3TSZDecode-12 10000 108797 ns/op func BenchmarkM3TSZDecode(b *testing.B) { var ( From 938f244701df116b49a0419227ec4d816b54303b Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 20:21:05 +0200 Subject: [PATCH 07/24] Replace IStream with IStream64 --- go.mod | 1 + .../main/parser/series_iterator_builder.go | 6 +- src/dbnode/client/config.go | 16 +- ...ch_tagged_results_accumulator_misc_test.go | 4 +- src/dbnode/client/options.go | 10 +- .../client/session_fetch_bulk_blocks_test.go | 48 +++-- src/dbnode/encoding/encoding.go | 2 +- src/dbnode/encoding/encoding_mock.go | 31 +-- src/dbnode/encoding/istream.go | 181 ++++++----------- src/dbnode/encoding/istream64.go | 169 ---------------- src/dbnode/encoding/istream64_test.go | 186 ------------------ src/dbnode/encoding/istream_test.go | 125 +++++++++--- src/dbnode/encoding/iterator_test.go | 16 +- src/dbnode/encoding/m3tsz/decoder.go | 10 +- .../encoding/m3tsz/decoder_benchmark_test.go | 22 +-- .../encoding/m3tsz/encoder_benchmark_test.go | 4 +- src/dbnode/encoding/m3tsz/encoder_test.go | 11 +- .../encoding/m3tsz/float_encoder_iterator.go | 4 +- src/dbnode/encoding/m3tsz/iterator.go | 25 +-- src/dbnode/encoding/m3tsz/iterator_test.go | 12 +- src/dbnode/encoding/m3tsz/roundtrip_test.go | 19 +- .../encoding/m3tsz/timestamp_iterator.go | 19 +- .../encoding/multi_reader_iterator_test.go | 9 +- src/dbnode/encoding/null.go | 9 +- .../encoding/proto/int_encoder_iterator.go | 6 +- src/dbnode/encoding/proto/iterator.go | 13 +- src/dbnode/encoding/proto/round_trip_test.go | 14 +- .../series_iterator_split_into_blocks_test.go | 6 +- src/dbnode/encoding/types.go | 24 +-- src/dbnode/persist/fs/merger_test.go | 9 +- src/dbnode/persist/fs/retriever.go | 14 +- src/dbnode/server/server.go | 4 +- .../storage/block/merged_block_reader.go | 14 +- src/dbnode/storage/block/options.go | 8 +- .../commitlog/source_data_test.go | 7 +- .../bootstrapper/fs/source_data_test.go | 9 +- src/dbnode/storage/bootstrap/util.go | 2 +- src/dbnode/storage/options.go | 5 +- src/dbnode/storage/series/buffer_test.go | 6 +- src/dbnode/storage/series/series_test.go | 6 +- src/dbnode/testdata/prototest/pools.go | 10 +- src/dbnode/x/xio/block_reader_test.go | 40 ++-- src/dbnode/x/xio/io_mock.go | 35 +++- src/dbnode/x/xio/null.go | 3 +- src/dbnode/x/xio/segment_reader.go | 119 ++++++++--- src/dbnode/x/xio/segment_reader_test.go | 13 +- src/dbnode/x/xio/types.go | 14 +- src/query/pools/query_pools.go | 7 +- src/query/remote/compressed_codecs.go | 5 +- src/query/ts/m3db/options.go | 4 +- 50 files changed, 513 insertions(+), 823 deletions(-) delete mode 100644 src/dbnode/encoding/istream64.go delete mode 100644 src/dbnode/encoding/istream64_test.go diff --git a/go.mod b/go.mod index 8fa319d98c..2ce2f55fa7 100644 --- a/go.mod +++ b/go.mod @@ -100,6 +100,7 @@ require ( github.com/streadway/quantile v0.0.0-20150917103942-b0c588724d25 // indirect github.com/stretchr/testify v1.6.1 github.com/subosito/gotenv v1.2.1-0.20190917103637-de67a6614a4d // indirect + github.com/tj/assert v0.0.0-20171129193455-018094318fb0 github.com/twotwotwo/sorts v0.0.0-20160814051341-bf5c1f2b8553 github.com/uber-go/tally v3.3.13+incompatible github.com/uber/jaeger-client-go v2.25.0+incompatible diff --git a/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go b/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go index c38252f465..aa46fe138c 100644 --- a/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go +++ b/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go @@ -21,7 +21,6 @@ package parser import ( - "io" "time" "github.com/m3db/m3/src/dbnode/encoding" @@ -34,9 +33,6 @@ import ( xtime "github.com/m3db/m3/src/x/time" ) -const sep rune = '!' -const tagSep rune = '.' - // Data is a set of datapoints. type Data []ts.Datapoint @@ -46,7 +42,7 @@ type IngestSeries struct { Tags Tags } -var iterAlloc = func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { +var iterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) } diff --git a/src/dbnode/client/config.go b/src/dbnode/client/config.go index 474346f2f6..15c366b8fb 100644 --- a/src/dbnode/client/config.go +++ b/src/dbnode/client/config.go @@ -23,7 +23,6 @@ package client import ( "errors" "fmt" - "io" "time" "github.com/m3db/m3/src/dbnode/encoding" @@ -31,6 +30,7 @@ import ( "github.com/m3db/m3/src/dbnode/environment" "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/topology" + "github.com/m3db/m3/src/dbnode/x/xio" xerrors "github.com/m3db/m3/src/x/errors" "github.com/m3db/m3/src/x/ident" "github.com/m3db/m3/src/x/instrument" @@ -43,11 +43,6 @@ const ( asyncWriteWorkerPoolDefaultSize = 128 ) -var ( - errConfigurationMustSupplyConfig = errors.New( - "must supply config when no topology initializer parameter supplied") -) - // Configuration is a configuration that can be used to construct a client. type Configuration struct { // The environment (static or dynamic) configuration. @@ -298,8 +293,8 @@ func (c Configuration) NewAdminClient( syncTopoInit = params.TopologyInitializer syncClientOverrides environment.ClientOverrides syncNsInit namespace.Initializer - asyncTopoInits = []topology.Initializer{} - asyncClientOverrides = []environment.ClientOverrides{} + asyncTopoInits []topology.Initializer + asyncClientOverrides []environment.ClientOverrides ) var buildAsyncPool bool @@ -412,9 +407,8 @@ func (c Configuration) NewAdminClient( encodingOpts = encoding.NewOptions() } - v = v.SetReaderIteratorAllocate(func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { - intOptimized := m3tsz.DefaultIntOptimizationEnabled - return m3tsz.NewReaderIterator(r, intOptimized, encodingOpts) + v = v.SetReaderIteratorAllocate(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) if c.Proto != nil && c.Proto.Enabled { diff --git a/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go b/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go index 2eb8d6837b..df9456a5ce 100644 --- a/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go +++ b/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go @@ -22,7 +22,6 @@ package client import ( "fmt" - "io" "math/rand" "os" "sort" @@ -34,6 +33,7 @@ import ( "github.com/m3db/m3/src/dbnode/generated/thrift/rpc" "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/storage/index" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/dbnode/x/xpool" "github.com/m3db/m3/src/x/ident" "github.com/m3db/m3/src/x/pool" @@ -272,7 +272,7 @@ func initTestFetchTaggedPools() *testFetchTaggedPools { pools.readerSlices.Init() pools.multiReader = encoding.NewMultiReaderIteratorPool(opts) - pools.multiReader.Init(func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + pools.multiReader.Init(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) }) diff --git a/src/dbnode/client/options.go b/src/dbnode/client/options.go index c8e350b199..6271cbadc5 100644 --- a/src/dbnode/client/options.go +++ b/src/dbnode/client/options.go @@ -22,7 +22,6 @@ package client import ( "errors" - "io" "math" "runtime" "time" @@ -37,6 +36,7 @@ import ( m3dbruntime "github.com/m3db/m3/src/dbnode/runtime" "github.com/m3db/m3/src/dbnode/storage/index" "github.com/m3db/m3/src/dbnode/topology" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/clock" "github.com/m3db/m3/src/x/context" "github.com/m3db/m3/src/x/ident" @@ -48,7 +48,7 @@ import ( "github.com/m3db/m3/src/x/serialize" xsync "github.com/m3db/m3/src/x/sync" - tchannel "github.com/uber/tchannel-go" + "github.com/uber/tchannel-go" "github.com/uber/tchannel-go/thrift" ) @@ -187,7 +187,7 @@ var ( defaultFetchSeriesBlocksBatchConcurrency = int(math.Max(1, float64(runtime.NumCPU())/2)) // defaultSeriesIteratorArrayPoolBuckets is the default pool buckets for the series iterator array pool - defaultSeriesIteratorArrayPoolBuckets = []pool.Bucket{} + defaultSeriesIteratorArrayPoolBuckets []pool.Bucket // defaulWriteRetrier is the default write retrier for write attempts defaultWriteRetrier = xretry.NewRetrier( @@ -445,7 +445,7 @@ func (o *options) Validate() error { func (o *options) SetEncodingM3TSZ() Options { opts := *o - opts.readerIteratorAllocate = func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + opts.readerIteratorAllocate = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) } opts.isProtoEnabled = false @@ -454,7 +454,7 @@ func (o *options) SetEncodingM3TSZ() Options { func (o *options) SetEncodingProto(encodingOpts encoding.Options) Options { opts := *o - opts.readerIteratorAllocate = func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + opts.readerIteratorAllocate = func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return proto.NewIterator(r, descr, encodingOpts) } opts.isProtoEnabled = true diff --git a/src/dbnode/client/session_fetch_bulk_blocks_test.go b/src/dbnode/client/session_fetch_bulk_blocks_test.go index db4d1afecd..3503948a6c 100644 --- a/src/dbnode/client/session_fetch_bulk_blocks_test.go +++ b/src/dbnode/client/session_fetch_bulk_blocks_test.go @@ -24,7 +24,6 @@ import ( "bytes" "fmt" "io" - "io/ioutil" "math" "sort" "sync" @@ -61,8 +60,8 @@ var ( blockSize = 2 * time.Hour nsID = ident.StringID("testNs1") nsRetentionOpts = retention.NewOptions(). - SetBlockSize(blockSize). - SetRetentionPeriod(48 * blockSize) + SetBlockSize(blockSize). + SetRetentionPeriod(48 * blockSize) testTagDecodingPool = serialize.NewTagDecoderPool( serialize.NewTagDecoderOptions(serialize.TagDecoderOptionsConfig{}), pool.NewObjectPoolOptions().SetSize(1)) @@ -101,7 +100,7 @@ func testsNsMetadata(t *testing.T) namespace.Metadata { func newSessionTestMultiReaderIteratorPool() encoding.MultiReaderIteratorPool { p := encoding.NewMultiReaderIteratorPool(nil) - p.Init(func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + p.Init(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) }) return p @@ -1455,10 +1454,9 @@ func TestStreamBlocksBatchFromPeerVerifiesBlockErr(t *testing.T) { require.True(t, ok) segment, err := reader.Segment() require.NoError(t, err) - rawBlockData := make([]byte, segment.Len()) - n, err := reader.Read(rawBlockData) - require.NoError(t, err) - require.Equal(t, len(rawBlockData), n) + rawBlockData, err := xio.ToBytes(reader) + require.Equal(t, io.EOF, err) + require.Equal(t, len(rawBlockData), segment.Len()) rawBlockLen := int64(len(rawBlockData)) var ( @@ -1510,8 +1508,8 @@ func TestStreamBlocksBatchFromPeerVerifiesBlockErr(t *testing.T) { Return(&rpc.FetchBlocksRawResult_{ Elements: []*rpc.Blocks{ // First foo block intact - &rpc.Blocks{ID: []byte("foo"), Blocks: []*rpc.Block{ - &rpc.Block{Start: start.UnixNano(), Segments: &rpc.Segments{ + {ID: []byte("foo"), Blocks: []*rpc.Block{ + {Start: start.UnixNano(), Segments: &rpc.Segments{ Merged: &rpc.Segment{ Head: rawBlockData[:len(rawBlockData)-1], Tail: []byte{rawBlockData[len(rawBlockData)-1]}, @@ -1519,16 +1517,16 @@ func TestStreamBlocksBatchFromPeerVerifiesBlockErr(t *testing.T) { }}, }}, // First bar block intact, second with error - &rpc.Blocks{ID: []byte("bar"), Blocks: []*rpc.Block{ - &rpc.Block{Start: start.UnixNano(), Segments: &rpc.Segments{ + {ID: []byte("bar"), Blocks: []*rpc.Block{ + {Start: start.UnixNano(), Segments: &rpc.Segments{ Merged: &rpc.Segment{ Head: rawBlockData[:len(rawBlockData)-1], Tail: []byte{rawBlockData[len(rawBlockData)-1]}, }, }}, }}, - &rpc.Blocks{ID: []byte("bar"), Blocks: []*rpc.Block{ - &rpc.Block{Start: start.Add(blockSize).UnixNano(), Err: &rpc.Error{ + {ID: []byte("bar"), Blocks: []*rpc.Block{ + {Start: start.Add(blockSize).UnixNano(), Err: &rpc.Error{ Type: rpc.ErrorType_INTERNAL_ERROR, Message: "an error", }}, @@ -1606,10 +1604,9 @@ func TestStreamBlocksBatchFromPeerVerifiesBlockChecksum(t *testing.T) { require.True(t, ok) segment, err := reader.Segment() require.NoError(t, err) - rawBlockData := make([]byte, segment.Len()) - n, err := reader.Read(rawBlockData) - require.NoError(t, err) - require.Equal(t, len(rawBlockData), n) + rawBlockData, err := xio.ToBytes(reader) + require.Equal(t, io.EOF, err) + require.Equal(t, len(rawBlockData), segment.Len()) rawBlockLen := int64(len(rawBlockData)) var ( @@ -1666,26 +1663,26 @@ func TestStreamBlocksBatchFromPeerVerifiesBlockChecksum(t *testing.T) { Return(&rpc.FetchBlocksRawResult_{ Elements: []*rpc.Blocks{ // valid foo block - &rpc.Blocks{ID: []byte("foo"), Blocks: []*rpc.Block{ - &rpc.Block{Start: start.UnixNano(), Checksum: &validChecksum, Segments: &rpc.Segments{ + {ID: []byte("foo"), Blocks: []*rpc.Block{ + {Start: start.UnixNano(), Checksum: &validChecksum, Segments: &rpc.Segments{ Merged: &rpc.Segment{ Head: head, Tail: tail, }, }}, }}, - &rpc.Blocks{ID: []byte("bar"), Blocks: []*rpc.Block{ + {ID: []byte("bar"), Blocks: []*rpc.Block{ // invalid bar block - &rpc.Block{Start: start.UnixNano(), Checksum: &invalidChecksum, Segments: &rpc.Segments{ + {Start: start.UnixNano(), Checksum: &invalidChecksum, Segments: &rpc.Segments{ Merged: &rpc.Segment{ Head: head, Tail: tail, }, }}, }}, - &rpc.Blocks{ID: []byte("bar"), Blocks: []*rpc.Block{ + {ID: []byte("bar"), Blocks: []*rpc.Block{ // valid bar block, no checksum - &rpc.Block{Start: start.Add(blockSize).UnixNano(), Segments: &rpc.Segments{ + {Start: start.Add(blockSize).UnixNano(), Segments: &rpc.Segments{ Merged: &rpc.Segment{ Head: head, Tail: tail, @@ -1769,8 +1766,7 @@ func TestBlocksResultAddBlockFromPeerReadMerged(t *testing.T) { require.NoError(t, err) // Assert block has data - data, err := ioutil.ReadAll(xio.NewSegmentReader(seg)) - require.NoError(t, err) + data, err := xio.ToBytes(xio.NewSegmentReader(seg)) assert.Equal(t, []byte{1, 2, 3}, data) } diff --git a/src/dbnode/encoding/encoding.go b/src/dbnode/encoding/encoding.go index f6623124ec..0a0389b179 100644 --- a/src/dbnode/encoding/encoding.go +++ b/src/dbnode/encoding/encoding.go @@ -43,7 +43,7 @@ func LeadingAndTrailingZeros(v uint64) (int, int) { } // SignExtend sign extends the highest bit of v which has numBits (<=64). -func SignExtend(v uint64, numBits uint) int64 { +func SignExtend(v uint64, numBits uint8) int64 { shift := 64 - numBits return (int64(v) << shift) >> shift } diff --git a/src/dbnode/encoding/encoding_mock.go b/src/dbnode/encoding/encoding_mock.go index 6fa164f95a..ec892c5aaa 100644 --- a/src/dbnode/encoding/encoding_mock.go +++ b/src/dbnode/encoding/encoding_mock.go @@ -25,7 +25,6 @@ package encoding import ( - "io" "reflect" "time" @@ -707,7 +706,7 @@ func (mr *MockReaderIteratorMockRecorder) Close() *gomock.Call { } // Reset mocks base method -func (m *MockReaderIterator) Reset(reader io.Reader, schema namespace.SchemaDescr) { +func (m *MockReaderIterator) Reset(reader xio.Reader64, schema namespace.SchemaDescr) { m.ctrl.T.Helper() m.ctrl.Call(m, "Reset", reader, schema) } @@ -1495,7 +1494,7 @@ func (m *MockDecoder) EXPECT() *MockDecoderMockRecorder { } // Decode mocks base method -func (m *MockDecoder) Decode(reader io.Reader) ReaderIterator { +func (m *MockDecoder) Decode(reader xio.Reader64) ReaderIterator { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Decode", reader) ret0, _ := ret[0].(ReaderIterator) @@ -1508,20 +1507,6 @@ func (mr *MockDecoderMockRecorder) Decode(reader interface{}) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Decode", reflect.TypeOf((*MockDecoder)(nil).Decode), reader) } -// Decode64 mocks base method -func (m *MockDecoder) Decode64(data []byte) ReaderIterator { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Decode64", data) - ret0, _ := ret[0].(ReaderIterator) - return ret0 -} - -// Decode64 indicates an expected call of Decode64 -func (mr *MockDecoderMockRecorder) Decode64(data interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Decode64", reflect.TypeOf((*MockDecoder)(nil).Decode64), data) -} - // MockIStream is a mock of IStream interface type MockIStream struct { ctrl *gomock.Controller @@ -1591,7 +1576,7 @@ func (mr *MockIStreamMockRecorder) ReadByte() *gomock.Call { } // ReadBits mocks base method -func (m *MockIStream) ReadBits(numBits uint) (uint64, error) { +func (m *MockIStream) ReadBits(numBits uint8) (uint64, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ReadBits", numBits) ret0, _ := ret[0].(uint64) @@ -1606,7 +1591,7 @@ func (mr *MockIStreamMockRecorder) ReadBits(numBits interface{}) *gomock.Call { } // PeekBits mocks base method -func (m *MockIStream) PeekBits(numBits uint) (uint64, error) { +func (m *MockIStream) PeekBits(numBits uint8) (uint64, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PeekBits", numBits) ret0, _ := ret[0].(uint64) @@ -1635,15 +1620,15 @@ func (mr *MockIStreamMockRecorder) RemainingBitsInCurrentByte() *gomock.Call { } // Reset mocks base method -func (m *MockIStream) Reset(r io.Reader) { +func (m *MockIStream) Reset(reader xio.Reader64) { m.ctrl.T.Helper() - m.ctrl.Call(m, "Reset", r) + m.ctrl.Call(m, "Reset", reader) } // Reset indicates an expected call of Reset -func (mr *MockIStreamMockRecorder) Reset(r interface{}) *gomock.Call { +func (mr *MockIStreamMockRecorder) Reset(reader interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Reset", reflect.TypeOf((*MockIStream)(nil).Reset), r) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Reset", reflect.TypeOf((*MockIStream)(nil).Reset), reader) } // MockOStream is a mock of OStream interface diff --git a/src/dbnode/encoding/istream.go b/src/dbnode/encoding/istream.go index 509a6c36b6..f72a2a7a63 100644 --- a/src/dbnode/encoding/istream.go +++ b/src/dbnode/encoding/istream.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2020 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,169 +21,112 @@ package encoding import ( - "bufio" "io" - "math" -) -// istream encapsulates a readable stream. -type istream struct { - r *bufio.Reader // encoded stream - err error // error encountered - current byte // current byte we are working off of - buffer []byte // buffer for reading in multiple bytes - remaining uint // bits remaining in current to be read -} + "github.com/m3db/m3/src/dbnode/x/xio" +) -// NewIStream creates a new IStream -func NewIStream(reader io.Reader, bufioSize int) IStream { - return &istream{ - r: bufio.NewReaderSize(reader, bufioSize), - // Buffer meant to hold uint64 size of bytes. - buffer: make([]byte, 8), - } +// iStream encapsulates a readable stream based directly on []byte slice and operating in 64 bit words. +type iStream struct { + r xio.Reader64 + err error // error encountered + current uint64 // current uint64 we are working off of + index int // current index within data slice + remaining uint8 // bits remaining in current to be read } -func (is *istream) ReadBit() (Bit, error) { - if is.err != nil { - return 0, is.err - } - if is.remaining == 0 { - if err := is.readByteFromStream(); err != nil { - return 0, err - } - } - return Bit(is.consumeBuffer(1)), nil +// NewIStream creates a new iStream +func NewIStream(reader64 xio.Reader64) IStream { + return &iStream{r: reader64} } -func (is *istream) Read(b []byte) (int, error) { - if is.remaining == 0 { - // Optimized path for when the iterator is already aligned on a byte boundary. Avoids - // all the bit manipulation and ReadByte() function calls. - // Use ReadFull because the bufferedReader may not return the requested number of bytes. - return io.ReadFull(is.r, b) - } - - var ( - i int - err error - ) - +func (is *iStream) Read(b []byte) (int, error) { + var i int for ; i < len(b); i++ { - b[i], err = is.ReadByte() + res, err := is.ReadBits(8) if err != nil { return i, err } + b[i] = byte(res) } return i, nil } -func (is *istream) ReadByte() (byte, error) { - if is.err != nil { - return 0, is.err - } - remaining := is.remaining - res := is.consumeBuffer(remaining) - if remaining == 8 { - return res, nil - } - if err := is.readByteFromStream(); err != nil { - return 0, err - } - res = (res << uint(8-remaining)) | is.consumeBuffer(8-remaining) - return res, nil +func (is *iStream) ReadByte() (byte, error) { + res, err := is.ReadBits(8) + return byte(res), err +} + +func (is *iStream) ReadBit() (Bit, error) { + res, err := is.ReadBits(1) + return Bit(res), err } -func (is *istream) ReadBits(numBits uint) (uint64, error) { +func (is *iStream) ReadBits(numBits uint8) (uint64, error) { if is.err != nil { return 0, is.err } - var res uint64 - numBytes := numBits / 8 - if numBytes > 0 { - // Use Read call rather than individual ReadByte calls since it has - // optimized path for when the iterator is aligned on a byte boundary. - bytes := is.buffer[0:numBytes] - _, err := is.Read(bytes) - if err != nil { - return 0, err - } - for _, b := range bytes { - res = (res << 8) | uint64(b) - } + if numBits <= is.remaining { + return is.consumeBuffer(numBits), nil } - - numBits = numBits % 8 - for numBits > 0 { - // This is equivalent to calling is.ReadBit() in a loop but some manual inlining - // has been performed to optimize this loop as its heavily used in the hot path. - if is.remaining == 0 { - if err := is.readByteFromStream(); err != nil { - return 0, err - } - } - - numToRead := numBits - if is.remaining < numToRead { - numToRead = is.remaining - } - bits := is.current >> (8 - numToRead) - is.current <<= numToRead - is.remaining -= numToRead - res = (res << uint64(numToRead)) | uint64(bits) - numBits -= numToRead + res := readBitsInWord(is.current, numBits) + bitsNeeded := numBits - is.remaining + if err := is.readWordFromStream(); err != nil { + return 0, err + } + if is.remaining < bitsNeeded { + return 0, io.EOF } - return res, nil + return res | is.consumeBuffer(bitsNeeded), nil } -func (is *istream) PeekBits(numBits uint) (uint64, error) { - // check the last byte first +func (is *iStream) PeekBits(numBits uint8) (uint64, error) { if numBits <= is.remaining { - return uint64(readBitsInByte(is.current, numBits)), nil + return readBitsInWord(is.current, numBits), nil } - // now check the bytes buffered and read more if necessary. - numBitsRead := is.remaining - res := uint64(readBitsInByte(is.current, is.remaining)) - numBytesToRead := int(math.Ceil(float64(numBits-numBitsRead) / 8)) - bytesRead, err := is.r.Peek(numBytesToRead) + res := readBitsInWord(is.current, numBits) + bitsNeeded := numBits - is.remaining + next, bytes, err := is.r.Peek64() if err != nil { return 0, err } - for i := 0; i < numBytesToRead-1; i++ { - res = (res << 8) | uint64(bytesRead[i]) - numBitsRead += 8 + rem := 8 * bytes + if rem < bitsNeeded { + return 0, io.EOF } - remainder := readBitsInByte(bytesRead[numBytesToRead-1], numBits-numBitsRead) - res = (res << (numBits - numBitsRead)) | uint64(remainder) - return res, nil + return res | readBitsInWord(next, bitsNeeded), nil } -func (is *istream) RemainingBitsInCurrentByte() uint { - return is.remaining +func (is *iStream) RemainingBitsInCurrentByte() uint { + return uint(is.remaining % 8) } -// readBitsInByte reads numBits in byte b. -func readBitsInByte(b byte, numBits uint) byte { - return b >> (8 - numBits) +// readBitsInWord reads the first numBits in word w. +func readBitsInWord(w uint64, numBits uint8) uint64 { + return w >> (64 - numBits) } // consumeBuffer consumes numBits in is.current. -func (is *istream) consumeBuffer(numBits uint) byte { - res := readBitsInByte(is.current, numBits) +func (is *iStream) consumeBuffer(numBits uint8) uint64 { + res := readBitsInWord(is.current, numBits) is.current <<= numBits is.remaining -= numBits return res } -func (is *istream) readByteFromStream() error { - is.current, is.err = is.r.ReadByte() - is.remaining = 8 - return is.err +func (is *iStream) readWordFromStream() error { + current, bytes, err := is.r.Read64() + is.current = current + is.remaining = 8 * bytes + is.err = err + + return err } -func (is *istream) Reset(r io.Reader) { - is.r.Reset(r) +func (is *iStream) Reset(reader xio.Reader64) { is.err = nil is.current = 0 is.remaining = 0 + is.index = 0 + is.r = reader } diff --git a/src/dbnode/encoding/istream64.go b/src/dbnode/encoding/istream64.go deleted file mode 100644 index d754bbee42..0000000000 --- a/src/dbnode/encoding/istream64.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright (c) 2020 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package encoding - -import ( - "encoding/binary" - "io" - "io/ioutil" -) - -// istream64 encapsulates a readable stream based directly on []byte slice and operating in 64 bit words. -type istream64 struct { - data []byte // encoded data - err error // error encountered - current uint64 // current uint64 we are working off of - index int // current index within data slice - remaining uint // bits remaining in current to be read -} - -// NewIStream64 creates a new istream64 -func NewIStream64(data []byte) IStream { - return &istream64{data: data} -} - -func (is *istream64) Read(b []byte) (int, error) { - var i int - for ; i < len(b); i++ { - res, err := is.ReadBits(8) - if err != nil { - return i, err - } - b[i] = byte(res) - } - return i, nil -} - -func (is *istream64) ReadByte() (byte, error) { - res, err := is.ReadBits(8) - return byte(res), err -} - -func (is *istream64) ReadBit() (Bit, error) { - res, err := is.ReadBits(1) - return Bit(res), err -} - -func (is *istream64) ReadBits(numBits uint) (uint64, error) { - if is.err != nil { - return 0, is.err - } - if numBits <= is.remaining { - return is.consumeBuffer(numBits), nil - } - res := readBitsInWord(is.current, numBits) - bitsNeeded := numBits - is.remaining - if err := is.readWordFromStream(); err != nil { - return 0, err - } - if is.remaining < bitsNeeded { - return 0, io.EOF - } - return res | is.consumeBuffer(bitsNeeded), nil -} - -func (is *istream64) PeekBits(numBits uint) (uint64, error) { - if numBits <= is.remaining { - return readBitsInWord(is.current, numBits), nil - } - res := readBitsInWord(is.current, numBits) - bitsNeeded := numBits - is.remaining - next, rem, err := is.peekWordFromStream() - if err != nil { - return 0, err - } - if rem < bitsNeeded { - return 0, io.EOF - } - return res | readBitsInWord(next, bitsNeeded), nil -} - -func (is *istream64) RemainingBitsInCurrentByte() uint { - return is.remaining % 8 -} - -// readBitsInWord reads the first numBits in word w. -func readBitsInWord(w uint64, numBits uint) uint64 { - return w >> (64 - numBits) -} - -// consumeBuffer consumes numBits in is.current. -func (is *istream64) consumeBuffer(numBits uint) uint64 { - res := readBitsInWord(is.current, numBits) - is.current <<= numBits - is.remaining -= numBits - return res -} - -func (is *istream64) peekWordFromStream() (uint64, uint, error) { - if is.index+8 <= len(is.data) { - // NB: this compiles to a single 64 bit load followed by a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - return binary.BigEndian.Uint64(is.data[is.index:]), 64, nil - } - if is.index >= len(is.data) { - return 0, 0, io.EOF - } - var res uint64 - var rem uint - for i := is.index; i < len(is.data); i++ { - res = (res << 8) | uint64(is.data[i]) - rem += 8 - } - return res << (64 - rem), rem, nil -} - -func (is *istream64) readWordFromStream() error { - if is.index+8 <= len(is.data) { - // NB: this compiles to a single 64 bit load followed by a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - is.current = binary.BigEndian.Uint64(is.data[is.index:]) - is.remaining = 64 - is.index += 8 - return nil - } - if is.index >= len(is.data) { - is.current = 0 - is.err = io.EOF - return is.err - } - var res uint64 - var rem uint - for ; is.index < len(is.data); is.index++ { - res = (res << 8) | uint64(is.data[is.index]) - rem += 8 - } - is.remaining = rem - is.current = res << (64 - rem) - return nil -} - -func (is *istream64) Reset(r io.Reader) { - is.err = nil - is.current = 0 - is.remaining = 0 - is.index = 0 - if r == nil { - is.data = nil - is.err = nil - return - } - //FIXME: this is slow and should accept a slice of bytes directly as an argument instead. - is.data, is.err = ioutil.ReadAll(r) -} diff --git a/src/dbnode/encoding/istream64_test.go b/src/dbnode/encoding/istream64_test.go deleted file mode 100644 index 83d0c9e1df..0000000000 --- a/src/dbnode/encoding/istream64_test.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright (c) 2020 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package encoding - -import ( - "bytes" - "io" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestIStream64ReadBits(t *testing.T) { - byteStream := []byte{ - 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, - 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, - } - - is := NewIStream64(byteStream) - numBits := []uint{1, 3, 4, 8, 7, 2, 64, 64} - var res []uint64 - for _, v := range numBits { - read, err := is.ReadBits(v) - require.NoError(t, err) - res = append(res, read) - } - expected := []uint64{0x1, 0x4, 0xa, 0xfe, 0x7e, 0x3, 0x1234567890abcdef, 0x1} - require.Equal(t, expected, res) - - _, err := is.ReadBits(8) - require.EqualError(t, err, io.EOF.Error()) -} - -func TestIStream64ReadByte(t *testing.T) { - byteStream := []byte{ - 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, - 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, - } - - is := NewIStream64(byteStream) - var res []byte - for range byteStream { - read, err := is.ReadByte() - require.NoError(t, err) - res = append(res, read) - } - require.Equal(t, byteStream, res) - - _, err := is.ReadByte() - require.EqualError(t, err, io.EOF.Error()) -} - -func TestIStream64PeekBitsSuccess(t *testing.T) { - byteStream := []byte{0xa9, 0xfe, 0xfe, 0xdf, 0x9b, 0x57, 0x21, 0xf1} - o := NewIStream64(byteStream) - is := o.(*istream64) - inputs := []struct { - numBits uint - expected uint64 - }{ - {0, 0}, - {1, 0x1}, - {8, 0xa9}, - {10, 0x2a7}, - {13, 0x153f}, - {16, 0xa9fe}, - {32, 0xa9fefedf}, - {64, 0xa9fefedf9b5721f1}, - } - for _, input := range inputs { - res, err := is.PeekBits(input.numBits) - require.NoError(t, err) - require.Equal(t, input.expected, res) - } - require.Equal(t, uint64(0), is.current) - require.Equal(t, 0, int(is.remaining)) -} - -func TestIStream64PeekBitsError(t *testing.T) { - byteStream := []byte{0x1, 0x2} - is := NewIStream64(byteStream) - res, err := is.PeekBits(20) - require.EqualError(t, err, io.EOF.Error()) - require.Equal(t, uint64(0), res) -} - -func TestIStream64ReadAfterPeekBits(t *testing.T) { - byteStream := []byte{0xab, 0xcd} - is := NewIStream64(byteStream) - res, err := is.PeekBits(10) - require.NoError(t, err) - require.Equal(t, uint64(0x2af), res) - _, err = is.PeekBits(20) - require.EqualError(t, err, io.EOF.Error()) - - inputs := []struct { - numBits uint - expected uint64 - }{ - {2, 0x2}, - {9, 0x15e}, - } - for _, input := range inputs { - res, err := is.ReadBits(input.numBits) - require.NoError(t, err) - require.Equal(t, input.expected, res) - } - _, err = is.ReadBits(8) - require.EqualError(t, err, io.EOF.Error()) -} - -func TestIStream64PeekAfterReadBits(t *testing.T) { - byteStream := []byte{0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA} - is := NewIStream64(byteStream) - - res, err := is.ReadBits(16) - require.NoError(t, err) - require.Equal(t, uint64(0x102), res) - - res, err = is.PeekBits(63) - require.NoError(t, err) - require.Equal(t, uint64(0x30405060708090A)>>1, res) - - res, err = is.PeekBits(64) - require.NoError(t, err) - require.Equal(t, uint64(0x30405060708090A), res) - - res, err = is.ReadBits(1) - require.NoError(t, err) - require.Equal(t, uint64(0), res) - - res, err = is.PeekBits(63) - require.NoError(t, err) - require.Equal(t, uint64(0x30405060708090A), res) - - res, err = is.PeekBits(64) - require.EqualError(t, err, io.EOF.Error()) -} - -func TestIStream64RemainingBitsInCurrentByte(t *testing.T) { - byteStream := []byte{0xff, 0, 0x42} - is := NewIStream64(byteStream) - for _, b := range byteStream { - for i := 0; i < 8; i++ { - var expected uint - if i > 0 { - expected = uint(8 - i) - } - require.Equal(t, expected, is.RemainingBitsInCurrentByte()) - bit, err := is.ReadBit() - require.NoError(t, err) - expectedBit := Bit(b>>i)&1 - require.Equal(t, expectedBit, bit) - } - } -} - -func TestIStream64ResetIStream(t *testing.T) { - o := NewIStream64([]byte{0xff}) - is := o.(*istream64) - is.ReadBits(8) - is.ReadBits(1) - is.Reset(bytes.NewReader(nil)) - require.Equal(t, uint64(0), is.current) - require.Equal(t, uint(0), is.remaining) - require.Equal(t, 0, is.index) - require.NoError(t, is.err) -} diff --git a/src/dbnode/encoding/istream_test.go b/src/dbnode/encoding/istream_test.go index 4841590b01..a80c744094 100644 --- a/src/dbnode/encoding/istream_test.go +++ b/src/dbnode/encoding/istream_test.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2020 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,21 +21,22 @@ package encoding import ( - "bytes" + "io" "testing" "github.com/stretchr/testify/require" + + "github.com/m3db/m3/src/dbnode/x/xio" ) -func TestReadBits(t *testing.T) { +func TestIStreamReadBits(t *testing.T) { byteStream := []byte{ 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, } - o := NewIStream(bytes.NewReader(byteStream), 16) - is := o.(*istream) - numBits := []uint{1, 3, 4, 8, 7, 2, 64, 64} + is := NewIStream(xio.NewBytesReader64(byteStream)) + numBits := []byte{1, 3, 4, 8, 7, 2, 64, 64} var res []uint64 for _, v := range numBits { read, err := is.ReadBits(v) @@ -46,15 +47,34 @@ func TestReadBits(t *testing.T) { require.Equal(t, expected, res) _, err := is.ReadBits(8) - require.Error(t, err) + require.EqualError(t, err, io.EOF.Error()) } -func TestPeekBitsSuccess(t *testing.T) { +func TestIStreamReadByte(t *testing.T) { + byteStream := []uint8{ + 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, + 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, + } + + is := NewIStream(xio.NewBytesReader64(byteStream)) + var res []byte + for range byteStream { + read, err := is.ReadByte() + require.NoError(t, err) + res = append(res, read) + } + require.Equal(t, byteStream, res) + + _, err := is.ReadByte() + require.EqualError(t, err, io.EOF.Error()) +} + +func TestIStreamPeekBitsSuccess(t *testing.T) { byteStream := []byte{0xa9, 0xfe, 0xfe, 0xdf, 0x9b, 0x57, 0x21, 0xf1} - o := NewIStream(bytes.NewReader(byteStream), 16) - is := o.(*istream) + o := NewIStream(xio.NewBytesReader64(byteStream)) + is := o.(*iStream) inputs := []struct { - numBits uint + numBits uint8 expected uint64 }{ {0, 0}, @@ -71,31 +91,29 @@ func TestPeekBitsSuccess(t *testing.T) { require.NoError(t, err) require.Equal(t, input.expected, res) } - require.Equal(t, byte(0), is.current) + require.Equal(t, uint64(0), is.current) require.Equal(t, 0, int(is.remaining)) } -func TestPeekBitsError(t *testing.T) { +func TestIStreamPeekBitsError(t *testing.T) { byteStream := []byte{0x1, 0x2} - o := NewIStream(bytes.NewReader(byteStream), 16) - is := o.(*istream) + is := NewIStream(xio.NewBytesReader64(byteStream)) res, err := is.PeekBits(20) - require.Error(t, err) + require.EqualError(t, err, io.EOF.Error()) require.Equal(t, uint64(0), res) } -func TestReadAfterPeekBits(t *testing.T) { +func TestIStreamReadAfterPeekBits(t *testing.T) { byteStream := []byte{0xab, 0xcd} - o := NewIStream(bytes.NewReader(byteStream), 16) - is := o.(*istream) + is := NewIStream(xio.NewBytesReader64(byteStream)) res, err := is.PeekBits(10) require.NoError(t, err) require.Equal(t, uint64(0x2af), res) _, err = is.PeekBits(20) - require.Error(t, err) + require.EqualError(t, err, io.EOF.Error()) inputs := []struct { - numBits uint + numBits uint8 expected uint64 }{ {2, 0x2}, @@ -107,14 +125,63 @@ func TestReadAfterPeekBits(t *testing.T) { require.Equal(t, input.expected, res) } _, err = is.ReadBits(8) - require.Error(t, err) + require.EqualError(t, err, io.EOF.Error()) } -func TestResetIStream(t *testing.T) { - o := NewIStream(bytes.NewReader(nil), 16) - is := o.(*istream) - is.ReadBits(1) - is.Reset(bytes.NewReader(nil)) - require.Equal(t, byte(0), is.current) - require.Equal(t, 0, int(is.remaining)) +func TestIStreamPeekAfterReadBits(t *testing.T) { + byteStream := []byte{0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA} + is := NewIStream(xio.NewBytesReader64(byteStream)) + + res, err := is.ReadBits(16) + require.NoError(t, err) + require.Equal(t, uint64(0x102), res) + + res, err = is.PeekBits(63) + require.NoError(t, err) + require.Equal(t, uint64(0x30405060708090A)>>1, res) + + res, err = is.PeekBits(64) + require.NoError(t, err) + require.Equal(t, uint64(0x30405060708090A), res) + + res, err = is.ReadBits(1) + require.NoError(t, err) + require.Equal(t, uint64(0), res) + + res, err = is.PeekBits(63) + require.NoError(t, err) + require.Equal(t, uint64(0x30405060708090A), res) + + res, err = is.PeekBits(64) + require.EqualError(t, err, io.EOF.Error()) +} + +func TestIStreamRemainingBitsInCurrentByte(t *testing.T) { + byteStream := []byte{0xff, 0, 0x42} + is := NewIStream(xio.NewBytesReader64(byteStream)) + for _, b := range byteStream { + for i := 0; i < 8; i++ { + var expected uint + if i > 0 { + expected = uint(8 - i) + } + require.Equal(t, expected, is.RemainingBitsInCurrentByte()) + bit, err := is.ReadBit() + require.NoError(t, err) + expectedBit := Bit(b>>i)&1 + require.Equal(t, expectedBit, bit) + } + } +} + +func TestIStreamReset(t *testing.T) { + o := NewIStream(xio.NewBytesReader64([]byte{0xff})) + is := o.(*iStream) + _, _ = is.ReadBits(8) + _, _ = is.ReadBits(1) + is.Reset(xio.NewBytesReader64(nil)) + require.Equal(t, uint64(0), is.current) + require.Equal(t, uint8(0), is.remaining) + require.Equal(t, 0, is.index) + require.NoError(t, is.err) } diff --git a/src/dbnode/encoding/iterator_test.go b/src/dbnode/encoding/iterator_test.go index f0a33c9357..ec6f4b065d 100644 --- a/src/dbnode/encoding/iterator_test.go +++ b/src/dbnode/encoding/iterator_test.go @@ -21,7 +21,6 @@ package encoding import ( - "io" "time" "github.com/m3db/m3/src/dbnode/namespace" @@ -44,7 +43,7 @@ type testIterator struct { closed bool err error onNext func(oldIdx, newIdx int) - onReset func(r io.Reader, descr namespace.SchemaDescr) + onReset func(r xio.Reader64, descr namespace.SchemaDescr) } func newTestIterator(values []testValue) ReaderIterator { @@ -72,7 +71,7 @@ func (it *testIterator) Current() (ts.Datapoint, xtime.Unit, ts.Annotation) { } v := it.values[idx] dp := ts.Datapoint{Timestamp: v.t, TimestampNanos: xtime.ToUnixNano(v.t), Value: v.value} - return dp, v.unit, ts.Annotation(v.annotation) + return dp, v.unit, v.annotation } func (it *testIterator) Err() error { @@ -83,7 +82,7 @@ func (it *testIterator) Close() { it.closed = true } -func (it *testIterator) Reset(r io.Reader, descr namespace.SchemaDescr) { +func (it *testIterator) Reset(r xio.Reader64, descr namespace.SchemaDescr) { it.onReset(r, descr) } @@ -101,7 +100,7 @@ type testMultiIterator struct { closed bool err error onNext func(oldIdx, newIdx int) - onReset func(r io.Reader) + onReset func(r xio.Reader64) } func newTestMultiIterator(values []testValue, err error) MultiReaderIterator { @@ -129,7 +128,7 @@ func (it *testMultiIterator) Current() (ts.Datapoint, xtime.Unit, ts.Annotation) } v := it.values[idx] dp := ts.Datapoint{Timestamp: v.t, TimestampNanos: xtime.ToUnixNano(v.t), Value: v.value} - return dp, v.unit, ts.Annotation(v.annotation) + return dp, v.unit, v.annotation } func (it *testMultiIterator) Err() error { @@ -215,10 +214,11 @@ func (it *testReaderSliceOfSlicesIterator) arrayIdx() int { } type testNoopReader struct { - n int // return for "n", also required so that each struct construction has its address + n byte // return for "n", also required so that each struct construction has its address } -func (r *testNoopReader) Read(p []byte) (int, error) { return r.n, nil } +func (r *testNoopReader) Read64() (word uint64, n byte, err error) { return 0, r.n, nil } +func (r *testNoopReader) Peek64() (word uint64, n byte, err error) { return 0, r.n, nil } func (r *testNoopReader) Segment() (ts.Segment, error) { return ts.Segment{}, nil } func (r *testNoopReader) Reset(ts.Segment) {} func (r *testNoopReader) Finalize() {} diff --git a/src/dbnode/encoding/m3tsz/decoder.go b/src/dbnode/encoding/m3tsz/decoder.go index ce1351a524..e9d0d38e06 100644 --- a/src/dbnode/encoding/m3tsz/decoder.go +++ b/src/dbnode/encoding/m3tsz/decoder.go @@ -21,9 +21,8 @@ package m3tsz import ( - "io" - "github.com/m3db/m3/src/dbnode/encoding" + "github.com/m3db/m3/src/dbnode/x/xio" ) type decoder struct { @@ -40,11 +39,6 @@ func NewDecoder(intOptimized bool, opts encoding.Options) encoding.Decoder { } // Decode decodes the encoded data captured by the reader. -func (dec *decoder) Decode(reader io.Reader) encoding.ReaderIterator { +func (dec *decoder) Decode(reader xio.Reader64) encoding.ReaderIterator { return NewReaderIterator(reader, dec.intOptimized, dec.opts) } - -// Decode decodes the encoded slice of bytes. -func (dec *decoder) Decode64(data []byte) encoding.ReaderIterator { - return NewReaderIterator64(data, dec.intOptimized, dec.opts) -} diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index ecc18ac1f1..d595a9012f 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -21,12 +21,12 @@ package m3tsz import ( - "bytes" "encoding/base64" "math/rand" "testing" "github.com/m3db/m3/src/dbnode/encoding" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/stretchr/testify/require" ) @@ -35,7 +35,7 @@ import ( func BenchmarkM3TSZDecode(b *testing.B) { var ( encodingOpts = encoding.NewOptions() - reader = bytes.NewReader(nil) + reader = xio.NewBytesReader64(nil) seriesRun = prepareSampleSeriesRun(b) ) @@ -49,24 +49,6 @@ func BenchmarkM3TSZDecode(b *testing.B) { require.NoError(b, iter.Err()) } } - -// BenchmarkM3TSZDecode64-12 17222 69151 ns/op -func BenchmarkM3TSZDecode64(b *testing.B) { - var ( - encodingOpts = encoding.NewOptions() - seriesRun = prepareSampleSeriesRun(b) - ) - - b.ResetTimer() - for i := 0; i < b.N; i++ { - iter := NewReaderIterator64(seriesRun[i], DefaultIntOptimizationEnabled, encodingOpts) - for iter.Next() { - _, _, _ = iter.Current() - } - require.NoError(b, iter.Err()) - } -} - func prepareSampleSeriesRun(b *testing.B) [][]byte { var ( rnd = rand.New(rand.NewSource(42)) diff --git a/src/dbnode/encoding/m3tsz/encoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/encoder_benchmark_test.go index 67c62b4660..02e5cf9f68 100644 --- a/src/dbnode/encoding/m3tsz/encoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/encoder_benchmark_test.go @@ -21,7 +21,6 @@ package m3tsz import ( - "bytes" "encoding/base64" "math/rand" "testing" @@ -31,6 +30,7 @@ import ( "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" xtime "github.com/m3db/m3/src/x/time" ) @@ -75,7 +75,7 @@ func prepareSampleSeriesEncRun(b *testing.B) [][]ts.Datapoint { sampleSeries = make([][]byte, 0, len(sampleSeriesBase64)) seriesRun = make([][]ts.Datapoint, b.N) encodingOpts = encoding.NewOptions() - reader = bytes.NewReader(nil) + reader = xio.NewBytesReader64(nil) ) for _, b64 := range sampleSeriesBase64 { diff --git a/src/dbnode/encoding/m3tsz/encoder_test.go b/src/dbnode/encoding/m3tsz/encoder_test.go index c9496cf893..4dde03f865 100644 --- a/src/dbnode/encoding/m3tsz/encoder_test.go +++ b/src/dbnode/encoding/m3tsz/encoder_test.go @@ -21,12 +21,14 @@ package m3tsz import ( + "io" "math/rand" "testing" "time" "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/checked" "github.com/m3db/m3/src/x/context" xtime "github.com/m3db/m3/src/x/time" @@ -160,10 +162,11 @@ func getBytes(t *testing.T, e encoding.Encoder) []byte { if !ok { return nil } - var b [1000]byte - n, err := r.Read(b[:]) - require.NoError(t, err) - return b[:n] + + bytes, err := xio.ToBytes(r) + assert.Equal(t, io.EOF, err) + + return bytes } func TestWriteTimeUnit(t *testing.T) { diff --git a/src/dbnode/encoding/m3tsz/float_encoder_iterator.go b/src/dbnode/encoding/m3tsz/float_encoder_iterator.go index b27f959ac2..a8ad564b1b 100644 --- a/src/dbnode/encoding/m3tsz/float_encoder_iterator.go +++ b/src/dbnode/encoding/m3tsz/float_encoder_iterator.go @@ -134,7 +134,7 @@ func (eit *FloatEncoderAndIterator) readNextFloat(stream encoding.IStream) error cb = (cb << 1) | nextCB if cb == opcodeContainedValueXOR { previousLeading, previousTrailing := encoding.LeadingAndTrailingZeros(eit.PrevXOR) - numMeaningfulBits := uint(64 - previousLeading - previousTrailing) + numMeaningfulBits := uint8(64 - previousLeading - previousTrailing) meaningfulBits, err := stream.ReadBits(numMeaningfulBits) if err != nil { return err @@ -153,7 +153,7 @@ func (eit *FloatEncoderAndIterator) readNextFloat(stream encoding.IStream) error numLeadingZeros := (numLeadingZeroesAndNumMeaningfulBits & bits12To6Mask) >> 6 numMeaningfulBits := (numLeadingZeroesAndNumMeaningfulBits & bits6To0Mask) + 1 - meaningfulBits, err := stream.ReadBits(uint(numMeaningfulBits)) + meaningfulBits, err := stream.ReadBits(uint8(numMeaningfulBits)) if err != nil { return err } diff --git a/src/dbnode/encoding/m3tsz/iterator.go b/src/dbnode/encoding/m3tsz/iterator.go index 07517d4f09..09dc2fd7ae 100644 --- a/src/dbnode/encoding/m3tsz/iterator.go +++ b/src/dbnode/encoding/m3tsz/iterator.go @@ -21,12 +21,12 @@ package m3tsz import ( - "io" "math" "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" xtime "github.com/m3db/m3/src/x/time" ) @@ -51,18 +51,13 @@ type readerIterator struct { } // NewReaderIterator returns a new iterator for a given reader -func NewReaderIterator(reader io.Reader, intOptimized bool, opts encoding.Options) encoding.ReaderIterator { +func NewReaderIterator( + reader xio.Reader64, + intOptimized bool, + opts encoding.Options, +) encoding.ReaderIterator { return &readerIterator{ - is: encoding.NewIStream(reader, opts.IStreamReaderSizeM3TSZ()), - opts: opts, - tsIterator: NewTimestampIterator(opts, false), - intOptimized: intOptimized, - } -} - -func NewReaderIterator64(data []byte, intOptimized bool, opts encoding.Options) encoding.ReaderIterator { - return &readerIterator{ - is: encoding.NewIStream64(data), + is: encoding.NewIStream(reader), opts: opts, tsIterator: NewTimestampIterator(opts, false), intOptimized: intOptimized, @@ -174,10 +169,10 @@ func (it *readerIterator) readIntValDiff() { sign = 1.0 } - it.intVal += sign * float64(it.readBits(uint(it.sig))) + it.intVal += sign * float64(it.readBits(it.sig)) } -func (it *readerIterator) readBits(numBits uint) uint64 { +func (it *readerIterator) readBits(numBits uint8) uint64 { if !it.hasNext() { return 0 } @@ -227,7 +222,7 @@ func (it *readerIterator) hasNext() bool { } // Reset resets the ReadIterator for reuse. -func (it *readerIterator) Reset(reader io.Reader, schema namespace.SchemaDescr) { +func (it *readerIterator) Reset(reader xio.Reader64, schema namespace.SchemaDescr) { it.is.Reset(reader) it.tsIterator = NewTimestampIterator(it.opts, it.tsIterator.SkipMarkers) it.err = nil diff --git a/src/dbnode/encoding/m3tsz/iterator_test.go b/src/dbnode/encoding/m3tsz/iterator_test.go index d5550e59dd..cc09a2d093 100644 --- a/src/dbnode/encoding/m3tsz/iterator_test.go +++ b/src/dbnode/encoding/m3tsz/iterator_test.go @@ -21,19 +21,19 @@ package m3tsz import ( - "bytes" "testing" "time" "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" xtime "github.com/m3db/m3/src/x/time" "github.com/stretchr/testify/require" ) func getTestReaderIterator(rawBytes []byte) *readerIterator { - return NewReaderIterator(bytes.NewReader(rawBytes), false, encoding.NewOptions()).(*readerIterator) + return NewReaderIterator(xio.NewBytesReader64(rawBytes), false, encoding.NewOptions()).(*readerIterator) } func TestReaderIteratorReadNextTimestamp(t *testing.T) { @@ -57,7 +57,7 @@ func TestReaderIteratorReadNextTimestamp(t *testing.T) { } for _, input := range inputs { - stream := encoding.NewIStream(bytes.NewBuffer(input.rawBytes), 16) + stream := encoding.NewIStream(xio.NewBytesReader64(input.rawBytes)) it := NewTimestampIterator(encoding.NewOptions(), false) it.TimeUnit = input.timeUnit @@ -68,7 +68,7 @@ func TestReaderIteratorReadNextTimestamp(t *testing.T) { require.Equal(t, input.expectedTimeDelta, it.PrevTimeDelta) } - stream := encoding.NewIStream(bytes.NewBuffer([]byte{0x1}), 16) + stream := encoding.NewIStream(xio.NewBytesReader64([]byte{0x1})) it := NewTimestampIterator(encoding.NewOptions(), false) err := it.readNextTimestamp(stream) require.Error(t, err) @@ -127,7 +127,7 @@ func TestReaderIteratorReadAnnotation(t *testing.T) { }, } for _, input := range inputs { - stream := encoding.NewIStream(bytes.NewBuffer(input.rawBytes), 16) + stream := encoding.NewIStream(xio.NewBytesReader64(input.rawBytes)) it := NewTimestampIterator(encoding.NewOptions(), false) err := it.readAnnotation(stream) @@ -157,7 +157,7 @@ func TestReaderIteratorReadTimeUnit(t *testing.T) { }, } for _, input := range inputs { - stream := encoding.NewIStream(bytes.NewBuffer(input.rawBytes), 16) + stream := encoding.NewIStream(xio.NewBytesReader64(input.rawBytes)) it := NewTimestampIterator(encoding.NewOptions(), false) it.TimeUnit = input.timeUnit diff --git a/src/dbnode/encoding/m3tsz/roundtrip_test.go b/src/dbnode/encoding/m3tsz/roundtrip_test.go index a404259312..9031f43048 100644 --- a/src/dbnode/encoding/m3tsz/roundtrip_test.go +++ b/src/dbnode/encoding/m3tsz/roundtrip_test.go @@ -22,13 +22,11 @@ package m3tsz import ( "bytes" - "io/ioutil" "math" "math/rand" "testing" "time" - "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/testgen" "github.com/m3db/m3/src/dbnode/ts" "github.com/m3db/m3/src/x/context" @@ -99,13 +97,11 @@ func TestIntOverflow(t *testing.T) { } func testRoundTrip(t *testing.T, input []ts.Datapoint) { - validateRoundTrip(t, input, true, false) - validateRoundTrip(t, input, false, false) - validateRoundTrip(t, input, true, true) - validateRoundTrip(t, input, false, true) + validateRoundTrip(t, input, true) + validateRoundTrip(t, input, false) } -func validateRoundTrip(t *testing.T, input []ts.Datapoint, intOpt bool, useDecode64 bool) { +func validateRoundTrip(t *testing.T, input []ts.Datapoint, intOpt bool) { ctx := context.NewContext() defer ctx.Close() @@ -150,14 +146,7 @@ func validateRoundTrip(t *testing.T, input []ts.Datapoint, intOpt bool, useDecod stream, ok := encoder.Stream(ctx) require.True(t, ok) - var it encoding.ReaderIterator - if useDecode64 { - data, err := ioutil.ReadAll(stream) - require.NoError(t, err) - it = decoder.Decode64(data) - } else { - it = decoder.Decode(stream) - } + it := decoder.Decode(stream) defer it.Close() i := 0 diff --git a/src/dbnode/encoding/m3tsz/timestamp_iterator.go b/src/dbnode/encoding/m3tsz/timestamp_iterator.go index 8e436fb65f..415733c0fb 100644 --- a/src/dbnode/encoding/m3tsz/timestamp_iterator.go +++ b/src/dbnode/encoding/m3tsz/timestamp_iterator.go @@ -41,6 +41,8 @@ type TimestampIterator struct { Opts encoding.Options + markerEncodingScheme encoding.MarkerEncodingScheme + TimeUnitChanged bool Done bool @@ -49,9 +51,8 @@ type TimestampIterator struct { // for situations where looking ahead is not safe. SkipMarkers bool - numValueBits uint - numBits uint - markerEncodingScheme encoding.MarkerEncodingScheme + numValueBits uint8 + numBits uint8 } // NewTimestampIterator creates a new TimestampIterator. @@ -60,8 +61,8 @@ func NewTimestampIterator(opts encoding.Options, skipMarkers bool) TimestampIter return TimestampIterator{ Opts: opts, SkipMarkers: skipMarkers, - numValueBits: uint(mes.NumValueBits()), - numBits: uint(mes.NumOpcodeBits() + mes.NumValueBits()), + numValueBits: uint8(mes.NumValueBits()), + numBits: uint8(mes.NumOpcodeBits() + mes.NumValueBits()), markerEncodingScheme: mes, } } @@ -254,12 +255,12 @@ func (it *TimestampIterator) readDeltaOfDelta( cb = (cb << 1) | nextCB if cb == buckets[i].Opcode() { - dodBits, err := stream.ReadBits(uint(buckets[i].NumValueBits())) + dodBits, err := stream.ReadBits(uint8(buckets[i].NumValueBits())) if err != nil { return 0, err } - dod := encoding.SignExtend(dodBits, uint(buckets[i].NumValueBits())) + dod := encoding.SignExtend(dodBits, uint8(buckets[i].NumValueBits())) timeUnit, err := it.TimeUnit.Value() if err != nil { return 0, nil @@ -269,7 +270,7 @@ func (it *TimestampIterator) readDeltaOfDelta( } } - numValueBits := uint(tes.DefaultBucket().NumValueBits()) + numValueBits := uint8(tes.DefaultBucket().NumValueBits()) dodBits, err := stream.ReadBits(numValueBits) if err != nil { return 0, err @@ -316,7 +317,7 @@ func (it *TimestampIterator) readVarint(stream encoding.IStream) (int, error) { return int(res), err } -func (it *TimestampIterator) tryPeekBits(stream encoding.IStream, numBits uint) (uint64, bool) { +func (it *TimestampIterator) tryPeekBits(stream encoding.IStream, numBits uint8) (uint64, bool) { res, err := stream.PeekBits(numBits) if err != nil { return 0, false diff --git a/src/dbnode/encoding/multi_reader_iterator_test.go b/src/dbnode/encoding/multi_reader_iterator_test.go index a82ab1549f..2ee2e44612 100644 --- a/src/dbnode/encoding/multi_reader_iterator_test.go +++ b/src/dbnode/encoding/multi_reader_iterator_test.go @@ -22,7 +22,6 @@ package encoding import ( "fmt" - "io" "testing" "time" @@ -287,7 +286,7 @@ func assertTestMultiReaderIterator( test testMultiReader, ) { type readerEntries struct { - reader io.Reader + reader xio.Reader64 entries *testMultiReaderEntries } @@ -315,8 +314,8 @@ func assertTestMultiReaderIterator( } var testIterators []*testIterator - var iteratorAlloc func(reader io.Reader, descr namespace.SchemaDescr) ReaderIterator - iteratorAlloc = func(reader io.Reader, descr namespace.SchemaDescr) ReaderIterator { + var iteratorAlloc func(reader xio.Reader64, descr namespace.SchemaDescr) ReaderIterator + iteratorAlloc = func(reader xio.Reader64, descr namespace.SchemaDescr) ReaderIterator { for i := range entriesByReader { if reader != entriesByReader[i].reader { continue @@ -331,7 +330,7 @@ func assertTestMultiReaderIterator( } } } - it.onReset = func(r io.Reader, descr namespace.SchemaDescr) { + it.onReset = func(r xio.Reader64, descr namespace.SchemaDescr) { newIt := iteratorAlloc(r, descr).(*testIterator) *it = *newIt // We close this here as we never actually use this iterator diff --git a/src/dbnode/encoding/null.go b/src/dbnode/encoding/null.go index 2dc914dcd6..576ddac1ae 100644 --- a/src/dbnode/encoding/null.go +++ b/src/dbnode/encoding/null.go @@ -22,7 +22,6 @@ package encoding import ( "fmt" - "io" "time" "github.com/m3db/m3/src/dbnode/namespace" @@ -74,7 +73,7 @@ func NewNullReaderIterator() ReaderIterator { func (r *nullReaderIterator) Current() (ts.Datapoint, xtime.Unit, ts.Annotation) { return ts.Datapoint{}, xtime.Unit(0), nil } -func (r *nullReaderIterator) Next() bool { return false } -func (r *nullReaderIterator) Err() error { return fmt.Errorf("not implemented") } -func (r *nullReaderIterator) Close() {} -func (r *nullReaderIterator) Reset(reader io.Reader, descr namespace.SchemaDescr) {} +func (r *nullReaderIterator) Next() bool { return false } +func (r *nullReaderIterator) Err() error { return fmt.Errorf("not implemented") } +func (r *nullReaderIterator) Close() {} +func (r *nullReaderIterator) Reset(xio.Reader64, namespace.SchemaDescr) {} diff --git a/src/dbnode/encoding/proto/int_encoder_iterator.go b/src/dbnode/encoding/proto/int_encoder_iterator.go index 53dc4074eb..f81267bc75 100644 --- a/src/dbnode/encoding/proto/int_encoder_iterator.go +++ b/src/dbnode/encoding/proto/int_encoder_iterator.go @@ -27,10 +27,6 @@ import ( "github.com/m3db/m3/src/dbnode/encoding/m3tsz" ) -const ( - opcodeZeroSig = 0x0 -) - type intEncoderAndIterator struct { prevIntBits uint64 intSigBitsTracker m3tsz.IntSigBitsTracker @@ -224,7 +220,7 @@ func (eit *intEncoderAndIterator) readIntValDiff(stream encoding.IStream) error itErrPrefix, err) } - numSig := uint(eit.intSigBitsTracker.NumSig) + numSig := eit.intSigBitsTracker.NumSig diffSigBits, err := stream.ReadBits(numSig) if err != nil { return fmt.Errorf( diff --git a/src/dbnode/encoding/proto/iterator.go b/src/dbnode/encoding/proto/iterator.go index 54dbb68eda..925f6fff7d 100644 --- a/src/dbnode/encoding/proto/iterator.go +++ b/src/dbnode/encoding/proto/iterator.go @@ -30,6 +30,7 @@ import ( "github.com/m3db/m3/src/dbnode/encoding/m3tsz" "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/checked" "github.com/m3db/m3/src/x/ident" "github.com/m3db/m3/src/x/instrument" @@ -79,11 +80,11 @@ type iterator struct { // NewIterator creates a new iterator. func NewIterator( - reader io.Reader, + reader xio.Reader64, descr namespace.SchemaDescr, opts encoding.Options, ) encoding.ReaderIterator { - stream := encoding.NewIStream(reader, opts.IStreamReaderSizeProto()) + stream := encoding.NewIStream(reader) i := &iterator{ opts: opts, @@ -237,7 +238,7 @@ func (it *iterator) Err() error { return it.err } -func (it *iterator) Reset(reader io.Reader, descr namespace.SchemaDescr) { +func (it *iterator) Reset(reader xio.Reader64, descr namespace.SchemaDescr) { it.resetSchema(descr) it.stream.Reset(reader) it.tsIterator = m3tsz.NewTimestampIterator(it.opts, true) @@ -336,7 +337,7 @@ func (it *iterator) readCustomFieldsSchema() error { } for i := 1; i <= int(numCustomFields); i++ { - fieldTypeBits, err := it.stream.ReadBits(uint(numBitsToEncodeCustomType)) + fieldTypeBits, err := it.stream.ReadBits(uint8(numBitsToEncodeCustomType)) if err != nil { return err } @@ -546,7 +547,7 @@ func (it *iterator) readBytesValue(i int, customField customFieldState) error { if valueInDictControlBit == opCodeInterpretSubsequentBitsAsLRUIndex { dictIdxBits, err := it.stream.ReadBits( - uint(numBitsRequiredForNumUpToN(it.byteFieldDictLRUSize))) + uint8(numBitsRequiredForNumUpToN(it.byteFieldDictLRUSize))) if err != nil { return fmt.Errorf( "%s error trying to read bytes dict idx: %v", @@ -861,7 +862,7 @@ func (it *iterator) nextToBeEvicted(fieldIdx int) []byte { return dict[0] } -func (it *iterator) readBits(numBits uint) (uint64, error) { +func (it *iterator) readBits(numBits uint8) (uint64, error) { res, err := it.stream.ReadBits(numBits) if err != nil { return 0, err diff --git a/src/dbnode/encoding/proto/round_trip_test.go b/src/dbnode/encoding/proto/round_trip_test.go index 7e0cb64f5c..0e3e6ceb92 100644 --- a/src/dbnode/encoding/proto/round_trip_test.go +++ b/src/dbnode/encoding/proto/round_trip_test.go @@ -21,7 +21,6 @@ package proto import ( - "bytes" "errors" "fmt" "testing" @@ -30,6 +29,7 @@ import ( "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/pool" xtime "github.com/m3db/m3/src/x/time" @@ -162,7 +162,7 @@ func TestRoundTrip(t *testing.T) { require.NoError(t, err) require.Equal(t, numExpectedBytes, len(rawBytes)) - buff := bytes.NewBuffer(rawBytes) + buff := xio.NewBytesReader64(rawBytes) iter := NewIterator(buff, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) i := 0 @@ -212,9 +212,9 @@ func TestRoundTripMidStreamSchemaChanges(t *testing.T) { vl2WriteTime := vl1WriteTime.Add(time.Second) err = enc.Encode(ts.Datapoint{Timestamp: vl2WriteTime}, xtime.Second, marshalledVL) - require.Equal(t, - "proto encoder: error unmarshalling message: encountered unknown field with field number: 6", - err.Error()) + require.EqualError(t, + err, + "proto encoder: error unmarshalling message: encountered unknown field with field number: 6") enc.SetSchema(namespace.GetTestSchemaDescr(testVL2Schema)) err = enc.Encode(ts.Datapoint{Timestamp: vl2WriteTime}, xtime.Second, marshalledVL) @@ -224,7 +224,7 @@ func TestRoundTripMidStreamSchemaChanges(t *testing.T) { require.NoError(t, err) // Try reading the stream just using the vl1 schema. - buff := bytes.NewBuffer(rawBytes) + buff := xio.NewBytesReader64(rawBytes) iter := NewIterator(buff, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) require.True(t, iter.Next(), "iter err: %v", iter.Err()) @@ -260,7 +260,7 @@ func TestRoundTripMidStreamSchemaChanges(t *testing.T) { require.NoError(t, iter.Err()) // Try reading the stream just using the vl2 schema. - buff = bytes.NewBuffer(rawBytes) + buff = xio.NewBytesReader64(rawBytes) iter = NewIterator(buff, namespace.GetTestSchemaDescr(testVL2Schema), testEncodingOptions) require.True(t, iter.Next(), "iter err: %v", iter.Err()) diff --git a/src/dbnode/encoding/series_iterator_split_into_blocks_test.go b/src/dbnode/encoding/series_iterator_split_into_blocks_test.go index 5ebc2aadd8..5ef4e7d7a2 100644 --- a/src/dbnode/encoding/series_iterator_split_into_blocks_test.go +++ b/src/dbnode/encoding/series_iterator_split_into_blocks_test.go @@ -22,7 +22,6 @@ package encoding_test import ( - "io" "testing" "time" @@ -34,9 +33,10 @@ import ( "github.com/m3db/m3/src/x/ident" xtime "github.com/m3db/m3/src/x/time" - "github.com/m3db/m3/src/dbnode/namespace" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/m3db/m3/src/dbnode/namespace" ) type Series struct { @@ -69,7 +69,7 @@ func TestDeconstructAndReconstruct(t *testing.T) { i++ } - iterAlloc := func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + iterAlloc := func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { iter := m3tsz.NewDecoder(true, encoding.NewOptions()) return iter.Decode(r) } diff --git a/src/dbnode/encoding/types.go b/src/dbnode/encoding/types.go index ad014b2f66..aece7a217a 100644 --- a/src/dbnode/encoding/types.go +++ b/src/dbnode/encoding/types.go @@ -21,7 +21,6 @@ package encoding import ( - "io" "time" "github.com/m3db/m3/src/dbnode/namespace" @@ -151,19 +150,19 @@ type Options interface { // ByteFieldDictionaryLRUSize returns the ByteFieldDictionaryLRUSize. ByteFieldDictionaryLRUSize() int - // SetIStreamReaderSizeM3TSZ sets the istream bufio reader size + // SetIStreamReaderSizeM3TSZ sets the iStream bufio reader size // for m3tsz encoding iteration. SetIStreamReaderSizeM3TSZ(value int) Options - // IStreamReaderSizeM3TSZ returns the istream bufio reader size + // IStreamReaderSizeM3TSZ returns the iStream bufio reader size // for m3tsz encoding iteration. IStreamReaderSizeM3TSZ() int - // SetIStreamReaderSizeProto sets the istream bufio reader size + // SetIStreamReaderSizeProto sets the iStream bufio reader size // for proto encoding iteration. SetIStreamReaderSizeProto(value int) Options - // SetIStreamReaderSizeProto returns the istream bufio reader size + // SetIStreamReaderSizeProto returns the iStream bufio reader size // for proto encoding iteration. IStreamReaderSizeProto() int } @@ -191,7 +190,7 @@ type ReaderIterator interface { // Reset resets the iterator to read from a new reader with // a new schema (for schema aware iterators). - Reset(reader io.Reader, schema namespace.SchemaDescr) + Reset(reader xio.Reader64, schema namespace.SchemaDescr) } // MultiReaderIterator is an iterator that iterates in order over @@ -328,10 +327,7 @@ type MutableSeriesIterators interface { // Decoder is the generic interface for different types of decoders. type Decoder interface { // Decode decodes the encoded data in the reader. - Decode(reader io.Reader) ReaderIterator - - // Decode64 decodes the encoded data slice of bytes. - Decode64(data []byte) ReaderIterator + Decode(reader xio.Reader64) ReaderIterator } // NewDecoderFn creates a new decoder. @@ -341,7 +337,7 @@ type NewDecoderFn func() Decoder type EncoderAllocate func() Encoder // ReaderIteratorAllocate allocates a ReaderIterator for a pool. -type ReaderIteratorAllocate func(reader io.Reader, descr namespace.SchemaDescr) ReaderIterator +type ReaderIteratorAllocate func(reader xio.Reader64, descr namespace.SchemaDescr) ReaderIterator // IStream encapsulates a readable stream. type IStream interface { @@ -355,17 +351,17 @@ type IStream interface { ReadByte() (byte, error) // ReadBits reads the next Bits. - ReadBits(numBits uint) (uint64, error) + ReadBits(numBits uint8) (uint64, error) // PeekBits looks at the next Bits, but doesn't move the pos. - PeekBits(numBits uint) (uint64, error) + PeekBits(numBits uint8) (uint64, error) // RemainingBitsInCurrentByte returns the number of bits remaining to // be read in the current byte. RemainingBitsInCurrentByte() uint // Reset resets the IStream. - Reset(r io.Reader) + Reset(reader xio.Reader64) } // OStream encapsulates a writable stream. diff --git a/src/dbnode/persist/fs/merger_test.go b/src/dbnode/persist/fs/merger_test.go index df9531c479..7aab2e493f 100644 --- a/src/dbnode/persist/fs/merger_test.go +++ b/src/dbnode/persist/fs/merger_test.go @@ -75,7 +75,7 @@ func init() { srPool = xio.NewSegmentReaderPool(poolOpts) srPool.Init() multiIterPool = encoding.NewMultiReaderIteratorPool(poolOpts) - multiIterPool.Init(func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + multiIterPool.Init(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) }) bytesPool := pool.NewCheckedBytesPool(nil, poolOpts, func(s []pool.Bucket) pool.BytesPool { @@ -618,11 +618,10 @@ func datapointsToCheckedBytes(t *testing.T, dps []ts.Datapoint) checked.Bytes { r, ok := encoder.Stream(ctx) require.True(t, ok) - var b [1000]byte - n, err := r.Read(b[:]) - require.NoError(t, err) + bytes, err := xio.ToBytes(r) + require.Equal(t, io.EOF, err) - copied := append([]byte(nil), b[:n]...) + copied := append([]byte(nil), bytes...) cb := checked.NewBytes(copied, nil) return cb } diff --git a/src/dbnode/persist/fs/retriever.go b/src/dbnode/persist/fs/retriever.go index 256eca347c..101feb63ad 100644 --- a/src/dbnode/persist/fs/retriever.go +++ b/src/dbnode/persist/fs/retriever.go @@ -923,12 +923,20 @@ func (req *retrieveRequest) BlockSize() time.Duration { return req.blockSize } -func (req *retrieveRequest) Read(b []byte) (int, error) { +func (req *retrieveRequest) Read64() (word uint64, n byte, err error) { req.resultWg.Wait() if req.err != nil { - return 0, req.err + return 0, 0, req.err } - return req.reader.Read(b) + return req.reader.Read64() +} + +func (req *retrieveRequest) Peek64() (word uint64, n byte, err error) { + req.resultWg.Wait() + if req.err != nil { + return 0, 0, req.err + } + return req.reader.Peek64() } func (req *retrieveRequest) Segment() (ts.Segment, error) { diff --git a/src/dbnode/server/server.go b/src/dbnode/server/server.go index 72422ee912..deee83c25b 100644 --- a/src/dbnode/server/server.go +++ b/src/dbnode/server/server.go @@ -1515,14 +1515,14 @@ func withEncodingAndPoolingOptions( return m3tsz.NewEncoder(time.Time{}, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - iteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + iteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { if cfg.Proto != nil && cfg.Proto.Enabled { return proto.NewIterator(r, descr, encodingOpts) } return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + multiIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { iter := iteratorPool.Get() iter.Reset(r, descr) return iter diff --git a/src/dbnode/storage/block/merged_block_reader.go b/src/dbnode/storage/block/merged_block_reader.go index 04461f261f..cd3b90e39e 100644 --- a/src/dbnode/storage/block/merged_block_reader.go +++ b/src/dbnode/storage/block/merged_block_reader.go @@ -177,12 +177,20 @@ func (r *dbMergedBlockReader) BlockSize() time.Duration { return r.blockSize } -func (r *dbMergedBlockReader) Read(b []byte) (int, error) { +func (r *dbMergedBlockReader) Read64() (word uint64, n byte, err error) { reader, err := r.mergedReader() if err != nil { - return 0, err + return 0, 0, err } - return reader.Read(b) + return reader.Read64() +} + +func (r *dbMergedBlockReader) Peek64() (word uint64, n byte, err error) { + reader, err := r.mergedReader() + if err != nil { + return 0, 0, err + } + return reader.Peek64() } func (r *dbMergedBlockReader) Segment() (ts.Segment, error) { diff --git a/src/dbnode/storage/block/options.go b/src/dbnode/storage/block/options.go index 70abd04930..f44deef887 100644 --- a/src/dbnode/storage/block/options.go +++ b/src/dbnode/storage/block/options.go @@ -21,17 +21,15 @@ package block import ( - "io" - "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/m3tsz" + "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/ts" "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/clock" "github.com/m3db/m3/src/x/context" "github.com/m3db/m3/src/x/pool" xsync "github.com/m3db/m3/src/x/sync" - "github.com/m3db/m3/src/dbnode/namespace" ) const ( @@ -93,10 +91,10 @@ func NewOptions() Options { o.encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - o.readerIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + o.readerIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - o.multiReaderIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + o.multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { it := o.readerIteratorPool.Get() it.Reset(r, descr) return it diff --git a/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_data_test.go b/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_data_test.go index a747a65ae4..c3d576305b 100644 --- a/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_data_test.go +++ b/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_data_test.go @@ -36,6 +36,7 @@ import ( "github.com/m3db/m3/src/dbnode/storage/series" "github.com/m3db/m3/src/dbnode/topology" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/checked" "github.com/m3db/m3/src/x/context" "github.com/m3db/m3/src/x/ident" @@ -441,9 +442,9 @@ func testItMergesSnapshotsAndCommitLogs(t *testing.T, opts Options, seg, err := reader.Segment() require.NoError(t, err) - bytes := make([]byte, seg.Len()) - _, err = reader.Read(bytes) - require.NoError(t, err) + bytes, err := xio.ToBytes(reader) + require.Equal(t, io.EOF, err) + require.Equal(t, seg.Len(), len(bytes)) mockReader.EXPECT().Read().Return( foo.ID, diff --git a/src/dbnode/storage/bootstrap/bootstrapper/fs/source_data_test.go b/src/dbnode/storage/bootstrap/bootstrapper/fs/source_data_test.go index 6e2b509339..5f150d0fa2 100644 --- a/src/dbnode/storage/bootstrap/bootstrapper/fs/source_data_test.go +++ b/src/dbnode/storage/bootstrap/bootstrapper/fs/source_data_test.go @@ -23,6 +23,7 @@ package fs import ( "errors" "fmt" + "io" "io/ioutil" "os" "path" @@ -44,6 +45,7 @@ import ( "github.com/m3db/m3/src/dbnode/storage/index" "github.com/m3db/m3/src/dbnode/storage/index/compaction" "github.com/m3db/m3/src/dbnode/storage/series" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/m3ninx/index/segment/fst" "github.com/m3db/m3/src/x/checked" "github.com/m3db/m3/src/x/context" @@ -557,11 +559,10 @@ func validateReadResults( readerAtTime := seriesReaders[0] assert.Equal(t, times[i], readerAtTime.Start) ctx := context.NewContext() - var b [100]byte - n, err := readerAtTime.Reader.Read(b[:]) + bytes, err := xio.ToBytes(readerAtTime.Reader) ctx.Close() - require.NoError(t, err) - require.Equal(t, data[i], b[:n]) + require.Equal(t, io.EOF, err) + require.Equal(t, data[i], bytes) } tester.EnsureNoWrites() diff --git a/src/dbnode/storage/bootstrap/util.go b/src/dbnode/storage/bootstrap/util.go index 215d33136d..ae79579b18 100644 --- a/src/dbnode/storage/bootstrap/util.go +++ b/src/dbnode/storage/bootstrap/util.go @@ -317,7 +317,7 @@ type NamespacesTester struct { func buildDefaultIterPool() encoding.MultiReaderIteratorPool { iterPool := encoding.NewMultiReaderIteratorPool(pool.NewObjectPoolOptions()) iterPool.Init( - func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) diff --git a/src/dbnode/storage/options.go b/src/dbnode/storage/options.go index 05bf1764e0..d2cecafc32 100644 --- a/src/dbnode/storage/options.go +++ b/src/dbnode/storage/options.go @@ -23,7 +23,6 @@ package storage import ( "errors" "fmt" - "io" "math" "runtime" "time" @@ -501,14 +500,14 @@ func (o *options) SetEncodingM3TSZPooled() Options { opts.encoderPool = encoderPool // initialize single reader iterator pool - readerIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + readerIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) opts.readerIteratorPool = readerIteratorPool // initialize multi reader iterator pool multiReaderIteratorPool := encoding.NewMultiReaderIteratorPool(opts.poolOpts) - multiReaderIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) opts.multiReaderIteratorPool = multiReaderIteratorPool diff --git a/src/dbnode/storage/series/buffer_test.go b/src/dbnode/storage/series/buffer_test.go index 61141c8bae..b3939e6f19 100644 --- a/src/dbnode/storage/series/buffer_test.go +++ b/src/dbnode/storage/series/buffer_test.go @@ -21,7 +21,6 @@ package series import ( - "io" "sort" "strings" "testing" @@ -42,9 +41,10 @@ import ( xtime "github.com/m3db/m3/src/x/time" "github.com/golang/mock/gomock" - "github.com/m3db/m3/src/dbnode/namespace" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/m3db/m3/src/dbnode/namespace" ) var ( @@ -60,7 +60,7 @@ func newBufferTestOptions() Options { encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiReaderIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) diff --git a/src/dbnode/storage/series/series_test.go b/src/dbnode/storage/series/series_test.go index 3d9441e923..34ffd7e6b7 100644 --- a/src/dbnode/storage/series/series_test.go +++ b/src/dbnode/storage/series/series_test.go @@ -22,7 +22,6 @@ package series import ( "errors" - "io" "sort" "testing" "time" @@ -43,9 +42,10 @@ import ( xtime "github.com/m3db/m3/src/x/time" "github.com/golang/mock/gomock" - "github.com/m3db/m3/src/dbnode/namespace" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/m3db/m3/src/dbnode/namespace" ) func newSeriesTestOptions() Options { @@ -57,7 +57,7 @@ func newSeriesTestOptions() Options { encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiReaderIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) diff --git a/src/dbnode/testdata/prototest/pools.go b/src/dbnode/testdata/prototest/pools.go index 6b23f0e132..ad07221680 100644 --- a/src/dbnode/testdata/prototest/pools.go +++ b/src/dbnode/testdata/prototest/pools.go @@ -21,12 +21,12 @@ package prototest import ( - "io" "time" - "github.com/m3db/m3/src/x/pool" - "github.com/m3db/m3/src/dbnode/encoding/proto" "github.com/m3db/m3/src/dbnode/encoding" + "github.com/m3db/m3/src/dbnode/encoding/proto" + "github.com/m3db/m3/src/dbnode/x/xio" + "github.com/m3db/m3/src/x/pool" xtime "github.com/m3db/m3/src/x/time" "github.com/m3db/m3/src/dbnode/namespace" @@ -63,10 +63,10 @@ func newPools() Pools { encoderPool.Init(func() encoding.Encoder { return proto.NewEncoder(timeZero, encodingOpts) }) - readerIterPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + readerIterPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return proto.NewIterator(r, descr, encodingOpts) }) - multiReaderIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { i := readerIterPool.Get() i.Reset(r, descr) return i diff --git a/src/dbnode/x/xio/block_reader_test.go b/src/dbnode/x/xio/block_reader_test.go index fd7371a7cb..746822391a 100644 --- a/src/dbnode/x/xio/block_reader_test.go +++ b/src/dbnode/x/xio/block_reader_test.go @@ -50,16 +50,15 @@ func TestCloneBlock(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() - var p []byte seg := ts.Segment{} reader := NewMockSegmentReader(ctrl) - reader.EXPECT().Read(p).Return(0, errTest).Times(1) - reader.EXPECT().Read(p).Return(100, nil).Times(1) + reader.EXPECT().Read64().Return(uint64(0), byte(0), errTest).Times(1) + reader.EXPECT().Read64().Return(uint64(123456), byte(10), nil).Times(1) reader.EXPECT().Reset(seg).Return().Times(1) clonedReader := NewMockSegmentReader(ctrl) - clonedReader.EXPECT().Read(p).Return(1337, nil).Times(1) + clonedReader.EXPECT().Read64().Return(uint64(1337), byte(2), nil).Times(1) reader.EXPECT().Clone(nil).Return(clonedReader, nil).Times(1) @@ -69,12 +68,14 @@ func TestCloneBlock(t *testing.T) { BlockSize: blockSize, } - read, err := b.Read(p) - require.Equal(t, read, 0) - require.Equal(t, err, errTest) + word, n, err := b.Read64() + require.Equal(t, uint64(0), word) + require.Equal(t, byte(0), n) + require.Equal(t, errTest, err) - read, err = b.Read(p) - require.Equal(t, read, 100) + word, n, err = b.Read64() + require.Equal(t, uint64(123456), word) + require.Equal(t, byte(10), n) require.NoError(t, err) b2, err := b.CloneBlock(nil) @@ -90,9 +91,10 @@ func TestCloneBlock(t *testing.T) { require.Equal(t, b2.Start, start) require.Equal(t, b2.BlockSize, blockSize) - read, err = b2.Read(p) + word, n, err = b2.Read64() - require.Equal(t, read, 1337) + require.Equal(t, uint64(1337), word) + require.Equal(t, byte(2), n) require.NoError(t, err) } @@ -121,16 +123,16 @@ func TestBlockReaderClone(t *testing.T) { func TestBlockReaderRead(t *testing.T) { br, sr := buildBlock(t) - var p []byte - - sr.EXPECT().Read(p).Return(0, errTest).Times(1) - read, err := br.Read(p) - require.Equal(t, read, 0) + sr.EXPECT().Read64().Return(uint64(0), byte(0), errTest).Times(1) + word, n, err := br.Read64() + require.Equal(t, uint64(0), word) + require.Equal(t, byte(0), n) require.Equal(t, err, errTest) - sr.EXPECT().Read(p).Return(100, nil).Times(1) - read, err = br.Read(p) - require.Equal(t, read, 100) + sr.EXPECT().Read64().Return(uint64(100), byte(1), nil).Times(1) + word, n, err = br.Read64() + require.Equal(t, uint64(100), word) + require.Equal(t, byte(1), n) require.NoError(t, err) } diff --git a/src/dbnode/x/xio/io_mock.go b/src/dbnode/x/xio/io_mock.go index daa99ba481..12f1c91cc7 100644 --- a/src/dbnode/x/xio/io_mock.go +++ b/src/dbnode/x/xio/io_mock.go @@ -83,19 +83,36 @@ func (mr *MockSegmentReaderMockRecorder) Finalize() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Finalize", reflect.TypeOf((*MockSegmentReader)(nil).Finalize)) } -// Read mocks base method -func (m *MockSegmentReader) Read(arg0 []byte) (int, error) { +// Peek64 mocks base method +func (m *MockSegmentReader) Peek64() (uint64, byte, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Read", arg0) - ret0, _ := ret[0].(int) - ret1, _ := ret[1].(error) - return ret0, ret1 + ret := m.ctrl.Call(m, "Peek64") + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(byte) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// Peek64 indicates an expected call of Peek64 +func (mr *MockSegmentReaderMockRecorder) Peek64() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Peek64", reflect.TypeOf((*MockSegmentReader)(nil).Peek64)) +} + +// Read64 mocks base method +func (m *MockSegmentReader) Read64() (uint64, byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Read64") + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(byte) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 } -// Read indicates an expected call of Read -func (mr *MockSegmentReaderMockRecorder) Read(arg0 interface{}) *gomock.Call { +// Read64 indicates an expected call of Read64 +func (mr *MockSegmentReaderMockRecorder) Read64() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Read", reflect.TypeOf((*MockSegmentReader)(nil).Read), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Read64", reflect.TypeOf((*MockSegmentReader)(nil).Read64)) } // Reset mocks base method diff --git a/src/dbnode/x/xio/null.go b/src/dbnode/x/xio/null.go index 81fdebad47..146b12519f 100644 --- a/src/dbnode/x/xio/null.go +++ b/src/dbnode/x/xio/null.go @@ -27,7 +27,8 @@ import ( type nullSegmentReader struct{} -func (r nullSegmentReader) Read(_ []byte) (n int, err error) { return 0, nil } +func (r nullSegmentReader) Read64() (word uint64, n byte, err error) { return 0, 0, nil } +func (r nullSegmentReader) Peek64() (word uint64, n byte, err error) { return 0, 0, nil } func (r nullSegmentReader) Segment() (ts.Segment, error) { return ts.Segment{}, nil } func (r nullSegmentReader) Reset(_ ts.Segment) {} func (r nullSegmentReader) Finalize() {} diff --git a/src/dbnode/x/xio/segment_reader.go b/src/dbnode/x/xio/segment_reader.go index 352c8feeea..536416635f 100644 --- a/src/dbnode/x/xio/segment_reader.go +++ b/src/dbnode/x/xio/segment_reader.go @@ -21,6 +21,7 @@ package xio import ( + "encoding/binary" "io" "github.com/m3db/m3/src/dbnode/ts" @@ -46,40 +47,101 @@ func (sr *segmentReader) Clone( return NewSegmentReader(sr.segment.Clone(pool)), nil } -func (sr *segmentReader) Read(b []byte) (int, error) { - if len(b) == 0 { - return 0, nil +func (sr *segmentReader) Read64() (word uint64, n byte, err error) { + sr.lazyInit() + + var ( + nh = len(sr.lazyHead) + nht = nh + len(sr.lazyTail) + res uint64 + bytes byte + ) + + if sr.si >= nht { + return 0, 0, io.EOF } - if b := sr.segment.Head; b != nil && len(sr.lazyHead) == 0 { - sr.lazyHead = b.Bytes() + if sr.si+8 < nh { + // NB: this compiles to a single 64 bit load followed by + // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). + res := binary.BigEndian.Uint64(sr.lazyHead[sr.si:]) + sr.si += 8 + return res, 8, nil } - if b := sr.segment.Tail; b != nil && len(sr.lazyTail) == 0 { - sr.lazyTail = b.Bytes() + + if sr.si < nh { + for ; sr.si < nh; sr.si++ { + res = (res << 8) | uint64(sr.lazyHead[sr.si]) + bytes++ + } + for ; sr.si < nht && bytes < 8; sr.si++ { + res = (res << 8) | uint64(sr.lazyTail[sr.si-nh]) + bytes++ + } + return res << (64 - 8*bytes), bytes, nil } - nh, nt := len(sr.lazyHead), len(sr.lazyTail) - if sr.si >= nh+nt { - return 0, io.EOF + if sr.si+8 < nht { + // NB: this compiles to a single 64 bit load followed by + // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). + res := binary.BigEndian.Uint64(sr.lazyTail[sr.si-nh:]) + sr.si += 8 + return res, 8, nil } - n := 0 - if sr.si < nh { - nRead := copy(b, sr.lazyHead[sr.si:]) - sr.si += nRead - n += nRead - if n == len(b) { - return n, nil + + for ; sr.si < nht && bytes < 8; sr.si++ { + res = (res << 8) | uint64(sr.lazyTail[sr.si-nh]) + bytes++ + } + return res << (64 - 8*bytes), bytes, nil +} + +func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { + sr.lazyInit() + + var ( + nh = len(sr.lazyHead) + nht = nh + len(sr.lazyTail) + i = sr.si + res uint64 + bytes byte + ) + + if i >= nht { + return 0, 0, io.EOF + } + + if i+8 < nh { + // NB: this compiles to a single 64 bit load followed by + // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). + res := binary.BigEndian.Uint64(sr.lazyHead[i:]) + return res, 8, nil + } + + if i < nh { + for ; i < nh; i++ { + res = (res << 8) | uint64(sr.lazyHead[i]) + bytes++ + } + for ; i < nht && bytes < 8; i++ { + res = (res << 8) | uint64(sr.lazyTail[i-nh]) + bytes++ } + return res << (64 - 8*bytes), bytes, nil } - if sr.si < nh+nt { - nRead := copy(b[n:], sr.lazyTail[sr.si-nh:]) - sr.si += nRead - n += nRead + + if i+8 < nht { + // NB: this compiles to a single 64 bit load followed by + // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). + res := binary.BigEndian.Uint64(sr.lazyTail[i-nh:]) + return res, 8, nil } - if n == 0 { - return 0, io.EOF + + for ; i < nht && bytes < 8; i++ { + res = (res << 8) | uint64(sr.lazyTail[i-nh]) + bytes++ } - return n, nil + return res << (64 - 8*bytes), bytes, nil } func (sr *segmentReader) Segment() (ts.Segment, error) { @@ -102,3 +164,12 @@ func (sr *segmentReader) Finalize() { pool.Put(sr) } } + +func (sr *segmentReader) lazyInit() { + if b := sr.segment.Head; b != nil && len(sr.lazyHead) == 0 { + sr.lazyHead = b.Bytes() + } + if b := sr.segment.Tail; b != nil && len(sr.lazyTail) == 0 { + sr.lazyTail = b.Bytes() + } +} diff --git a/src/dbnode/x/xio/segment_reader_test.go b/src/dbnode/x/xio/segment_reader_test.go index ef0f704fb0..7d58932ce7 100644 --- a/src/dbnode/x/xio/segment_reader_test.go +++ b/src/dbnode/x/xio/segment_reader_test.go @@ -60,15 +60,9 @@ func testSegmentReader( checksum := uint32(10) segment := ts.NewSegment(checkd(head), checkd(tail), checksum, ts.FinalizeNone) r := NewSegmentReader(segment) - var b [100]byte - n, err := r.Read(b[:]) - require.NoError(t, err) - require.Equal(t, len(expected), n) - require.Equal(t, expected, b[:n]) - - n, err = r.Read(b[:]) + bytes, err := ToBytes(r) require.Equal(t, io.EOF, err) - require.Equal(t, 0, n) + require.Equal(t, expected, bytes) seg, err := r.Segment() require.NoError(t, err) @@ -92,13 +86,14 @@ func testSegmentReader( cloned.Finalize() segment.Finalize() } + func TestSegmentReaderNoPool(t *testing.T) { checkd := func(d []byte) checked.Bytes { return checked.NewBytes(d, nil) } testSegmentReader(t, checkd, nil) } func TestSegmentReaderWithPool(t *testing.T) { - bytesPool := pool.NewCheckedBytesPool([]pool.Bucket{pool.Bucket{ + bytesPool := pool.NewCheckedBytesPool([]pool.Bucket{{ Capacity: 1024, Count: 10, }}, nil, func(s []pool.Bucket) pool.BytesPool { diff --git a/src/dbnode/x/xio/types.go b/src/dbnode/x/xio/types.go index fd02692fed..7576902b07 100644 --- a/src/dbnode/x/xio/types.go +++ b/src/dbnode/x/xio/types.go @@ -21,7 +21,6 @@ package xio import ( - "io" "time" "github.com/m3db/m3/src/dbnode/ts" @@ -42,7 +41,7 @@ var EmptyBlockReader = BlockReader{} // SegmentReader implements the io reader interface backed by a segment. type SegmentReader interface { - io.Reader + Reader64 xresource.Finalizer // Segment gets the segment read by this reader. @@ -103,3 +102,14 @@ type ReaderSliceOfSlicesFromBlockReadersIterator interface { // Reset resets the iterator with a new array of block readers arrays. Reset(blocks [][]BlockReader) } + + +// Reader64 is a reader for reading 64 bit words. +type Reader64 interface { + + // Read64 reads and returns a 64 bit word plus a number of bytes (up to 8) actually read. + Read64() (word uint64, n byte, err error) + + // Read64 peeks and returns the next 64 bit word plus a number of bytes (up to 8) available. + Peek64() (word uint64, n byte, err error) +} diff --git a/src/query/pools/query_pools.go b/src/query/pools/query_pools.go index 65c5c7ad99..664a431bd0 100644 --- a/src/query/pools/query_pools.go +++ b/src/query/pools/query_pools.go @@ -21,11 +21,10 @@ package pools import ( - "io" - "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/m3tsz" "github.com/m3db/m3/src/dbnode/namespace" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/dbnode/x/xpool" xconfig "github.com/m3db/m3/src/x/config" "github.com/m3db/m3/src/x/ident" @@ -212,12 +211,12 @@ func BuildIteratorPools( encodingOpts := encoding.NewOptions(). SetReaderIteratorPool(readerIteratorPool) - readerIteratorPool.Init(func(r io.Reader, descr namespace.SchemaDescr) encoding.ReaderIterator { + readerIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) pools.multiReaderIterator = encoding.NewMultiReaderIteratorPool(defaultPerSeriesPoolOpts) - pools.multiReaderIterator.Init(func(r io.Reader, s namespace.SchemaDescr) encoding.ReaderIterator { + pools.multiReaderIterator.Init(func(r xio.Reader64, s namespace.SchemaDescr) encoding.ReaderIterator { iter := readerIteratorPool.Get() iter.Reset(r, s) return iter diff --git a/src/query/remote/compressed_codecs.go b/src/query/remote/compressed_codecs.go index 166cefcd2c..d5a64a345c 100644 --- a/src/query/remote/compressed_codecs.go +++ b/src/query/remote/compressed_codecs.go @@ -22,7 +22,6 @@ package remote import ( "fmt" - "io" "sync" "time" @@ -47,14 +46,14 @@ func initializeVars() { b.Reset(nil) })) - iterAlloc = func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + iterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) } } var ( opts checked.BytesOptions - iterAlloc func(r io.Reader, d namespace.SchemaDescr) encoding.ReaderIterator + iterAlloc func(r xio.Reader64, d namespace.SchemaDescr) encoding.ReaderIterator initialize sync.Once ) diff --git a/src/query/ts/m3db/options.go b/src/query/ts/m3db/options.go index 43867c654e..2b4985367f 100644 --- a/src/query/ts/m3db/options.go +++ b/src/query/ts/m3db/options.go @@ -23,13 +23,13 @@ package m3db import ( "errors" "fmt" - "io" "time" "github.com/m3db/m3/src/dbnode/client" "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/m3tsz" "github.com/m3db/m3/src/dbnode/namespace" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/query/models" "github.com/m3db/m3/src/query/pools" queryconsolidator "github.com/m3db/m3/src/query/storage/m3/consolidators" @@ -43,7 +43,7 @@ var ( defaultCount = 10 defaultLookbackDuration = time.Duration(0) defaultConsolidationFn = consolidators.TakeLast - defaultIterAlloc = func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + defaultIterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) } defaultIteratorBatchingFn = iteratorBatchingFn From c216a2abf432fca02bbc3eef5cb1664d73381022 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 20:21:59 +0200 Subject: [PATCH 08/24] git add --- src/dbnode/x/xio/reader64.go | 61 +++++++++++++++++++++++++++++++ src/dbnode/x/xio/reader64_test.go | 53 +++++++++++++++++++++++++++ src/dbnode/x/xio/utils.go | 18 +++++++++ 3 files changed, 132 insertions(+) create mode 100644 src/dbnode/x/xio/reader64.go create mode 100644 src/dbnode/x/xio/reader64_test.go create mode 100644 src/dbnode/x/xio/utils.go diff --git a/src/dbnode/x/xio/reader64.go b/src/dbnode/x/xio/reader64.go new file mode 100644 index 0000000000..8a389cab23 --- /dev/null +++ b/src/dbnode/x/xio/reader64.go @@ -0,0 +1,61 @@ +package xio + +import ( + "encoding/binary" + "io" +) + +type BytesReader64 struct { + data []byte + index int +} + +func NewBytesReader64(data []byte) *BytesReader64 { + return &BytesReader64{data: data} +} + +func (r *BytesReader64) Read64() (word uint64, n byte, err error) { + if r.index+8 <= len(r.data) { + // NB: this compiles to a single 64 bit load followed by + // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). + res := binary.BigEndian.Uint64(r.data[r.index:]) + r.index += 8 + return res, 8, nil + } + if r.index >= len(r.data) { + return 0, 0, io.EOF + } + var res uint64 + var bytes byte + for ; r.index < len(r.data); r.index++ { + res = (res << 8) | uint64(r.data[r.index]) + bytes++ + } + return res << (64 - 8*bytes), bytes, nil +} + +func (r *BytesReader64) Peek64() (word uint64, n byte, err error) { + if r.index+8 <= len(r.data) { + // NB: this compiles to a single 64 bit load followed by + // BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). + res := binary.BigEndian.Uint64(r.data[r.index:]) + return res, 8, nil + } + + if r.index >= len(r.data) { + return 0, 0, io.EOF + } + + var res uint64 + var bytes byte + for i := r.index; i < len(r.data); i++ { + res = (res << 8) | uint64(r.data[i]) + bytes++ + } + return res << (64 - 8*bytes), bytes, nil +} + +func (r *BytesReader64) Reset(data []byte) { + r.data = data + r.index = 0 +} diff --git a/src/dbnode/x/xio/reader64_test.go b/src/dbnode/x/xio/reader64_test.go new file mode 100644 index 0000000000..855c2f1c9a --- /dev/null +++ b/src/dbnode/x/xio/reader64_test.go @@ -0,0 +1,53 @@ +package xio + +import ( + "encoding/binary" + "io" + "testing" + + "github.com/stretchr/testify/require" + "github.com/tj/assert" +) + +func TestBytesReader64(t *testing.T) { + var ( + data = []byte{4, 5, 6, 7, 8, 9, 1, 2, 3, 0, 10, 11, 12, 13, 14, 15, 16, 17} + r = NewBytesReader64(nil) + ) + + for l := 0; l < len(data); l++ { + testBytesReader64(t, r, data) + } +} + +func testBytesReader64(t *testing.T, r *BytesReader64, data []byte) { + r.Reset(data) + + var ( + peeked, read []byte + buf [8]byte + word uint64 + n byte + err error + ) + + for { + word, n, err = r.Peek64() + if err != nil { + break + } + binary.BigEndian.PutUint64(buf[:], word) + peeked = append(peeked, buf[:n]...) + + word, n, err = r.Read64() + if err != nil { + break + } + binary.BigEndian.PutUint64(buf[:], word) + read = append(read, buf[:n]...) + } + + require.Equal(t, io.EOF, err) + assert.Equal(t, data, peeked) + assert.Equal(t, data, read) +} diff --git a/src/dbnode/x/xio/utils.go b/src/dbnode/x/xio/utils.go new file mode 100644 index 0000000000..d631a0e4b6 --- /dev/null +++ b/src/dbnode/x/xio/utils.go @@ -0,0 +1,18 @@ +package xio + +import "encoding/binary" + +func ToBytes(reader Reader64) ([]byte, error) { + var ( + res []byte + buf [8]byte + ) + + word, bytes, err := reader.Read64() + for ; err == nil; word, bytes, err = reader.Read64() { + binary.BigEndian.PutUint64(buf[:], word) + res = append(res, buf[:bytes]...) + } + + return res, err +} From cc22a353a8f2d43966348cff8707e3c177f80382 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 20:58:12 +0200 Subject: [PATCH 09/24] Fix read_data_files --- src/cmd/tools/read_data_files/main/main.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cmd/tools/read_data_files/main/main.go b/src/cmd/tools/read_data_files/main/main.go index d48e9ec561..41c1418116 100644 --- a/src/cmd/tools/read_data_files/main/main.go +++ b/src/cmd/tools/read_data_files/main/main.go @@ -21,7 +21,6 @@ package main import ( - "bytes" "encoding/base64" "fmt" "io" @@ -35,6 +34,7 @@ import ( "github.com/m3db/m3/src/dbnode/encoding/m3tsz" "github.com/m3db/m3/src/dbnode/persist" "github.com/m3db/m3/src/dbnode/persist/fs" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/ident" "github.com/pborman/getopt" @@ -122,7 +122,7 @@ func main() { } data.IncRef() - iter := m3tsz.NewReaderIterator(bytes.NewReader(data.Bytes()), true, encodingOpts) + iter := m3tsz.NewReaderIterator(xio.NewBytesReader64(data.Bytes()), true, encodingOpts) for iter.Next() { dp, _, annotation := iter.Current() // Use fmt package so it goes to stdout instead of stderr From e42c7614f21a139eb0d99b781fd60b453276cdd4 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 22:24:58 +0200 Subject: [PATCH 10/24] lint --- .golangci.yml | 4 +++- go.mod | 1 - src/dbnode/client/config.go | 7 ++++--- src/dbnode/client/options.go | 10 +++++++-- .../client/session_fetch_bulk_blocks_test.go | 13 +++++++----- src/dbnode/encoding/istream.go | 2 +- src/dbnode/encoding/istream_test.go | 18 +++++++++------- .../encoding/m3tsz/decoder_benchmark_test.go | 7 ++++--- src/dbnode/encoding/m3tsz/iterator_test.go | 6 +++++- .../encoding/m3tsz/timestamp_iterator.go | 4 ++-- .../encoding/multi_reader_iterator_test.go | 4 ++-- src/dbnode/encoding/null.go | 21 ++++++++++--------- src/dbnode/encoding/proto/iterator.go | 9 -------- src/dbnode/encoding/scheme.go | 3 ++- src/dbnode/storage/block/options.go | 18 +++++++++------- src/dbnode/storage/options.go | 10 +++++++-- src/dbnode/storage/series/buffer_test.go | 7 ++++--- src/dbnode/storage/series/series_test.go | 7 ++++--- src/dbnode/x/xio/reader64.go | 5 +++++ src/dbnode/x/xio/reader64_test.go | 2 +- src/dbnode/x/xio/segment_reader.go | 8 +++---- src/dbnode/x/xio/types.go | 1 - src/dbnode/x/xio/utils.go | 2 ++ src/query/pools/query_pools.go | 18 +++++++++------- src/query/test/test_series_iterator.go | 5 ++--- .../ts/m3db/encoded_step_iterator_test.go | 5 ++--- 26 files changed, 112 insertions(+), 85 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 05855d38b6..ee2df8e6fd 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -222,7 +222,9 @@ linters: - godox # New line required before return would require a large fraction of the # code base to need updating, it's not worth the perceived benefit. - - nlreturn + - nlreturn + # Opinionated and sometimes wrong. + - paralleltest disable-all: false presets: # bodyclose, errcheck, gosec, govet, scopelint, staticcheck, typecheck diff --git a/go.mod b/go.mod index 2ce2f55fa7..8fa319d98c 100644 --- a/go.mod +++ b/go.mod @@ -100,7 +100,6 @@ require ( github.com/streadway/quantile v0.0.0-20150917103942-b0c588724d25 // indirect github.com/stretchr/testify v1.6.1 github.com/subosito/gotenv v1.2.1-0.20190917103637-de67a6614a4d // indirect - github.com/tj/assert v0.0.0-20171129193455-018094318fb0 github.com/twotwotwo/sorts v0.0.0-20160814051341-bf5c1f2b8553 github.com/uber-go/tally v3.3.13+incompatible github.com/uber/jaeger-client-go v2.25.0+incompatible diff --git a/src/dbnode/client/config.go b/src/dbnode/client/config.go index 15c366b8fb..a9a359a6c8 100644 --- a/src/dbnode/client/config.go +++ b/src/dbnode/client/config.go @@ -407,9 +407,10 @@ func (c Configuration) NewAdminClient( encodingOpts = encoding.NewOptions() } - v = v.SetReaderIteratorAllocate(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + v = v.SetReaderIteratorAllocate( + func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) + }) if c.Proto != nil && c.Proto.Enabled { v = v.SetEncodingProto(encodingOpts) diff --git a/src/dbnode/client/options.go b/src/dbnode/client/options.go index 6271cbadc5..e18586ad1f 100644 --- a/src/dbnode/client/options.go +++ b/src/dbnode/client/options.go @@ -445,7 +445,10 @@ func (o *options) Validate() error { func (o *options) SetEncodingM3TSZ() Options { opts := *o - opts.readerIteratorAllocate = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + opts.readerIteratorAllocate = func( + r xio.Reader64, + _ namespace.SchemaDescr, + ) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) } opts.isProtoEnabled = false @@ -454,7 +457,10 @@ func (o *options) SetEncodingM3TSZ() Options { func (o *options) SetEncodingProto(encodingOpts encoding.Options) Options { opts := *o - opts.readerIteratorAllocate = func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { + opts.readerIteratorAllocate = func( + r xio.Reader64, + descr namespace.SchemaDescr, + ) encoding.ReaderIterator { return proto.NewIterator(r, descr, encodingOpts) } opts.isProtoEnabled = true diff --git a/src/dbnode/client/session_fetch_bulk_blocks_test.go b/src/dbnode/client/session_fetch_bulk_blocks_test.go index 3503948a6c..79dde3b276 100644 --- a/src/dbnode/client/session_fetch_bulk_blocks_test.go +++ b/src/dbnode/client/session_fetch_bulk_blocks_test.go @@ -57,17 +57,19 @@ import ( ) var ( - blockSize = 2 * time.Hour - nsID = ident.StringID("testNs1") - nsRetentionOpts = retention.NewOptions(). - SetBlockSize(blockSize). - SetRetentionPeriod(48 * blockSize) + blockSize = 2 * time.Hour + nsID = ident.StringID("testNs1") + + nsRetentionOpts = retention.NewOptions().SetBlockSize(blockSize).SetRetentionPeriod(48 * blockSize) + testTagDecodingPool = serialize.NewTagDecoderPool( serialize.NewTagDecoderOptions(serialize.TagDecoderOptionsConfig{}), pool.NewObjectPoolOptions().SetSize(1)) + testTagEncodingPool = serialize.NewTagEncoderPool( serialize.NewTagEncoderOptions(), pool.NewObjectPoolOptions().SetSize(1)) + testIDPool = newSessionTestOptions().IdentifierPool() fooID = ident.StringID("foo") fooTags checked.Bytes @@ -1767,6 +1769,7 @@ func TestBlocksResultAddBlockFromPeerReadMerged(t *testing.T) { // Assert block has data data, err := xio.ToBytes(xio.NewSegmentReader(seg)) + require.NoError(t, err) assert.Equal(t, []byte{1, 2, 3}, data) } diff --git a/src/dbnode/encoding/istream.go b/src/dbnode/encoding/istream.go index f72a2a7a63..08115b4000 100644 --- a/src/dbnode/encoding/istream.go +++ b/src/dbnode/encoding/istream.go @@ -26,7 +26,7 @@ import ( "github.com/m3db/m3/src/dbnode/x/xio" ) -// iStream encapsulates a readable stream based directly on []byte slice and operating in 64 bit words. +// iStream encapsulates a readable stream. type iStream struct { r xio.Reader64 err error // error encountered diff --git a/src/dbnode/encoding/istream_test.go b/src/dbnode/encoding/istream_test.go index a80c744094..e9fb4a1d4a 100644 --- a/src/dbnode/encoding/istream_test.go +++ b/src/dbnode/encoding/istream_test.go @@ -51,13 +51,15 @@ func TestIStreamReadBits(t *testing.T) { } func TestIStreamReadByte(t *testing.T) { - byteStream := []uint8{ - 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, - 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, - } + var ( + byteStream = []uint8{ + 0xca, 0xfe, 0xfd, 0x89, 0x1a, 0x2b, 0x3c, 0x48, 0x55, 0xe6, 0xf7, + 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, + } + is = NewIStream(xio.NewBytesReader64(byteStream)) + res = make([]byte, 0, len(byteStream)) + ) - is := NewIStream(xio.NewBytesReader64(byteStream)) - var res []byte for range byteStream { read, err := is.ReadByte() require.NoError(t, err) @@ -152,7 +154,7 @@ func TestIStreamPeekAfterReadBits(t *testing.T) { require.NoError(t, err) require.Equal(t, uint64(0x30405060708090A), res) - res, err = is.PeekBits(64) + _, err = is.PeekBits(64) require.EqualError(t, err, io.EOF.Error()) } @@ -168,7 +170,7 @@ func TestIStreamRemainingBitsInCurrentByte(t *testing.T) { require.Equal(t, expected, is.RemainingBitsInCurrentByte()) bit, err := is.ReadBit() require.NoError(t, err) - expectedBit := Bit(b>>i)&1 + expectedBit := Bit(b>>i) & 1 require.Equal(t, expectedBit, bit) } } diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index d595a9012f..b38982c3e2 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -25,10 +25,10 @@ import ( "math/rand" "testing" + "github.com/stretchr/testify/require" + "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/x/xio" - - "github.com/stretchr/testify/require" ) // BenchmarkM3TSZDecode-12 10000 108797 ns/op @@ -49,9 +49,10 @@ func BenchmarkM3TSZDecode(b *testing.B) { require.NoError(b, iter.Err()) } } + func prepareSampleSeriesRun(b *testing.B) [][]byte { var ( - rnd = rand.New(rand.NewSource(42)) + rnd = rand.New(rand.NewSource(42)) // nolint: gosec sampleSeries = make([][]byte, 0, len(sampleSeriesBase64)) seriesRun = make([][]byte, 0, b.N) ) diff --git a/src/dbnode/encoding/m3tsz/iterator_test.go b/src/dbnode/encoding/m3tsz/iterator_test.go index cc09a2d093..212ff1eb13 100644 --- a/src/dbnode/encoding/m3tsz/iterator_test.go +++ b/src/dbnode/encoding/m3tsz/iterator_test.go @@ -33,7 +33,11 @@ import ( ) func getTestReaderIterator(rawBytes []byte) *readerIterator { - return NewReaderIterator(xio.NewBytesReader64(rawBytes), false, encoding.NewOptions()).(*readerIterator) + return NewReaderIterator( + xio.NewBytesReader64(rawBytes), + false, + encoding.NewOptions(), + ).(*readerIterator) } func TestReaderIteratorReadNextTimestamp(t *testing.T) { diff --git a/src/dbnode/encoding/m3tsz/timestamp_iterator.go b/src/dbnode/encoding/m3tsz/timestamp_iterator.go index 415733c0fb..ace3f8d8a6 100644 --- a/src/dbnode/encoding/m3tsz/timestamp_iterator.go +++ b/src/dbnode/encoding/m3tsz/timestamp_iterator.go @@ -51,8 +51,8 @@ type TimestampIterator struct { // for situations where looking ahead is not safe. SkipMarkers bool - numValueBits uint8 - numBits uint8 + numValueBits uint8 + numBits uint8 } // NewTimestampIterator creates a new TimestampIterator. diff --git a/src/dbnode/encoding/multi_reader_iterator_test.go b/src/dbnode/encoding/multi_reader_iterator_test.go index 2ee2e44612..c065d24489 100644 --- a/src/dbnode/encoding/multi_reader_iterator_test.go +++ b/src/dbnode/encoding/multi_reader_iterator_test.go @@ -314,8 +314,8 @@ func assertTestMultiReaderIterator( } var testIterators []*testIterator - var iteratorAlloc func(reader xio.Reader64, descr namespace.SchemaDescr) ReaderIterator - iteratorAlloc = func(reader xio.Reader64, descr namespace.SchemaDescr) ReaderIterator { + var iteratorAlloc func(xio.Reader64, namespace.SchemaDescr) ReaderIterator + iteratorAlloc = func(reader xio.Reader64, _ namespace.SchemaDescr) ReaderIterator { for i := range entriesByReader { if reader != entriesByReader[i].reader { continue diff --git a/src/dbnode/encoding/null.go b/src/dbnode/encoding/null.go index 576ddac1ae..35ce276912 100644 --- a/src/dbnode/encoding/null.go +++ b/src/dbnode/encoding/null.go @@ -53,12 +53,12 @@ func (e *nullEncoder) LastEncoded() (ts.Datapoint, error) { func (e *nullEncoder) LastAnnotationChecksum() (uint64, error) { return 0, fmt.Errorf("not implemented") } -func (e *nullEncoder) Len() int { return 0 } -func (e *nullEncoder) Seal() { e.sealed = true } -func (e *nullEncoder) Reset(t time.Time, capacity int, descr namespace.SchemaDescr) {} -func (e *nullEncoder) Close() {} -func (e *nullEncoder) Discard() ts.Segment { return ts.Segment{} } -func (e *nullEncoder) DiscardReset(t time.Time, capacity int, descr namespace.SchemaDescr) ts.Segment { +func (e *nullEncoder) Len() int { return 0 } +func (e *nullEncoder) Seal() { e.sealed = true } +func (e *nullEncoder) Reset(time.Time, int, namespace.SchemaDescr) {} +func (e *nullEncoder) Close() {} +func (e *nullEncoder) Discard() ts.Segment { return ts.Segment{} } +func (e *nullEncoder) DiscardReset(time.Time, int, namespace.SchemaDescr) ts.Segment { return ts.Segment{} } func (e *nullEncoder) SetSchema(_ namespace.SchemaDescr) {} @@ -73,7 +73,8 @@ func NewNullReaderIterator() ReaderIterator { func (r *nullReaderIterator) Current() (ts.Datapoint, xtime.Unit, ts.Annotation) { return ts.Datapoint{}, xtime.Unit(0), nil } -func (r *nullReaderIterator) Next() bool { return false } -func (r *nullReaderIterator) Err() error { return fmt.Errorf("not implemented") } -func (r *nullReaderIterator) Close() {} -func (r *nullReaderIterator) Reset(xio.Reader64, namespace.SchemaDescr) {} +func (r *nullReaderIterator) Next() bool { return false } +func (r *nullReaderIterator) Err() error { return fmt.Errorf("not implemented") } +func (r *nullReaderIterator) Close() {} +func (r *nullReaderIterator) Reset(xio.Reader64, namespace.SchemaDescr) { +} diff --git a/src/dbnode/encoding/proto/iterator.go b/src/dbnode/encoding/proto/iterator.go index 925f6fff7d..738a90ed49 100644 --- a/src/dbnode/encoding/proto/iterator.go +++ b/src/dbnode/encoding/proto/iterator.go @@ -862,15 +862,6 @@ func (it *iterator) nextToBeEvicted(fieldIdx int) []byte { return dict[0] } -func (it *iterator) readBits(numBits uint8) (uint64, error) { - res, err := it.stream.ReadBits(numBits) - if err != nil { - return 0, err - } - - return res, nil -} - func (it *iterator) resetUnmarshalProtoBuffer(n int) { if it.unmarshalProtoBuf != nil && it.unmarshalProtoBuf.Cap() >= n { // If the existing one is big enough, just resize it. diff --git a/src/dbnode/encoding/scheme.go b/src/dbnode/encoding/scheme.go index 1a02ff602f..0c8327aea8 100644 --- a/src/dbnode/encoding/scheme.go +++ b/src/dbnode/encoding/scheme.go @@ -141,7 +141,8 @@ func newTimeEncodingSchemes(schemes map[xtime.Unit]TimeEncodingScheme) TimeEncod } // newTimeEncodingScheme creates a new time encoding scheme. -// NB(xichen): numValueBitsForBuckets should be ordered by value in ascending order (smallest value first). +// NB(xichen): numValueBitsForBuckets should be ordered by value +// in ascending order (smallest value first). func newTimeEncodingScheme(numValueBitsForBuckets []int, numValueBitsForDefault int) TimeEncodingScheme { numBuckets := len(numValueBitsForBuckets) buckets := make([]TimeBucket, 0, numBuckets) diff --git a/src/dbnode/storage/block/options.go b/src/dbnode/storage/block/options.go index f44deef887..888cafc468 100644 --- a/src/dbnode/storage/block/options.go +++ b/src/dbnode/storage/block/options.go @@ -91,14 +91,16 @@ func NewOptions() Options { o.encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - o.readerIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) - o.multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { - it := o.readerIteratorPool.Get() - it.Reset(r, descr) - return it - }) + o.readerIteratorPool.Init( + func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) + }) + o.multiReaderIteratorPool.Init( + func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { + it := o.readerIteratorPool.Get() + it.Reset(r, descr) + return it + }) o.segmentReaderPool.Init() o.bytesPool.Init() return o diff --git a/src/dbnode/storage/options.go b/src/dbnode/storage/options.go index d2cecafc32..a018623c4b 100644 --- a/src/dbnode/storage/options.go +++ b/src/dbnode/storage/options.go @@ -500,14 +500,20 @@ func (o *options) SetEncodingM3TSZPooled() Options { opts.encoderPool = encoderPool // initialize single reader iterator pool - readerIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { + readerIteratorPool.Init(func( + r xio.Reader64, + _ namespace.SchemaDescr, + ) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) opts.readerIteratorPool = readerIteratorPool // initialize multi reader iterator pool multiReaderIteratorPool := encoding.NewMultiReaderIteratorPool(opts.poolOpts) - multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { + multiReaderIteratorPool.Init(func( + r xio.Reader64, + _ namespace.SchemaDescr, + ) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) opts.multiReaderIteratorPool = multiReaderIteratorPool diff --git a/src/dbnode/storage/series/buffer_test.go b/src/dbnode/storage/series/buffer_test.go index b3939e6f19..d9d3e74c08 100644 --- a/src/dbnode/storage/series/buffer_test.go +++ b/src/dbnode/storage/series/buffer_test.go @@ -60,9 +60,10 @@ func newBufferTestOptions() Options { encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + multiReaderIteratorPool.Init( + func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) + }) bufferBucketPool := NewBufferBucketPool(nil) bufferBucketVersionsPool := NewBufferBucketVersionsPool(nil) diff --git a/src/dbnode/storage/series/series_test.go b/src/dbnode/storage/series/series_test.go index 34ffd7e6b7..2e68929f93 100644 --- a/src/dbnode/storage/series/series_test.go +++ b/src/dbnode/storage/series/series_test.go @@ -57,9 +57,10 @@ func newSeriesTestOptions() Options { encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiReaderIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + multiReaderIteratorPool.Init( + func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) + }) bufferBucketPool := NewBufferBucketPool(nil) bufferBucketVersionsPool := NewBufferBucketVersionsPool(nil) diff --git a/src/dbnode/x/xio/reader64.go b/src/dbnode/x/xio/reader64.go index 8a389cab23..0342756a81 100644 --- a/src/dbnode/x/xio/reader64.go +++ b/src/dbnode/x/xio/reader64.go @@ -5,15 +5,18 @@ import ( "io" ) +// BytesReader64 implements a Reader64 over a slice of bytes. type BytesReader64 struct { data []byte index int } +// NewBytesReader64 creates a new BytesReader64. func NewBytesReader64(data []byte) *BytesReader64 { return &BytesReader64{data: data} } +// Read64 reads and returns a 64 bit word plus a number of bytes (up to 8) actually read. func (r *BytesReader64) Read64() (word uint64, n byte, err error) { if r.index+8 <= len(r.data) { // NB: this compiles to a single 64 bit load followed by @@ -34,6 +37,7 @@ func (r *BytesReader64) Read64() (word uint64, n byte, err error) { return res << (64 - 8*bytes), bytes, nil } +// Peek64 peeks and returns the next 64 bit word plus a number of bytes (up to 8) available. func (r *BytesReader64) Peek64() (word uint64, n byte, err error) { if r.index+8 <= len(r.data) { // NB: this compiles to a single 64 bit load followed by @@ -55,6 +59,7 @@ func (r *BytesReader64) Peek64() (word uint64, n byte, err error) { return res << (64 - 8*bytes), bytes, nil } +// Reset resets the BytesReader64 for reuse. func (r *BytesReader64) Reset(data []byte) { r.data = data r.index = 0 diff --git a/src/dbnode/x/xio/reader64_test.go b/src/dbnode/x/xio/reader64_test.go index 855c2f1c9a..002d664703 100644 --- a/src/dbnode/x/xio/reader64_test.go +++ b/src/dbnode/x/xio/reader64_test.go @@ -5,8 +5,8 @@ import ( "io" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/tj/assert" ) func TestBytesReader64(t *testing.T) { diff --git a/src/dbnode/x/xio/segment_reader.go b/src/dbnode/x/xio/segment_reader.go index 536416635f..27cfd1586f 100644 --- a/src/dbnode/x/xio/segment_reader.go +++ b/src/dbnode/x/xio/segment_reader.go @@ -64,7 +64,7 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { if sr.si+8 < nh { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - res := binary.BigEndian.Uint64(sr.lazyHead[sr.si:]) + res = binary.BigEndian.Uint64(sr.lazyHead[sr.si:]) sr.si += 8 return res, 8, nil } @@ -84,7 +84,7 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { if sr.si+8 < nht { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - res := binary.BigEndian.Uint64(sr.lazyTail[sr.si-nh:]) + res = binary.BigEndian.Uint64(sr.lazyTail[sr.si-nh:]) sr.si += 8 return res, 8, nil } @@ -114,7 +114,7 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { if i+8 < nh { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - res := binary.BigEndian.Uint64(sr.lazyHead[i:]) + res = binary.BigEndian.Uint64(sr.lazyHead[i:]) return res, 8, nil } @@ -133,7 +133,7 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { if i+8 < nht { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - res := binary.BigEndian.Uint64(sr.lazyTail[i-nh:]) + res = binary.BigEndian.Uint64(sr.lazyTail[i-nh:]) return res, 8, nil } diff --git a/src/dbnode/x/xio/types.go b/src/dbnode/x/xio/types.go index 7576902b07..28cd107f0c 100644 --- a/src/dbnode/x/xio/types.go +++ b/src/dbnode/x/xio/types.go @@ -103,7 +103,6 @@ type ReaderSliceOfSlicesFromBlockReadersIterator interface { Reset(blocks [][]BlockReader) } - // Reader64 is a reader for reading 64 bit words. type Reader64 interface { diff --git a/src/dbnode/x/xio/utils.go b/src/dbnode/x/xio/utils.go index d631a0e4b6..35a64234aa 100644 --- a/src/dbnode/x/xio/utils.go +++ b/src/dbnode/x/xio/utils.go @@ -2,6 +2,8 @@ package xio import "encoding/binary" +// ToBytes reads and returns the contents of Reader64 as a slice of bytes. +// Should normally return io.EOF as an error. func ToBytes(reader Reader64) ([]byte, error) { var ( res []byte diff --git a/src/query/pools/query_pools.go b/src/query/pools/query_pools.go index 664a431bd0..153c91d34d 100644 --- a/src/query/pools/query_pools.go +++ b/src/query/pools/query_pools.go @@ -211,16 +211,18 @@ func BuildIteratorPools( encodingOpts := encoding.NewOptions(). SetReaderIteratorPool(readerIteratorPool) - readerIteratorPool.Init(func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + readerIteratorPool.Init( + func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { + return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) + }) pools.multiReaderIterator = encoding.NewMultiReaderIteratorPool(defaultPerSeriesPoolOpts) - pools.multiReaderIterator.Init(func(r xio.Reader64, s namespace.SchemaDescr) encoding.ReaderIterator { - iter := readerIteratorPool.Get() - iter.Reset(r, s) - return iter - }) + pools.multiReaderIterator.Init( + func(r xio.Reader64, s namespace.SchemaDescr) encoding.ReaderIterator { + iter := readerIteratorPool.Get() + iter.Reset(r, s) + return iter + }) pools.seriesIterator = encoding.NewSeriesIteratorPool(defaultPerSeriesPoolOpts) pools.seriesIterator.Init() diff --git a/src/query/test/test_series_iterator.go b/src/query/test/test_series_iterator.go index a2adb81b66..7f980062ef 100644 --- a/src/query/test/test_series_iterator.go +++ b/src/query/test/test_series_iterator.go @@ -22,7 +22,6 @@ package test import ( "fmt" - "io" "sort" "time" @@ -53,7 +52,7 @@ var ( // End is the expected end time for the generated series End time.Time - testIterAlloc func(r io.Reader, d namespace.SchemaDescr) encoding.ReaderIterator + testIterAlloc func(r xio.Reader64, d namespace.SchemaDescr) encoding.ReaderIterator ) func init() { @@ -68,7 +67,7 @@ func init() { Middle = Start.Add(BlockSize) End = Middle.Add(BlockSize) - testIterAlloc = func(r io.Reader, _ namespace.SchemaDescr) encoding.ReaderIterator { + testIterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) } } diff --git a/src/query/ts/m3db/encoded_step_iterator_test.go b/src/query/ts/m3db/encoded_step_iterator_test.go index 6b9a91f690..85b5ad166e 100644 --- a/src/query/ts/m3db/encoded_step_iterator_test.go +++ b/src/query/ts/m3db/encoded_step_iterator_test.go @@ -22,7 +22,6 @@ package m3db import ( "fmt" - "io" "os" "runtime" "sync" @@ -44,8 +43,8 @@ import ( "github.com/m3db/m3/src/x/pool" xsync "github.com/m3db/m3/src/x/sync" xtime "github.com/m3db/m3/src/x/time" - "github.com/pkg/profile" + "github.com/pkg/profile" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -478,7 +477,7 @@ func setupBlock(b *testing.B, iterations int, t iterType) (block.Block, reset, s m3tsz.DefaultIntOptimizationEnabled, encodingOpts) iterAlloc := func( - r io.Reader, + r xio.Reader64, d namespace.SchemaDescr, ) encoding.ReaderIterator { readerIter.Reset(r, d) From 4c44fdad41ac60bb5cc5754410232ab2491c89cd Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 22:35:29 +0200 Subject: [PATCH 11/24] TestSegmentReader64 --- src/dbnode/x/xio/segment_reader_test.go | 55 ++++++++++++++++++++++++- 1 file changed, 53 insertions(+), 2 deletions(-) diff --git a/src/dbnode/x/xio/segment_reader_test.go b/src/dbnode/x/xio/segment_reader_test.go index 7d58932ce7..ec3478fa92 100644 --- a/src/dbnode/x/xio/segment_reader_test.go +++ b/src/dbnode/x/xio/segment_reader_test.go @@ -21,6 +21,7 @@ package xio import ( + "encoding/binary" "io" "testing" @@ -48,6 +49,8 @@ var ( 0xc0, 0x1, 0xf4, 0x1, 0x0, 0x0, 0x0, 0x2, 0x1, 0x2, 0x7, 0x10, 0x1e, 0x0, 0x1, 0x0, 0xe0, 0x65, 0x58, 0xcd, 0x3, 0x0, 0x0, 0x0, 0x0, } + + checkdNoPool = func(d []byte) checked.Bytes { return checked.NewBytes(d, nil) } ) type byteFunc func(d []byte) checked.Bytes @@ -88,8 +91,7 @@ func testSegmentReader( } func TestSegmentReaderNoPool(t *testing.T) { - checkd := func(d []byte) checked.Bytes { return checked.NewBytes(d, nil) } - testSegmentReader(t, checkd, nil) + testSegmentReader(t, checkdNoPool, nil) } func TestSegmentReaderWithPool(t *testing.T) { @@ -110,3 +112,52 @@ func TestSegmentReaderWithPool(t *testing.T) { testSegmentReader(t, checkd, bytesPool) } + +func TestSegmentReader64(t *testing.T) { + data := make([]byte, 32) + for i := range data { + data[i] = 100 + byte(i) + } + + for headLen := 0; headLen < len(data); headLen++ { + for tailLen := 0; tailLen < len(data)-headLen; tailLen++ { + testSegmentReader64(t, data[:headLen], data[headLen:headLen+tailLen]) + } + } +} + +func testSegmentReader64(t *testing.T, head []byte, tail []byte) { + var expected []byte + expected = append(expected, head...) + expected = append(expected, tail...) + + var ( + segment = ts.NewSegment(checkdNoPool(head), checkdNoPool(tail), 0, ts.FinalizeNone) + r = NewSegmentReader(segment) + peeked, read []byte + buf [8]byte + word uint64 + n byte + err error + ) + + for { + word, n, err = r.Peek64() + if err != nil { + break + } + binary.BigEndian.PutUint64(buf[:], word) + peeked = append(peeked, buf[:n]...) + + word, n, err = r.Read64() + if err != nil { + break + } + binary.BigEndian.PutUint64(buf[:], word) + read = append(read, buf[:n]...) + } + + require.Equal(t, io.EOF, err) + require.Equal(t, expected, peeked) + require.Equal(t, expected, read) +} From dd42c215712db8fe84b6e1d2b3d0c2f61b3409c7 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 22:42:33 +0200 Subject: [PATCH 12/24] Fix disk_flush_helpers.go --- src/dbnode/integration/disk_flush_helpers.go | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/dbnode/integration/disk_flush_helpers.go b/src/dbnode/integration/disk_flush_helpers.go index be56b6a327..661eda8518 100644 --- a/src/dbnode/integration/disk_flush_helpers.go +++ b/src/dbnode/integration/disk_flush_helpers.go @@ -23,7 +23,6 @@ package integration import ( - "bytes" "errors" "fmt" "testing" @@ -36,6 +35,7 @@ import ( "github.com/m3db/m3/src/dbnode/persist/fs" "github.com/m3db/m3/src/dbnode/sharding" "github.com/m3db/m3/src/dbnode/storage" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/ident" "github.com/m3db/m3/src/x/ident/testutil" xtime "github.com/m3db/m3/src/x/time" @@ -184,14 +184,6 @@ func waitUntilFileSetFilesExist( return waitUntilFileSetFilesExistOrNot(filePathPrefix, files, true, timeout) } -func waitUntilFileSetFilesNotExist( - filePathPrefix string, - files []fs.FileSetFileIdentifier, - timeout time.Duration, -) error { - return waitUntilFileSetFilesExistOrNot(filePathPrefix, files, false, timeout) -} - func waitUntilFileSetFilesExistOrNot( filePathPrefix string, files []fs.FileSetFileIdentifier, @@ -309,7 +301,7 @@ func checkForTime( var datapoints []generate.TestValue it := iteratorPool.Get() - it.Reset(bytes.NewBuffer(data.Bytes()), nsCtx.Schema) + it.Reset(xio.NewBytesReader64(data.Bytes()), nsCtx.Schema) for it.Next() { dp, _, ann := it.Current() datapoints = append(datapoints, generate.TestValue{Datapoint: dp, Annotation: ann}) From 41e3e6c8cebf3c59dbba5397c044bb02fb8d9a11 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 22:43:41 +0200 Subject: [PATCH 13/24] Update benchmark result --- src/dbnode/encoding/m3tsz/decoder_benchmark_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index b38982c3e2..27f0c0e87a 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -31,7 +31,7 @@ import ( "github.com/m3db/m3/src/dbnode/x/xio" ) -// BenchmarkM3TSZDecode-12 10000 108797 ns/op +// BenchmarkM3TSZDecode-12 16813 71793 ns/op func BenchmarkM3TSZDecode(b *testing.B) { var ( encodingOpts = encoding.NewOptions() From 40f1dbb56fa677b391d54ac79a77607d4906c1fc Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 23:01:08 +0200 Subject: [PATCH 14/24] Drop IStream interface --- src/dbnode/encoding/encoding_mock.go | 124 ------------------ src/dbnode/encoding/istream.go | 35 +++-- src/dbnode/encoding/istream_test.go | 6 +- .../encoding/m3tsz/decoder_benchmark_test.go | 2 +- .../encoding/m3tsz/float_encoder_iterator.go | 6 +- src/dbnode/encoding/m3tsz/iterator.go | 2 +- .../encoding/m3tsz/timestamp_iterator.go | 25 ++-- .../encoding/proto/int_encoder_iterator.go | 6 +- src/dbnode/encoding/proto/iterator.go | 2 +- src/dbnode/encoding/types.go | 33 +---- 10 files changed, 51 insertions(+), 190 deletions(-) diff --git a/src/dbnode/encoding/encoding_mock.go b/src/dbnode/encoding/encoding_mock.go index ec892c5aaa..201a89c74c 100644 --- a/src/dbnode/encoding/encoding_mock.go +++ b/src/dbnode/encoding/encoding_mock.go @@ -1507,130 +1507,6 @@ func (mr *MockDecoderMockRecorder) Decode(reader interface{}) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Decode", reflect.TypeOf((*MockDecoder)(nil).Decode), reader) } -// MockIStream is a mock of IStream interface -type MockIStream struct { - ctrl *gomock.Controller - recorder *MockIStreamMockRecorder -} - -// MockIStreamMockRecorder is the mock recorder for MockIStream -type MockIStreamMockRecorder struct { - mock *MockIStream -} - -// NewMockIStream creates a new mock instance -func NewMockIStream(ctrl *gomock.Controller) *MockIStream { - mock := &MockIStream{ctrl: ctrl} - mock.recorder = &MockIStreamMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIStream) EXPECT() *MockIStreamMockRecorder { - return m.recorder -} - -// Read mocks base method -func (m *MockIStream) Read(arg0 []byte) (int, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Read", arg0) - ret0, _ := ret[0].(int) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Read indicates an expected call of Read -func (mr *MockIStreamMockRecorder) Read(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Read", reflect.TypeOf((*MockIStream)(nil).Read), arg0) -} - -// ReadBit mocks base method -func (m *MockIStream) ReadBit() (Bit, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ReadBit") - ret0, _ := ret[0].(Bit) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ReadBit indicates an expected call of ReadBit -func (mr *MockIStreamMockRecorder) ReadBit() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReadBit", reflect.TypeOf((*MockIStream)(nil).ReadBit)) -} - -// ReadByte mocks base method -func (m *MockIStream) ReadByte() (byte, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ReadByte") - ret0, _ := ret[0].(byte) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ReadByte indicates an expected call of ReadByte -func (mr *MockIStreamMockRecorder) ReadByte() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReadByte", reflect.TypeOf((*MockIStream)(nil).ReadByte)) -} - -// ReadBits mocks base method -func (m *MockIStream) ReadBits(numBits uint8) (uint64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ReadBits", numBits) - ret0, _ := ret[0].(uint64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ReadBits indicates an expected call of ReadBits -func (mr *MockIStreamMockRecorder) ReadBits(numBits interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReadBits", reflect.TypeOf((*MockIStream)(nil).ReadBits), numBits) -} - -// PeekBits mocks base method -func (m *MockIStream) PeekBits(numBits uint8) (uint64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "PeekBits", numBits) - ret0, _ := ret[0].(uint64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// PeekBits indicates an expected call of PeekBits -func (mr *MockIStreamMockRecorder) PeekBits(numBits interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PeekBits", reflect.TypeOf((*MockIStream)(nil).PeekBits), numBits) -} - -// RemainingBitsInCurrentByte mocks base method -func (m *MockIStream) RemainingBitsInCurrentByte() uint { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RemainingBitsInCurrentByte") - ret0, _ := ret[0].(uint) - return ret0 -} - -// RemainingBitsInCurrentByte indicates an expected call of RemainingBitsInCurrentByte -func (mr *MockIStreamMockRecorder) RemainingBitsInCurrentByte() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RemainingBitsInCurrentByte", reflect.TypeOf((*MockIStream)(nil).RemainingBitsInCurrentByte)) -} - -// Reset mocks base method -func (m *MockIStream) Reset(reader xio.Reader64) { - m.ctrl.T.Helper() - m.ctrl.Call(m, "Reset", reader) -} - -// Reset indicates an expected call of Reset -func (mr *MockIStreamMockRecorder) Reset(reader interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Reset", reflect.TypeOf((*MockIStream)(nil).Reset), reader) -} - // MockOStream is a mock of OStream interface type MockOStream struct { ctrl *gomock.Controller diff --git a/src/dbnode/encoding/istream.go b/src/dbnode/encoding/istream.go index 08115b4000..f87d78e20d 100644 --- a/src/dbnode/encoding/istream.go +++ b/src/dbnode/encoding/istream.go @@ -26,8 +26,8 @@ import ( "github.com/m3db/m3/src/dbnode/x/xio" ) -// iStream encapsulates a readable stream. -type iStream struct { +// IStream encapsulates a readable stream. +type IStream struct { r xio.Reader64 err error // error encountered current uint64 // current uint64 we are working off of @@ -35,12 +35,13 @@ type iStream struct { remaining uint8 // bits remaining in current to be read } -// NewIStream creates a new iStream -func NewIStream(reader64 xio.Reader64) IStream { - return &iStream{r: reader64} +// NewIStream creates a new IStream +func NewIStream(reader64 xio.Reader64) *IStream { + return &IStream{r: reader64} } -func (is *iStream) Read(b []byte) (int, error) { +// Read reads len(b) bytes. +func (is *IStream) Read(b []byte) (int, error) { var i int for ; i < len(b); i++ { res, err := is.ReadBits(8) @@ -52,17 +53,20 @@ func (is *iStream) Read(b []byte) (int, error) { return i, nil } -func (is *iStream) ReadByte() (byte, error) { +// ReadByte reads the next Byte. +func (is *IStream) ReadByte() (byte, error) { res, err := is.ReadBits(8) return byte(res), err } -func (is *iStream) ReadBit() (Bit, error) { +// ReadBit reads the next Bit. +func (is *IStream) ReadBit() (Bit, error) { res, err := is.ReadBits(1) return Bit(res), err } -func (is *iStream) ReadBits(numBits uint8) (uint64, error) { +// ReadBits reads the next Bits. +func (is *IStream) ReadBits(numBits uint8) (uint64, error) { if is.err != nil { return 0, is.err } @@ -80,7 +84,8 @@ func (is *iStream) ReadBits(numBits uint8) (uint64, error) { return res | is.consumeBuffer(bitsNeeded), nil } -func (is *iStream) PeekBits(numBits uint8) (uint64, error) { +// PeekBits looks at the next Bits, but doesn't move the pos. +func (is *IStream) PeekBits(numBits uint8) (uint64, error) { if numBits <= is.remaining { return readBitsInWord(is.current, numBits), nil } @@ -97,7 +102,8 @@ func (is *iStream) PeekBits(numBits uint8) (uint64, error) { return res | readBitsInWord(next, bitsNeeded), nil } -func (is *iStream) RemainingBitsInCurrentByte() uint { +// RemainingBitsInCurrentByte returns the number of bits remaining to be read in the current byte. +func (is *IStream) RemainingBitsInCurrentByte() uint { return uint(is.remaining % 8) } @@ -107,14 +113,14 @@ func readBitsInWord(w uint64, numBits uint8) uint64 { } // consumeBuffer consumes numBits in is.current. -func (is *iStream) consumeBuffer(numBits uint8) uint64 { +func (is *IStream) consumeBuffer(numBits uint8) uint64 { res := readBitsInWord(is.current, numBits) is.current <<= numBits is.remaining -= numBits return res } -func (is *iStream) readWordFromStream() error { +func (is *IStream) readWordFromStream() error { current, bytes, err := is.r.Read64() is.current = current is.remaining = 8 * bytes @@ -123,7 +129,8 @@ func (is *iStream) readWordFromStream() error { return err } -func (is *iStream) Reset(reader xio.Reader64) { +// Reset resets the IStream. +func (is *IStream) Reset(reader xio.Reader64) { is.err = nil is.current = 0 is.remaining = 0 diff --git a/src/dbnode/encoding/istream_test.go b/src/dbnode/encoding/istream_test.go index e9fb4a1d4a..b0de83fb22 100644 --- a/src/dbnode/encoding/istream_test.go +++ b/src/dbnode/encoding/istream_test.go @@ -73,8 +73,7 @@ func TestIStreamReadByte(t *testing.T) { func TestIStreamPeekBitsSuccess(t *testing.T) { byteStream := []byte{0xa9, 0xfe, 0xfe, 0xdf, 0x9b, 0x57, 0x21, 0xf1} - o := NewIStream(xio.NewBytesReader64(byteStream)) - is := o.(*iStream) + is := NewIStream(xio.NewBytesReader64(byteStream)) inputs := []struct { numBits uint8 expected uint64 @@ -177,8 +176,7 @@ func TestIStreamRemainingBitsInCurrentByte(t *testing.T) { } func TestIStreamReset(t *testing.T) { - o := NewIStream(xio.NewBytesReader64([]byte{0xff})) - is := o.(*iStream) + is := NewIStream(xio.NewBytesReader64([]byte{0xff})) _, _ = is.ReadBits(8) _, _ = is.ReadBits(1) is.Reset(xio.NewBytesReader64(nil)) diff --git a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go index 27f0c0e87a..0f70c7b4da 100644 --- a/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go +++ b/src/dbnode/encoding/m3tsz/decoder_benchmark_test.go @@ -31,7 +31,7 @@ import ( "github.com/m3db/m3/src/dbnode/x/xio" ) -// BenchmarkM3TSZDecode-12 16813 71793 ns/op +// BenchmarkM3TSZDecode-12 16867 69272 ns/op func BenchmarkM3TSZDecode(b *testing.B) { var ( encodingOpts = encoding.NewOptions() diff --git a/src/dbnode/encoding/m3tsz/float_encoder_iterator.go b/src/dbnode/encoding/m3tsz/float_encoder_iterator.go index a8ad564b1b..b885cf1c1b 100644 --- a/src/dbnode/encoding/m3tsz/float_encoder_iterator.go +++ b/src/dbnode/encoding/m3tsz/float_encoder_iterator.go @@ -55,7 +55,7 @@ func (eit *FloatEncoderAndIterator) WriteFloat(stream encoding.OStream, val floa } // ReadFloat reads a compressed float from the stream. -func (eit *FloatEncoderAndIterator) ReadFloat(stream encoding.IStream) error { +func (eit *FloatEncoderAndIterator) ReadFloat(stream *encoding.IStream) error { if eit.NotFirst { return eit.readNextFloat(stream) } @@ -102,7 +102,7 @@ func (eit *FloatEncoderAndIterator) writeXOR(stream encoding.OStream, currXOR ui stream.WriteBits(currXOR>>uint(curTrailing), numMeaningfulBits) } -func (eit *FloatEncoderAndIterator) readFullFloat(stream encoding.IStream) error { +func (eit *FloatEncoderAndIterator) readFullFloat(stream *encoding.IStream) error { vb, err := stream.ReadBits(64) if err != nil { return err @@ -114,7 +114,7 @@ func (eit *FloatEncoderAndIterator) readFullFloat(stream encoding.IStream) error return nil } -func (eit *FloatEncoderAndIterator) readNextFloat(stream encoding.IStream) error { +func (eit *FloatEncoderAndIterator) readNextFloat(stream *encoding.IStream) error { cb, err := stream.ReadBits(1) if err != nil { return err diff --git a/src/dbnode/encoding/m3tsz/iterator.go b/src/dbnode/encoding/m3tsz/iterator.go index 09dc2fd7ae..b70ce36011 100644 --- a/src/dbnode/encoding/m3tsz/iterator.go +++ b/src/dbnode/encoding/m3tsz/iterator.go @@ -33,7 +33,7 @@ import ( // readerIterator provides an interface for clients to incrementally // read datapoints off of an encoded stream. type readerIterator struct { - is encoding.IStream + is *encoding.IStream opts encoding.Options err error // current error diff --git a/src/dbnode/encoding/m3tsz/timestamp_iterator.go b/src/dbnode/encoding/m3tsz/timestamp_iterator.go index ace3f8d8a6..a9da57e72b 100644 --- a/src/dbnode/encoding/m3tsz/timestamp_iterator.go +++ b/src/dbnode/encoding/m3tsz/timestamp_iterator.go @@ -68,7 +68,7 @@ func NewTimestampIterator(opts encoding.Options, skipMarkers bool) TimestampIter } // ReadTimestamp reads the first or next timestamp. -func (it *TimestampIterator) ReadTimestamp(stream encoding.IStream) (bool, bool, error) { +func (it *TimestampIterator) ReadTimestamp(stream *encoding.IStream) (bool, bool, error) { it.PrevAnt = nil var ( @@ -98,7 +98,7 @@ func (it *TimestampIterator) ReadTimestamp(stream encoding.IStream) (bool, bool, // ReadTimeUnit reads an encoded time unit and updates the iterator's state // accordingly. It is exposed as a public method so that callers can control // the encoding / decoding of the time unit on their own if they choose. -func (it *TimestampIterator) ReadTimeUnit(stream encoding.IStream) error { +func (it *TimestampIterator) ReadTimeUnit(stream *encoding.IStream) error { tuBits, err := stream.ReadByte() if err != nil { return err @@ -113,7 +113,7 @@ func (it *TimestampIterator) ReadTimeUnit(stream encoding.IStream) error { return nil } -func (it *TimestampIterator) readFirstTimestamp(stream encoding.IStream) error { +func (it *TimestampIterator) readFirstTimestamp(stream *encoding.IStream) error { ntBits, err := stream.ReadBits(64) if err != nil { return err @@ -134,7 +134,7 @@ func (it *TimestampIterator) readFirstTimestamp(stream encoding.IStream) error { return nil } -func (it *TimestampIterator) readNextTimestamp(stream encoding.IStream) error { +func (it *TimestampIterator) readNextTimestamp(stream *encoding.IStream) error { dod, err := it.readMarkerOrDeltaOfDelta(stream) if err != nil { return err @@ -145,7 +145,8 @@ func (it *TimestampIterator) readNextTimestamp(stream encoding.IStream) error { return nil } -func (it *TimestampIterator) tryReadMarker(stream encoding.IStream) (time.Duration, bool, error) { +// nolint: gocyclo +func (it *TimestampIterator) tryReadMarker(stream *encoding.IStream) (time.Duration, bool, error) { opcodeAndValue, success := it.tryPeekBits(stream, it.numBits) if !success { return 0, false, nil @@ -201,7 +202,9 @@ func (it *TimestampIterator) tryReadMarker(stream encoding.IStream) (time.Durati } } -func (it *TimestampIterator) readMarkerOrDeltaOfDelta(stream encoding.IStream) (time.Duration, error) { +func (it *TimestampIterator) readMarkerOrDeltaOfDelta( + stream *encoding.IStream, +) (time.Duration, error) { if !it.SkipMarkers { dod, success, err := it.tryReadMarker(stream) if err != nil { @@ -225,7 +228,9 @@ func (it *TimestampIterator) readMarkerOrDeltaOfDelta(stream encoding.IStream) ( } func (it *TimestampIterator) readDeltaOfDelta( - stream encoding.IStream, tes encoding.TimeEncodingScheme) (time.Duration, error) { + stream *encoding.IStream, + tes encoding.TimeEncodingScheme, +) (time.Duration, error) { if it.TimeUnitChanged { // NB(xichen): if the time unit has changed, always read 64 bits as normalized // dod in nanoseconds. @@ -284,7 +289,7 @@ func (it *TimestampIterator) readDeltaOfDelta( return xtime.FromNormalizedDuration(dod, timeUnit), nil } -func (it *TimestampIterator) readAnnotation(stream encoding.IStream) error { +func (it *TimestampIterator) readAnnotation(stream *encoding.IStream) error { antLen, err := it.readVarint(stream) if err != nil { return err @@ -312,12 +317,12 @@ func (it *TimestampIterator) readAnnotation(stream encoding.IStream) error { return nil } -func (it *TimestampIterator) readVarint(stream encoding.IStream) (int, error) { +func (it *TimestampIterator) readVarint(stream *encoding.IStream) (int, error) { res, err := binary.ReadVarint(stream) return int(res), err } -func (it *TimestampIterator) tryPeekBits(stream encoding.IStream, numBits uint8) (uint64, bool) { +func (it *TimestampIterator) tryPeekBits(stream *encoding.IStream, numBits uint8) (uint64, bool) { res, err := stream.PeekBits(numBits) if err != nil { return 0, false diff --git a/src/dbnode/encoding/proto/int_encoder_iterator.go b/src/dbnode/encoding/proto/int_encoder_iterator.go index f81267bc75..d15e7c472e 100644 --- a/src/dbnode/encoding/proto/int_encoder_iterator.go +++ b/src/dbnode/encoding/proto/int_encoder_iterator.go @@ -144,7 +144,7 @@ func (eit *intEncoderAndIterator) encodeIntValDiff(stream encoding.OStream, valB stream.WriteBits(valBits, int(numSig)) } -func (eit *intEncoderAndIterator) readIntValue(stream encoding.IStream) error { +func (eit *intEncoderAndIterator) readIntValue(stream *encoding.IStream) error { if eit.hasEncodedFirst { changeExistsControlBit, err := stream.ReadBit() if err != nil { @@ -178,7 +178,7 @@ func (eit *intEncoderAndIterator) readIntValue(stream encoding.IStream) error { return nil } -func (eit *intEncoderAndIterator) readIntSig(stream encoding.IStream) error { +func (eit *intEncoderAndIterator) readIntSig(stream *encoding.IStream) error { updateControlBit, err := stream.ReadBit() if err != nil { return fmt.Errorf( @@ -212,7 +212,7 @@ func (eit *intEncoderAndIterator) readIntSig(stream encoding.IStream) error { return nil } -func (eit *intEncoderAndIterator) readIntValDiff(stream encoding.IStream) error { +func (eit *intEncoderAndIterator) readIntValDiff(stream *encoding.IStream) error { negativeControlBit, err := stream.ReadBit() if err != nil { return fmt.Errorf( diff --git a/src/dbnode/encoding/proto/iterator.go b/src/dbnode/encoding/proto/iterator.go index 738a90ed49..d1c81ab575 100644 --- a/src/dbnode/encoding/proto/iterator.go +++ b/src/dbnode/encoding/proto/iterator.go @@ -56,7 +56,7 @@ type iterator struct { err error schema *desc.MessageDescriptor schemaDesc namespace.SchemaDescr - stream encoding.IStream + stream *encoding.IStream marshaller customFieldMarshaller byteFieldDictLRUSize int // TODO(rartoul): Update these as we traverse the stream if we encounter diff --git a/src/dbnode/encoding/types.go b/src/dbnode/encoding/types.go index aece7a217a..567d908016 100644 --- a/src/dbnode/encoding/types.go +++ b/src/dbnode/encoding/types.go @@ -150,19 +150,19 @@ type Options interface { // ByteFieldDictionaryLRUSize returns the ByteFieldDictionaryLRUSize. ByteFieldDictionaryLRUSize() int - // SetIStreamReaderSizeM3TSZ sets the iStream bufio reader size + // SetIStreamReaderSizeM3TSZ sets the IStream bufio reader size // for m3tsz encoding iteration. SetIStreamReaderSizeM3TSZ(value int) Options - // IStreamReaderSizeM3TSZ returns the iStream bufio reader size + // IStreamReaderSizeM3TSZ returns the IStream bufio reader size // for m3tsz encoding iteration. IStreamReaderSizeM3TSZ() int - // SetIStreamReaderSizeProto sets the iStream bufio reader size + // SetIStreamReaderSizeProto sets the IStream bufio reader size // for proto encoding iteration. SetIStreamReaderSizeProto(value int) Options - // SetIStreamReaderSizeProto returns the iStream bufio reader size + // SetIStreamReaderSizeProto returns the IStream bufio reader size // for proto encoding iteration. IStreamReaderSizeProto() int } @@ -339,31 +339,6 @@ type EncoderAllocate func() Encoder // ReaderIteratorAllocate allocates a ReaderIterator for a pool. type ReaderIteratorAllocate func(reader xio.Reader64, descr namespace.SchemaDescr) ReaderIterator -// IStream encapsulates a readable stream. -type IStream interface { - // Read reads len(b) bytes. - Read([]byte) (int, error) - - // ReadBit reads the next Bit. - ReadBit() (Bit, error) - - // ReadByte reads the next Byte. - ReadByte() (byte, error) - - // ReadBits reads the next Bits. - ReadBits(numBits uint8) (uint64, error) - - // PeekBits looks at the next Bits, but doesn't move the pos. - PeekBits(numBits uint8) (uint64, error) - - // RemainingBitsInCurrentByte returns the number of bits remaining to - // be read in the current byte. - RemainingBitsInCurrentByte() uint - - // Reset resets the IStream. - Reset(reader xio.Reader64) -} - // OStream encapsulates a writable stream. type OStream interface { // Len returns the length of the OStream. From 129340756e2377c929156c6efca30767657ce594 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 23:01:53 +0200 Subject: [PATCH 15/24] Drop noop XOR with 0 --- src/dbnode/encoding/m3tsz/float_encoder_iterator.go | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dbnode/encoding/m3tsz/float_encoder_iterator.go b/src/dbnode/encoding/m3tsz/float_encoder_iterator.go index b885cf1c1b..9a4348b816 100644 --- a/src/dbnode/encoding/m3tsz/float_encoder_iterator.go +++ b/src/dbnode/encoding/m3tsz/float_encoder_iterator.go @@ -122,7 +122,6 @@ func (eit *FloatEncoderAndIterator) readNextFloat(stream *encoding.IStream) erro if cb == opcodeZeroValueXOR { eit.PrevXOR = 0 - eit.PrevFloatBits ^= eit.PrevXOR return nil } From bd387d27bcd2852ebaa08649facc1ad148e5bad3 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 23:18:18 +0200 Subject: [PATCH 16/24] Fix test condition --- src/dbnode/client/session_fetch_bulk_blocks_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dbnode/client/session_fetch_bulk_blocks_test.go b/src/dbnode/client/session_fetch_bulk_blocks_test.go index 79dde3b276..3482149152 100644 --- a/src/dbnode/client/session_fetch_bulk_blocks_test.go +++ b/src/dbnode/client/session_fetch_bulk_blocks_test.go @@ -1769,7 +1769,7 @@ func TestBlocksResultAddBlockFromPeerReadMerged(t *testing.T) { // Assert block has data data, err := xio.ToBytes(xio.NewSegmentReader(seg)) - require.NoError(t, err) + require.Equal(t, io.EOF, err) assert.Equal(t, []byte{1, 2, 3}, data) } From ca83b170d603cb5d99aaf240230cdf5a43f9675f Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sun, 29 Nov 2020 23:19:05 +0200 Subject: [PATCH 17/24] Copyrights --- src/dbnode/x/xio/reader64.go | 20 ++++++++++++++++++++ src/dbnode/x/xio/reader64_test.go | 20 ++++++++++++++++++++ src/dbnode/x/xio/utils.go | 20 ++++++++++++++++++++ 3 files changed, 60 insertions(+) diff --git a/src/dbnode/x/xio/reader64.go b/src/dbnode/x/xio/reader64.go index 0342756a81..37ccf7a404 100644 --- a/src/dbnode/x/xio/reader64.go +++ b/src/dbnode/x/xio/reader64.go @@ -1,3 +1,23 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + package xio import ( diff --git a/src/dbnode/x/xio/reader64_test.go b/src/dbnode/x/xio/reader64_test.go index 002d664703..cdc7867383 100644 --- a/src/dbnode/x/xio/reader64_test.go +++ b/src/dbnode/x/xio/reader64_test.go @@ -1,3 +1,23 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + package xio import ( diff --git a/src/dbnode/x/xio/utils.go b/src/dbnode/x/xio/utils.go index 35a64234aa..8374bc7af9 100644 --- a/src/dbnode/x/xio/utils.go +++ b/src/dbnode/x/xio/utils.go @@ -1,3 +1,23 @@ +// Copyright (c) 2020 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + package xio import "encoding/binary" From a01bd7122088021fab428b06dbacf6b9ca3ac7c0 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Mon, 30 Nov 2020 00:16:21 +0200 Subject: [PATCH 18/24] Fix some more tests --- src/dbnode/encoding/proto/corruption_prop_test.go | 7 ++++--- src/dbnode/encoding/proto/round_trip_test.go | 12 ++++++------ .../bootstrapper/commitlog/source_prop_test.go | 12 ++++-------- 3 files changed, 14 insertions(+), 17 deletions(-) diff --git a/src/dbnode/encoding/proto/corruption_prop_test.go b/src/dbnode/encoding/proto/corruption_prop_test.go index 7f5ec5e5a9..c58ae0f78a 100644 --- a/src/dbnode/encoding/proto/corruption_prop_test.go +++ b/src/dbnode/encoding/proto/corruption_prop_test.go @@ -23,7 +23,6 @@ package proto import ( - "bytes" "os" "testing" "time" @@ -31,7 +30,9 @@ import ( "github.com/leanovate/gopter" "github.com/leanovate/gopter/gen" "github.com/leanovate/gopter/prop" + "github.com/m3db/m3/src/dbnode/namespace" + "github.com/m3db/m3/src/dbnode/x/xio" ) // TestIteratorHandlesCorruptStreams ensures that the protobuf iterator never panics when reading corrupt streams. @@ -48,8 +49,8 @@ func TestIteratorHandlesCorruptStreams(t *testing.T) { parameters.Rng.Seed(seed) props.Property("Iterator should handle corrupt streams", prop.ForAll(func(input corruptionPropTestInput) (bool, error) { - buff := bytes.NewBuffer(input.bytes) - iter := NewIterator(buff, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) + r := xio.NewBytesReader64(input.bytes) + iter := NewIterator(r, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) for iter.Next() { } return true, nil diff --git a/src/dbnode/encoding/proto/round_trip_test.go b/src/dbnode/encoding/proto/round_trip_test.go index 0e3e6ceb92..3efdf25129 100644 --- a/src/dbnode/encoding/proto/round_trip_test.go +++ b/src/dbnode/encoding/proto/round_trip_test.go @@ -162,8 +162,8 @@ func TestRoundTrip(t *testing.T) { require.NoError(t, err) require.Equal(t, numExpectedBytes, len(rawBytes)) - buff := xio.NewBytesReader64(rawBytes) - iter := NewIterator(buff, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) + r := xio.NewBytesReader64(rawBytes) + iter := NewIterator(r, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) i := 0 for iter.Next() { @@ -224,8 +224,8 @@ func TestRoundTripMidStreamSchemaChanges(t *testing.T) { require.NoError(t, err) // Try reading the stream just using the vl1 schema. - buff := xio.NewBytesReader64(rawBytes) - iter := NewIterator(buff, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) + r := xio.NewBytesReader64(rawBytes) + iter := NewIterator(r, namespace.GetTestSchemaDescr(testVLSchema), testEncodingOptions) require.True(t, iter.Next(), "iter err: %v", iter.Err()) dp, unit, annotation := iter.Current() @@ -260,8 +260,8 @@ func TestRoundTripMidStreamSchemaChanges(t *testing.T) { require.NoError(t, iter.Err()) // Try reading the stream just using the vl2 schema. - buff = xio.NewBytesReader64(rawBytes) - iter = NewIterator(buff, namespace.GetTestSchemaDescr(testVL2Schema), testEncodingOptions) + r = xio.NewBytesReader64(rawBytes) + iter = NewIterator(r, namespace.GetTestSchemaDescr(testVL2Schema), testEncodingOptions) require.True(t, iter.Next(), "iter err: %v", iter.Err()) dp, unit, annotation = iter.Current() diff --git a/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_prop_test.go b/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_prop_test.go index 6dea040bbe..a8f14a788e 100644 --- a/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_prop_test.go +++ b/src/dbnode/storage/bootstrap/bootstrapper/commitlog/source_prop_test.go @@ -24,6 +24,7 @@ package commitlog import ( "fmt" + "io" "io/ioutil" "os" "reflect" @@ -45,6 +46,7 @@ import ( "github.com/m3db/m3/src/dbnode/topology" tu "github.com/m3db/m3/src/dbnode/topology/testutil" "github.com/m3db/m3/src/dbnode/ts" + "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/x/checked" "github.com/m3db/m3/src/x/context" "github.com/m3db/m3/src/x/ident" @@ -191,14 +193,8 @@ func TestCommitLogSourcePropCorrectlyBootstrapsFromCommitlog(t *testing.T) { ctx := context.NewContext() reader, ok := encoder.Stream(ctx) if ok { - seg, err := reader.Segment() - if err != nil { - return false, err - } - - bytes := make([]byte, seg.Len()) - _, err = reader.Read(bytes) - if err != nil { + bytes, err := xio.ToBytes(reader) + if err != io.EOF { return false, err } encodersBySeries[seriesID] = bytes From 8530b9e507304cf7963e49518953f93ad77a3809 Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Tue, 1 Dec 2020 21:54:48 +0200 Subject: [PATCH 19/24] Address PR feedback --- src/dbnode/x/xio/reader64_test.go | 13 +++++++------ src/dbnode/x/xio/segment_reader.go | 10 +++++----- src/dbnode/x/xio/segment_reader_test.go | 6 +++--- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/src/dbnode/x/xio/reader64_test.go b/src/dbnode/x/xio/reader64_test.go index cdc7867383..98d29bdb11 100644 --- a/src/dbnode/x/xio/reader64_test.go +++ b/src/dbnode/x/xio/reader64_test.go @@ -36,7 +36,7 @@ func TestBytesReader64(t *testing.T) { ) for l := 0; l < len(data); l++ { - testBytesReader64(t, r, data) + testBytesReader64(t, r, data[:l]) } } @@ -44,11 +44,12 @@ func testBytesReader64(t *testing.T, r *BytesReader64, data []byte) { r.Reset(data) var ( - peeked, read []byte - buf [8]byte - word uint64 - n byte - err error + peeked = []byte{} + read = []byte{} + buf [8]byte + word uint64 + n byte + err error ) for { diff --git a/src/dbnode/x/xio/segment_reader.go b/src/dbnode/x/xio/segment_reader.go index 27cfd1586f..0f8b98f681 100644 --- a/src/dbnode/x/xio/segment_reader.go +++ b/src/dbnode/x/xio/segment_reader.go @@ -61,7 +61,7 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { return 0, 0, io.EOF } - if sr.si+8 < nh { + if sr.si+8 <= nh { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). res = binary.BigEndian.Uint64(sr.lazyHead[sr.si:]) @@ -81,7 +81,7 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { return res << (64 - 8*bytes), bytes, nil } - if sr.si+8 < nht { + if sr.si+8 <= nht { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). res = binary.BigEndian.Uint64(sr.lazyTail[sr.si-nh:]) @@ -89,7 +89,7 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { return res, 8, nil } - for ; sr.si < nht && bytes < 8; sr.si++ { + for ; sr.si < nht; sr.si++ { res = (res << 8) | uint64(sr.lazyTail[sr.si-nh]) bytes++ } @@ -111,7 +111,7 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { return 0, 0, io.EOF } - if i+8 < nh { + if i+8 <= nh { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). res = binary.BigEndian.Uint64(sr.lazyHead[i:]) @@ -130,7 +130,7 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { return res << (64 - 8*bytes), bytes, nil } - if i+8 < nht { + if i+8 <= nht { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). res = binary.BigEndian.Uint64(sr.lazyTail[i-nh:]) diff --git a/src/dbnode/x/xio/segment_reader_test.go b/src/dbnode/x/xio/segment_reader_test.go index ec3478fa92..7612a40fc2 100644 --- a/src/dbnode/x/xio/segment_reader_test.go +++ b/src/dbnode/x/xio/segment_reader_test.go @@ -50,7 +50,7 @@ var ( 0x0, 0x1, 0x0, 0xe0, 0x65, 0x58, 0xcd, 0x3, 0x0, 0x0, 0x0, 0x0, } - checkdNoPool = func(d []byte) checked.Bytes { return checked.NewBytes(d, nil) } + checkedNoPool = func(d []byte) checked.Bytes { return checked.NewBytes(d, nil) } ) type byteFunc func(d []byte) checked.Bytes @@ -91,7 +91,7 @@ func testSegmentReader( } func TestSegmentReaderNoPool(t *testing.T) { - testSegmentReader(t, checkdNoPool, nil) + testSegmentReader(t, checkedNoPool, nil) } func TestSegmentReaderWithPool(t *testing.T) { @@ -132,7 +132,7 @@ func testSegmentReader64(t *testing.T, head []byte, tail []byte) { expected = append(expected, tail...) var ( - segment = ts.NewSegment(checkdNoPool(head), checkdNoPool(tail), 0, ts.FinalizeNone) + segment = ts.NewSegment(checkedNoPool(head), checkedNoPool(tail), 0, ts.FinalizeNone) r = NewSegmentReader(segment) peeked, read []byte buf [8]byte From ffe5e4076f3dc30d8af8aa3e85f2e397fc80279c Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sat, 5 Dec 2020 22:33:39 +0200 Subject: [PATCH 20/24] Apply some DRY --- .../main/parser/series_iterator_builder.go | 5 +---- src/dbnode/client/config.go | 6 +----- .../fetch_tagged_results_accumulator_misc_test.go | 6 +----- src/dbnode/client/options.go | 7 +------ .../client/session_fetch_bulk_blocks_test.go | 4 +--- src/dbnode/encoding/m3tsz/iterator.go | 9 +++++++++ src/dbnode/persist/fs/merger_test.go | 4 +--- src/dbnode/storage/block/options.go | 5 +---- src/dbnode/storage/bootstrap/util.go | 7 +------ src/dbnode/storage/options.go | 14 ++------------ src/dbnode/storage/series/buffer_test.go | 5 +---- src/dbnode/storage/series/series_test.go | 5 +---- src/query/pools/query_pools.go | 7 +------ src/query/remote/compressed_codecs.go | 4 +--- src/query/test/test_series_iterator.go | 4 +--- src/query/ts/m3db/options.go | 6 +----- 16 files changed, 25 insertions(+), 73 deletions(-) diff --git a/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go b/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go index aa46fe138c..6bfaec2736 100644 --- a/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go +++ b/src/cmd/services/m3comparator/main/parser/series_iterator_builder.go @@ -25,7 +25,6 @@ import ( "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/m3tsz" - "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/ts" "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/query/models" @@ -42,9 +41,7 @@ type IngestSeries struct { Tags Tags } -var iterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) -} +var iterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) func buildBlockReader( block Data, diff --git a/src/dbnode/client/config.go b/src/dbnode/client/config.go index a9a359a6c8..5ff2b60982 100644 --- a/src/dbnode/client/config.go +++ b/src/dbnode/client/config.go @@ -30,7 +30,6 @@ import ( "github.com/m3db/m3/src/dbnode/environment" "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/topology" - "github.com/m3db/m3/src/dbnode/x/xio" xerrors "github.com/m3db/m3/src/x/errors" "github.com/m3db/m3/src/x/ident" "github.com/m3db/m3/src/x/instrument" @@ -407,10 +406,7 @@ func (c Configuration) NewAdminClient( encodingOpts = encoding.NewOptions() } - v = v.SetReaderIteratorAllocate( - func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + v = v.SetReaderIteratorAllocate(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) if c.Proto != nil && c.Proto.Enabled { v = v.SetEncodingProto(encodingOpts) diff --git a/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go b/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go index df9456a5ce..66b6e14019 100644 --- a/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go +++ b/src/dbnode/client/fetch_tagged_results_accumulator_misc_test.go @@ -31,9 +31,7 @@ import ( "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/m3tsz" "github.com/m3db/m3/src/dbnode/generated/thrift/rpc" - "github.com/m3db/m3/src/dbnode/namespace" "github.com/m3db/m3/src/dbnode/storage/index" - "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/dbnode/x/xpool" "github.com/m3db/m3/src/x/ident" "github.com/m3db/m3/src/x/pool" @@ -272,9 +270,7 @@ func initTestFetchTaggedPools() *testFetchTaggedPools { pools.readerSlices.Init() pools.multiReader = encoding.NewMultiReaderIteratorPool(opts) - pools.multiReader.Init(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - }) + pools.multiReader.Init(m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions())) pools.seriesIter = encoding.NewSeriesIteratorPool(opts) pools.seriesIter.Init() diff --git a/src/dbnode/client/options.go b/src/dbnode/client/options.go index 1caa44e10f..84250f5143 100644 --- a/src/dbnode/client/options.go +++ b/src/dbnode/client/options.go @@ -452,12 +452,7 @@ func (o *options) Validate() error { func (o *options) SetEncodingM3TSZ() Options { opts := *o - opts.readerIteratorAllocate = func( - r xio.Reader64, - _ namespace.SchemaDescr, - ) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - } + opts.readerIteratorAllocate = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) opts.isProtoEnabled = false return &opts } diff --git a/src/dbnode/client/session_fetch_bulk_blocks_test.go b/src/dbnode/client/session_fetch_bulk_blocks_test.go index 3482149152..f81531c9d6 100644 --- a/src/dbnode/client/session_fetch_bulk_blocks_test.go +++ b/src/dbnode/client/session_fetch_bulk_blocks_test.go @@ -102,9 +102,7 @@ func testsNsMetadata(t *testing.T) namespace.Metadata { func newSessionTestMultiReaderIteratorPool() encoding.MultiReaderIteratorPool { p := encoding.NewMultiReaderIteratorPool(nil) - p.Init(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - }) + p.Init(m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions())) return p } diff --git a/src/dbnode/encoding/m3tsz/iterator.go b/src/dbnode/encoding/m3tsz/iterator.go index b70ce36011..22a1be963a 100644 --- a/src/dbnode/encoding/m3tsz/iterator.go +++ b/src/dbnode/encoding/m3tsz/iterator.go @@ -30,6 +30,15 @@ import ( xtime "github.com/m3db/m3/src/x/time" ) +// DefaultReaderIteratorAllocFn returns a function for allocating NewReaderIterator. +func DefaultReaderIteratorAllocFn( + opts encoding.Options, +) func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { + return NewReaderIterator(r, DefaultIntOptimizationEnabled, opts) + } +} + // readerIterator provides an interface for clients to incrementally // read datapoints off of an encoded stream. type readerIterator struct { diff --git a/src/dbnode/persist/fs/merger_test.go b/src/dbnode/persist/fs/merger_test.go index 7aab2e493f..2b018ee882 100644 --- a/src/dbnode/persist/fs/merger_test.go +++ b/src/dbnode/persist/fs/merger_test.go @@ -75,9 +75,7 @@ func init() { srPool = xio.NewSegmentReaderPool(poolOpts) srPool.Init() multiIterPool = encoding.NewMultiReaderIteratorPool(poolOpts) - multiIterPool.Init(func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - }) + multiIterPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions())) bytesPool := pool.NewCheckedBytesPool(nil, poolOpts, func(s []pool.Bucket) pool.BytesPool { return pool.NewBytesPool(s, poolOpts) }) diff --git a/src/dbnode/storage/block/options.go b/src/dbnode/storage/block/options.go index 888cafc468..14e1581db7 100644 --- a/src/dbnode/storage/block/options.go +++ b/src/dbnode/storage/block/options.go @@ -91,10 +91,7 @@ func NewOptions() Options { o.encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - o.readerIteratorPool.Init( - func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + o.readerIteratorPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) o.multiReaderIteratorPool.Init( func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { it := o.readerIteratorPool.Get() diff --git a/src/dbnode/storage/bootstrap/util.go b/src/dbnode/storage/bootstrap/util.go index ae79579b18..a164f52b90 100644 --- a/src/dbnode/storage/bootstrap/util.go +++ b/src/dbnode/storage/bootstrap/util.go @@ -316,12 +316,7 @@ type NamespacesTester struct { func buildDefaultIterPool() encoding.MultiReaderIteratorPool { iterPool := encoding.NewMultiReaderIteratorPool(pool.NewObjectPoolOptions()) - iterPool.Init( - func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, - m3tsz.DefaultIntOptimizationEnabled, - encoding.NewOptions()) - }) + iterPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions())) return iterPool } diff --git a/src/dbnode/storage/options.go b/src/dbnode/storage/options.go index 54f08064ba..0d0199e310 100644 --- a/src/dbnode/storage/options.go +++ b/src/dbnode/storage/options.go @@ -500,22 +500,12 @@ func (o *options) SetEncodingM3TSZPooled() Options { opts.encoderPool = encoderPool // initialize single reader iterator pool - readerIteratorPool.Init(func( - r xio.Reader64, - _ namespace.SchemaDescr, - ) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + readerIteratorPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) opts.readerIteratorPool = readerIteratorPool // initialize multi reader iterator pool multiReaderIteratorPool := encoding.NewMultiReaderIteratorPool(opts.poolOpts) - multiReaderIteratorPool.Init(func( - r xio.Reader64, - _ namespace.SchemaDescr, - ) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + multiReaderIteratorPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) opts.multiReaderIteratorPool = multiReaderIteratorPool opts.blockOpts = opts.blockOpts. diff --git a/src/dbnode/storage/series/buffer_test.go b/src/dbnode/storage/series/buffer_test.go index d9d3e74c08..b2d7408899 100644 --- a/src/dbnode/storage/series/buffer_test.go +++ b/src/dbnode/storage/series/buffer_test.go @@ -60,10 +60,7 @@ func newBufferTestOptions() Options { encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiReaderIteratorPool.Init( - func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + multiReaderIteratorPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) bufferBucketPool := NewBufferBucketPool(nil) bufferBucketVersionsPool := NewBufferBucketVersionsPool(nil) diff --git a/src/dbnode/storage/series/series_test.go b/src/dbnode/storage/series/series_test.go index 2e68929f93..1f43efafe3 100644 --- a/src/dbnode/storage/series/series_test.go +++ b/src/dbnode/storage/series/series_test.go @@ -57,10 +57,7 @@ func newSeriesTestOptions() Options { encoderPool.Init(func() encoding.Encoder { return m3tsz.NewEncoder(timeZero, nil, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) }) - multiReaderIteratorPool.Init( - func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + multiReaderIteratorPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) bufferBucketPool := NewBufferBucketPool(nil) bufferBucketVersionsPool := NewBufferBucketVersionsPool(nil) diff --git a/src/query/pools/query_pools.go b/src/query/pools/query_pools.go index 153c91d34d..d0e1dcd360 100644 --- a/src/query/pools/query_pools.go +++ b/src/query/pools/query_pools.go @@ -41,10 +41,8 @@ const ( defaultReplicas = 3 defaultSeriesIteratorPoolSize = 2 << 12 // ~8k defaultCheckedBytesWrapperPoolSize = 2 << 12 // ~8k - defaultBucketCapacity = 256 defaultPoolableConcurrentQueries = 64 defaultPoolableSeriesPerQuery = 4096 - defaultSeriesReplicaReaderPoolSize = defaultPoolableConcurrentQueries * defaultPoolableSeriesPerQuery * defaultReplicas ) var ( @@ -211,10 +209,7 @@ func BuildIteratorPools( encodingOpts := encoding.NewOptions(). SetReaderIteratorPool(readerIteratorPool) - readerIteratorPool.Init( - func(r xio.Reader64, descr namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encodingOpts) - }) + readerIteratorPool.Init(m3tsz.DefaultReaderIteratorAllocFn(encodingOpts)) pools.multiReaderIterator = encoding.NewMultiReaderIteratorPool(defaultPerSeriesPoolOpts) pools.multiReaderIterator.Init( diff --git a/src/query/remote/compressed_codecs.go b/src/query/remote/compressed_codecs.go index d5a64a345c..77f41772a6 100644 --- a/src/query/remote/compressed_codecs.go +++ b/src/query/remote/compressed_codecs.go @@ -46,9 +46,7 @@ func initializeVars() { b.Reset(nil) })) - iterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - } + iterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) } var ( diff --git a/src/query/test/test_series_iterator.go b/src/query/test/test_series_iterator.go index 7f980062ef..26b1586fe8 100644 --- a/src/query/test/test_series_iterator.go +++ b/src/query/test/test_series_iterator.go @@ -67,9 +67,7 @@ func init() { Middle = Start.Add(BlockSize) End = Middle.Add(BlockSize) - testIterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - } + testIterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) } // Builds a MultiReaderIterator representing a single replica diff --git a/src/query/ts/m3db/options.go b/src/query/ts/m3db/options.go index 2b4985367f..9a5896ad0d 100644 --- a/src/query/ts/m3db/options.go +++ b/src/query/ts/m3db/options.go @@ -28,8 +28,6 @@ import ( "github.com/m3db/m3/src/dbnode/client" "github.com/m3db/m3/src/dbnode/encoding" "github.com/m3db/m3/src/dbnode/encoding/m3tsz" - "github.com/m3db/m3/src/dbnode/namespace" - "github.com/m3db/m3/src/dbnode/x/xio" "github.com/m3db/m3/src/query/models" "github.com/m3db/m3/src/query/pools" queryconsolidator "github.com/m3db/m3/src/query/storage/m3/consolidators" @@ -43,9 +41,7 @@ var ( defaultCount = 10 defaultLookbackDuration = time.Duration(0) defaultConsolidationFn = consolidators.TakeLast - defaultIterAlloc = func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator { - return m3tsz.NewReaderIterator(r, m3tsz.DefaultIntOptimizationEnabled, encoding.NewOptions()) - } + defaultIterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) defaultIteratorBatchingFn = iteratorBatchingFn defaultBlockSeriesProcessor = NewBlockSeriesProcessor() defaultInstrumented = true From 6f2f3e39f9788b374bbf743863dbfe88f3ae08ed Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sat, 5 Dec 2020 22:51:32 +0200 Subject: [PATCH 21/24] Revert some changes --- src/dbnode/client/config.go | 4 ++-- src/dbnode/client/options.go | 2 +- src/dbnode/encoding/istream.go | 2 +- src/dbnode/encoding/istream_test.go | 2 +- src/query/ts/m3db/options.go | 10 +++++----- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/dbnode/client/config.go b/src/dbnode/client/config.go index 5ff2b60982..18bc61e361 100644 --- a/src/dbnode/client/config.go +++ b/src/dbnode/client/config.go @@ -292,8 +292,8 @@ func (c Configuration) NewAdminClient( syncTopoInit = params.TopologyInitializer syncClientOverrides environment.ClientOverrides syncNsInit namespace.Initializer - asyncTopoInits []topology.Initializer - asyncClientOverrides []environment.ClientOverrides + asyncTopoInits = []topology.Initializer{} + asyncClientOverrides = []environment.ClientOverrides{} ) var buildAsyncPool bool diff --git a/src/dbnode/client/options.go b/src/dbnode/client/options.go index 84250f5143..abc8c28e45 100644 --- a/src/dbnode/client/options.go +++ b/src/dbnode/client/options.go @@ -187,7 +187,7 @@ var ( defaultFetchSeriesBlocksBatchConcurrency = int(math.Max(1, float64(runtime.NumCPU())/2)) // defaultSeriesIteratorArrayPoolBuckets is the default pool buckets for the series iterator array pool - defaultSeriesIteratorArrayPoolBuckets []pool.Bucket + defaultSeriesIteratorArrayPoolBuckets = []pool.Bucket{} // defaulWriteRetrier is the default write retrier for write attempts defaultWriteRetrier = xretry.NewRetrier( diff --git a/src/dbnode/encoding/istream.go b/src/dbnode/encoding/istream.go index f87d78e20d..f348f95419 100644 --- a/src/dbnode/encoding/istream.go +++ b/src/dbnode/encoding/istream.go @@ -1,4 +1,4 @@ -// Copyright (c) 2020 Uber Technologies, Inc. +// Copyright (c) 2016 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/src/dbnode/encoding/istream_test.go b/src/dbnode/encoding/istream_test.go index b0de83fb22..eddc4dd30d 100644 --- a/src/dbnode/encoding/istream_test.go +++ b/src/dbnode/encoding/istream_test.go @@ -1,4 +1,4 @@ -// Copyright (c) 2020 Uber Technologies, Inc. +// Copyright (c) 2016 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/src/query/ts/m3db/options.go b/src/query/ts/m3db/options.go index 9a5896ad0d..153df2aac7 100644 --- a/src/query/ts/m3db/options.go +++ b/src/query/ts/m3db/options.go @@ -37,11 +37,11 @@ import ( ) var ( - defaultCapacity = 1024 - defaultCount = 10 - defaultLookbackDuration = time.Duration(0) - defaultConsolidationFn = consolidators.TakeLast - defaultIterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) + defaultCapacity = 1024 + defaultCount = 10 + defaultLookbackDuration = time.Duration(0) + defaultConsolidationFn = consolidators.TakeLast + defaultIterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions()) defaultIteratorBatchingFn = iteratorBatchingFn defaultBlockSeriesProcessor = NewBlockSeriesProcessor() defaultInstrumented = true From 9f1d3c1ab4c54cce7ece32f2b849e1f9e5797c7d Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sat, 9 Jan 2021 22:15:23 +0200 Subject: [PATCH 22/24] Improve segment_reader_test --- src/dbnode/x/xio/segment_reader_test.go | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/dbnode/x/xio/segment_reader_test.go b/src/dbnode/x/xio/segment_reader_test.go index 7612a40fc2..fed14e0680 100644 --- a/src/dbnode/x/xio/segment_reader_test.go +++ b/src/dbnode/x/xio/segment_reader_test.go @@ -49,12 +49,14 @@ var ( 0xc0, 0x1, 0xf4, 0x1, 0x0, 0x0, 0x0, 0x2, 0x1, 0x2, 0x7, 0x10, 0x1e, 0x0, 0x1, 0x0, 0xe0, 0x65, 0x58, 0xcd, 0x3, 0x0, 0x0, 0x0, 0x0, } - - checkedNoPool = func(d []byte) checked.Bytes { return checked.NewBytes(d, nil) } ) type byteFunc func(d []byte) checked.Bytes +func checkedNoPool(d []byte) checked.Bytes { + return checked.NewBytes(d, nil) +} + func testSegmentReader( t *testing.T, checkd byteFunc, @@ -150,9 +152,8 @@ func testSegmentReader64(t *testing.T, head []byte, tail []byte) { peeked = append(peeked, buf[:n]...) word, n, err = r.Read64() - if err != nil { - break - } + require.NoError(t, err) + binary.BigEndian.PutUint64(buf[:], word) read = append(read, buf[:n]...) } @@ -160,4 +161,7 @@ func testSegmentReader64(t *testing.T, head []byte, tail []byte) { require.Equal(t, io.EOF, err) require.Equal(t, expected, peeked) require.Equal(t, expected, read) + + _, _, err = r.Read64() + require.Equal(t, io.EOF, err) } From ccbc64c9ce570f17e81fcc6bdcd8ebc76f2fcacb Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Sat, 13 Feb 2021 22:28:03 +0200 Subject: [PATCH 23/24] Address PR feedback --- src/dbnode/encoding/istream.go | 2 ++ src/dbnode/x/xio/segment_reader.go | 50 ++++++++++++++++-------------- 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/src/dbnode/encoding/istream.go b/src/dbnode/encoding/istream.go index f348f95419..1d1d2e6a83 100644 --- a/src/dbnode/encoding/istream.go +++ b/src/dbnode/encoding/istream.go @@ -71,9 +71,11 @@ func (is *IStream) ReadBits(numBits uint8) (uint64, error) { return 0, is.err } if numBits <= is.remaining { + // Have enough bits buffered. return is.consumeBuffer(numBits), nil } res := readBitsInWord(is.current, numBits) + // Not enough bits buffered, read next word from the stream. bitsNeeded := numBits - is.remaining if err := is.readWordFromStream(); err != nil { return 0, err diff --git a/src/dbnode/x/xio/segment_reader.go b/src/dbnode/x/xio/segment_reader.go index 0f8b98f681..73ee5caf26 100644 --- a/src/dbnode/x/xio/segment_reader.go +++ b/src/dbnode/x/xio/segment_reader.go @@ -51,17 +51,18 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { sr.lazyInit() var ( - nh = len(sr.lazyHead) - nht = nh + len(sr.lazyTail) + headLen = len(sr.lazyHead) + headTailLen = headLen + len(sr.lazyTail) + res uint64 bytes byte ) - if sr.si >= nht { + if sr.si >= headTailLen { return 0, 0, io.EOF } - if sr.si+8 <= nh { + if sr.si+8 <= headLen { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). res = binary.BigEndian.Uint64(sr.lazyHead[sr.si:]) @@ -69,28 +70,28 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { return res, 8, nil } - if sr.si < nh { - for ; sr.si < nh; sr.si++ { + if sr.si < headLen { + for ; sr.si < headLen; sr.si++ { res = (res << 8) | uint64(sr.lazyHead[sr.si]) bytes++ } - for ; sr.si < nht && bytes < 8; sr.si++ { - res = (res << 8) | uint64(sr.lazyTail[sr.si-nh]) + for ; sr.si < headTailLen && bytes < 8; sr.si++ { + res = (res << 8) | uint64(sr.lazyTail[sr.si-headLen]) bytes++ } return res << (64 - 8*bytes), bytes, nil } - if sr.si+8 <= nht { + if sr.si+8 <= headTailLen { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - res = binary.BigEndian.Uint64(sr.lazyTail[sr.si-nh:]) + res = binary.BigEndian.Uint64(sr.lazyTail[sr.si-headLen:]) sr.si += 8 return res, 8, nil } - for ; sr.si < nht; sr.si++ { - res = (res << 8) | uint64(sr.lazyTail[sr.si-nh]) + for ; sr.si < headTailLen; sr.si++ { + res = (res << 8) | uint64(sr.lazyTail[sr.si-headLen]) bytes++ } return res << (64 - 8*bytes), bytes, nil @@ -100,45 +101,46 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { sr.lazyInit() var ( - nh = len(sr.lazyHead) - nht = nh + len(sr.lazyTail) + headLen = len(sr.lazyHead) + headTailLen = headLen + len(sr.lazyTail) + i = sr.si res uint64 bytes byte ) - if i >= nht { + if i >= headTailLen { return 0, 0, io.EOF } - if i+8 <= nh { + if i+8 <= headLen { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). res = binary.BigEndian.Uint64(sr.lazyHead[i:]) return res, 8, nil } - if i < nh { - for ; i < nh; i++ { + if i < headLen { + for ; i < headLen; i++ { res = (res << 8) | uint64(sr.lazyHead[i]) bytes++ } - for ; i < nht && bytes < 8; i++ { - res = (res << 8) | uint64(sr.lazyTail[i-nh]) + for ; i < headTailLen && bytes < 8; i++ { + res = (res << 8) | uint64(sr.lazyTail[i-headLen]) bytes++ } return res << (64 - 8*bytes), bytes, nil } - if i+8 <= nht { + if i+8 <= headTailLen { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). - res = binary.BigEndian.Uint64(sr.lazyTail[i-nh:]) + res = binary.BigEndian.Uint64(sr.lazyTail[i-headLen:]) return res, 8, nil } - for ; i < nht && bytes < 8; i++ { - res = (res << 8) | uint64(sr.lazyTail[i-nh]) + for ; i < headTailLen && bytes < 8; i++ { + res = (res << 8) | uint64(sr.lazyTail[i-headLen]) bytes++ } return res << (64 - 8*bytes), bytes, nil From 8b87db8a63d5648a22983b0724cbd41ca37b8b1f Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Mon, 15 Feb 2021 10:07:47 +0200 Subject: [PATCH 24/24] Only compute headTailLen when needed --- src/dbnode/x/xio/segment_reader.go | 40 +++++++++++++++--------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/src/dbnode/x/xio/segment_reader.go b/src/dbnode/x/xio/segment_reader.go index 73ee5caf26..5555937cc2 100644 --- a/src/dbnode/x/xio/segment_reader.go +++ b/src/dbnode/x/xio/segment_reader.go @@ -51,17 +51,11 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { sr.lazyInit() var ( - headLen = len(sr.lazyHead) - headTailLen = headLen + len(sr.lazyTail) - - res uint64 - bytes byte + headLen = len(sr.lazyHead) + res uint64 + bytes byte ) - if sr.si >= headTailLen { - return 0, 0, io.EOF - } - if sr.si+8 <= headLen { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). @@ -70,6 +64,8 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { return res, 8, nil } + headTailLen := headLen + len(sr.lazyTail) + if sr.si < headLen { for ; sr.si < headLen; sr.si++ { res = (res << 8) | uint64(sr.lazyHead[sr.si]) @@ -90,6 +86,10 @@ func (sr *segmentReader) Read64() (word uint64, n byte, err error) { return res, 8, nil } + if sr.si >= headTailLen { + return 0, 0, io.EOF + } + for ; sr.si < headTailLen; sr.si++ { res = (res << 8) | uint64(sr.lazyTail[sr.si-headLen]) bytes++ @@ -101,18 +101,12 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { sr.lazyInit() var ( - headLen = len(sr.lazyHead) - headTailLen = headLen + len(sr.lazyTail) - - i = sr.si - res uint64 - bytes byte + headLen = len(sr.lazyHead) + i = sr.si + res uint64 + bytes byte ) - if i >= headTailLen { - return 0, 0, io.EOF - } - if i+8 <= headLen { // NB: this compiles to a single 64 bit load followed by // a BSWAPQ on amd64 gc 1.13 (https://godbolt.org/z/oTK1jx). @@ -120,6 +114,8 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { return res, 8, nil } + headTailLen := headLen + len(sr.lazyTail) + if i < headLen { for ; i < headLen; i++ { res = (res << 8) | uint64(sr.lazyHead[i]) @@ -139,7 +135,11 @@ func (sr *segmentReader) Peek64() (word uint64, n byte, err error) { return res, 8, nil } - for ; i < headTailLen && bytes < 8; i++ { + if i >= headTailLen { + return 0, 0, io.EOF + } + + for ; i < headTailLen; i++ { res = (res << 8) | uint64(sr.lazyTail[i-headLen]) bytes++ }