From 18cae4cc062f918bbf53feff7d90420a70c535d9 Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Thu, 18 Apr 2024 15:00:51 +0200 Subject: [PATCH 01/11] deepnpts_firststab --- nbs/common.scalers.ipynb | 4 +- nbs/core.ipynb | 3 +- nbs/losses.pytorch.ipynb | 1712 +++++++++++++++++++++++++++-- nbs/models.deepnpts.ipynb | 1137 +++++++++++++++++++ neuralforecast/_modidx.py | 18 + neuralforecast/common/_scalers.py | 4 +- neuralforecast/core.py | 3 + neuralforecast/losses/pytorch.py | 110 +- neuralforecast/models/__init__.py | 3 +- neuralforecast/models/deepnpts.py | 557 ++++++++++ 10 files changed, 3371 insertions(+), 180 deletions(-) create mode 100644 nbs/models.deepnpts.ipynb create mode 100644 neuralforecast/models/deepnpts.py diff --git a/nbs/common.scalers.ipynb b/nbs/common.scalers.ipynb index c06fa0da2..921d5adaf 100644 --- a/nbs/common.scalers.ipynb +++ b/nbs/common.scalers.ipynb @@ -567,8 +567,8 @@ " shape = list(x.shape)\n", " shape[dim] = 1\n", "\n", - " x_shift = torch.zeros(shape)\n", - " x_scale = torch.ones(shape)\n", + " x_shift = torch.zeros(shape, device=x.device)\n", + " x_scale = torch.ones(shape, device=x.device)\n", "\n", " return x_shift, x_scale" ] diff --git a/nbs/core.ipynb b/nbs/core.ipynb index 2c2b15c50..710fcd0b4 100644 --- a/nbs/core.ipynb +++ b/nbs/core.ipynb @@ -90,7 +90,7 @@ " Informer, Autoformer, FEDformer,\n", " StemGNN, PatchTST, TimesNet, TimeLLM, TSMixer, TSMixerx,\n", " MLPMultivariate, iTransformer,\n", - " BiTCN,\n", + " BiTCN, DeepNPTS\n", ")" ] }, @@ -233,6 +233,7 @@ " 'mlpmultivariate': MLPMultivariate, 'automlpmultivariate': MLPMultivariate,\n", " 'itransformer': iTransformer, 'autoitransformer': iTransformer,\n", " 'bitcn': BiTCN, 'autobitcn': BiTCN,\n", + " 'deepnpts': DeepNPTS, 'autodeepnpts': DeepNPTS,\n", "}" ] }, diff --git a/nbs/losses.pytorch.ipynb b/nbs/losses.pytorch.ipynb index 55cd837b3..36adfaabd 100644 --- a/nbs/losses.pytorch.ipynb +++ b/nbs/losses.pytorch.ipynb @@ -244,7 +244,61 @@ "execution_count": null, "id": "1d004cd0", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__init__\n", + "\n", + "> MAE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Error\n", + "\n", + "Calculates Mean Absolute Error between\n", + "`y` and `y_hat`. MAE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the\n", + "deviation of the prediction and the true\n", + "value at a given time and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__init__\n", + "\n", + "> MAE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Error\n", + "\n", + "Calculates Mean Absolute Error between\n", + "`y` and `y_hat`. MAE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the\n", + "deviation of the prediction and the true\n", + "value at a given time and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAE, name='MAE.__init__', title_level=3)" ] @@ -254,7 +308,51 @@ "execution_count": null, "id": "0a20a273", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__call__\n", + "\n", + "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mae`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__call__\n", + "\n", + "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mae`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAE.__call__, name='MAE.__call__', title_level=3)" ] @@ -328,7 +426,61 @@ "execution_count": null, "id": "e8c65b82", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__init__\n", + "\n", + "> MSE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Squared Error\n", + "\n", + "Calculates Mean Squared Error between\n", + "`y` and `y_hat`. MSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the \n", + "squared deviation of the prediction and the true\n", + "value at a given time, and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__init__\n", + "\n", + "> MSE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Squared Error\n", + "\n", + "Calculates Mean Squared Error between\n", + "`y` and `y_hat`. MSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the \n", + "squared deviation of the prediction and the true\n", + "value at a given time, and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MSE, name='MSE.__init__', title_level=3)" ] @@ -338,7 +490,51 @@ "execution_count": null, "id": "b0126a7f", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__call__\n", + "\n", + "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mse`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__call__\n", + "\n", + "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mse`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MSE.__call__, name='MSE.__call__', title_level=3)" ] @@ -416,7 +612,67 @@ "execution_count": null, "id": "d961d383", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__init__\n", + "\n", + "> RMSE.__init__ (horizon_weight=None)\n", + "\n", + "Root Mean Squared Error\n", + "\n", + "Calculates Root Mean Squared Error between\n", + "`y` and `y_hat`. RMSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the squared deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "Finally the RMSE will be in the same scale\n", + "as the original time series so its comparison with other\n", + "series is possible only if they share a common scale. \n", + "RMSE has a direct connection to the L2 norm.\n", + "\n", + "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__init__\n", + "\n", + "> RMSE.__init__ (horizon_weight=None)\n", + "\n", + "Root Mean Squared Error\n", + "\n", + "Calculates Root Mean Squared Error between\n", + "`y` and `y_hat`. RMSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the squared deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "Finally the RMSE will be in the same scale\n", + "as the original time series so its comparison with other\n", + "series is possible only if they share a common scale. \n", + "RMSE has a direct connection to the L2 norm.\n", + "\n", + "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(RMSE, name='RMSE.__init__', title_level=3)" ] @@ -426,7 +682,51 @@ "execution_count": null, "id": "d398d3e3", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__call__\n", + "\n", + "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`rmse`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__call__\n", + "\n", + "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`rmse`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(RMSE.__call__, name='RMSE.__call__', title_level=3)" ] @@ -517,7 +817,69 @@ "execution_count": null, "id": "174e8042", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__init__\n", + "\n", + "> MAPE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Percentage Error\n", + "\n", + "Calculates Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. MAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the percentual deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "The closer to zero an observed value is, the higher penalty MAPE loss\n", + "assigns to the corresponding error.\n", + "\n", + "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__init__\n", + "\n", + "> MAPE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Percentage Error\n", + "\n", + "Calculates Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. MAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the percentual deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "The closer to zero an observed value is, the higher penalty MAPE loss\n", + "assigns to the corresponding error.\n", + "\n", + "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAPE, name='MAPE.__init__', title_level=3)" ] @@ -527,7 +889,51 @@ "execution_count": null, "id": "da63f136", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__call__\n", + "\n", + "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mape`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__call__\n", + "\n", + "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mape`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAPE.__call__, name='MAPE.__call__', title_level=3)" ] @@ -609,7 +1015,73 @@ "execution_count": null, "id": "dee99fb8", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__init__\n", + "\n", + "> SMAPE.__init__ (horizon_weight=None)\n", + "\n", + "Symmetric Mean Absolute Percentage Error\n", + "\n", + "Calculates Symmetric Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. SMAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the relative deviation\n", + "of the prediction and the observed value scaled by the sum of the\n", + "absolute values for the prediction and observed value at a\n", + "given time, then averages these devations over the length\n", + "of the series. This allows the SMAPE to have bounds between\n", + "0% and 200% which is desireble compared to normal MAPE that\n", + "may be undetermined when the target is zero.\n", + "\n", + "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__init__\n", + "\n", + "> SMAPE.__init__ (horizon_weight=None)\n", + "\n", + "Symmetric Mean Absolute Percentage Error\n", + "\n", + "Calculates Symmetric Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. SMAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the relative deviation\n", + "of the prediction and the observed value scaled by the sum of the\n", + "absolute values for the prediction and observed value at a\n", + "given time, then averages these devations over the length\n", + "of the series. This allows the SMAPE to have bounds between\n", + "0% and 200% which is desireble compared to normal MAPE that\n", + "may be undetermined when the target is zero.\n", + "\n", + "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(SMAPE, name='SMAPE.__init__', title_level=3)" ] @@ -619,7 +1091,51 @@ "execution_count": null, "id": "db62a845", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__call__\n", + "\n", + "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`smape`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__call__\n", + "\n", + "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`smape`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(SMAPE.__call__, name='SMAPE.__call__', title_level=3)" ] @@ -706,7 +1222,71 @@ "execution_count": null, "id": "b6a4cf21", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__init__\n", + "\n", + "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", + "\n", + "Mean Absolute Scaled Error \n", + "Calculates the Mean Absolute Scaled Error between\n", + "`y` and `y_hat`. MASE measures the relative prediction\n", + "accuracy of a forecasting method by comparinng the mean absolute errors\n", + "of the prediction and the observed value against the mean\n", + "absolute errors of the seasonal naive model.\n", + "The MASE partially composed the Overall Weighted Average (OWA), \n", + "used in the M4 Competition.\n", + "\n", + "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", + "\n", + "**Parameters:**
\n", + "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__init__\n", + "\n", + "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", + "\n", + "Mean Absolute Scaled Error \n", + "Calculates the Mean Absolute Scaled Error between\n", + "`y` and `y_hat`. MASE measures the relative prediction\n", + "accuracy of a forecasting method by comparinng the mean absolute errors\n", + "of the prediction and the observed value against the mean\n", + "absolute errors of the seasonal naive model.\n", + "The MASE partially composed the Overall Weighted Average (OWA), \n", + "used in the M4 Competition.\n", + "\n", + "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", + "\n", + "**Parameters:**
\n", + "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MASE, name='MASE.__init__', title_level=3)" ] @@ -716,7 +1296,53 @@ "execution_count": null, "id": "32a2c11b", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__call__\n", + "\n", + "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mase`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__call__\n", + "\n", + "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mase`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MASE.__call__, name='MASE.__call__', title_level=3)" ] @@ -803,7 +1429,69 @@ "execution_count": null, "id": "edeb6f9a", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__init__\n", + "\n", + "> relMSE.__init__ (y_train, horizon_weight=None)\n", + "\n", + "Relative Mean Squared Error\n", + "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", + "as an alternative to percentage errors, to avoid measure unstability.\n", + "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", + "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", + "\n", + "**Parameters:**
\n", + "`y_train`: numpy array, Training values.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "- [Hyndman, R. J and Koehler, A. B. (2006).\n", + " \"Another look at measures of forecast accuracy\",\n", + " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", + " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__init__\n", + "\n", + "> relMSE.__init__ (y_train, horizon_weight=None)\n", + "\n", + "Relative Mean Squared Error\n", + "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", + "as an alternative to percentage errors, to avoid measure unstability.\n", + "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", + "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", + "\n", + "**Parameters:**
\n", + "`y_train`: numpy array, Training values.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "- [Hyndman, R. J and Koehler, A. B. (2006).\n", + " \"Another look at measures of forecast accuracy\",\n", + " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", + " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(relMSE, name='relMSE.__init__', title_level=3)" ] @@ -813,7 +1501,53 @@ "execution_count": null, "id": "a317b5c5", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__call__\n", + "\n", + "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`relMSE`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__call__\n", + "\n", + "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`relMSE`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(relMSE.__call__, name='relMSE.__call__', title_level=3)" ] @@ -898,7 +1632,67 @@ "execution_count": null, "id": "70bd46d9", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__init__\n", + "\n", + "> QuantileLoss.__init__ (q, horizon_weight=None)\n", + "\n", + "Quantile Loss\n", + "\n", + "Computes the quantile loss between `y` and `y_hat`.\n", + "QL measures the deviation of a quantile forecast.\n", + "By weighting the absolute deviation in a non symmetric way, the\n", + "loss pays more attention to under or over estimation.\n", + "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", + "\n", + "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", + "\n", + "**Parameters:**
\n", + "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__init__\n", + "\n", + "> QuantileLoss.__init__ (q, horizon_weight=None)\n", + "\n", + "Quantile Loss\n", + "\n", + "Computes the quantile loss between `y` and `y_hat`.\n", + "QL measures the deviation of a quantile forecast.\n", + "By weighting the absolute deviation in a non symmetric way, the\n", + "loss pays more attention to under or over estimation.\n", + "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", + "\n", + "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", + "\n", + "**Parameters:**
\n", + "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(QuantileLoss, name='QuantileLoss.__init__', title_level=3)" ] @@ -908,7 +1702,51 @@ "execution_count": null, "id": "0b1588e9", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__call__\n", + "\n", + "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`quantile_loss`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__call__\n", + "\n", + "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`quantile_loss`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(QuantileLoss.__call__, name='QuantileLoss.__call__', title_level=3)" ] @@ -1080,7 +1918,87 @@ "execution_count": null, "id": "8f42ec82", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__init__\n", + "\n", + "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", + "\n", + "Multi-Quantile loss\n", + "\n", + "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", + "MQL calculates the average multi-quantile Loss for\n", + "a given set of quantiles, based on the absolute \n", + "difference between predicted quantiles and observed values.\n", + "\n", + "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", + "\n", + "The limit behavior of MQL allows to measure the accuracy \n", + "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", + "the continuous ranked probability score (CRPS). This can be achieved \n", + "through a numerical integration technique, that discretizes the quantiles \n", + "and treats the CRPS integral with a left Riemann approximation, averaging over \n", + "uniformly distanced quantiles. \n", + "\n", + "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", + "\n", + "**Parameters:**
\n", + "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", + "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", + "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__init__\n", + "\n", + "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", + "\n", + "Multi-Quantile loss\n", + "\n", + "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", + "MQL calculates the average multi-quantile Loss for\n", + "a given set of quantiles, based on the absolute \n", + "difference between predicted quantiles and observed values.\n", + "\n", + "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", + "\n", + "The limit behavior of MQL allows to measure the accuracy \n", + "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", + "the continuous ranked probability score (CRPS). This can be achieved \n", + "through a numerical integration technique, that discretizes the quantiles \n", + "and treats the CRPS integral with a left Riemann approximation, averaging over \n", + "uniformly distanced quantiles. \n", + "\n", + "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", + "\n", + "**Parameters:**
\n", + "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", + "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", + "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MQLoss, name='MQLoss.__init__', title_level=3)" ] @@ -1090,7 +2008,51 @@ "execution_count": null, "id": "bac2237a", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__call__\n", + "\n", + "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mqloss`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__call__\n", + "\n", + "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mqloss`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MQLoss.__call__, name='MQLoss.__call__', title_level=3)" ] @@ -1109,7 +2071,17 @@ "execution_count": null, "id": "da37f2ef", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check MQLoss' stored quantiles\n", @@ -1654,7 +2626,99 @@ "execution_count": null, "id": "a462101b", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__init__\n", + "\n", + "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> **distribution_kwargs)\n", + "\n", + "DistributionLoss\n", + "\n", + "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", + "interact with NeuralForecast models modularly. It shares the negative \n", + "log-likelihood as the optimization objective and a sample method to \n", + "generate empirically the quantiles defined by the `level` list.\n", + "\n", + "Additionally, it implements a distribution transformation that factorizes the\n", + "scale-dependent likelihood parameters into a base scale and a multiplier \n", + "efficiently learnable within the network's non-linearities operating ranges.\n", + "\n", + "Available distributions:
\n", + "- Poisson
\n", + "- Normal
\n", + "- StudentT
\n", + "- NegativeBinomial
\n", + "- Tweedie
\n", + "- Bernoulli (Temporal Classifiers)\n", + "\n", + "**Parameters:**
\n", + "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", + "\n", + "**References:**
\n", + "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", + "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", + " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__init__\n", + "\n", + "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> **distribution_kwargs)\n", + "\n", + "DistributionLoss\n", + "\n", + "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", + "interact with NeuralForecast models modularly. It shares the negative \n", + "log-likelihood as the optimization objective and a sample method to \n", + "generate empirically the quantiles defined by the `level` list.\n", + "\n", + "Additionally, it implements a distribution transformation that factorizes the\n", + "scale-dependent likelihood parameters into a base scale and a multiplier \n", + "efficiently learnable within the network's non-linearities operating ranges.\n", + "\n", + "Available distributions:
\n", + "- Poisson
\n", + "- Normal
\n", + "- StudentT
\n", + "- NegativeBinomial
\n", + "- Tweedie
\n", + "- Bernoulli (Temporal Classifiers)\n", + "\n", + "**Parameters:**
\n", + "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", + "\n", + "**References:**
\n", + "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", + "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", + " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DistributionLoss, name='DistributionLoss.__init__', title_level=3)" ] @@ -1664,7 +2728,65 @@ "execution_count": null, "id": "d8c367f8", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.sample\n", + "\n", + "> DistributionLoss.sample (distr_args:torch.Tensor,\n", + "> num_samples:Optional[int]=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.sample\n", + "\n", + "> DistributionLoss.sample (distr_args:torch.Tensor,\n", + "> num_samples:Optional[int]=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DistributionLoss.sample, name='DistributionLoss.sample', title_level=3)" ] @@ -1674,7 +2796,75 @@ "execution_count": null, "id": "04e32679", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__call__\n", + "\n", + "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Computes the negative log-likelihood objective function. \n", + "To estimate the following predictive distribution:\n", + "\n", + "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", + "\n", + "where $\\theta$ represents the distributions parameters. It aditionally \n", + "summarizes the objective signal using a weighted average using the `mask` tensor. \n", + "\n", + "**Parameters**
\n", + "`y`: tensor, Actual values.
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns**
\n", + "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__call__\n", + "\n", + "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Computes the negative log-likelihood objective function. \n", + "To estimate the following predictive distribution:\n", + "\n", + "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", + "\n", + "where $\\theta$ represents the distributions parameters. It aditionally \n", + "summarizes the objective signal using a weighted average using the `mask` tensor. \n", + "\n", + "**Parameters**
\n", + "`y`: tensor, Actual values.
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns**
\n", + "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DistributionLoss.__call__, name='DistributionLoss.__call__', title_level=3)" ] @@ -1684,7 +2874,17 @@ "execution_count": null, "id": "14a7e381", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check DistributionLoss' stored quantiles\n", @@ -1764,35 +2964,42 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " self.param_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", + " lambda_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", + " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", + " self.param_names = [i for j in zip(lambda_names, weight_names) for i in j]\n", " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = n_components\n", + " self.outputsize_multiplier = 2 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " return (output,)#, weights\n", - " \n", - " def scale_decouple(self, \n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None):\n", - " \"\"\" Scale Decouple\n", + " lambdas, weights = output.chunk(2, dim=-1)\n", + " return (lambdas, weights)\n", + "\n", + " def scale_decouple(\n", + " self,\n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " ):\n", + " \"\"\"Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " lambdas = output[0]\n", + " lambdas, weights = output\n", + " weights = F.softmax(weights, dim=-1)\n", + "\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(lambdas.size(dim=0), 1, -1)\n", " scale = scale.view(lambdas.size(dim=0), 1, -1)\n", " lambdas = (lambdas * scale) + loc\n", " lambdas = F.softplus(lambdas)\n", - " return (lambdas,)\n", + " return (lambdas, weights)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -1814,15 +3021,10 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", "\n", - " lambdas = distr_args[0]\n", + " lambdas, weights = distr_args\n", " B, H, K = lambdas.size()\n", " Q = len(self.quantiles)\n", "\n", - " # Sample K ~ Mult(weights)\n", - " # shared across B, H\n", - " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", - " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", - "\n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " lambdas = lambdas.flatten() \n", @@ -1860,7 +3062,7 @@ " \n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", " if mask is None: \n", " mask = (y > 0) * 1\n", @@ -1868,11 +3070,9 @@ " mask = mask * ((y > 0) * 1)\n", "\n", " eps = 1e-10\n", - " lambdas = distr_args[0]\n", + " lambdas, weights = distr_args\n", " B, H, K = lambdas.size()\n", "\n", - " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", - "\n", " y = y[:,:,None]\n", " mask = mask[:,:,None]\n", "\n", @@ -1897,7 +3097,7 @@ " return loss\n", "\n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)\n" @@ -1908,7 +3108,83 @@ "execution_count": null, "id": "62d7daba", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__init__\n", + "\n", + "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> batch_correlation=False, horizon_correlation=False)\n", + "\n", + "Poisson Mixture Mesh\n", + "\n", + "This Poisson Mixture statistical model assumes independence across groups of \n", + "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", + "\n", + "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", + "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", + "\\prod_{\\beta\\in[g_{i}]} \n", + "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", + "\n", + "**Parameters:**
\n", + "`n_components`: int=10, the number of mixture components.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", + "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", + "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", + "\n", + "**References:**
\n", + "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", + "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__init__\n", + "\n", + "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> batch_correlation=False, horizon_correlation=False)\n", + "\n", + "Poisson Mixture Mesh\n", + "\n", + "This Poisson Mixture statistical model assumes independence across groups of \n", + "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", + "\n", + "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", + "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", + "\\prod_{\\beta\\in[g_{i}]} \n", + "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", + "\n", + "**Parameters:**
\n", + "`n_components`: int=10, the number of mixture components.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", + "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", + "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", + "\n", + "**References:**
\n", + "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", + "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(PMM, name='PMM.__init__', title_level=3)" ] @@ -1918,7 +3194,63 @@ "execution_count": null, "id": "fa8da65c", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.sample\n", + "\n", + "> PMM.sample (distr_args, num_samples=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.sample\n", + "\n", + "> PMM.sample (distr_args, num_samples=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(PMM.sample, name='PMM.sample', title_level=3)" ] @@ -1928,7 +3260,39 @@ "execution_count": null, "id": "ba75717c", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__call__\n", + "\n", + "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Call self as a function." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__call__\n", + "\n", + "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Call self as a function." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(PMM.__call__, name='PMM.__call__', title_level=3)" ] @@ -1947,7 +3311,17 @@ "execution_count": null, "id": "e4a20e21", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -1971,11 +3345,43 @@ "execution_count": null, "id": "a56a2fbe", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "lambdas.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", + "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", + "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9FUlEQVR4nO3de1hU1foH8O8GhuGm4IDcFBHF+/2SijcgA8PUTI+WWoGHvOQtw7TQU4BxpEOllpblJTDL7KamZiqWoh21BOV4ySwLUE8QKQoICiOs3x/+2MdxuM0wwzDM9/M88zzM2muv9a4ZYN5Ze++1JSGEABEREVkUK1MHQERERA2PCQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICYCJ/fDDD3jsscfQpk0bKJVKeHh4ICAgAAsXLtSoFxQUhKCgIKPHI0kSYmNjDdZe27ZtMXr0aIO1V5NDhw5BkiQcOnSoQfrTVVBQECRJgiRJsLKyQrNmzeDv74+JEyfiiy++QEVFhdY+bdu2RUREhE79HD16FLGxsbhx44ZO+93fV+Xr+cUXX+jUTk1KSkoQGxtb5XuUnJwMSZKQlZVlsP6IqHo2pg7Akn399dcYO3YsgoKCkJiYCC8vL+Tk5CAtLQ1bt27Fm2++Kdd99913TRipeejbty+OHTuGrl27mjqUarVr1w4ff/wxAKC4uBiZmZnYsWMHJk6ciGHDhmHXrl1wdnaW62/fvh3NmzfXqY+jR48iLi4OERERcHFxqfN++vSlq5KSEsTFxQGAVkL7yCOP4NixY/Dy8jJqDER0FxMAE0pMTISfnx/27dsHG5v/vRVPPPEEEhMTNeo25g81U1Or1ZAkCc2bN8egQYNMHU6N7O3ttWJ85plnkJSUhL///e+YMWMGPv30U3lbnz59jB7TrVu3YG9v3yB91aRly5Zo2bKlSWMgsiQ8BGBC165dg5ubm8aHfyUrK8235v5DAFlZWZAkCW+88QZWrFgBPz8/ODk5ISAgAMePH9dqb/369ejYsSOUSiW6du2KLVu2ICIiAm3btq01ztzcXMycOROtW7eGra0t/Pz8EBcXhzt37tR5rHv37kXfvn1hb2+Pzp0744MPPtCqc/bsWTz66KNo0aIF7Ozs0Lt3b2zatEmjTuW09ObNm7Fw4UK0atUKSqUSFy9e1DoEUPkaVfe41wcffIBevXrBzs4OKpUKjz32GM6fP69RJyIiAk5OTrh48SJGjRoFJycn+Pj4YOHChSgtLa3za1GVadOmYdSoUfj888+RnZ0tl98/LV9RUYH4+Hh06tQJ9vb2cHFxQc+ePfHWW28BAGJjY7Fo0SIAgJ+fnzzWytek8pDMtm3b0KdPH9jZ2cnfyKs73HD79m1ERUXB09MT9vb2CAwMxKlTpzTqVHeI6t7fsaysLPkDPi4uTo6tss/qDgEY+r1Zu3YtevXqBScnJzRr1gydO3fGkiVLtGInauo4A2BCAQEB2LBhA+bPn4+pU6eib9++UCgUOrXxzjvvoHPnzli1ahUA4OWXX8aoUaOQmZkpTyWvW7cOM2fOxIQJE7By5UoUFBQgLi6uTh9aubm5GDBgAKysrPDKK6+gffv2OHbsGOLj45GVlYWkpKRa2/jPf/6DhQsX4qWXXoKHhwc2bNiAyMhI+Pv7Y/jw4QCACxcuYPDgwXB3d8fbb78NV1dXfPTRR4iIiMCff/6JxYsXa7QZHR2NgIAAvPfee7CysoK7uztyc3M16nh5eeHYsWMaZX/99ReefPJJtGrVSi5LSEjAkiVLMHnyZCQkJODatWuIjY1FQEAATpw4gQ4dOsh11Wo1xo4di8jISCxcuBCHDx/Gq6++CmdnZ7zyyiu1vhY1GTt2LPbs2YMjR47A19e3yjqJiYmIjY3FP/7xDwwfPhxqtRo///yzfLz/mWeeQX5+PlavXo1t27bJ0+n3ziCdPHkS58+fxz/+8Q/4+fnB0dGxxriWLFmCvn37YsOGDSgoKEBsbCyCgoJw6tQptGvXrs7j8/Lywt69e/Hwww8jMjISzzzzDADU+K3f0O/N1q1bMXv2bMybNw9vvPEGrKyscPHiRfz00091HgdRkyHIZK5evSqGDh0qAAgAQqFQiMGDB4uEhARRVFSkUTcwMFAEBgbKzzMzMwUA0aNHD3Hnzh25/McffxQAxCeffCKEEKK8vFx4enqKgQMHarSXnZ0tFAqF8PX11SgHIGJiYuTnM2fOFE5OTiI7O1uj3htvvCEAiHPnztU4Rl9fX2FnZ6ex/61bt4RKpRIzZ86Uy5544gmhVCrFpUuXNPYPCwsTDg4O4saNG0IIIQ4ePCgAiOHDh2v1Vbnt4MGDVcZSXFwsBgwYILy8vERWVpYQQojr168Le3t7MWrUKI26ly5dEkqlUkyZMkUuCw8PFwDEZ599plF31KhRolOnTjW+DkLcfQ+7detW7fZvvvlGABD/+te/5DJfX18RHh4uPx89erTo3bt3jf28/vrrAoDIzMzU2ubr6yusra3FhQsXqtx2b1+Vr2ffvn1FRUWFXJ6VlSUUCoV45plnNMZ27+9npfDwcI3fsb/++kvrd6xSUlKSRtzGeG/mzp0rXFxctPomskQ8BGBCrq6uOHLkCE6cOIHXXnsNjz76KH755RdER0ejR48euHr1aq1tPPLII7C2tpaf9+zZEwDkaeQLFy4gNzcXkyZN0tivTZs2GDJkSK3t7969G8HBwfD29sadO3fkR1hYGAAgNTW11jZ69+6NNm3ayM/t7OzQsWNHjanu7777DiNGjICPj4/GvhERESgpKdH6Jj9hwoRa+71XeXk5Hn/8cZw/fx579uyRv2EfO3YMt27d0pr69vHxwYMPPohvv/1Wo1ySJIwZM0ajrGfPnhpj0ZcQotY6AwYMwH/+8x/Mnj0b+/btQ2Fhoc799OzZEx07dqxz/SlTpmgcMvH19cXgwYNx8OBBnfvWhTHemwEDBuDGjRuYPHkyvvrqqzr9jRE1VUwAGoH+/fvjxRdfxOeff44//vgDzz//PLKysrROBKyKq6urxnOlUgng7oldwN3zDADAw8NDa9+qyu73559/YteuXVAoFBqPbt26AUCd/oHeH2NlnJUxVsZZ1dnf3t7eGuOopOuZ4rNmzcLevXvxxRdfoHfv3hr9Vteet7e3Vr8ODg6ws7PTGsvt27d1iqcqlR9UlWOuSnR0NN544w0cP34cYWFhcHV1xYgRI5CWllbnfnR97Tw9Passu/+1MTRjvDdPPfUUPvjgA2RnZ2PChAlwd3fHwIEDkZKSYoQREDVuTAAaGYVCgZiYGAB3T4qrr8oP3z///FNr2/3HzKvi5uaG0NBQnDhxospHZGRkvWOsjDMnJ0er/I8//pDjuNf9J/HVJDY2Fhs2bMD69esRGhqq1S+Aavu+v19j2rlzJyRJks+LqIqNjQ2ioqJw8uRJ5Ofn45NPPsHly5cxcuRIlJSU1KkfXV47oOrfk9zcXI3Ezs7OrspzSurzDdtY7820adNw9OhRFBQU4Ouvv4YQAqNHjzbILA6ROWECYEJV/WMDIJ/hXNM3wbrq1KkTPD098dlnn2mUX7p0CUePHq11/9GjR+Ps2bNo3749+vfvr/UwRIwAMGLECHz33XfyB36lDz/8EA4ODnpf3rdx40bExcVh2bJlVZ7hHhAQAHt7e3z00Uca5VeuXJEPSzSEpKQkfPPNN5g8ebLG4ZKauLi44G9/+xvmzJmD/Px8+ez5+2eB6uuTTz7RODyRnZ2No0ePapz137ZtW/zyyy8aScC1a9e0fsd0ic3Y742joyPCwsKwdOlSlJWV4dy5c/Vqj8jc8CoAExo5ciRat26NMWPGoHPnzqioqEBGRgbefPNNODk54bnnnqt3H1ZWVoiLi8PMmTPxt7/9DX//+99x48YNxMXFwcvLS+tyw/stW7YMKSkpGDx4MObPn49OnTrh9u3byMrKwp49e/Dee++hdevW9Y4zJiZGPt/glVdegUqlwscff4yvv/4aiYmJGovj1NWxY8cwa9YsDBkyBCEhIVqXRw4aNAguLi54+eWXsWTJEjz99NOYPHkyrl27hri4ONjZ2cmzMYZy69YtOY5bt27h999/x44dO7B7924EBgbivffeq3H/MWPGoHv37ujfvz9atmyJ7OxsrFq1Cr6+vvIZ8T169AAAvPXWWwgPD4dCoUCnTp3QrFkzvWLOy8vDY489hunTp6OgoAAxMTGws7NDdHS0XOepp57C+++/jyeffBLTp0/HtWvXkJiYqLWwULNmzeDr64uvvvoKI0aMgEqlgpubW5WXoxrjvZk+fTrs7e0xZMgQeHl5ITc3FwkJCXB2dsYDDzygc3tEZs3EJyFatE8//VRMmTJFdOjQQTg5OQmFQiHatGkjnnrqKfHTTz9p1K3uKoDXX39dq11UcZb1unXrhL+/v7C1tRUdO3YUH3zwgXj00UdFnz59at33r7/+EvPnzxd+fn5CoVAIlUol+vXrJ5YuXSpu3rxZ4xh9fX3FI488olVe1VnjZ86cEWPGjBHOzs7C1tZW9OrVSyQlJWnUqTwz/fPPP9dq8/6rACrPKq/uca8NGzaInj17CltbW+Hs7CweffRRrSscwsPDhaOjo1a/MTExWu1VJTAwUKN/R0dH0a5dO/G3v/1NfP7556K8vFxrn/vPzH/zzTfF4MGDhZubm7C1tRVt2rQRkZGR8lUNlaKjo4W3t7ewsrLSeE2qez+q6qvy9dy8ebOYP3++aNmypVAqlWLYsGEiLS1Na/9NmzaJLl26CDs7O9G1a1fx6aefal0FIIQQBw4cEH369BFKpVIAkPu8/yqASoZ8bzZt2iSCg4OFh4eHsLW1Fd7e3mLSpEni9OnTVb4mRE2ZJEQdTj2mJufGjRvo2LEjxo0bh3Xr1pk6HCIiamA8BGABcnNz8c9//hPBwcFwdXVFdnY2Vq5ciaKiIoMcZiAiIvPDBMACKJVKZGVlYfbs2cjPz5dPqnvvvffky/mIiMiy8BAAERGRBeJlgERERBaICQAREZEF4jkAuHuL1T/++APNmjXTeZU0IiJ9CSFQVFQEb2/vWtfkIDI0JgC4u6zo/TehISJqKJcvXzbIglpEumACAMgrpF2+fFlr5bLqqNVq7N+/H6GhoVAoFMYMz2Sa+hib+vgAjrGxKywshI+Pj96rNBLVBxMA/O/mKM2bN9cpAXBwcEDz5s3N7p9OXTX1MTb18QEco7ngoUcyBR50IiIiskBMAIiIiCwQEwAiIiILZNJzABISErBt2zb8/PPPsLe3x+DBg/Gvf/0LnTp1kusIIRAXF4d169bh+vXrGDhwIN555x2NJWxLS0vxwgsv4JNPPsGtW7cwYsQIvPvuuzyrlojMXkVFBcrKykwdBpkBhUIBa2vrOtc3aQKQmpqKOXPm4IEHHsCdO3ewdOlShIaG4qeffoKjoyMAIDExEStWrEBycjI6duyI+Ph4hISE4MKFC/KZswsWLMCuXbuwdetWuLq6YuHChRg9ejTS09N1ejGIiBqTsrIyZGZmoqKiwtShkJlwcXGBp6dnnU4sNWkCsHfvXo3nSUlJcHd3R3p6OoYPHw4hBFatWoWlS5di/PjxAIBNmzbBw8MDW7ZswcyZM1FQUICNGzdi8+bNeOihhwAAH330EXx8fHDgwAGMHDmywcdFRFRfQgjk5OTA2toaPj4+XCiIaiSEQElJCfLy8gAAXl5ete7TqC4DLCgoAACoVCoAQGZmJnJzcxEaGirXUSqVCAwMxNGjRzFz5kykp6dDrVZr1PH29kb37t1x9OjRKhOA0tJSlJaWys8LCwsB3L2cSK1W1ynWynp1rW+OmvoYm/r4AI6xsasp5jt37qCkpATe3t5wcHBowKjIXNnb2wMA8vLy4O7uXusMeKNJAIQQiIqKwtChQ9G9e3cAd+9jDwAeHh4adT08PJCdnS3XsbW1RYsWLbTqVO5/v4SEBMTFxWmV79+/X+c/tJSUFJ3qm6OmPsamPj6AY2ysSkpKqt1WXl4OALC1tW2ocKgJqPwMU6vV5pMAzJ07F6dPn8b333+vte3+YxlCiFqPb9RUJzo6GlFRUfLzytW4QkNDdVoIKCUlBSEhIWaz+Mg7By/qVF8S5Wh7+zdk2bXH7Ac71b6DmTHH91BXHGPjVjn7WBMuEkS60OX3pVEkAPPmzcPOnTtx+PBhjTP3PT09Adz9ln/v8Yy8vDx5VsDT0xNlZWW4fv26xixAXl4eBg8eXGV/SqUSSqVSq1yhUOj8D0SffUxFSPqdECkka7MZoz7M6T3UF8fYOJlbvNS0mPSsEiEE5s6di23btuG7776Dn5+fxnY/Pz94enpqTO2VlZUhNTVV/nDv168fFAqFRp2cnBycPXu22gSAiIjI0pl0BmDOnDnYsmULvvrqKzRr1kw+Zu/s7Ax7e3tIkoQFCxZg+fLl6NChAzp06IDly5fDwcEBU6ZMketGRkZi4cKFcHV1hUqlwgsvvIAePXrIVwUQERGRJpMmAGvXrgUABAUFaZQnJSUhIiICALB48WLcunULs2fPlhcC2r9/v8bds1auXAkbGxtMmjRJXggoOTmZawAQUZOzMuWXBu3v+ZCODdpfXQQFBaF3795YtWqVqUMxayY/BFDVo/LDH7h7QkNsbCxycnJw+/ZtpKamylcJVLKzs8Pq1atx7do1lJSUYNeuXfDx8Wng0RAREQBERERg3LhxWuWHDh2CJEm4ceNGvdrftm0bXn311Xq1YQ4OHz6MMWPGwNvbG5IkYceOHQZtnytLEBGRWahcElmlUmnMApuboKAgJCcn11qvuLgYvXr1wpo1a4wSBxMAIiIyidLSUsyfPx/u7u6ws7PD0KFDceLECXl7UFAQ5s6di6ioKLi5uSEkJEQuX7BgAQAgKysLkiRpPSoPLdfWR2V78+fPx+LFi6FSqeDp6YnY2NgaYx87dmyV/UqShJ07dxrk9QkLC0N8fLy8Eq6hMQEgIiKTWLx4Mb788kts2rQJJ0+ehL+/P0aOHIn8/Hy5zqZNm2BjY4N///vfeP/997Xa8PHxQU5Ojvw4deoUXF1dMXz48Dr3UdmPo6MjfvjhByQmJmLZsmU1Li6VlJSEnJwc/PrrrwCAPXv2yDGMGjXKEC+P0TWKdQCIiKhp2b17N5ycnDTKKlc3BO5Ob69duxbJyckICwsDAKxfvx4pKSnYuHEjFi1aBADw9/dHYmJitf1YW1vLa8bcvn0b48aNQ0BAAGJjY+vcBwD07NkTMTExAIAOHTpgzZo1+Pbbb+VZh/u5uroCAI4dOwZJkjB06FCzOyzBGQAiIjK44OBgZGRkaDw2bNggb//tt9+gVqsxZMgQuUyhUGDAgAE4f/68XNa/f/869xkZGYmioiJs2bIFVlZWde4DuJsA3MvLy0u+sU5NTp8+jbZt29b44b98+XI4OTnJjyNHjmDWrFlaZQ2NMwBERGRwjo6O8Pf31yi7cuWK/LMQAkDtS71X3hq+NvHx8di7dy9+/PFH+cO4rn0A2qsySpJUp9swnz59Wit5uN+sWbMwadIk+fnUqVMxYcIEjWP7rVq1qrUvQ+MMABERNTh/f3/Y2tpq3P9FrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp5rvlaJSqeDv7y8/7O3t4e7urlXW0DgDQEREDc7R0RHPPvssFi1aBJVKhTZt2iAxMRElJSWIjIyscztnz57F008/jRdffBHdunWTV5S1tbWFSqUySB81qaioQHZ2Nq5cuYJWrVoZ9OZNN2/exMWL/7uJW2ZmJjIyMuSx1BcTACIiM9IYV+bT12uvvYaKigo89dRTKCoqQv/+/bFv3z6t27vXJC0tDSUlJYiPj0d8fLxcHhgYiEOHDhmkj5rMnz8fM2bMQOfOnVFYWGjQBCAtLQ3BwcHy88q72IaHh9dpHYHaSKLyIIkFKywshLOzMwoKCnS6HfCePXswatQos7mjl65LiEqiHH63fkGmfUcsCDXMdFljYo7voa44xsatpv89t2/fRmZmJvz8/GBnZ2eiCMnc6PJ7w3MAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQFwJ0ER0XZSnUlNaBYyIiEyHMwBEREQWiDMARETm5GBCw/YXHN2w/dVBUFAQevfujVWrVpk6FLPGGQAiIjKoiIgIjBs3Tqv80KFDkCQJN27cqFf727Ztw6uvvlqvNsxBQkICHnjgATRr1gzu7u4YN24cLly4YLD2mQAQEZFZKCsrAwCoVCo0a9bMxNHoLygoqE5380tNTcWcOXNw/PhxpKSk4M6dOwgNDUVxcbFB4mACQEREJlFaWor58+fD3d0ddnZ2GDp0KE6cOCFvDwoKwty5cxEVFQU3NzeEhITI5QsWLAAAZGVlQZIkrUdQUFCd+qhsb/78+Vi8eDFUKhU8PT0RGxtbY+xjx46tsl9JkrBz506DvD579+5FREQEunXrhl69eiEpKQmXLl1Cenq6QdpnAkBERCaxePFifPnll9i0aRNOnjwJf39/jBw5Evn5+XKdTZs2wcbGBv/+97/x/vvva7Xh4+ODnJwc+XHq1Cm4urpi+PDhde6jsh9HR0f88MMPSExMxLJly5CSklJt7ElJScjJycGvv/4KANizZ48cw6hRowzx8mgpKCgAcHcGxBB4EiARERnc7t274eTkpFFWXl4u/1xcXIy1a9ciOTkZYWFhAID169cjJSUFGzduxKJFiwAA/v7+SExMrLYfa2treHp6AgBu376NcePGISAgALGxsXXuAwB69uyJmJgYAECHDh2wZs0afPvtt/Ksw/1cXV0BAMeOHYMkSRg6dKhRD0sIIRAVFYWhQ4eie/fuBmmTMwBERGRwwcHByMjI0Hhs2LBB3v7bb79BrVZjyJAhcplCocCAAQNw/vx5uax///517jMyMhJFRUXYsmULrKys6twHcDcBuJeXlxfy8vJq7fP06dNo27ZtjR/+y5cvh5OTk/w4cuQIZs2apVVWk7lz5+L06dP45JNPao2prjgDQEREBufo6Ah/f3+NsitXrsg/CyEAAJIkadQRQmiUOTo61qm/+Ph47N27Fz/++KP8YVzXPoC7icG9JElCRUVFrf2ePn1aK3m436xZszBp0iT5+dSpUzFhwgSMHz9eLmvVqlW1+8+bNw87d+7E4cOH0bp161pjqivOABARUYPz9/eHra0tvv/+e7lMrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp0411lGpVPD395cf9vb2cHd31yq7nxACc+fOxbZt2/Ddd9/Bz8/PIDFX4gwAERE1OEdHRzz77LNYtGgRVCoV2rRpg8TERJSUlCAyMrLO7Zw9exZPP/00XnzxRXTr1g25ubkAAFtbW6hUKoP0UZOKigpkZ2fjypUraNWqldbMQn3MmTMHW7ZswVdffYVmzZrJY3N2dq4yYdAVEwAiInPSCFfm09drr72GiooKPPXUUygqKkL//v2xb98+tGjRos5tpKWloaSkBPHx8YiPj5fLAwMDcejQIYP0UZP58+djxowZ6Ny5MwoLCw2aAKxduxYA5EsaKyUlJSEiIqLe7TMBICIig6pukZugoCD5uDwA2NnZ4e2338bbb79dZf1Dhw7VWh4REVHjh2FtfVTXz44dO6qtf6+wsDBcvny5TnVr6q8q975WxsBzAIiIiCwQEwAiIiILxEMAZHQrU37Ra7/nQzoaOBIiIqrEGQAiIiILxBkAavyMdf9zYQWgs3HaJjIQY58IRk1LXRYvqsQEgIioEVIoFJAkCX/99Rdatmxp0MvLqOkRQqCsrAx//fUXrKysYGtrW+s+TACIiBoha2trtG7dGleuXEFWVpapwyEz4eDggDZt2sDKqvYj/EwAiIgaKScnJ3To0AFqtdrUoZAZsLa2ho2NTZ1ni5gAEBE1YtbW1rC2tjZ1GNQE8SoAIiIiC8QEgIiIyAKZNAE4fPgwxowZA29vb0iSpLX2ckREBCRJ0ngMGjRIo05paSnmzZsHNzc3ODo6YuzYsRr3nCYiIiJtJk0AiouL0atXL6xZs6baOg8//DBycnLkx549ezS2L1iwANu3b8fWrVvx/fff4+bNmxg9ejTKy8uNHT4REZHZMulJgGFhYQgLC6uxjlKphKenZ5XbCgoKsHHjRmzevBkPPfQQAOCjjz6Cj48PDhw4gJEjR1a5X2lpKUpLS+XnhYWFAAC1Wl3ns20r6+l7dq4k9EtQ6nM2sK59VtaXRHmD9ltJ7lMYJ09V/3+7TfkM6/r+npoDcx6jOcZMTYckGskyU5IkYfv27Rg3bpxcFhERgR07dsDW1hYuLi4IDAzEP//5T7i7uwMAvvvuO4wYMQL5+fka93bu1asXxo0bh7i4uCr7io2NrXLbli1b4ODgYNiBERFVo6SkBFOmTEFBQQGaN29u6nDIwjTqywDDwsIwceJE+Pr6IjMzEy+//DIefPBBpKenQ6lUIjc3F7a2thof/gDg4eGB3NzcatuNjo5GVFSU/LywsBA+Pj4IDQ2t8x+hWq1GSkoKQkJCoFAodB7bOwcv6rwPAMwJ9tdthyMr5B9/zMrXadcKyRr5qoFQ5f+AQb7O1VccFlX9NhhgrPeMwZDUwgopNzvq/R6ag/r+npoDcx5j5ewjkSk06gTg8ccfl3/u3r07+vfvD19fX3z99dcYP358tfsJIWpcCEGpVEKpVGqVKxQKnf+B6LMPAAhJv+t6de5L+t+60FZ6TsVbiXIopBrWl64lpnqPtaa+DUDf99CccIyNk7nFS02LWV0G6OXlBV9fX/z6668AAE9PT5SVleH69esa9fLy8uDh4WGKEImIiMyCWSUA165dw+XLl+Hl5QUA6NevHxQKBVJSUuQ6OTk5OHv2LAYPHmyqMImIiBo9kx4CuHnzJi5e/N/x4czMTGRkZEClUkGlUiE2NhYTJkyAl5cXsrKysGTJEri5ueGxxx4DADg7OyMyMhILFy6Eq6srVCoVXnjhBfTo0UO+KoCoVkdWGP0wA4Kjjds+EZGOTJoApKWlITg4WH5eeWJeeHg41q5dizNnzuDDDz/EjRs34OXlheDgYHz66ado1qyZvM/KlSthY2ODSZMm4datWxgxYgSSk5O5djYREVENTJoABAUFoaarEPft21drG3Z2dli9ejVWr15tyNCIiIiaNLM6B4CIiIgMgwkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIEa9c2AyEwcTKhx86BL1/Rs11W//YiIqFZ6JQDJycmYNGkSHBwcDB0PUdNUS5JUb1xqmIh0pNchgOjoaHh6eiIyMhJHjx41dExERERkZHrNAFy5cgVff/01kpOTERwcDD8/P0ybNg3h4eHw9PQ0dIx0j5Upv+hUX+/pdyIiatL0mgGwtrbG2LFjsW3bNly+fBkzZszAxx9/jDZt2mDs2LH46quvUFFh5LurERERkd7qfRWAu7s7hgwZgoCAAFhZWeHMmTOIiIhA+/btcejQIQOESERERIamdwLw559/4o033kC3bt0QFBSEwsJC7N69G5mZmfjjjz8wfvx4hIeHGzJWIiIiMhC9zgEYM2YM9u3bh44dO2L69Ol4+umnoVKp5O329vZYuHAhVq5cabBAiYiIyHD0SgDc3d2RmpqKgICAaut4eXkhMzNT78CIiIjIePQ6BBAYGIi+fftqlZeVleHDDz8EAEiSBF9f3/pFR0REREahVwIwbdo0FBQUaJUXFRVh2rRp9Q6KiIiIjEuvBEAIAUmStMqvXLkCZ2fnegdFRERExqXTOQB9+vSBJEmQJAkjRoyAjc3/di8vL0dmZiYefvhhgwdJREREhqVTAjBu3DgAQEZGBkaOHAknJyd5m62tLdq2bYsJEyYYNEAiIiIyPJ0SgJiYGABA27Zt8fjjj8POzs4oQREREZFx6XUZIBf4ISIiMm91TgBUKhV++eUXuLm5oUWLFlWeBFgpPz/fIMERERGRcdQ5AVi5ciWaNWsm/1xTAkBERESNW50TgHun/SMiIowRCxERETWQOicAhYWFdW60efPmegVDREREDaPOCYCLi0ut0/6VCwSVl5fXOzAiIiIynjonAAcPHjRmHERERNSA6pwABAYGGjMOIqqPgwlVlwsrAJ2BIysAqaJ+fQRH129/ImpU6pwAnD59Gt27d4eVlRVOnz5dY92ePXvWOzAiIiIynjonAL1790Zubi7c3d3Ru3dvSJIEIYRWPZ4DQERE1PjVOQHIzMxEy5Yt5Z/Jshz7/ZqpQyAiIgOqcwLg6+tb5c9ERERkfvS6FwAAXLhwAatXr8b58+chSRI6d+6MefPmoVOnToaMj4iIiIzASp+dvvjiC3Tv3h3p6eno1asXevbsiZMnT6J79+74/PPPDR0jERERGZheMwCLFy9GdHQ0li1bplEeExODF198ERMnTjRIcERERGQces0A5Obm4umnn9Yqf/LJJ5Gbm1vvoIiIiMi49EoAgoKCcOTIEa3y77//HsOGDat3UERERGRcdT4EsHPnTvnnsWPH4sUXX0R6ejoGDRoEADh+/Dg+//xzxMXFGT5KIiIiMqg6JwDjxo3TKnv33Xfx7rvvapTNmTMHs2bNqndgREREZDx1TgAqKuq5jjgRERE1GnqdA2Aohw8fxpgxY+Dt7Q1JkrBjxw6N7UIIxMbGwtvbG/b29ggKCsK5c+c06pSWlmLevHlwc3ODo6Mjxo4diytXrjTgKIiIiMyP3gsBFRcXIzU1FZcuXUJZWZnGtvnz59e5jV69emHatGmYMGGC1vbExESsWLECycnJ6NixI+Lj4xESEoILFy6gWbNmAIAFCxZg165d2Lp1K1xdXbFw4UKMHj0a6enpsLa21nd4RERETZpeCcCpU6cwatQolJSUoLi4GCqVClevXoWDgwPc3d3rnACEhYUhLCysym1CCKxatQpLly7F+PHjAQCbNm2Ch4cHtmzZgpkzZ6KgoAAbN27E5s2b8dBDDwEAPvroI/j4+ODAgQMYOXKkPsMjIiJq8vRKAJ5//nmMGTMGa9euhYuLC44fPw6FQoEnn3wSzz33nEECy8zMRG5uLkJDQ+UypVKJwMBAHD16FDNnzkR6ejrUarVGHW9vb3Tv3h1Hjx6tNgEoLS1FaWmp/LywsBAAoFaroVar6xRfZb261r+fJBrmjokVkv6zIJX71qeN+lAL4x6hqmzf2P2YkkHHqOfvurHV92/RlMwxZmo69EoAMjIy8P7778Pa2hrW1tYoLS1Fu3btkJiYiPDwcPkbe31ULijk4eGhUe7h4YHs7Gy5jq2tLVq0aKFVp6YFiRISEqq8XHH//v1wcHDQKc6UlBSd6lfy02sv3V11HVzvNvJVAw0Qie72FDVMPyk3OzZMRyZkkDHu2VP/NoxI379FUyopKTF1CGTB9EoAFAoFJEkCcPfD9tKlS+jSpQucnZ1x6dIlgwZY2U8lIYRW2f1qqxMdHY2oqCj5eWFhIXx8fBAaGormzZvXKS61Wo2UlBSEhIRAoVDUaZ97vXPwos776OOBK8l671shWSNfNRCq/B9g1UAzFoYyoK2q1jpqYYWUmx0R4vQLFFLTvMrFoGMcFlV7HROo79+iKVXOPhKZgl4JQJ8+fZCWloaOHTsiODgYr7zyCq5evYrNmzejR48eBgnM09MTwN1v+V5eXnJ5Xl6ePCvg6emJsrIyXL9+XWMWIC8vD4MHV//NV6lUQqlUapUrFAqd/4Hosw8AiAaaVjfEB7eVKDe7BECXDzuFVNFkE4BKBhljI/9w1fdv0ZTMLV5qWvQ6MLh8+XL5Q/nVV1+Fq6srnn32WeTl5WHdunUGCczPzw+enp4a03plZWVITU2VP9z79esHhUKhUScnJwdnz56tMQEgIiKydHrNAPTv31/+uWXLltij57HBmzdv4uLF/02FZ2ZmIiMjAyqVCm3atMGCBQuwfPlydOjQAR06dMDy5cvh4OCAKVOmAACcnZ0RGRmJhQsXwtXVFSqVCi+88AJ69OghXxVARERE2vReBwC4O9V+4cIFSJKETp06oWXLljrtn5aWhuDgYPl55XH58PBwJCcnY/Hixbh16xZmz56N69evY+DAgdi/f7+8BgAArFy5EjY2Npg0aRJu3bqFESNGIDk5mWsAEBER1UCvBKCwsBBz5szB1q1bUV5+99iwtbU1Hn/8cbzzzjtwdnauUztBQUEQQlS7XZIkxMbGIjY2tto6dnZ2WL16NVavXq3TGIiIiCyZXucAPPPMM/jhhx+we/du3LhxAwUFBdi9ezfS0tIwffp0Q8dIREREBqbXDMDXX3+Nffv2YejQoXLZyJEjsX79ejz88MMGC46IiIiMQ68ZAFdX1yqn+Z2dnbUW5SEiIqLGR68E4B//+AeioqKQk5Mjl+Xm5mLRokV4+eWXDRYcERERGUedDwH06dNHY3W9X3/9Fb6+vmjTpg0A4NKlS1Aqlfjrr78wc+ZMw0dKREREBlPnBGDcuHFGDIOIiIgaUp0TgJiYGGPGQURERA2oXgsBpaen4/z585AkCV27dkWfPn0MFRcREREZkV4JQF5eHp544gkcOnQILi4uEEKgoKAAwcHB2Lp1q84rAhIREVHD0usqgHnz5qGwsBDnzp1Dfn4+rl+/jrNnz6KwsBDz5883dIxERERkYHrNAOzduxcHDhxAly5d5LKuXbvinXfeQWhoqMGCIyIiIuPQawagoqKiyvtYKxQKVFQ07fuqExERNQV6JQAPPvggnnvuOfzxxx9y2X//+188//zzGDFihMGCIyIiIuPQKwFYs2YNioqK0LZtW7Rv3x7+/v7w8/NDUVER78pHRERkBvQ6B8DHxwcnT55ESkoKfv75Zwgh0LVrVzz00EOGjo+IiIiMQOcE4M6dO7Czs0NGRgZCQkIQEhJijLiIiIjIiHQ+BGBjYwNfX1+Ul5cbIx4iIiJqAHrfDTA6Ohr5+fmGjoeIiIgagF7nALz99tu4ePEivL294evrC0dHR43tJ0+eNEhwREREZBx6JQDjxo2DJEkQQhg6HiIiImoAOiUAJSUlWLRoEXbs2AG1Wo0RI0Zg9erVcHNzM1Z8REREZAQ6nQMQExOD5ORkPPLII5g8eTIOHDiAZ5991lixERERkZHoNAOwbds2bNy4EU888QQAYOrUqRgyZAjKy8thbW1tlACJiIjI8HSaAbh8+TKGDRsmPx8wYABsbGw0lgQmIiKixk+nBKC8vBy2trYaZTY2Nrhz545BgyIiIiLj0ukQgBACERERUCqVctnt27cxa9YsjUsBt23bZrgIm7hBl9aZOgQiIrJAOiUA4eHhWmVPPvmkwYIhIiKihqFTApCUlGSsOIiIiKgB6bUUMBEREZk3JgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIF0uhkQEVmwgwnGbT842rjtE5EGzgAQERFZICYAREREFqhRJwCxsbGQJEnj4enpKW8XQiA2Nhbe3t6wt7dHUFAQzp07Z8KIiYiIzEOjTgAAoFu3bsjJyZEfZ86ckbclJiZixYoVWLNmDU6cOAFPT0+EhISgqKjIhBETERE1fo3+JEAbGxuNb/2VhBBYtWoVli5divHjxwMANm3aBA8PD2zZsgUzZ86sts3S0lKUlpbKzwsLCwEAarUaarW6TnFV1qtr/ftJohwAUCFZ67V/Q6iMrTHHWB21qD23raxTl7rmyqzGqOffUn3/Fk3JHGOmpkMSQghTB1Gd2NhYvP7663B2doZSqcTAgQOxfPlytGvXDr///jvat2+PkydPok+fPvI+jz76KFxcXLBp06Ya242Li9Mq37JlCxwcHIwyFiKi+5WUlGDKlCkoKChA8+bNTR0OWZhGnQB88803KCkpQceOHfHnn38iPj4eP//8M86dO4cLFy5gyJAh+O9//wtvb295nxkzZiA7Oxv79u2rtt2qZgB8fHxw9erVOv8RqtVqpKSkICQkBAqFQuexvXPwIgDggSvJOu/bUCoka+SrBkKV/wOs/n/GwlwMaKuqtY5aWCHlZkeEOP0ChVTRAFE1PLMa47AovXar79+iKRUWFsLNzY0JAJlEoz4EEBYWJv/co0cPBAQEoH379ti0aRMGDRoEAJAkSWMfIYRW2f2USiWUSqVWuUKh0PkfiD77AID4/2l1c/hgtRLlZhHnvXT5sFNIFY3/w7GezGKM9fzw1vdv0ZTMLV5qWszgwOD/ODo6okePHvj111/l8wJyc3M16uTl5cHDw8MU4REREZkNs0oASktLcf78eXh5ecHPzw+enp5ISUmRt5eVlSE1NRWDBw82YZRERESNX6M+BPDCCy9gzJgxaNOmDfLy8hAfH4/CwkKEh4dDkiQsWLAAy5cvR4cOHdChQwcsX74cDg4OmDJliqlDJyIiatQadQJw5coVTJ48GVevXkXLli0xaNAgHD9+HL6+vgCAxYsX49atW5g9ezauX7+OgQMHYv/+/WjWrJmJIyciImrcGnUCsHXr1hq3S5KE2NhYxMbGNkxARERETYRZnQNAREREhtGoZwDMwTsHL8qX9FHjcez3a7XWqZCsAVfgx6x8jcscA9q5GjM0IqJGgTMAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFsjG1AEQNTbHfr+m974B7VwNGAkRkfFwBoCIiMgCcQaAiBqHgwn67SesAHQGjqwApIrq6wVH69c+URPFGQAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskA2pg6AiKhBHEwwfh/B0cbvg8hAOANARERkgTgDQGRAx36/ptd+Ae1cDRwJEVHNOANARERkgZgAEBERWaAmkwC8++678PPzg52dHfr164cjR46YOiQiIqJGq0kkAJ9++ikWLFiApUuX4tSpUxg2bBjCwsJw6dIlU4dGRETUKDWJBGDFihWIjIzEM888gy5dumDVqlXw8fHB2rVrTR0aERFRo2T2VwGUlZUhPT0dL730kkZ5aGgojh49WuU+paWlKC0tlZ8XFBQAAPLz86FWq+vUr1qtRklJCUrLCyAkaz2jB4pu39F7X2OrkARKSkpQdPsOrES5qcMxuMY0vms3y4zSrlpYoaSkBNekMiikCqP0YWqNaozXdLsKpKioCAAghDBGNEQ1MvsE4OrVqygvL4eHh4dGuYeHB3Jzc6vcJyEhAXFxcVrlfn5+RomRiCxFrF57FRUVwdnZ2bChENXC7BOASpIkaTwXQmiVVYqOjkZUVJT8vKKiAvn5+XB1da12n/sVFhbCx8cHly9fRvPmzfUPvBFr6mNs6uMDOMbGTgiBoqIieHt7mzoUskBmnwC4ubnB2tpa69t+Xl6e1qxAJaVSCaVSqVHm4uKiV//Nmzc3u386umrqY2zq4wM4xsaM3/zJVMz+JEBbW1v069cPKSkpGuUpKSkYPHiwiaIiIiJq3Mx+BgAAoqKi8NRTT6F///4ICAjAunXrcOnSJcyaNcvUoRERETVKTSIBePzxx3Ht2jUsW7YMOTk56N69O/bs2QNfX1+j9alUKhETE6N1KKEpaepjbOrjAzhGIqqeJHj9CRERkcUx+3MAiIiISHdMAIiIiCwQEwAiIiILxASAiIjIAjEB0ENTuvXw4cOHMWbMGHh7e0OSJOzYsUNjuxACsbGx8Pb2hr29PYKCgnDu3DnTBKuHhIQEPPDAA2jWrBnc3d0xbtw4XLhwQaOOuY9x7dq16Nmzp7wQTkBAAL755ht5u7mP734JCQmQJAkLFiyQy5raGIkaAhMAHTW1Ww8XFxejV69eWLNmTZXbExMTsWLFCqxZswYnTpyAp6cnQkJC5JuYNHapqamYM2cOjh8/jpSUFNy5cwehoaEoLi6W65j7GFu3bo3XXnsNaWlpSEtLw4MPPohHH31U/gA09/Hd68SJE1i3bh169uypUd6UxkjUYATpZMCAAWLWrFkaZZ07dxYvvfSSiSIyHABi+/bt8vOKigrh6ekpXnvtNbns9u3bwtnZWbz33nsmiLD+8vLyBACRmpoqhGiaYxRCiBYtWogNGzY0qfEVFRWJDh06iJSUFBEYGCiee+45IUTTfQ+JjI0zADqovPVwaGioRnlNtx42Z5mZmcjNzdUYr1KpRGBgoNmOt/LWzyqVCkDTG2N5eTm2bt2K4uJiBAQENKnxzZkzB4888ggeeughjfKmNEaihtQkVgJsKPrceticVY6pqvFmZ2ebIqR6EUIgKioKQ4cORffu3QE0nTGeOXMGAQEBuH37NpycnLB9+3Z07dpV/gA09/Ft3boVJ0+exIkTJ7S2NZX3kKihMQHQgy63Hm4Kmsp4586di9OnT+P777/X2mbuY+zUqRMyMjJw48YNfPnllwgPD0dqaqq83ZzHd/nyZTz33HPYv38/7Ozsqq1nzmMkMgUeAtCBPrceNmeenp4A0CTGO2/ePOzcuRMHDx5E69at5fKmMkZbW1v4+/ujf//+SEhIQK9evfDWW281ifGlp6cjLy8P/fr1g42NDWxsbJCamoq3334bNjY28jjMeYxEpsAEQAeWduthPz8/eHp6aoy3rKwMqampZjNeIQTmzp2Lbdu24bvvvoOfn5/G9qYwxqoIIVBaWtokxjdixAicOXMGGRkZ8qN///6YOnUqMjIy0K5dO7MfI5FJmO78Q/O0detWoVAoxMaNG8VPP/0kFixYIBwdHUVWVpapQ9NLUVGROHXqlDh16pQAIFasWCFOnTolsrOzhRBCvPbaa8LZ2Vls27ZNnDlzRkyePFl4eXmJwsJCE0deN88++6xwdnYWhw4dEjk5OfKjpKRErmPuY4yOjhaHDx8WmZmZ4vTp02LJkiXCyspK7N+/Xwhh/uOryr1XAQjRNMdIZGxMAPTwzjvvCF9fX2Frayv69u0rX1Jmjg4ePCgAaD3Cw8OFEHcvsYqJiRGenp5CqVSK4cOHizNnzpg2aB1UNTYAIikpSa5j7mP8+9//Lv8+tmzZUowYMUL+8BfC/MdXlfsTgKY4RiJj4+2AiYiILBDPASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAygKysLEiShIyMDFOHQkRUJ0wAqEkRQuChhx7CyJEjtba9++67cHZ2xqVLl0wQGRFR48IEgJoUSZKQlJSEH374Ae+//75cnpmZiRdffBFvvfUW2rRpY8IIiYgaByYA1OT4+PjgrbfewgsvvIDMzEwIIRAZGYkRI0YgIiJCq/7kyZPxxBNPaJSp1Wq4ubkhKSkJALB3714MHToULi4ucHV1xejRo/Hbb79VG0NycjJcXFw0ynbs2AFJkjTKdu3ahX79+sHOzg7t2rVDXFwc7ty5I2+PjY1FmzZtoFQq4e3tjfnz5+v4ahARVc3G1AEQGUN4eDi2b9+OadOmYcKECTh79izOnj1bZd2pU6di0qRJuHnzJpycnAAA+/btQ3FxMSZMmAAAKC4uRlRUFHr06IHi4mK88soreOyxx5CRkQErK/3y6H379uHJJ5/E22+/jWHDhuG3337DjBkzAAAxMTH44osvsHLlSmzduhXdunVDbm4u/vOf/+jVFxGRFtPejJDIeP7880/RsmVLYWVlJbZt21ZtvbKyMuHm5iY+/PBDuWzy5Mli4sSJ1e6Tl5cnAMi3nM3MzBQAxKlTp4QQQiQlJQlnZ2eNfbZv3y7u/ZMbNmyYWL58uUadzZs3Cy8vLyGEEG+++abo2LGjKCsrq9N4iYh0wUMA1GS5u7tjxowZ6NKlCx577LFq6ykUCkycOBEff/wxgLvf9r/66itMnTpVrvPbb79hypQpaNeuHZo3bw4/Pz8AqNcJhenp6Vi2bBmcnJzkx/Tp05GTk4OSkhJMnDgRt27dQrt27TB9+nRs375d4/AAEVF98BAANWk2Njawsan913zq1KkIDAxEXl4eUlJSYGdnh7CwMHn7mDFj4OPjg/Xr18Pb2xsVFRXo3r07ysrKqmzPysoKQgiNMrVarfG8oqICcXFxGD9+vNb+dnZ28PHxwYULF5CSkoIDBw5g9uzZeP3115GamgqFQlGX4RMRVYsJABGAwYMHw8fHB59++im++eYbTJw4Eba2tgCAa9eu4fz583j//fcxbNgwAMD3339fY3stW7ZEUVERiouL4ejoCABaawT07dsXFy5cgL+/f7Xt2NvbY+zYsRg7dizmzJmDzp0748yZM+jbt289RktExASACMDdywenTJmC9957D7/88gsOHjwob2vRogVcXV2xbt06eHl54dKlS3jppZdqbG/gwIFwcHDAkiVLMG/ePPz4449ITk7WqPPKK69g9OjR8PHxwcSJE2FlZYXTp0/jzJkziI+PR3JyMsrLy+W2Nm/eDHt7e/j6+hrjJSAiC8NzAIj+39SpU/HTTz+hVatWGDJkiFxuZWWFrVu3Ij09Hd27d8fzzz+P119/vca2VCoVPvroI+zZswc9evTAJ598gtjYWI06I0eOxO7du5GSkoIHHngAgwYNwooVK+QPeBcXF6xfvx5DhgxBz5498e2332LXrl1wdXU1+NiJyPJI4v4DlURERNTkcQaAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCzQ/wGGQ4CjYVIb5gAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABXk0lEQVR4nO3deXxMV/8H8M9MMjMy2cg+WWSziy20YimxJERRtbZaRFNPn1LlQdVSBC2lpbqg9RShKFWPX7WUhIot0RJJLVEiiUhIkAiTPZOZ8/tjzE1GFjPJbJl836/XvJh779z5nizzyT33nHt5jDEGQgghhJgVvrELIIQQQojuUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwDdQVFQUeDwe97C0tISnpyemTp2Ku3fvctvFxsZy20RFRdW4r4EDB4LH48HHx0dtuY+PD3g8HoKDg2t83c6dO7l9x8bG1llv1Tp4PB4sLCzg6uqKcePG4fr161q0/Pl8fHwwfPhwne6Tx+Phvffee+52qnZW/XpERkaCx+OpbRccHFzr17U2ycnJiIyMxO3bt6utCw8Pr/b9qy9VvaqHUCiEr68vZs2ahcePH+vkPZ6Hx+MhMjKSe676ea+p7XU5cuSI2n6q8vHxQXh4eL1rJITUjAJeR7Zv3474+HjExMRg2rRp+PHHH/HSSy+hqKhIbTtbW1ts3bq12uvT09MRGxsLOzu7Gvdva2uL06dPIzU1tdq6bdu21fq62qxatQrx8fE4efIkPvzwQ8TExKBPnz5qf5Q0ZoGBgYiPj0dgYGCd223atAmbNm3Sat/JyclYvnx5jSG3ZMkSHDx4UKv9Pc/Ro0cRHx+Pw4cPY9SoUfj6668RFhYGY1xl+uWXX0Z8fDwkEolWrzty5AiWL19e47qDBw9iyZIluiiPEFIFBbyOBAQEICgoCAMGDMCyZcswf/58pKen4//+7//UtpswYQLOnj2LlJQUteXbtm2Dh4cH+vTpU+P++/btCw8PD2zbtk1teWpqKk6fPo0JEyZoVW/r1q0RFBSEfv36Yc6cOVi/fj3y8/Nr7V0AgOLiYq3ew5js7OwQFBT03D98OnTogA4dOujsff39/dGtWzed7Q8AunfvjqCgIISEhOCLL77Am2++ifPnzyMuLq7W1+jre+Xs7IygoCCIRCKd7bNbt27w9/fX2f4IIUoU8HoSFBQEAMjIyFBbHhISAi8vL7WgVigU2LFjB6ZMmQI+v+ZvCZ/Px+TJk7Fjxw4oFApu+bZt2+Dl5YXBgwfrtF5V9/ClS5cwduxYtGjRgvsQLi0txcKFC+Hr6wuhUAgPDw/MmDGj1m7jgwcPonPnzmjWrBn8/Pzw1Vdfqa0vLS3F3Llz0bVrV9jb28PBwQG9evXCL7/8Umu93333Hdq0aQORSIQOHTpg7969autr6qKvSU1d9Js3b0aXLl1gY2MDW1tbtGvXDosWLQKg7KIeN24cAGDAgAHVTrvU1EWvUCjw9ddfo2vXrrCyskLz5s0RFBSEQ4cO1VlbbZ79XgUHByMgIACnT59G7969IRaL8dZbbwEApFIp5s2bp/a9mj17drWeJalUimnTpsHR0RE2NjYYOnQobt68We29a+uiP3r0KAYNGgR7e3uIxWK0b98eq1ev5r4mGzduBAC1Uw6qfdTURX/nzh28+eabcHFxgUgkQvv27bFu3Tq1n/3bt2+Dx+Ph888/x/r16+Hr6wsbGxv06tUL58+fV9tfWloaXnvtNbi7u0MkEsHV1RWDBg1CUlKS5l94QhoZS2MXYK5u3boFQHnEUxWfz0d4eDi2bt2Kjz/+GBYWFoiOjkZWVhamTp2KWbNm1brPt956C6tXr8axY8cQFhYGuVyOHTt2ICIiotY/DBpa7+jRo/Haa6/h3//+N4qKisAYw6hRo3DixAksXLgQL730Ei5fvoxly5YhPj4e8fHxakd3SUlJmD17NiIjI+Hm5obdu3dj1qxZKC8vx7x58wAAZWVlePToEebNmwcPDw+Ul5fj+PHjGD16NLZv347Jkyer1XTo0CGcPHkSK1asgLW1NTZt2oTXX38dlpaWGDt2bIO+Dnv37sX06dMxc+ZMfP755+Dz+bh16xaSk5MBKLuoV61ahUWLFmHjxo3cKYC6jkDDw8Oxa9cuREREYMWKFRAKhbh06ZLW57FVavpeZWdn480338T8+fOxatUq8Pl8FBcXo3///sjKysKiRYvQuXNnXLt2DUuXLsWVK1dw/Phx8Hg87nsaFxeHpUuX4oUXXsC5c+cQFhamUT1bt27FtGnT0L9/f3z77bdwcXHBzZs3cfXqVQDK0xZFRUX4+eefER8fz72utm7+hw8fonfv3igvL8fKlSvh4+OD3377DfPmzUNqamq1UyobN25Eu3btsGHDBu79hg0bhvT0dNjb2wMAhg0bBrlcjrVr16Jly5bIzc1FXFycwcYyEGIUjDTI9u3bGQB2/vx5JpPJWEFBAfvtt9+Ys7Mzs7W1ZTk5OYwxxk6ePMkAsP3797O0tDTG4/HYb7/9xhhjbNy4cSw4OJgxxtjLL7/MvL291d7D29ubvfzyy4wxxvr378/Gjh3LGGPs8OHDjMfjsfT0dLZ//34GgJ08ebLOelV17Nu3j8lkMlZcXMxOnz7NWrVqxSwsLNjff//NGGNs2bJlDABbunSp2uuPHj3KALC1a9eqLd+3bx8DwLZs2aJWN4/HY0lJSWrbhoSEMDs7O1ZUVFRjjRUVFUwmk7GIiAjWrVs3tXUAmJWVFfd1VW3frl071qpVq2rtrPr1ULWpqv79+7P+/ftzz9977z3WvHnzGutSqetrPWXKFLXv3+nTpxkAtnjx4jr3WRNVvTk5OUwmk7H8/Hy2a9cuZmVlxby8vFhJSQnXBgDsxIkTaq9fvXo14/P57MKFC2rLf/75ZwaAHTlyhDHG2O+//84AsC+//FJtu08++YQBYMuWLeOWqX7e09PTGWOMFRQUMDs7O9a3b1+mUChqbcuMGTOqfe1VvL292ZQpU7jnCxYsYADYn3/+qbbdu+++y3g8Hrtx4wZjjLH09HQGgHXq1IlVVFRw2/31118MAPvxxx8ZY4zl5uYyAGzDhg211keIOaIueh0JCgqCQCCAra0thg8fDjc3N/z+++9wdXWttq2vry+Cg4Oxbds25OXl4ZdffuG6VJ/nrbfewqFDh5CXl4etW7diwIAB9Rq1PWHCBAgEAojFYvTr1w9yuRw///wzOnfurLbdmDFj1J7/8ccfAFCtS3XcuHGwtrbGiRMn1JZ37NgRXbp0UVs2ceJESKVSXLp0iVu2f/9+9OnTBzY2NrC0tIRAIMDWrVtrHNk/aNAgta+rhYUFJkyYgFu3biErK0vzL0INXnzxRTx+/Bivv/46fvnlF+Tm5jZof7///jsAYMaMGfXeh5ubGwQCAVq0aIE333wTgYGBOHr0KJo1a8Zt06JFCwwcOFDtdb/99hsCAgLQtWtXVFRUcI8hQ4aonb44efIkAOCNN95Qe/3EiROfW1tcXBykUimmT59ebYZCff3xxx/o0KEDXnzxRbXl4eHhYIxxP4MqL7/8MiwsLLjnqp9h1SkMBwcH+Pv747PPPsP69euRmJio1tVPiLmigNeRnTt34sKFC0hMTMS9e/dw+fLlWgfMAUBERAR+/fVXrF+/HlZWVhp3LY8dOxbNmjXDF198gV9//RURERH1qnfNmjW4cOECLl26hDt37iAtLQ2jRo2qtt2z3ah5eXmwtLSs1pXP4/Hg5uaGvLw8teVubm7V9qlaptr2f//7H8aPHw8PDw/s2rUL8fHxuHDhAt566y2UlpbW+vq69llfkyZNwrZt25CRkYExY8bAxcUFPXv2RExMTL329/DhQ1hYWNRYs6aOHz+OCxcuICkpCbm5uTh79my1gYE1dXffv38fly9fhkAgUHvY2tqCMcb98aL6njo6Oqq9XpOaHz58CADw9PSsb/OqycvLq7E97u7u3Pqqnq1bdYqopKQEgPJn88SJExgyZAjWrl2LwMBAODs74/3330dBQYHO6ibE1NA5eB1p3749evToofH2o0ePxowZM/Dpp59i2rRpsLKy0uh1YrEYr732GlavXg07OzuMHj26XvX6+flpVO+zR2WOjo6oqKjAw4cP1UKeMYacnBy88MILatvn5ORU26dqmeqDedeuXfD19cW+ffvU3q+srKzGmjTZZ0NMnToVU6dORVFREU6fPo1ly5Zh+PDhuHnzJry9vbXal7OzM+RyOXJycrSeWqbSpUsXODk51blNTUfPTk5OsLKyqjbzoup6oPJ7mpeXp/b1q+nr/CzVz0BDe06qcnR0RHZ2drXl9+7dA4Dnfi1q4u3tzU1PvXnzJn766SdERkaivLwc3377bcMKJsRE0RG8kVhZWWHp0qUYMWIE3n33Xa1e++6772LEiBFYunSpWjetIQwaNAiAMpSrOnDgAIqKirj1KteuXcPff/+ttmzPnj2wtbXlBqipLuJSNaRycnJqHUV/4sQJ3L9/n3sul8uxb98++Pv76/RI0traGmFhYVi8eDHKy8tx7do1ANWPEOuiGqi2efNmndWlqeHDhyM1NRWOjo7o0aNHtYfq1M6AAQMAALt371Z7/Z49e577Hr1794a9vT2+/fbbOufla/M1GzRoEJKTk9VO4QCVF3RS1Vtfbdq0wUcffYROnTpVew9CzAkdwRvRnDlzMGfOHK1f17Vr12rz6w0lJCQEQ4YMwYcffgipVIo+ffpwo+i7deuGSZMmqW3v7u6OkSNHIjIyEhKJBLt27UJMTAzWrFkDsVgMQBlE//vf/zB9+nSMHTsWmZmZWLlyJSQSSbXrBQDKI7iBAwdiyZIl3Cj6f/75p9pUufpQ9ab06dMHEokEOTk5WL16Nezt7bneiYCAAADAli1bYGtri2bNmsHX17fG3oOXXnoJkyZNwscff4z79+9j+PDhEIlESExMhFgsxsyZMxtcc21mz56NAwcOoF+/fvjPf/6Dzp07Q6FQ4M6dO4iOjsbcuXPRs2dPhIaGol+/fpg/fz6KiorQo0cPnDt3Dj/88MNz38PGxgbr1q3D22+/jcGDB2PatGlwdXXFrVu38Pfff+Obb74BAHTq1AmA8tRQWFgYLCws0LlzZwiFwmr7/M9//oOdO3fi5ZdfxooVK+Dt7Y3Dhw9j06ZNePfdd9GmTRutvg6XL1/Ge++9h3HjxqF169YQCoX4448/cPnyZSxYsECrfRHSmFDAE63weDz83//9HyIjI7F9+3Z88skncHJywqRJk7Bq1apqF0Dp2rUrpk6dimXLliElJQXu7u5Yv349/vOf/3DbTJ06FQ8ePMC3336Lbdu2wc/PDwsWLEBWVlaNVz8bOXIkOnbsiI8++gh37tyBv78/du/erfXFfmry0ksvISoqCj/99BPy8/Ph5OSEvn37YufOnVx3tK+vLzZs2IAvv/wSwcHBkMvl2L59e62XW42KikJgYCC2bt2KqKgoWFlZoUOHDtzcen2xtrbGmTNn8Omnn2LLli1IT0+HlZUVWrZsicGDB3NH8Hw+H4cOHcKcOXOwdu1alJeXo0+fPjhy5AjatWv33PeJiIiAu7s71qxZg7fffhuMMfj4+GDKlCncNhMnTsS5c+ewadMmrFixAowxpKen1zhA1NnZGXFxcVi4cCEWLlwIqVQKPz8/rF27tl5/ELu5ucHf3x+bNm1CZmYmeDwe/Pz8sG7dOr3+gUWIsfFYXf1qhBBCCGmU6Bw8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZsjsp8kpFArcu3cPtra2OrtWNiGEPA9jDAUFBXB3d2/w3R4JqQ+zD/h79+7By8vL2GUQQpqozMxMnV5hkRBNmX3A29raAlD+ktnZ2Wn0GplMhujoaISGhkIgEOizPIOg9pg2c2sPYH5tqk97pFIpvLy8uM8gQgzN7ANe1S1vZ2enVcCLxWLY2dmZzYcTtcd0mVt7APNrU0PaQ6cGibHQiSFCCCHEDFHAE0IIIWaIAp4QQggxQ2Z/Dp4QQkyZXC6HTCYzdhmkkRAIBLCwsNBoWwp4QggxAsYYcnJy8PjxY2OXQhqZ5s2bw83N7bkDOCngCSHECFTh7uLiArFYTKPtyXMxxlBcXIwHDx4AACQSSZ3bU8ATQoiByeVyLtwdHR2NXQ5pRKysrAAADx48gIuLS53d9TTIjhBC6sAYUFys232qzrmLxWLd7pg0Caqfm+eN3aAjeEIIqUFJCZCbCzx6BAiF+nkP6pYn9aHpzw0FPCGEPKVQKAM9NxcoKqpcrq+AJ0SfKOAJIU1eUVHl0bpCYexqCNENOgdPCGmSKiqA+/eBa9eAf/5RBjyFu+kIDg7G7Nmzuec+Pj7YsGGDXt8zNjYWPB4PPB4Po0aN0ut71Ub1/s2bN2/wvijgCSFNilQKpKUBly8DWVlAaamxKyKauHDhAv71r38Z5L1u3LiBqKgorV4THh7OhbPqERQUpLZNWVkZZs6cCScnJ1hbW2PkyJHIyspS2yY7O1tnf8hQFz0hxOyVlwN5ecqj9PJyY1dD6sPZ2dlg7+Xi4lKvI+ihQ4di+/bt3HPhM4M3Zs+ejV9//RV79+6Fo6Mj5s6di+HDhyMhIYGb7ubm5gZ7e/sG1a9CR/CEELPEGJCfD6SkAFeuAPfumW64M8ZQVFRklAdjTOM6g4ODMXPmTMyePRstWrSAq6srtmzZgqKiIkydOhW2trbw9/fH77//rva65ORkDBs2DDY2NnB1dcWkSZOQm5vLrS8qKsLkyZNhY2MDiUSCdevWVXvvZ7vo169fj06dOsHa2hpeXl6YPn06CgsLufVRUVFo3rw5jh07hvbt28PGxgZDhw5Fdna2Ft+Z2ut79hQCAIhEIri5uXEPBwcHbt2TJ0+wdetWrFu3DoMHD0a3bt2wa9cuXLlyBcePH9e6Jk1QwBNCzEppqbLr/fJlZVe8VGrsip6vuLgYNjY2RnkUaznJf8eOHXBycsJff/2FmTNn4t1338W4cePQu3dvXLp0CUOGDMGkSZO4/WZnZ6N///7o2rUrLl68iKNHj+L+/fsYP348t88PPvgAJ0+exMGDBxEdHY3Y2FgkJCTUWQefz8dXX32Fq1evYseOHfjjjz8wf/78al/Xzz//HD/88ANOnz6NO3fuYN68eVq1V5v6YmNj4eLigjZt2mDatGncFecAICEhATKZDKGhodwyd3d3BAQEIC4uTuuaNEFd9ISQRq+26W1E97p06YKPPvoIALBw4UJ8+umncHJywrRp0wAAS5cuxebNm3H58mUEBQVh8+bNCAwMxKpVq7h9bNu2DV5eXrh58ybc3d2xdetW7Ny5EyEhIQCUf0R4enrWWUfVo2dfX1+sXLkS7777LjZt2sQtl8lk+Pbbb+Hv7w8AeO+997BixQqt2ltYWKhRfWFhYRg3bhy8vb2Rnp6OJUuWYODAgUhISIBIJEJOTg6EQiFatGih9jpXV1fk5ORoVZOmKOAJIY2Wanpbfj4glxu7mvoTi8Vq3cuGfm9tdO7cmfu/hYUFHB0d0alTJ26Zq6srAHBHrwkJCTh58iRsbGyq7Ss1NRUlJSUoLy9Hr169uOUODg5o27ZtnXWcPHkSq1atQnJyMqRSKSoqKlBaWoqioiJYW1tzbVOFO6C8dnvVo2pNpKamalTfhAkTuP8HBASgR48e8Pb2xuHDhzF69Oha988Y09sFj4zaRb9582Z07twZdnZ2sLOzQ69evdTO3TDGEBkZCXd3d1hZWSE4OBjXrl0zYsWEEGOrqAAePACSkyuntzXmcAeUU6Osra2N8tA2XAQCQbXaqy5T7U/xdM6hQqHAiBEjkJSUpPZISUlBv379tBoDoJKRkYFhw4YhICAABw4cQEJCAjZu3AhA/fKtNdWq7fvVpz5A+ceEt7c3UlJSACgHz5WXlyM/P19tuwcPHnB/FOmaUQPe09MTn376KS5evIiLFy9i4MCBeOWVV7gQX7t2LdavX49vvvkGFy5cgJubG0JCQlBQUGDMsgkhRlB1eltmpvJSssT0BQYG4tq1a/Dx8UGrVq3UHtbW1mjVqhUEAgHOnz/PvSY/Px83b96sdZ8XL15ERUUF1q1bh6CgILRp0wb37t3TS/31qQ8A8vLykJmZyd3xrXv37hAIBIiJieG2yc7OxtWrV9G7d2+91G7UgB8xYgSGDRuGNm3aoE2bNvjkk09gY2OD8+fPgzGGDRs2YPHixRg9ejQCAgKwY8cOFBcXY8+ePcYsmxBiIOXlQHY2cPWqcjR8fr5ydDxpPGbMmIFHjx7h9ddfx19//YW0tDRER0fjrbfeglwuh42NDSIiIvDBBx/gxIkTuHr1KsLDw8Hn1x5P/v7+qKiowNdff420tDT88MMP+Pbbb/VSvyb1FRYWYt68eYiPj8ft27cRGxuLESNGwMnJCa+++ioAwN7eHhEREZg7dy5OnDiBxMREvPnmm+jUqRMGDx6sl9pN5hy8XC7H/v37UVRUhF69eiE9PR05OTlqIw5FIhH69++PuLg4vPPOOzXup6ysDGVlZdxz6dMhtDKZ7Ll33lFRbafp9qaO2mPazK09QMPaxJjyaD0vz3RGwMvl2rfHnL6fDeHu7o5z587hww8/xJAhQ1BWVgZvb28MHTqUC8nPPvsMhYWFGDlyJGxtbTF37lw8efKk1n127doV69evx5o1a7Bw4UL069cPq1evxuTJk/XShufVZ2FhgStXrmDnzp14/PgxJBIJBgwYgH379sHW1pbb7osvvoClpSXGjx+PkpISDBo0CFFRUXXe8rUheKy+Jxh05MqVK+jVqxdKS0thY2ODPXv2YNiwYYiLi0OfPn1w9+5duLu7c9v/61//QkZGBo4dO1bj/iIjI7F8+fJqy/fs2UO3ZiSEGExxcTEmTpyIJ0+ewM7OTm1daWkp0tPT4evri2bNmhmpQvKs2NhYDBgwAPn5+c+90E1wcDC6du2ql8vnRkVFYfbs2Xj8+HGN6zX9+TH6EXzbtm2RlJSEx48f48CBA5gyZQpOnTrFrX92AMjzRhwuXLgQc+bM4Z5LpVJ4eXkhNDS02i9ZbWQyGWJiYhASElJtkEZjRO0xbebWHkDzNikUwOPHyqN1U57eZmUlQ2qqdt8jqal0PxCteXp6YsSIEfjxxx8N/t42NjaoqKjQyR9+Rg94oVCIVq1aAQB69OiBCxcu4Msvv8SHH34IAMjJyeEGKQDPH3EoEokgEomqLRcIBFp/eNbnNaaM2mPazK09QO1tKipShvqjR5Uj4Os45Wp0qh5Ubb5H5va9bAp69uzJjXqvaVqfISQlJQGATrrtjR7wz2KMoaysDL6+vnBzc0NMTAy6desGACgvL8epU6ewZs0aI1dJCNGWXF55PXgaAU9MkZWVFXfA+TyxsbF6qUHT99eEUQN+0aJFCAsLg5eXFwoKCrB3717Exsbi6NGj4PF4mD17NlatWoXWrVujdevWWLVqFcRiMSZOnGjMsgkhWigoUIb648d0O1ZCDMmoAX///n1MmjQJ2dnZsLe3R+fOnXH06FHucoDz589HSUkJpk+fjvz8fPTs2RPR0dFqoxIJIaZHNYD8+vXK/xNCDMuoAb9169Y61/N4PERGRiIyMtIwBRFC6o0x4MmTyqN1ACgrM+1z64SYM5M7B08IaVzKypShnpdXebROF6MhxPgo4AkhWlMolFeVy80FjHSPFELIc1DAE0I0VlysDPWq09sIIaaJAp4QUie5vPJe68XFxq7GvG3ZYtj3+9e/tNs+ODiYuxBZYmIiunbtqvuiTJTqAmv29va1XmHO1NDwF0JIjQoKgPR05d3b7tyhcCdK06ZNQ3Z2NgICAjTaPjY2Fq+88gokEgmsra3RtWtX7N69u9o2PB6v2uOff/5pcL017ZfH4+Gzzz7jtgkODq62/rXXXlPbT3Z2tl4uS6tPdARPCOHIZJUXo6lyzyZCOGKxGG5ubhpvHxcXh86dO+PDDz+Eq6srDh8+jMmTJ8POzg4jRoxQ2/bGjRtqlxR3dnZucL3Z2dlqz3///XdERERgzJgxasunTZuGFStWcM+trKzU1ru5ucHe3r7B9RgSBTwhTZxqeltenvJfGgFPNKW6Octvv/2GRYsW4caNG+jSpQu+//57dOrUCYDygmZVvf/++zh27BgOHjxYLeBdXFyee5OXqoKDg7mehF27dsHCwgLvvvsuVq5cyXWpP/vHyC+//IIBAwbAz89Pbbm2f7g0BtRFT0gTVVYG3L0LXLkCpKYq565TuJP6+OCDD/D555/jwoULcHFxwciRI+u8Xe6TJ0/g4OBQbXm3bt0gkUgwaNAgnDx5UqP33rFjBywtLfHnn3/iq6++whdffIHvv/++xm3v37+Pw4cPIyIiotq63bt3w8nJCR07dsS8efNQUFCg0fubMjqCJ6QJUd29LTdXeY6dEF1YtmwZdwXSHTt2wNPTEwcPHsT48eOrbfvzzz/jwoUL+O6777hlEokEW7ZsQffu3VFWVoYffvgBgwYNQmxsLPr161fne3t5eeGLL74Aj8dD27ZtceXKFXzxxReYNm1atW137NgBW1tbjB49Wm35G2+8wd3/5OrVq1i4cCH+/vtvxMTE1OfLYTIo4AlpAmh6G9GnXr16cf93cHBA27Ztcf369WrbxcbGIjw8HP/973/RsWNHbnnbtm3Rtm1btf1lZmbi888/R79+/XDmzBmEhYVx67/77ju88cYbAICgoCC1W4j36tUL69atg1wur3ZHtm3btuGNN96odivWqn8MBAQEoHXr1ujRowcuXbqEwMBAbb8cJoMCnhAzRdPbiDFVDV0AOHXqFEaMGIH169dj8uTJz319UFAQdu3aBUB5K3HVbVQB1HnL8NqcOXMGN27cwL59+567bWBgIAQCAVJSUijgCSGmo7BQGer5+XT3NmIY58+fR8uWLQEA+fn5uHnzJtq1a8etj42NxfDhw7FmzRr8S8PJ94mJiZBIJADqvo3r+fPnqz1v3bp1taP3rVu3onv37ujSpctz3/vatWuQyWTc+zdWFPCEmAHV9La8PKC01NjVkKZmxYoVcHR0hKurKxYvXgwnJyeMGjUKgDLcX375ZcyaNQtjxoxBTk4OAEAoFHID7TZs2AAfHx907NgR5eXl2LVrFw4cOIADBw48970zMzMxZ84cvPPOO7h06RK+/vprrFu3Tm0bqVSK/fv3V1sOAKmpqdi9ezeGDRsGJycnJCcnY+7cuejWrRv69OnTwK+McVHAE9JIMQZIpcqjdZreZh60vbKcqfj0008xa9YspKSkoEuXLjh06BCEQiEAICoqCsXFxVi9ejVWr17NvaZ///6IjY0FAJSXl2PevHm4e/curKys0LFjRxw+fBjDhg177ntPnjwZJSUlePHFF2FhYYGZM2dW6yXYu3cvGGN4/fXXq71eKBTixIkT+PLLL1FYWAgvLy+8/PLLWLZsWbVegMaGAp6QRqasrPJiNHSvdWIK+vbti6tXr9a4LioqClFRUXW+fv78+Zg/f3693lsgEGDDhg3YvHlzrdv861//qvXUgJeXF3f5XXNDAU9II8BY5d3baHobMaZNmzbh+++/R3x8vLFLMSgbGxtUVFRUG4FvyijgCTFhJSWV09sqKoxdDWnqdu/ejZKSEgBAy5YtERcXZ+SKDEc1ir8xddtTwBNiYlTT2/LygKIiY1dDSCUPDw+158HBwWBGHPyhOodvCLWN4jdlFPCEmIiiIuVV5mh6GyFEFyjgCTGiigrgwQPl/1NSAD7dHYIQoiMU8IQYwZMnldPb6NKxhBB9oIAnxEDKy5Whnpen/D8hhOgTBTwhesRY5d3bpFJjV0MIaUoo4AnRA5reRggxNgp4QnREoai8extNbyP1kZBg2Pfr3l277YODg7mrviUmJqJr1666L8oIfHx8kJGRAUB5s5zmzZsbtyAdoTG7hDRQURGQkQH8/bfyXwp3Ys6mTZuG7OxsBAQEaLR9aWkpwsPD0alTJ1haWnI3odGFgoICzJ49G97e3rCyskLv3r1x4cIFtW3u37+P8PBwuLu7QywWY+jQoUhJSVHb5sKFCxrd2KaxMWrAr169Gi+88AJsbW3h4uKCUaNG4caNG2rbhIeHg8fjqT2CgoKMVDEhShUVwP37wLVrwD//KI/aae46aQrEYjHc3NxgaalZB7BcLoeVlRXef/99DB48WKe1vP3224iJicEPP/yAK1euIDQ0FIMHD8bdu3cBAIwxjBo1Cmlpafjll1+QmJgIb29vDB48GEVV/hJ3dnbm7mxnTowa8KdOncKMGTNw/vx5xMTEoKKiAqGhoWpfeAAYOnQosrOzuceRI0eMVDFp6qRSIC0NuHwZyMqiW7OSpi02NhY8Hg+HDx9Gly5d0KxZM/Ts2RNXrlzhtrG2tsbmzZsxbdo0uLm5abzv8PBwjBo1CsuXL4eLiwvs7OzwzjvvoPzpFJSSkhIcOHAAa9euRb9+/dCqVStERkbC19eXu/FMSkoKzp8/j82bN+OFF15A27ZtsWnTJhQWFuLHH3/U7RfDBBn1HPzRo0fVnm/fvh0uLi5ISEhAv379uOUikUjjH4yysjKUlZVxz6VPhy7LZDLINLz1lmo7Tbc3ddSehikvV55bf/RIP9PbFAqZ2r/mwNzaJJdr/zNnLr9vmvjggw/w5Zdfws3NDYsWLcLIkSNx8+ZNCASCBu33xIkTaNasGU6ePInbt29j6tSpcHJywieffIKKigrI5fJqN3+xsrLC2bNnAYDLgqrbWFhYQCgU4uzZs3j77bcbVJ+pM6lBdk+ePAGAal0lsbGxcHFxQfPmzdG/f3988skncHFxqXEfq1evxvLly6stj46Ohlgs1qqemJgYrbY3ddQe05aTY17tAcyvTdr8zBUXF+uxEtOybNkyhISEAAB27NgBT09PHDx4EOPHj2/QfoVCIbZt2waxWIyOHTtixYoV+OCDD7By5UrY2tqiV69eWLlyJdq3bw9XV1f8+OOP+PPPP9G6dWsAQLt27eDt7Y2FCxfiu+++g7W1NdavX4+cnBxkZ2c3uN2mzmQCnjGGOXPmoG/fvmqDN8LCwjBu3Dh4e3sjPT0dS5YswcCBA5GQkACRSFRtPwsXLsScOXO451KpFF5eXggNDYWdnZ1GtchkMsTExCAkJKTBf4GaAmqP5lT3Wjfk9DaFQoacnBi4uYWAz2/83x/A/NpkZSVDaqp2P3PSJnThg169enH/d3BwQNu2bXH9+nWNXnvnzh106NCBe75o0SIsWrQIANClSxe1A7NevXqhsLAQmZmZ8Pb2xg8//IC33noLHh4esLCwQGBgICZOnIhLly4BUN4r/sCBA4iIiICDgwMsLCwwePBghIWF6aLZJs9kAv69997D5cuXua4VlQkTJnD/DwgIQI8ePeDt7Y3Dhw9j9OjR1fYjEolqDH6BQKB1GNTnNaaM2lOz2qa3Gfq68Hy+wCzCsCpzaZPqDqHa/MyZ0+9affB4PI22c3d3527FClTvwa1r3/7+/jh16hSKiooglUohkUgwYcIE+Pr6ctt2794dSUlJePLkCcrLy+Hs7IyePXuiR48e2jWoETKJgJ85cyYOHTqE06dPw9PTs85tJRIJvL29q01zIERbRUXKUM/Pp+vBE9IQ58+fR8uWLQEo55HfvHkT7dq10+i1lpaWtd6K9e+//0ZJSQmsrKy497GxsamWE9bW1rC2tkZ+fj6OHTuGtWvXVtuXvb09AOXAu4sXL2LlypUat6+xMmrAM8Ywc+ZMHDx4ELGxsWp/ddUmLy8PmZmZkEgkBqiQmJuKisqj9ZISY1dDiHlYsWIFHB0d4erqisWLF8PJyUltvntycjLKy8vx6NEjFBQUcEfsz7tQTnl5OSIiIvDRRx8hIyMDy5Ytw3vvvQf+0+61Y8eOgTGGtm3b4tatW/jggw/Qtm1bTJ06ldvH/v374ezsjJYtW+LKlSuYNWsWRo0ahdDQUF1/GUyOUQN+xowZ2LNnD3755RfY2toiJycHgPIvLSsrKxQWFiIyMhJjxoyBRCLB7du3sWjRIjg5OeHVV181ZumkkZFKlaH++LHy+vCEmCJtryxnKj799FPMmjULKSkp6NKlCw4dOgShUMitHzZsGHelOADo1q0bAOVBXl0GDRqE1q1bo1+/figrK8Nrr72GyMhIbv2TJ0+wcOFCZGVlwcHBAWPGjMEnn3yidnokOzsbc+bMwf379yGRSDB58mQsWbJERy03bUYNeNVcxeDgYLXl27dvR3h4OCwsLHDlyhXs3LkTjx8/hkQiwYABA7Bv3z7Y2toaoWLSmJSXKwfM5ebS3dsI0ae+ffvi6tWrta6/fft2vfe9fPnyGmdGAcD48eOfO1L//fffx/vvv1/v92/MjN5FXxcrKyscO3bMQNUQc8CY+r3WCSG6tWnTJnz//feIj483dik607FjR6SlpRm7DJ0ziUF2hDRUaWnlvdbp7m2E6Mfu3btR8nTwSsuWLREXF2fkinTjyJEj3IWJNJ1O3RhQwJNGS6Go7IIvLDR2NYSYPw8PD7XnwcHBz+2Jra+oqCi97Lcm3t7eBnsvQ6KAJ42O6gJh167RgDlCCKkNBTxpFOTyyqN11cVo5HLDX4yGEF1S0C0IST1o+nNDAU9MWkFB5fQ2+iwk5kIoFILP5+PevXtwdnaGUCjU+MpvpOlijKG8vBwPHz4En89Xm4pYEwp4YnJkssoBc1VuDEiI2eDz+fD19UV2djbu3btn7HJIIyMWi9GyZUvugj+1oYAnJqHq9DaplM6tE9Oij5kZQqEQLVu25G57SogmLCwsYGlpqVGPDwU8Maqyssqj9SZ0+2zSCJSWAllZwN27gK0tUMsdqhuEx+OZ3U2giOmggCcGp1Aob/BC09uIqaka6nl5lcttbIxXEyH1RQFPDKa4WBnqjx7R3duI6agt1Alp7CjgiV6pprfl5VXOXyfE2CjUSVNAAU/0gqa3EVNDoU6aGgp4ojMyWeXFaGh6GzEFFOqkKaOAJw1C09uIqaka6o8e0c8kaboo4Em90PQ2Ykoo1AmpjgKeaEyhUJ5Tz81VnmMnxJhKS5WBnpVFoU5ITRoc8HK5HFeuXIG3tzdatGihi5qIiaHpbcRUqEJddU6dQp2Q2mkd8LNnz0anTp0QEREBuVyO/v37Iy4uDmKxGL/99huCg4P1UCYxNLlcGei5uTS9jRgXhToh9aN1wP/888948803AQC//vor0tPT8c8//2Dnzp1YvHgxzp07p/MiieEUFipDPT+fprcR46FQJ6ThtA743NxcuLm5AQCOHDmCcePGoU2bNoiIiMBXX32l8wKJ/qmmt+XlKT9YCTEGCnVCdEvrgHd1dUVycjIkEgmOHj2KTZs2AQCKi4thYWGh8wKJfjCmnNaWm6uc5kYfpsQYKNQJ0R+tA37q1KkYP348JBIJeDweQkJCAAB//vkn2rVrp/MCiW6VlVVejIamtxFjKC0F7t1Tjn6nUCdEf7QO+MjISAQEBCAzMxPjxo2DSCQCoLxH7YIFC3ReIGk41QforVs0YI4YhyrU795V/nFJoU6I/tVrmtzYsWMBAKVVTthOmTJFNxURnSkpUX6Y5uYqnxcWAny+cWsiTcvt2xTqhBiL1h/3crkcK1euhIeHB2xsbJCWlgYAWLJkCbZu3arzAol25HLg4UPg+nUgORl48ACoqDB2VaSpKC0F0tKAuDjl88uXlT+PFO6EGJ7WAf/JJ58gKioKa9euhVAo5JZ36tQJ33//vVb7Wr16NV544QXY2trCxcUFo0aNwo0bN9S2YYwhMjIS7u7usLKyQnBwMK5du6Zt2WavsFB5tHT5MnDnDnXFE8NRhfqZM8DvvwNJSZW9RoQQ49E64Hfu3IktW7bgjTfeUBs137lzZ/zzzz9a7evUqVOYMWMGzp8/j5iYGFRUVCA0NBRFRUXcNmvXrsX69evxzTff4MKFC3Bzc0NISAgK6FqpqKgA7t8Hrl0DbtxQDliiuevEEGoKdTpSJ8S0aH0O/u7du2jVqlW15QqFAjIth2UfPXpU7fn27dvh4uKChIQE9OvXD4wxbNiwAYsXL8bo0aMBADt27ICrqyv27NmDd955R9vyzYLq7m00vY0YUlMcKMcYQ25uMm7ePAKJJBPDhg0zdkmEaEzrgO/YsSPOnDkDb29vteX79+9Ht27dGlTMkydPAAAODg4AgPT0dOTk5CA0NJTbRiQScZfHrSngy8rKUFblZuRSqRQAIJPJNP4DRLWdtn+w6FN5ufLSsY8eKf+vDYVCpvZvY0ftMZzSUiAnRxnsVae08XjKR234fJnav41JSUk+0tNPIC0tBqmpMSgoyAIA+Pn54dNPP9V4P6b0+UGaJq0DftmyZZg0aRLu3r0LhUKB//3vf7hx4wZ27tyJ3377rd6FMMYwZ84c9O3bFwEBAQCAnJwcAMqL61Tl6uqKjIyMGvezevVqLF++vNry6OhoiMVirWqKiYnRantTl5ND7TFlptoeoRDw8VE+tBUYaJptqkoulyMlJQWJiYlISkpCSkoKFFXOdQmFQnTo0AGBgYGIjo4Gr66/bKoopoEwxMi0DvgRI0Zg3759WLVqFXg8HpYuXYrAwED8+uuv3EVv6uO9997D5cuXcfbs2Wrrnv2FYozV+ku2cOFCzJkzh3sulUrh5eWF0NBQ2NnZaVSLTCZDTEwMQkJCIBAItGiFbpSWKo+W8vN1MwJeoZAhJycGbm4h4PMN3x5do/boXlkZkJ1d/Ui9vvh8GQIDY3DpUggUCtP7HkmlWUhNjUFaWjTS0/9AaWm+2nonp/bw9w+Fn18IWrZ8Cf7+lnBx0e4zQdV7SIix1Gse/JAhQzBkyBCdFTFz5kwcOnQIp0+fhqenJ7dcdc37nJwcSCQSbvmDBw+qHdWriEQi7uI7VQkEAq3Duj6vqS+FovLubVXGGOp03jqfLzCLQFSh9jRMWVnlFeX0dU5doRCYRMDLZCXIzDyDtLRjSEs7htxc9Zk4zZo1h4/PYPj5DYGf3xDY2XmprWdM2d2uzWeCMQ4OCKmqwfeDbwjGGGbOnImDBw8iNjYWvr6+aut9fX3h5uaGmJgY7vx+eXk5Tp06hTVr1hijZJ0rLFQeMT16RCPgif6pQv3uXfMe9c4YQ17eP08D/Sju3DmFiorKC3PxeHxIJC/Az28o/PyGwN39BfD5Rv04JETntP6J5vP5dZ6DksvlGu9rxowZ2LNnD3755RfY2tpy59zt7e1hZWUFHo+H2bNnY9WqVWjdujVat26NVatWQSwWY+LEidqWbjIqKiqvB093byP61lRCvbT0MdLTjyM9XXmULpVmqq23tfWAn98Q+PoOga/vYFhZORipUkIMQ+uAP3jwoNpzmUyGxMRE7Nixo8bBbXXZvHkzACA4OFht+fbt2xEeHg4AmD9/PkpKSjB9+nTk5+ejZ8+eiI6Ohq2trbalG53q7m2PH5vvhywxDU0h1BUKObKzLyIt7RjS04/h7t3zYKyyG8zCQoSWLftxoe7s3FHjAXKEmAOtA/6VV16ptmzs2LHo2LEj9u3bh4iICI33xTT41OHxeIiMjERkZKQ2ZZqM8nJlqOflaT+9jRBtNIVQLyi4i7S0aKSlHcXt28dRUvJIbb2jY7un59GHomXLfhAItJs5Q4g50dlJp549e2LatGm62l2jxpjyKD03V3nUToi+VA313FzzG8dRUVGKzMyzSEs7irS0Y3j48KraepHIXm1wnL19SyNVSojp0UnAl5SU4Ouvv1YbAd8UlZZWHq3TDV6IvqimtKlGv5tTqCsHx93gut0zMmJRUVFSZQse3N1fgK+vMtA9PHrS4DhCaqH1b0aLFi3UzmMxxlBQUACxWIxdu3bptLjGoLbpbYTokjmHemnpE9y+fYIb8S6V3lFbb2MjqTI4LgRisaORKiWkcdE64L/44gu1gOfz+XB2dkbPnj3RokULnRZnyoqKlB+0+fnKW7QSomvmGuoKhRw5OZe4bnfl4LjKXyILCyG8vF7iprA5OwfQ4DhC6kHrgFeNbm+KKioqj9ZLSp6/PSHaMtdQLyzM5i4yk54eg5KSPLX1Dg5tufPoLVv2h1BobaRKCTEfGgX85cuXNd5h586d612MqaLpbUSfVPdGio9Xjn43h1CvqCjD33//jRMnTiM19TgePFD/DBGJ7ODtPYgL9ebNfYxTKCFmTKOA79q1K3g83nOntfF4PK0udGPq7t9XdsHT9Daia+XllZeJffQICAxs3OHOGMOjRylVrhwXC5ms6s1WeJBIulcZHBcECwu6lCsh+qRRwKenp+u7DpOUna3ba8GTpq1qqFftfm+sP2NlZdIqg+OO4cmT22rrW7RoAS+v4fD1DXs6OM7JOIUS0kRpFPDP3vudEKKZ2kK9MWJM8XRwnDLQs7Liqg2O8/TsCz+/IWjVaiCGDctCQsLLJnGzGUKaonpPIE1OTsadO3dQ/kz/9ciRIxtcFCGNmTmFemFhDtLTo5+GejRKSnLV1js4tOa63b29gyEU2gBQ3i6Wx7trjJIJIU9pHfBpaWl49dVXceXKFbXz8qppLOZ0Dp4QTZlLqMvl5cjMPMdNYXvw4G+19UKhLXx8BnJT2Jo3961lT4QQY9M64GfNmgVfX18cP34cfn5++Ouvv5CXl4e5c+fi888/10eNhJgkcwh1xhjy829x3e4ZGSchk6lfscnNrTs32t3DoxcNjiOkkdA64OPj4/HHH3/A2dkZfD4ffD4fffv2xerVq/H+++8jMTFRH3USYhJUoa66oUtjDPWysgJkZPzBjXh//Fh9EK21tSt8fUOfXj0uBNbWLkaqlBDSEFoHvFwuh42N8jybk5MT7t27h7Zt28Lb2xs3btzQeYGEGFtjD3XGFLh/PwlpaceQmnoUd+/GQaGovFkCny+Ap2cf+PkNgb//ULi4dAaP10iH9hNCOFoHfEBAAC5fvgw/Pz/07NkTa9euhVAoxJYtW+Dn56ePGgkxuMYe6oWF97nBcenpMSgufqC2vkULf+48esuWwRCJbI1UqWkTiwEnJ6BVK6CgwNjVEKIdrQP+o48+QtHTu6p8/PHHGD58OF566SU4Ojpi3759Oi+QEENpzKEul5cjKyuOO5d+/776qTKh0Abe3gO5c+ktWvgbqVLTpgp0JyfA2Rmwtq5cTgFPGhutA37IkCHc//38/JCcnIxHjx5Vu8scIY2BTKYM9MYY6vn5qVUGx/2B8vJCtfWurt24QPf07A0LC6GRKjVdVQPdyQl4evaRELOgdcDv2LEDY8eOhbV15c0gHBwcdFoUIfokk1WOfm9MoV5eXoiMjJNITT2K9PRjyM9PVVsvFjvD1zcU/v5D4eMTAhsbVyNVaroo0ElTonXAz5s3D9OnT8eIESPw5ptvYujQobC0rPf1cggxCFWo370LPHjQOEJdOTju7ypXjjsHhULGrefzLbnBcX5+Q+Dq2pUGxz2DAp00ZVonc3Z2No4ePYoff/wRr732GqysrDBu3Di8+eab6N27tz5qJKReGmOoFxU9QHp6zNPBcdEoKrqvtr55cz8u0L29B0AksjNSpabJykp57pwCnZB6BLylpSWGDx+O4cOHo7i4GAcPHsSePXswYMAAeHp6IjU19fk7IURPGluoy+UyZGaewfXrP+DcueXIyVEfHCcQWMPbewA34t3BoZWRKjVNFOiE1K5BfetisRhDhgxBfn4+MjIycP36dV3VRYjGGluo5+enPT1CP4bbt/9Aebn68GxX165PLzKjHBxnaSkyUqWmhwKdEM3VK+BVR+67d+/G8ePH4eXlhddffx379+/XdX2E1Cozs3GEunJwXCwX6o8epaitF4ud0KNHBzRvPgU+PsNgY+NmpEpNDwU6IfWndcC//vrr+PXXXyEWizFu3DjExsbSuXdiEKoj9Xv3AC8vIDHRNIOdMYYHDy5zl4LNzDxbbXCch0evp+fSh8LdPQAvvHAUFy8Oa/K3VrWyqpyDToFOSMNoHfA8Hg/79u3DkCFDaPQ80buaut/5fGXAm5Kiooe4fTuGu61qUVGO2np7ex/uPLqPz0C1wXE8nuzZ3TUZFOiE6I/WCb1nzx6dvfnp06fx2WefISEhAdnZ2Th48CBGjRrFrQ8PD8eOHTvUXtOzZ0+cP39eZzUQ09MYzqnL5TLcvXue63bPzk4AwLj1AoEY3t4DuHulOzi0pgtBgQKdEEMy6iF4UVERunTpgqlTp2LMmDE1bjN06FBs376dey4U0tW4zJFMBmRnKy8+Y6qh/vjx7SpXjjuBsjKp2noXl87c4Dgvr740OA7KQHdwoEAnxBiMGvBhYWEICwurcxuRSAQ3Nxp0ZI5MPdTLy4tw584ppKUdRVraMTx6dFNtvZWVI3x9Q552vYfCxkZipEpNR7NmyjB3dlY+DwlRnlIhhBiexgGflZUFT09PfdZSo9jYWLi4uKB58+bo378/PvnkE7i41H5/6rKyMpSVlXHPpVLlUZZMJoNMptm5TtV2VQdGNWaqdphCe2Qy4P59ZRf8s6GuaRDw+TK1f3VFOTjuCtLSYpCaGoPMzLOQy8u59TyeBTw9g+DnFwJ//1C4uXUDn29RZQ/1q0df7TGEZs0qR7g7OlbenEWhkCEnxzR+5nRBLle2Q9PPEG23JUQfeIwx9vzNgObNm+Prr7/GpEmT9FMIj1ftHPy+fftgY2MDb29vpKenY8mSJaioqEBCQgJEopq7PyMjI7F8+fJqy/fs2QOxWKyX2knjJZVK8ffffyMxMRFJSUl49OiR2npnZ2d069YN3bp1Q+fOndXuwUBIXYqLizFx4kQ8efIEdnZ0xUFieBoH/KZNm7BgwQKEhIRgy5YtcHR01G0hNQT8s7Kzs+Ht7Y29e/di9OjRNW5T0xG8l5cXcnNzNf4lk8lkiImJgZtbCPj8xj9tSXk0Zdj21HWk3lB8vgyBgTG4dClE62llCkUF7t79C6mp0UhLi8a9e+qD4ywtreDt3R/+/sqjdAeHNnofHNeQ9uib6gjd0VH5r6Z/3xjjZ06frKxkSE2NQUhICAQCzdojlUrh5OREAU+MRuMu+unTpyMsLAwRERHo2LEjtmzZgpEjR+qztmokEgm8vb2RkpJS6zYikajGo3uBQKDxL6YKny8wiw8nFX23R3VO/e5dZbjr+5y6QiHQKBCfPMngBsfdvn0CZWVP1NY7OwdwU9iUg+OacesYUz4MQdP26JPqHLpqpHtDB8WZy++QxdMzMdp8jmj7eUOIrmk1yM7X1xd//PEHvvnmG4wZMwbt27evNhf+0qVLOi2wqry8PGRmZkIiocFMpqKionJKmyFCXRMyWfHTwXHKUM/L+0dtvZWVA3x8Qp5eaCYUtrYeRqrU+FRH6KpQt7U1dkWEEF3RehR9RkYGDhw4AAcHB7zyyisNuthNYWEhbt26xT1PT09HUlISHBwc4ODggMjISIwZMwYSiQS3b9/GokWL4OTkhFdffbXe70karqKicvS7KYQ6YwwPH15Deroy0O/cOQ25vPI0DY/Hh4dHEDcnXSLp8czguKaDAp2QpkOrdP7vf/+LuXPnYvDgwbh69SqcVXNh6unixYsYMGAA93zOnDkAgClTpmDz5s24cuUKdu7cicePH0MikWDAgAHYt28fbOlTyeBUoX73Lp6OjjZuPQUFBbh27Sekpp5Aeno0Cgruqq23s/PiLgXr4zMIzZo1N06hRkaBTkjTpXHADx06FH/99Re++eYbTJ48WSdvHhwcjLrG+B07dkwn70Pqx5RCXaGowL17fz29ctxRZGdfhKJKQZaWzdCyZTB3r3RHx3ZN8spxFOiEEBWNA14ul+Py5ctGmQtPDMeUQl0qzawyOO44Sksfq613du4AX1/V4LiXIBBYGadQI6o6D93ZmQKdEFJJ44CPiYnRZx3EiEwl1GWyEmRmnkZq6tGng+Ouq61v1qwFfHwGw99/MF591QK3b082+qhzQ6NAJ4Roim4H10SZQqgzxpCbe527FGxm5mlUVJRy63k8PtzdX+SmsEkkL4DPtwCfL4OT0xHcvm34mg1NJKoMcwp0Qog2KOCbkIoK5ah3Y4Z6SUk+bt8+znW9FxRkqa23tfXkzqP7+AyClZWD4Ys0Igp0QoiuUMCbOdWROo8HHD2qfG5ICoUc2dkXuEC/d+9PMFb5l4WFhQgtW/bnRrw7ObVvUoPjRKLKG7MMHAjQBc8IIbpCAW+Gnu1+B4AePQx3xC6VZiE9PRqpqUefDo7LV1vv6Ngefn5D4O8/FF5e/ZrU4LiajtAVCuXFguhWqoQQXaKANxMVFcowV118Ri6vXKfv23VWVJTizp3T3FF6bu41tfUikf3T26oqu97t7Lz0W5AJqRroTk50hE4IMRwK+EasrlDXJ8YY8vL+4QL9zp1TqKgoqbIF7+ngOGWgu7u/CD6/afyoUaATQkxF0/jUNSPGCvXS0se4ffvE01A/Cqk0U229jY07dx7d13dwkxkcR4FOCDFVFPCNgDFCXTk47uLTK8cdw927f4KxyjdWDo7rBz+/IfD1HQJn545NYnCcUKh+tzUKdEKIqaKAN1HGCPWCgntVrhwXg5KSR2rrHR3bcd3uLVv2h0Ag1n9RRkaBTghprCjgTYihQ72iohSZmWe5bveHD6+qrReJ7OHjM4gLdXt7b/0WZAIo0Akh5oIC3sgMGerKwXE3kJp6FOnpx5CREVttcJxE0oO7cpyHR0+zHxwnFKqfQ7e3N3ZFhBCiG+b96W2iDBnqpaVPcOfOMfz111b8+ecsPHmSobbexkbCnUf39R0MsdhJf8WYAAp0QkhTQQFvIKpQV118Rl+hzpgC2dkJ3Ln0u3fjnxkcJ4SX10tct7uzcyezHhxHgU4Iaaoo4PXIUKFeWJiNtLRopKUdRXp6DEpK8tTWOzi0Rq9ebWBr+y94eg6CUGitn0JMgCrQVaFOgU4Iaaoo4HXMEKFeUVGGzMyzSE9XHqU/eHBZbb1QaAsfn8HcUbqDgwd69DiCixfDzO72qhTohBBSMwp4HdB3qDPG8OhRCjcnPSPjJGSy4ipb8CCRdIev75Cng+OCYGFRNchlui3IiFSj3AEgOBho3tyY1RBCiOmigK8nfYd6WZkUt2//wU1he/Lkttp6a2s3+PmFws9vKHx8BsPa2lm3BZiImo7QVTdnoSlshBBSOwp4Legz1BlTICfnktrgOIWi8t6ufL4AXl59uSlsLi6dzXJwHHW5E0KIblDAP4c+Q72wMAfp6dFPu95jUFz8UG29g0Nrrtvd2zsYQqH53U+UAp0QQvSDAr4OFy8q76uuq1CXy8uRmXmOGxx3/36S2nqh0AY+PoO4UG/Rwk83b2xCKNAJIcQwKODrcO+e8nxvQzx6dIs7j64cHFektt7NLZC7C5uHR69nBsc1flUD3cmJBsURQoihUMDrWFlZATIy/uDOpT9+nKa2Xix24aav+fqGwNraxUiV6gcFOiGEmAYK+AZiTIH795O4QM/KOldtcJynZx8u1F1du4DH4xuxYt2iQCeEENNk1IA/ffo0PvvsMyQkJCA7OxsHDx7EqFGjuPWMMSxfvhxbtmxBfn4+evbsiY0bN6Jjx47GKxpAUdEDpKdHP71pSwyKix+orW/Rwr/K4LgBEIlsjVSp7j0b6Pb2gBkO5ieEkEbPqAFfVFSELl26YOrUqRgzZky19WvXrsX69esRFRWFNm3a4OOPP0ZISAhu3LgBW1vDhaZcXo6srHikpR19OjguUW29QGANH5+B3BS2Fi38DVabvlGgE0JI42TUgA8LC0NYWFiN6xhj2LBhAxYvXozRo0cDAHbs2AFXV1fs2bMH77zzjl5ry87OxsWL3yI19TgyMv5AeXmh2npX125ct7unZ29YWAj1Wo+hUKATQoh5MNlz8Onp6cjJyUFoaCi3TCQSoX///oiLi6s14MvKylBWVsY9l0qlAACZTAaZ7PmXbM3JycGAAQOQmpqqtlwsdoaf32D4+YXAzy8ENjauz7zSdC8Hy+fL1P6tSigEHByUYe7oqLw6XNVAZ0z5MCUKhUzt38bO3NoDmF+b5HJlOzT5DFHRZltC9MFkAz4nJwcA4OqqHqSurq7IyMio6SUAgNWrV2P58uXVlkdHR0MsFj/3fZXXfX8ECwsLtGvXDt26dUO3bt3g6+sLPl81OC5B84aYkMDAmDrXFxUpH41FTk7d7WlszK09gPm1KSZG8/YUFxc/fyNC9MhkA17l2cuxMsbqvETrwoULMWfOHO65VCqFl5cXQkNDYafhxct//fVX3LlzB//88woUCgHy84H8/PrVb2wCAeDkJIOHRwzE4hDY2wsafZe7QiFDTk4M3NxCwOc3/usGmFt7APNrk5WVDKmpMQgJCYFAoFl7VL2HhBiLyQa8m5sbAOWRvEQi4ZY/ePCg2lF9VSKRCCKRqNpygUCg8S/miy++iNzcXCgUgkZ3e1VloKtfKY4x5UV7mjcXmMWHrQqfT+0xdebSJgsL5b/afI5ouh0h+mKyAe/r6ws3NzfExMSgW7duAIDy8nKcOnUKa9asMXJ1pqOmQH/2CN3UzqETQgjRP6MGfGFhIW7dusU9T09PR1JSEhwcHNCyZUvMnj0bq1atQuvWrdG6dWusWrUKYrEYEydONGLVxqVJoBNCCCFGDfiLFy9iwIAB3HPVufMpU6YgKioK8+fPR0lJCaZPn85d6CY6Otqgc+CNTSBQjm53dqZAJ4QQojmjBnxwcDBYHf3HPB4PkZGRiIyMNFxRRkaBTgghRBdM9hx8U0GBTgghRB8o4A2MAp0Q0yUQACJR9QefD9y8aezqCNEOBbyeVQ101d3WKNAJMQ4er+YAFworg7wmdFE60hhRwOsYBTohxmVhUXOIq4KckKaCAr6BKNAJMbzautJFIsCSPtUIAUABrzVVoKvmoVOgE6J7tXWlq47Ca+tKJ4RUooB/Dgp0QvSDutIJ0S8K+Dr06we0aEGBTkh9CQSAlRV1pRNiDPQrVgc6WiekbrV1pVtYKG9w1LGjMuQJIYZHAU8IqVN9utJpWhkhxkcBTwjh5oFXnRNOXemENG70q0tIE0Cj0glpeijgCTETNCqdEFIVBTwhjciz3efPDmwjhBAVCnhCTEhdXekiEc3qIIRojgKeEAN7titdNaWsQwfA2trY1RFCzAUFPCF6oE1XumpKGZ0nJ4ToEgU8IfVAXemEEFNHAU9ILSwtaz8Sp6NtQoipo4AnTRqNSieEmCsKeGLW+Py6Q5y60gkh5ooCnjR6lpY1d6FTVzohpCmjgCeNQtWjcNW0srZtldPKqCudEEKqo4AnJkGbrnTVtDIrKwp3QgipDQU8MZiautJVD7pnOCGE6BYFPNEpGpVOCCGmwaQDPjIyEsuXL1db5urqipycHCNVRGhUOiGENA4mHfAA0LFjRxw/fpx7bkGHgXpHXemEENL4mXzAW1paws3NTePty8rKUFZWxj2XSqUAAJlMBplqdNZzqLZTKDTb3tSp2lG1PUKh+lSyqtPL6vobSsMvoV6pvj+afj9Nnbm1BzC/NtWnPebSdtJ48RhjzNhF1CYyMhKfffYZ7O3tIRKJ0LNnT6xatQp+fn51vubZbn0A2LNnD8RisT7LJYQQTnFxMSZOnIgnT57Azs7O2OWQJsikA/73339HcXEx2rRpg/v37+Pjjz/GP//8g2vXrsHR0bHG19R0BO/l5YXc3FyNf8lkMhliYmLg5hYCPt90+6SfvVZ61SPyql3pqvaEhIRAYAZ97NQe02dubapPe6RSKZycnCjgidGYdBd9WFgY9/9OnTqhV69e8Pf3x44dOzBnzpwaXyMSiSASiaotFwgEWn/Q8PkCowY8j6cMal2NSq/P18CUUXtMn7m1SZv2mFO7SeNk0gH/LGtra3Tq1AkpKSnGLkVn+PzaA1wopFHphBBC6qdRBXxZWRmuX7+Ol156ydilaIVGpRNCCDE0kw74efPmYcSIEWjZsiUePHiAjz/+GFKpFFOmTDF2aWp03ZVOCCGENJRJB3xWVhZef/115ObmwtnZGUFBQTh//jy8vb0NXgt1pRNCCGlMTDrg9+7da9T3b9lSebcy6konhBDS2Jh0wBubgwMFOyGEkMaJb+wCCCGEEKJ7FPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwBNCCCFmyOwvdKO63b1UKtX4NTKZDMXFxZBKpWZxy0dqj2kzt/YA5tem+rRH9Zmj+gwixNDMPuALCgoAAF5eXkauhBDSFBUUFMDe3t7YZZAmiMfM/M9LhUKBe/fuwdbWFjwN7wgjlUrh5eWFzMxM2NnZ6blC/aP2mDZzaw9gfm2qT3sYYygoKIC7uzv4fDobSgzP7I/g+Xw+PD096/VaOzs7s/hwUqH2mDZzaw9gfm3Stj105E6Mif6sJIQQQswQBTwhhBBihijgayASibBs2TKIRCJjl6IT1B7TZm7tAcyvTebWHtI0mP0gO0IIIaQpoiN4QgghxAxRwBNCCCFmiAKeEEIIMUMU8IQQQogZarIBv2nTJvj6+qJZs2bo3r07zpw5U+f2p06dQvfu3dGsWTP4+fnh22+/NVClmtGmPf/73/8QEhICZ2dn2NnZoVevXjh27JgBq30+bb8/KufOnYOlpSW6du2q3wK1pG17ysrKsHjxYnh7e0MkEsHf3x/btm0zULXPp217du/ejS5dukAsFkMikWDq1KnIy8szULV1O336NEaMGAF3d3fweDz83//933NfY+qfB4QAAFgTtHfvXiYQCNh///tflpyczGbNmsWsra1ZRkZGjdunpaUxsVjMZs2axZKTk9l///tfJhAI2M8//2zgymumbXtmzZrF1qxZw/766y928+ZNtnDhQiYQCNilS5cMXHnNtG2PyuPHj5mfnx8LDQ1lXbp0MUyxGqhPe0aOHMl69uzJYmJiWHp6Ovvzzz/ZuXPnDFh17bRtz5kzZxifz2dffvklS0tLY2fOnGEdO3Zko0aNMnDlNTty5AhbvHgxO3DgAAPADh48WOf2pv55QIhKkwz4F198kf373/9WW9auXTu2YMGCGrefP38+a9eundqyd955hwUFBemtRm1o256adOjQgS1fvlzXpdVLfdszYcIE9tFHH7Fly5aZVMBr257ff/+d2dvbs7y8PEOUpzVt2/PZZ58xPz8/tWVfffUV8/T01FuN9aVJwJv65wEhKk2ui768vBwJCQkIDQ1VWx4aGoq4uLgaXxMfH19t+yFDhuDixYuQyWR6q1UT9WnPsxQKBQoKCuDg4KCPErVS3/Zs374dqampWLZsmb5L1Ep92nPo0CH06NEDa9euhYeHB9q0aYN58+ahpKTEECXXqT7t6d27N7KysnDkyBEwxnD//n38/PPPePnllw1Rss6Z8ucBIVWZ/c1mnpWbmwu5XA5XV1e15a6ursjJyanxNTk5OTVuX1FRgdzcXEgkEr3V+zz1ac+z1q1bh6KiIowfP14fJWqlPu1JSUnBggULcObMGVhamtaPdH3ak5aWhrNnz6JZs2Y4ePAgcnNzMX36dDx69Mjo5+Hr057evXtj9+7dmDBhAkpLS1FRUYGRI0fi66+/NkTJOmfKnweEVNXkjuBVnr11LGOsztvJ1rR9TcuNRdv2qPz444+IjIzEvn374OLioq/ytKZpe+RyOSZOnIjly5ejTZs2hipPa9p8fxQKBXg8Hnbv3o0XX3wRw4YNw/r16xEVFWUSR/GAdu1JTk7G+++/j6VLlyIhIQFHjx5Feno6/v3vfxuiVL0w9c8DQoAmeATv5OQECwuLakcbDx48qPZXuYqbm1uN21taWsLR0VFvtWqiPu1R2bdvHyIiIrB//34MHjxYn2VqTNv2FBQU4OLFi0hMTMR7770HQBmQjDFYWloiOjoaAwcONEjtNanP90cikcDDw0PtVqPt27cHYwxZWVlo3bq1XmuuS33as3r1avTp0wcffPABAKBz586wtrbGSy+9hI8//rjRHfGa8ucBIVU1uSN4oVCI7t27IyYmRm15TEwMevfuXeNrevXqVW376Oho9OjRAwKBQG+1aqI+7QGUR+7h4eHYs2ePSZ0L1bY9dnZ2uHLlCpKSkrjHv//9b7Rt2xZJSUno2bOnoUqvUX2+P3369MG9e/dQWFjILbt58yb4fD48PT31Wu/z1Kc9xcXF4PPVP2osLCwAVB75Niam/HlAiBojDe4zKtU0n61bt7Lk5GQ2e/ZsZm1tzW7fvs0YY2zBggVs0qRJ3PaqaTH/+c9/WHJyMtu6datJTYvRtj179uxhlpaWbOPGjSw7O5t7PH782FhNUKNte55laqPotW1PQUEB8/T0ZGPHjmXXrl1jp06dYq1bt2Zvv/22sZqgRtv2bN++nVlaWrJNmzax1NRUdvbsWdajRw/24osvGqsJagoKClhiYiJLTExkANj69etZYmIiN+2vsX0eEKLSJAOeMcY2btzIvL29mVAoZIGBgezUqVPcuilTprD+/furbR8bG8u6devGhEIh8/HxYZs3bzZwxXXTpj39+/dnAKo9pkyZYvjCa6Ht96cqUwt4xrRvz/Xr19ngwYOZlZUV8/T0ZHPmzGHFxcUGrrp22rbnq6++Yh06dGBWVlZMIpGwN954g2VlZRm46pqdPHmyzt+Hxvh5QAhjjNHtYgkhhBAz1OTOwRNCCCFNAQU8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ6QGt2/fBo/HQ1JSkrFLIYSQeqGAJ41WeHg4Ro0aVW15bGwseDweHj9+XO99e3l5ITs7GwEBAfUvkBBCjKjJ3S6WkOcpLy+HUCiEm5ubsUshhJB6oyN4YvYOHDiAjh07QiQSwcfHB+vWrVNb7+Pjg48//hjh4eGwt7fHtGnTqnXRh4eHg8fjVXvExsYCAPLz8zF58mS0aNECYrEYYWFhSElJ4d4jKioKzZs3x7Fjx9C+fXvY2Nhg6NChyM7ONtSXgRDSxFDAE7OWkJCA8ePH47XXXsOVK1cQGRmJJUuWICoqSm27zz77DAEBAUhISMCSJUuq7efLL79EdnY295g1axZcXFzQrl07AMo/AC5evIhDhw4hPj4ejDEMGzYMMpmM20dxcTE+//xz/PDDDzh9+jTu3LmDefPm6bX9hJAmzMh3syOk3qZMmcIsLCyYtbW12qNZs2YMAMvPz2cTJ05kISEhaq/74IMPWIcOHbjn3t7ebNSoUWrbpKenMwAsMTGx2vseOHCAiUQidubMGcYYYzdv3mQA2Llz57htcnNzmZWVFfvpp58YY8p7ogNgt27d4rbZuHEjc3V1bfDXgRBCakJH8KRRGzBgAJKSktQe33//Pbf++vXr6NOnj9pr+vTpg5SUFMjlcm5Zjx49NHq/xMRETJ48GRs3bkTfvn2597C0tETPnj257RwdHdG2bVtcv36dWyYWi+Hv7889l0gkePDggXYNJoQQDdEgO9KoWVtbo1WrVmrLsrKyuP8zxsDj8dTWM8Zq3M/z5OTkYOTIkYiIiEBERESd+6vpvQUCgdp6Ho9X62sJIaSh6AiemLUOHTrg7Nmzasvi4uLQpk0bWFhYaLyf0tJSvPLKK2jXrh3Wr19f7T0qKirw559/csvy8vJw8+ZNtG/fvmENIISQeqIjeGLW5s6dixdeeAErV67EhAkTEB8fj2+++QabNm3Saj/vvPMOMjMzceLECTx8+JBb7uDggNatW+OVV17BtGnT8N1338HW1hYLFiyAh4cHXnnlFV03iRBCNEJH8MSsBQYG4qeffsLevXsREBCApUuXYsWKFQgPD9dqP6dOnUJ2djY6dOgAiUTCPeLi4gAA27dvR/fu3TF8+HD06tULjDEcOXKkWrc8IYQYCo/RSUBCCCHE7NARPCGEEGKGKOAJIYQQM0QBTwghhJghCnhCCCHEDFHAE0IIIWaIAp4QQggxQxTwhBBCiBmigCeEEELMEAU8IYQQYoYo4AkhhBAzRAFPCCGEmKH/B4YHxYxK7zDVAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| hide\n", - "# Create single mixture and broadcast to N,H,K\n", - "weights = torch.ones((1,3))[None, :, :]\n", + "# Create single mixture and broadcast to N, H, K\n", + "weights = torch.ones((2,3))[None, :, :]\n", "lambdas = torch.Tensor([[5,10,15], [10,20,30]])[None, :, :]\n", "\n", "# Create repetitions for the batch dimension N.\n", @@ -1987,7 +3393,7 @@ "print('lambdas.shape (N,H,K) \\t', lambdas.shape)\n", "\n", "distr = PMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (lambdas,)\n", + "distr_args = (lambdas, weights)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -2092,38 +3498,44 @@ " if self.return_params:\n", " mu_names = [f\"-mu-{i}\" for i in range(1, n_components + 1)]\n", " std_names = [f\"-std-{i}\" for i in range(1, n_components + 1)]\n", - " mu_std_names = [i for j in zip(mu_names, std_names) for i in j]\n", - " self.output_names = self.output_names + mu_std_names\n", + " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", + " self.param_names = [i for j in zip(mu_names, std_names, weight_names) for i in j]\n", + " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 2 * n_components\n", + " self.outputsize_multiplier = 3 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " means, stds = torch.tensor_split(output, 2, dim=-1)\n", - " return (means, stds)\n", + " means, stds, weights = output.chunk(3, dim=-1)\n", + "\n", + " return (means, stds, weights)\n", "\n", - " def scale_decouple(self, \n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float=0.2):\n", - " \"\"\" Scale Decouple\n", + " def scale_decouple(\n", + " self,\n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float = 0.2,\n", + " ):\n", + " \"\"\"Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " means, stds = output\n", + " means, stds, weights = output\n", " stds = F.softplus(stds)\n", + " weights = F.softmax(weights, dim=-1)\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(means.size(dim=0), 1, -1)\n", - " scale = scale.view(means.size(dim=0), 1, -1) \n", + " scale = scale.view(means.size(dim=0), 1, -1)\n", " means = (means * scale) + loc\n", " stds = (stds + eps) * scale\n", - " return (means, stds)\n", + "\n", + " return (means, stds, weights)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -2145,17 +3557,11 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " means, stds = distr_args\n", + " means, stds, weights = distr_args\n", " B, H, K = means.size()\n", " Q = len(self.quantiles)\n", " assert means.shape == stds.shape\n", "\n", - " # Sample K ~ Mult(weights)\n", - " # shared across B, H\n", - " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", - " \n", - " weights = (1/K) * torch.ones_like(means, device=means.device)\n", - " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " means = means.flatten()\n", @@ -2195,17 +3601,15 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " means, stds = distr_args\n", + " means, stds, weights = distr_args\n", " B, H, K = means.size()\n", - " \n", - " weights = (1/K) * torch.ones_like(means, device=means.device)\n", - " \n", + " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", " \n", @@ -2228,7 +3632,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -2278,7 +3682,17 @@ "execution_count": null, "id": "8ebe4250", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -2302,7 +3716,40 @@ "execution_count": null, "id": "684d2382", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "means.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "stds.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", + "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", + "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfcAAAEyCAYAAADnUJkgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABDOklEQVR4nO3de1wU5f4H8M8gy3KHALkpIApoZt4yFVHBDEzTND12CjU085IXMjrpUTu5GAeMCq0sSy3g1CHL0jIzlUrJfqiB5rHMMBPQVCJvgKCwwvP7w5hclsvusrDL8nm/XvuSeWbmme9+WffLzDwzIwkhBIiIiMhiWJk6ACIiIjIuFnciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgW9xZ08OBBPPjgg/D394dSqYSXlxdCQ0Px9NNPaywXERGBiIiIFo9HkiSoVCqj9delSxeMHTvWaP01Zu/evZAkCXv37m2V7ekrIiICkiRBkiRYWVnByckJQUFBmDx5Mj766CPU1NRordOlSxdMnz5dr+1kZ2dDpVLhypUreq1Xd1u1+fzoo4/06qcxFRUVUKlU9f6O0tLSIEkSCgoKjLY9ImqYtakDsFSff/45HnjgAURERCA5ORk+Pj44f/48cnNzsWnTJrz88svysm+88YYJI20b+vfvj/3796Nnz56mDqVBXbt2xX//+18AQHl5OfLz8/HJJ59g8uTJGDZsGD777DO4uLjIy2/duhXOzs56bSM7Oxvx8fGYPn06XF1ddV7PkG3pq6KiAvHx8QCg9cfq/fffj/3798PHx6dFYyCim1jcW0hycjICAwOxa9cuWFv/leaHH34YycnJGsuac8EyNbVaDUmS4OzsjMGDB5s6nEbZ2dlpxfj4448jNTUVjz32GGbPno0PPvhAntevX78Wj+natWuws7NrlW01pmPHjujYsaNJYyBqT3hYvoVcvHgRHh4eGoW9lpWVZtrrHpYvKCiAJEl46aWXkJKSgsDAQDg6OiI0NBQHDhzQ6m/Dhg0ICQmBUqlEz549kZGRgenTp6NLly5NxllUVIQ5c+agc+fOsLGxQWBgIOLj43Hjxg2d3+vOnTvRv39/2NnZoUePHnjnnXe0lvnxxx8xfvx43HbbbbC1tUXfvn2Rnp6usUztoeJ3330XTz/9NDp16gSlUomTJ09qHZavzVFDr1u988476NOnD2xtbeHm5oYHH3wQx48f11hm+vTpcHR0xMmTJzFmzBg4OjrCz88PTz/9NCorK3XORX1mzJiBMWPGYPPmzSgsLJTb6x4qr6mpQUJCArp37w47Ozu4urqid+/eeOWVVwAAKpUKzzzzDAAgMDBQfq+1Oak9TbJlyxb069cPtra28p50Q6cArl+/jri4OHh7e8POzg7h4eH4/vvvNZZp6LTRrZ+xgoICuXjHx8fLsdVus6HD8sb+3axbtw59+vSBo6MjnJyc0KNHDyxbtkwrdiJLxz33FhIaGoqNGzciNjYWU6ZMQf/+/aFQKPTq4/XXX0ePHj2wZs0aAMC//vUvjBkzBvn5+fLh3fXr12POnDmYNGkSVq9ejZKSEsTHx+tUkIqKijBw4EBYWVnhueeeQ7du3bB//34kJCSgoKAAqampTfbxv//9D08//TT++c9/wsvLCxs3bsTMmTMRFBSE4cOHAwDy8vIwZMgQeHp64tVXX4W7uzvee+89TJ8+Hb///jsWL16s0efSpUsRGhqKN998E1ZWVvD09ERRUZHGMj4+Pti/f79G2x9//IGpU6eiU6dOcltSUhKWLVuGRx55BElJSbh48SJUKhVCQ0ORk5OD4OBgeVm1Wo0HHngAM2fOxNNPP41vvvkGzz//PFxcXPDcc881mYvGPPDAA9ixYwf27duHgICAepdJTk6GSqXCs88+i+HDh0OtVuPnn3+Wz68//vjjuHTpEl577TVs2bJFPsR965Gfw4cP4/jx43j22WcRGBgIBweHRuNatmwZ+vfvj40bN6KkpAQqlQoRERH4/vvv0bVrV53fn4+PD3bu3In77rsPM2fOxOOPPw4Aje6tG/t3s2nTJsybNw8LFy7ESy+9BCsrK5w8eRI//fSTzu+DyGIIahEXLlwQQ4cOFQAEAKFQKMSQIUNEUlKSKCsr01g2PDxchIeHy9P5+fkCgLjzzjvFjRs35PbvvvtOABDvv/++EEKI6upq4e3tLQYNGqTRX2FhoVAoFCIgIECjHYBYsWKFPD1nzhzh6OgoCgsLNZZ76aWXBABx7NixRt9jQECAsLW11Vj/2rVrws3NTcyZM0due/jhh4VSqRSnT5/WWH/06NHC3t5eXLlyRQghxJ49ewQAMXz4cK1t1c7bs2dPvbGUl5eLgQMHCh8fH1FQUCCEEOLy5cvCzs5OjBkzRmPZ06dPC6VSKaKjo+W2mJgYAUB8+OGHGsuOGTNGdO/evdE8CHHzd3jHHXc0OP+LL74QAMQLL7wgtwUEBIiYmBh5euzYsaJv376NbufFF18UAER+fr7WvICAANGhQweRl5dX77xbt1Wbz/79+4uamhq5vaCgQCgUCvH4449rvLdbP5+1YmJiND5jf/zxh9ZnrFZqaqpG3C3xu1mwYIFwdXXV2jZRe8TD8i3E3d0d+/btQ05ODlatWoXx48fjxIkTWLp0Ke68805cuHChyT7uv/9+dOjQQZ7u3bs3AMiHdvPy8lBUVISHHnpIYz1/f3+EhYU12f/27dsxYsQI+Pr64saNG/Jr9OjRAICsrKwm++jbty/8/f3laVtbW4SEhGgcfv76668xcuRI+Pn5aaw7ffp0VFRUaO2BT5o0qcnt3qq6uhp///vfcfz4cezYsUPeM96/fz+uXbumdTjaz88P99xzD7766iuNdkmSMG7cOI223r17a7wXQwkhmlxm4MCB+N///od58+Zh165dKC0t1Xs7vXv3RkhIiM7LR0dHa5zGCAgIwJAhQ7Bnzx69t62PlvjdDBw4EFeuXMEjjzyCTz/9VKf/Y0SWisW9hQ0YMABLlizB5s2bce7cOTz11FMoKCjQGlRXH3d3d41ppVIJ4OYgKeDmeX0A8PLy0lq3vra6fv/9d3z22WdQKBQarzvuuAMAdPpyrBtjbZy1MdbGWd8oaV9fX433UUvfEdVz587Fzp078dFHH6Fv374a222oP19fX63t2tvbw9bWVuu9XL9+Xa946lNbhGrfc32WLl2Kl156CQcOHMDo0aPh7u6OkSNHIjc3V+ft6Js7b2/vetvq5sbYWuJ3M23aNLzzzjsoLCzEpEmT4OnpiUGDBiEzM7MF3gGReWNxb0UKhQIrVqwAcHOAWXPVFtbff/9da17dc9T18fDwQFRUFHJycup9zZw5s9kx1sZ5/vx5rfZz587Jcdyq7oC4xqhUKmzcuBEbNmxAVFSU1nYBNLjtutttSdu2bYMkSfI4hPpYW1sjLi4Ohw8fxqVLl/D+++/jzJkzGDVqFCoqKnTajj65A+r/nBQVFWn80WZra1vvGI7m7Bm31O9mxowZyM7ORklJCT7//HMIITB27FijHH0haktY3FtIfV9aAOSRwI3twemqe/fu8Pb2xocffqjRfvr0aWRnZze5/tixY/Hjjz+iW7duGDBggNbLGDECwMiRI/H111/LxbzWf/7zH9jb2xt8idvbb7+N+Ph4rFy5st6R4KGhobCzs8N7772n0f7bb7/JpwpaQ2pqKr744gs88sgjGqcwGuPq6oq//e1vmD9/Pi5duiSPMq979Ka53n//fY1TBoWFhcjOztYYHd+lSxecOHFCo8BfvHhR6zOmT2wt/btxcHDA6NGjsXz5clRVVeHYsWPN6o+oreFo+RYyatQodO7cGePGjUOPHj1QU1ODI0eO4OWXX4ajoyOefPLJZm/DysoK8fHxmDNnDv72t7/hsccew5UrVxAfHw8fHx+tS+7qWrlyJTIzMzFkyBDExsaie/fuuH79OgoKCrBjxw68+eab6Ny5c7PjXLFihXx+/7nnnoObmxv++9//4vPPP0dycrLGjV10tX//fsydOxdhYWGIjIzUukRw8ODBcHV1xb/+9S8sW7YMjz76KB555BFcvHgR8fHxsLW1lY+iGMu1a9fkOK5du4ZTp07hk08+wfbt2xEeHo4333yz0fXHjRuHXr16YcCAAejYsSMKCwuxZs0aBAQEyCPH77zzTgDAK6+8gpiYGCgUCnTv3h1OTk4GxVxcXIwHH3wQs2bNQklJCVasWAFbW1ssXbpUXmbatGl46623MHXqVMyaNQsXL15EcnKy1k1xnJycEBAQgE8//RQjR46Em5sbPDw86r0ksyV+N7NmzYKdnR3CwsLg4+ODoqIiJCUlwcXFBXfffbfe/RG1aSYe0GexPvjgAxEdHS2Cg4OFo6OjUCgUwt/fX0ybNk389NNPGss2NFr+xRdf1OoX9YxGXr9+vQgKChI2NjYiJCREvPPOO2L8+PGiX79+Ta77xx9/iNjYWBEYGCgUCoVwc3MTd911l1i+fLm4evVqo+8xICBA3H///Vrt9Y2u/uGHH8S4ceOEi4uLsLGxEX369BGpqakay9SO4N68ebNWn3VHy9eOvm7odauNGzeK3r17CxsbG+Hi4iLGjx+vdSVATEyMcHBw0NruihUrtPqrT3h4uMb2HRwcRNeuXcXf/vY3sXnzZlFdXa21Tt0R7C+//LIYMmSI8PDwEDY2NsLf31/MnDlTHv1fa+nSpcLX11dYWVlp5KSh30d926rN57vvvitiY2NFx44dhVKpFMOGDRO5ubla66enp4vbb79d2Nraip49e4oPPvhAa7S8EEJ8+eWXol+/fkKpVAoA8jbrjpavZczfTXp6uhgxYoTw8vISNjY2wtfXVzz00EPi6NGj9eaEyJJJQugwjJfalCtXriAkJAQTJkzA+vXrTR0OERG1Mh6Wb+OKiorw73//GyNGjIC7uzsKCwuxevVqlJWVGeXQPxERtT0s7m2cUqlEQUEB5s2bh0uXLskD1N588035kjYiImpfeFieiIjIwvBSOCIiIgvD4k5ERGRhLP6ce01NDc6dOwcnJye9795FRGQoIQTKysrg6+vb5D0niIzN4ov7uXPntB5YQkTUWs6cOWOUm0ER6cPii3vtnbvOnDmjdUctapparcbu3bsRFRWl9/PoqWHMa8swp7yWlpbCz8/P4LsHEjWHxRf32kPxzs7OLO4GUKvVsLe3h7Ozs8m/LC0J89oyzDGvPB1IpsATQURERBaGxZ2IiMjCsLgTERFZGIs/505EZM6qq6uhVqtNHQaZOYVCgQ4dOui8PIs7EZEJCCFQVFSEK1eumDoUaiNcXV3h7e2t0yBNFnciIhOoLeyenp6wt7fnqHpqkBACFRUVKC4uBgD4+Pg0uQ6LOxFRK6uurpYLu7u7u6nDoTbAzs4OAFBcXAxPT88mD9FzQB0RUSurPcdub29v4kioLan9vOgyRoN77tSwPUmAsALQA9iXAtyzxNQRtW17kv76uTav1K7xUDzpQ5/PC/fciYiILAyLOxERkYVhcSciIrIwPOdORGQmVmeeaNXtPRUZ0qrb00VERAT69u2LNWvWmDqUNo177kREpLPp06djwoQJWu179+6FJEnNvinPli1b8Pzzzzerj7bgm2++wbhx4+Dr6wtJkvDJJ58YtX8WdyIiMrmqqioAgJubG5ycnEwcjeEiIiKQlpbW5HLl5eXo06cP1q5d2yJxsLgTEZHRVVZWIjY2Fp6enrC1tcXQoUORk5Mjz4+IiMCCBQsQFxcHDw8PREZGyu2LFi0CABQUFECSJK1XRESETtuo7S82NhaLFy+Gm5sbvL29oVKpGo39gQceqHe7kiRh27ZtRsnP6NGjkZCQgIkTJxqlv7pY3ImIyOgWL16Mjz/+GOnp6Th8+DCCgoIwatQoXLp0SV4mPT0d1tbW+L//+z+89dZbWn34+fnh/Pnz8uv777+Hu7s7hg8frvM2arfj4OCAgwcPIjk5GStXrkRmZmaDsaempuL8+fP45ZdfAAA7duyQYxgzZowx0tPiOKCOiIj0sn37djg6Omq0VVdXyz+Xl5dj3bp1SEtLw+jRowEAGzZsQGZmJt5++20888wzAICgoCAkJyc3uJ0OHTrA29sbAHD9+nVMmDABoaGhUKlUOm8DAHr37o0VK1YAAIKDg7F27Vp89dVX8tGCumpvCbx//35IkoShQ4e2uVMF3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PjID2FpzNGjR9GlS5dGC3tiYiIcHR3l1759+zB37lytttZm0uLepUuXes9pzJ8/H8DNJ+GoVCr4+vrCzs4OEREROHbsmClDJiJq9xwcHBAUFKTx6tSpkzxfCAFA+3apQgiNNgcHB522l5CQgJ07d2Lbtm1yodV1G8DNon8rSZJQU1PT5HaPHj2q9YdBXXPnztX4I2fAgAFYuXKlVltrM2lxz8nJ0TifUnsOZPLkyQCA5ORkpKSkYO3atcjJyYG3tzciIyNRVlZmyrCJiKgRQUFBsLGxwbfffiu3qdVq5Obm4vbbb9err48//hgrV67Ehx9+iG7durXINhpSUFCA7t27N7qMm5ubxh85dnZ28PT01GprbSY9596xY0eN6VWrVqFbt24IDw+HEAJr1qzB8uXL5dGE6enp8PLyQkZGBubMmVNvn5WVlaisrJSnS0tLAdz8pevyJB26hbCCWtz8+08trADmr3nEX39Ly3llTo2qNp/mkFdziMFUHBwc8MQTT+CZZ56Bm5sb/P39kZycjIqKCsycOVPnfn788Uc8+uijWLJkCe644w4UFRUBAGxsbODm5maUbTSmpqYGhYWF+O2339CpUyejPujn6tWrOHnypDydn5+PI0eOyO+lucxmQF1VVRXee+89xMXFQZIknDp1CkVFRYiKipKXUSqVCA8PR3Z2doPFPSkpCfHx8Vrtu3fv5uMV9fbXU8syr4YAO3aYMBZLoP0UuMZG7JLhzCGvFRUVeq9jjneMM9SqVatQU1ODadOmoaysDAMGDMCuXbtw22236dxHbm4uKioqkJCQgISEBLk9PDwce/fuNco2GhMbG4vZs2ejR48eKC0tNWpxz83NxYgRI+TpuLg4AEBMTIxO18k3RRK1Jy5M7MMPP0R0dDROnz4NX19fZGdnIywsDGfPnoWvr6+83OzZs1FYWIhdu3bV2099e+5+fn64cOECnJ2dW/x9WJR9KVALK2ReDUGk4wkohi8ydURt274U+Uc5r5GRWucDyXBqtRqZmZlmkdfS0lJ4eHigpKRE67vn+vXryM/PR2BgIGxtbU0UIbU1+nxuzGbP/e2338bo0aM1Cjmg22CJWymVSiiVSq12hUJh8v/sbY7014AThVTD/DWXpD2Ah5/LlmEOeTX19ql9M4tL4QoLC/Hll1/i8ccfl9tqr22sPcdSq7i4GF5eXq0aHxERUVtiFsU9NTUVnp6euP/+++W2wMBAeHt7a5w7q6qqQlZWFoYMGWKKMImIiNoEkx+Wr6mpQWpqKmJiYmBt/Vc4kiRh0aJFSExMRHBwMIKDg5GYmAh7e3tER0ebMGIiIiLzZvLi/uWXX+L06dN47LHHtOYtXrwY165dw7x583D58mUMGjQIu3fvbnO3ASQiImpNJi/uUVFRaGjAviRJUKlUTT7Bh4iIiP5iFufciYiIyHhY3ImIiCwMizsREZGFYXEnIiKyMCYfUEdERH/ak9S62xuxtHW3p4OIiAj07dsXa9asMXUobRr33ImISGfTp0/HhAkTtNr37t0LSZJw5cqVZvW/ZcsWPP/8883qoy1ISkrC3XffDScnJ3h6emLChAnIy8szWv8s7kREZHJVVVUAbj4fvS3fyyQiIkKnp7plZWVh/vz5OHDgADIzM3Hjxg1ERUWhvLzcKHGwuBMRkdFVVlYiNjYWnp6esLW1xdChQ5GTkyPPj4iIwIIFCxAXFwcPDw9ERkbK7YsWLQIAFBQUQJIkrVdERIRO26jtLzY2FosXL4abmxu8vb2bvHfKAw88UO92JUnCtm3bjJKfnTt3Yvr06bjjjjvQp08fpKam4vTp0zh06JBR+mdxJyIio1u8eDE+/vhjpKen4/DhwwgKCsKoUaNw6dIleZn09HRYW1vj//7v//DWW29p9eHn54fz58/Lr++//x7u7u4YPny4ztuo3Y6DgwMOHjyI5ORkrFy5UuO5JXWlpqbi/Pnz+OWXXwAAO3bskGMYM2aMMdKjpaSkBMDNIxfGwAF1RESkl+3bt8PR0VGjrbq6Wv65vLwc69atQ1paGkaPHg0A2LBhAzIzM/H222/jmWeeAQAEBQUhOTm5we106NBBfkLo9evXMWHCBISGhkKlUum8DQDo3bs3VqxYAQAIDg7G2rVr8dVXX8lHC+pyd3cHAOzfvx+SJGHo0KEteqpACIG4uDgMHToUvXr1Mkqf3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PiguLi4yW0ePXoUXbp0abSwJyYmwtHRUX7t27cPc+fO1WprzIIFC3D06FG8//77TcakK+65ExGRXhwcHBAUFKTR9ttvv8k/1z4vRJIkjWWEEBptDg4OOm0vISEBO3fuxHfffScXWl23Adws+reSJAk1NTVNbvfo0aNafxjUNXfuXDz00EPy9JQpUzBp0iRMnDhRbuvUqVOD6y9cuBDbtm3DN998g86dOzcZk664505EbdeeJM0XmYWgoCDY2Njg22+/ldvUajVyc3Nx++2369XXxx9/jJUrV+LDDz9Et27dWmQbDSkoKED37t0bXcbNzQ1BQUHyy87ODp6enlptdQkhsGDBAmzZsgVff/01AgMDjRJzLe65ExGRUTk4OOCJJ57AM888Azc3N/j7+yM5ORkVFRWYOXOmzv38+OOPePTRR7FkyRLccccdKCoqAgDY2NjAzc3NKNtoTE1NDQoLC/Hbb7+hU6dOWkcEmmP+/PnIyMjAp59+CicnJ/m9ubi41PvHgL5Y3ImIzIUZ3jHOUKtWrUJNTQ2mTZuGsrIyDBgwALt27cJtt92mcx+5ubmoqKhAQkICEhIS5Pbw8HDs3bvXKNtoTGxsLGbPno0ePXqgtLTUqMV93bp1ACBf1lcrNTUV06dPb3b/kmjoYeoWorS0FC4uLigpKYGzs7Opw2lb9iRBLaywo6wHxjj9DMU9S0wdUdvSyGFiOa9jxmidDyQ91Mmxeug/sGPHDrPIa2PfPdevX0d+fj4CAwNha2trogiprdHnc8Nz7kRERBbG5MX97NmzmDp1Ktzd3WFvb4++fftq3KFHCAGVSgVfX1/Y2dkhIiICx44dM2HERERE5s2kxf3y5csICwuDQqHAF198gZ9++gkvv/wyXF1d5WWSk5ORkpKCtWvXIicnB97e3oiMjERZWZnpAiciIjJjJh1Q98ILL8DPzw+pqalyW5cuXeSfhRBYs2YNli9fLl8zmJ6eDi8vL2RkZGDOnDmtHTIREZHZM2lx37ZtG0aNGoXJkycjKysLnTp1wrx58zBr1iwAQH5+PoqKihAVFSWvo1QqER4ejuzs7HqLe2VlJSorK+Xp0tJSADevf1Sr1S38jiyMsIJa3Dy4oxZWAPOnH9HwgTE5r8xp89TJcW0+zSGvusSgy41UiGrp83kxaXE/deoU1q1bh7i4OCxbtgzfffcdYmNjoVQq8eijj8rX/Xl5eWms5+XlhcLCwnr7TEpKQnx8vFb77t27YW9vb/w3YdF6yD9lXg0BduwwYSxtUY8ml2js4RWkizo5/jOf5pDXioqKBufZ2NjAysoK586dQ8eOHWFjY2PUy6zIsgghUFVVhT/++ANWVlawsbFpch2TXgpnY2ODAQMGIDs7W26LjY1FTk4O9u/fj+zsbISFheHcuXPw8fGRl5k1axbOnDmDnTt3avVZ3567n58fLly4wEvh9LUvBWphhcyrIYh0PAHF8EWmjqht2ZfS4Cw5r5GRJr9kq02rk2P14IXIzMw0i7yWlpbCw8Ojwctwq6qqcP78+Ub/CCC6lb29PXx8fHQq7ibdc/fx8UHPnj012m6//XZ8/PHHACA/DaioqEijuBcXF2vtzddSKpVQKpVa7QqFwuT/2dsc6a9DQAqphvnTl9T0ITTFgdeguHU5C7qJSauom+M/P6Pm8P+9qe3b2NjA398fN27c0HiiGlF9OnToAGtra52P8Ji0uIeFhSEvL0+j7cSJEwgICAAABAYGwtvbG5mZmejXrx+Am3/tZmVl4YUXXmj1eImIjEmSJLP4Q4Qsj0mL+1NPPYUhQ4YgMTERDz30EL777jusX78e69evB3Dzg79o0SIkJiYiODgYwcHBSExMhL29PaKjo00ZOhERkdkyaXG/++67sXXrVixduhQrV65EYGAg1qxZgylTpsjLLF68GNeuXcO8efNw+fJlDBo0CLt37270+bpERETtmckfHDN27FiMHTu2wfmSJEGlUkGlUrVeUERERG2YyW8/S0RERMZl8j13IqIWVffpfLwigdoB7rkTERFZGBZ3IiIiC8PiTkREZGFY3ImIiCwMizsREZGF4Wh5IrIc+1IA9Lj5rw739ieyVNxzJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZGA6oo5vq3qJTl2V4G08iIrPEPXciIiILw+JORERkYXhYngxX36F8HqonIjI57rkTERFZGJMWd5VKBUmSNF7e3t7yfCEEVCoVfH19YWdnh4iICBw7dsyEERMREZk/gw7Lp6Wl4aGHHoK9vX2zA7jjjjvw5ZdfytMdOnSQf05OTkZKSgrS0tIQEhKChIQEREZGIi8vD05OTs3etimszjyh1fZUZIgJIiEiIktl0J770qVL4e3tjZkzZyI7O7tZAVhbW8Pb21t+dezYEcDNvfY1a9Zg+fLlmDhxInr16oX09HRUVFQgIyOjWdskIiKyZAbtuf/222/4/PPPkZaWhhEjRiAwMBAzZsxATEyMxmF1Xfzyyy/w9fWFUqnEoEGDkJiYiK5duyI/Px9FRUWIioqSl1UqlQgPD0d2djbmzJlTb3+VlZWorKyUp0tLSwEAarUaarXagHdrXJKo1mozh7gg6v87T/1nu7qB+dormMF7MReN5KzBvDJ/+qmTP50+r62UY7P4f03tliSEEM3poLi4GO+99x7S0tLw888/47777sPMmTMxbtw4WFk1XhC++OILVFRUICQkBL///jsSEhLw888/49ixY8jLy0NYWBjOnj0LX19feZ3Zs2ejsLAQu3btqrdPlUqF+Ph4rfaMjAyjnEYgItJFRUUFoqOjUVJSAmdnZ1OHQ+1Ms4s7ABw8eBDvvPMO0tPT4ePjgytXrsDV1RWpqamIiIjQuZ/y8nJ069YNixcvxuDBgxEWFoZz587Bx8dHXmbWrFk4c+YMdu7cWW8f9e25+/n54cKFC2bxH+z1PSe12uaPCDJBJHXsS6m3WS2skHk1BJGOJ6DQ5RGaw+KMHFgb1kBOgUbyyvzpp06Odfq8tlKOS0tL4eHhweJOJmHwde6///473n33XaSmpuLUqVOYMGECtm/fjnvvvRfXrl3Ds88+i5iYGBQWFurcp4ODA+6880788ssvmDBhAgCgqKhIo7gXFxfDy8urwT6USiWUSqVWu0KhgEKh0P0NthAhddBqM4e4mnr2tUKq0a24m8N7MRc65Esrr8yffhrIcaOf11bKsVn8v6Z2y6ABdePGjYOfnx/S0tIwa9YsnD17Fu+//z7uvfdeAICdnR2efvppnDlzRq9+Kysrcfz4cfj4+CAwMBDe3t7IzMyU51dVVSErKwtDhgwxJGwiIqJ2waA9d09PT2RlZSE0NLTBZXx8fJCfn99oP//4xz8wbtw4+Pv7o7i4GAkJCSgtLUVMTAwkScKiRYuQmJiI4OBgBAcHIzExEfb29oiOjjYkbCIionbBoOIeHh6O/v37a7VXVVVh06ZNePTRRyFJEgICAhrt57fffsMjjzyCCxcuoGPHjhg8eDAOHDggr7d48WJcu3YN8+bNw+XLlzFo0CDs3r27zV7jTkRE1BoMKu4zZszAfffdB09PT432srIyzJgxA48++qhO/WzatKnR+ZIkQaVSQaVSGRImERFRu2TQOXchBCRJ0mr/7bff4OLi0uygiIiIyHB67bn369dPvgf8yJEjYW391+rV1dXIz8/HfffdZ/QgiYiISHd6Fffay9OOHDmCUaNGwdHRUZ5nY2ODLl26YNKkSUYNkIiIiPSjV3FfsWIFAKBLly74+9//Dltb2xYJioiIiAxn0IC6mJgYY8dBRERERqJzcXdzc8OJEyfg4eGB2267rd4BdbUuXbpklOCIiIhIfzoX99WrV8vXl69evbrR4k5ERESmo3Nxv/VQ/PTp01siFiKixu1JMnUERG2CzsW99rnouuATkIiIiExH5+Lu6ura5KH42pvbVFdXNzswIiIiMozOxX3Pnj0tGQeZqe8KLgHuN/8NC3Q1dTiWr77DziOWtn4cRNSm6Vzcw8PDWzIOIiIiMhKdi/vRo0fRq1cvWFlZ4ejRo40u27t372YHRkRERIbRubj37dsXRUVF8PT0RN++fSFJEoQQWsvxnDsREZFp6Vzc8/Pz0bFjR/lnIiIiMk86F/eAgIB6fyYiIiLzYtC95QEgLy8Pr732Go4fPw5JktCjRw8sXLgQ3bt3N2Z87cLqzBPyz09FhpgwEiIisgRWhqz00UcfoVevXjh06BD69OmD3r174/Dhw+jVqxc2b95s7BiJiIhIDwYV98WLF2Pp0qXYv38/UlJSkJKSguzsbCxbtgxLliwxKJCkpCRIkoRFixbJbUIIqFQq+Pr6ws7ODhERETh27JhB/VPz7T91EftPXTR1GDpZnXlCfhERtTcGFfeioiI8+uijWu1Tp05FUVGR3v3l5ORg/fr1WpfQJScnIyUlBWvXrkVOTg68vb0RGRmJsrIyQ8ImIiJqFww65x4REYF9+/YhKChIo/3bb7/FsGHD9Orr6tWrmDJlCjZs2ICEhAS5XQiBNWvWYPny5Zg4cSIAID09HV5eXsjIyMCcOXPq7a+yshKVlZXydO098dVqNdRqtV6xtQRJNH6ZoMliFPX/nVcjddD4FwDUDSx7c6bpcwxo5tnccgr8lcNGcykvbB45NQtN5EunvLZSPs3h+4baL0nUd7F6PbZt2yb/fO7cOTz33HN46KGHMHjwYADAgQMHsHnzZsTHx2Pu3Lk6BxATEwM3NzesXr0aERER6Nu3L9asWYNTp06hW7duOHz4MPr16ycvP378eLi6uiI9Pb3e/lQqFeLj47XaMzIyYG9vr3NcRETNUVFRgejoaJSUlPBhWtTqdC7uVla6HcHX5yY2mzZtwr///W/k5OTA1tZWo7hnZ2cjLCwMZ8+eha+vr7zO7NmzUVhYiF27dtXbZ3177n5+frhw4YJZ/Ad7fc/JRufPHxHU6PwWsy+l3uYDhSW45DYIbpcOwurPveGBXdwa7mdYXEtEp7db82xuOQVu7llmXg1BpOMJKKSaxvsxk5yahUZyCuiY11bKZ2lpKTw8PFjcySR0PixfU9PEF5Cezpw5gyeffBK7d++Gra1tg8vVfRJd7ZPnGqJUKqFUKrXaFQoFFAqF4QEbibjl8HZ9TBZjA1+EtQXdSlTLPzdajMwgx4Bmns0tp7dSSDVNF3czyalZ0CGnQBN5baV8msP3DbVfBg2oM4ZDhw6huLgYd911F6ytrWFtbY2srCy8+uqrsLa2hpeXFwBoDdArLi6W5xEREZE2g29iU15ejqysLJw+fRpVVVUa82JjY5tcf+TIkfjhhx802mbMmIEePXpgyZIl6Nq1K7y9vZGZmSmfc6+qqkJWVhZeeOEFQ8MmIiKyeAYV9++//x5jxoxBRUUFysvL4ebmhgsXLsDe3h6enp46FXcnJyf06tVLo83BwQHu7u5y+6JFi5CYmIjg4GAEBwcjMTER9vb2iI6ONiRsIiKidsGg4v7UU09h3LhxWLduHVxdXXHgwAEoFApMnToVTz75pNGCW7x4Ma5du4Z58+bh8uXLGDRoEHbv3g0nJyejbYMsH2/vS0TtjUHF/ciRI3jrrbfQoUMHdOjQAZWVlejatSuSk5MRExMjX5eur71792pMS5IElUoFlUplUH9ERETtkUED6hQKhTxi3cvLC6dPnwYAuLi4yD8TERGRaRi0596vXz/k5uYiJCQEI0aMwHPPPYcLFy7g3XffxZ133mnsGNsVHkLWH3NGRKTJoD33xMRE+Pj4AACef/55uLu744knnkBxcTHWr19v1ACJiIhIPwbtuQ8YMED+uWPHjtixY4fRAiIiIqLmMfg6d+DmDWXy8vIgSRK6d++Ojh07GisuMmO3PvY1tKu7SWIwu0e57kkydQSkq/p+VyOWtn4cRC3IoMPypaWlmDZtGjp16oTw8HAMHz4cvr6+mDp1KkpKSowdIxEREenBoOL++OOP4+DBg9i+fTuuXLmCkpISbN++Hbm5uZg1a5axYyQiIiI9GHRY/vPPP8euXbswdOhQuW3UqFHYsGED7rvvPqMFR0RERPozaM/d3d0dLi4uWu0uLi647bbbmh0UERERGc6gPfdnn30WcXFx+M9//iNfEldUVIRnnnkG//rXv4waoCUwu8FfgNagInMYJEdERMahc3Hv16+fxnPUf/nlFwQEBMDf3x8AcPr0aSiVSvzxxx+YM2eO8SMlIiIinehc3CdMmNCCYRAREZGx6FzcV6xY0ZJxEBmFWZ4CISJqZc26ic2hQ4dw/PhxSJKEnj17ol+/fsaKi4iIiAxkUHEvLi7Gww8/jL1798LV1RVCCJSUlGDEiBHYtGkT71RHRERkQgZdCrdw4UKUlpbi2LFjuHTpEi5fvowff/wRpaWliI2NNXaMbcrqzBNt/tDw/lMX5RcREbU9Bu2579y5E19++SVuv/12ua1nz554/fXXERUVZbTgiIiISH8G7bnX1NRAoVBotSsUCtTU1Ojcz7p169C7d284OzvD2dkZoaGh+OKLL+T5QgioVCr4+vrCzs4OEREROHbsmCEhExERtRsGFfd77rkHTz75JM6dOye3nT17Fk899RRGjhypcz+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCTk5ORkpKCtWvXIicnB97e3oiMjERZWZkhYRMREbULBhX3tWvXoqysDF26dEG3bt0QFBSEwMBAlJWV4bXXXtO5n3HjxmHMmDEICQlBSEgI/v3vf8PR0REHDhyAEAJr1qzB8uXLMXHiRPTq1Qvp6emoqKhARkaGIWETERG1Cwadc/fz88Phw4eRmZmJn3/+GUII9OzZE/fee6/BgVRXV2Pz5s0oLy9HaGgo8vPzUVRUpHEOX6lUIjw8HNnZ2Q3eBa+yshKVlZXydGlpKQBArVZDrVYbHJ+uJFEtb69um75aNF6h+XddjdSh3sVq2xuar67TD1ohx4AZ5rRuHpqK48/ltfJX78Ktk9M2oYl86ZVXjRWNn+PW+L4haogkhBD6rHDjxg3Y2triyJEj6NWrV7MD+OGHHxAaGorr16/D0dERGRkZGDNmDLKzsxEWFoazZ8/C19dXXn727NkoLCzErl276u1PpVIhPj5eqz0jIwP29vbNjpeISBcVFRWIjo5GSUkJnJ2dTR0OtTN677lbW1sjICAA1dWG7TnV1b17dxw5cgRXrlzBxx9/jJiYGGRlZcnzb72fPXBzkF3dtlstXboUcXFx8nRpaSn8/PwQFRXVKv/BXt9zskX6nT8iyLgd7kvRmPyu4FK9i9VIHXDJbRDcLh2EVT17ywO7uGk2DIvTWqYlGJpno+exVp18NkUtrJB5NQSRjiegkJoYhNpKOW0TmsizXnm9VQvkuPaoIZEpGPxUuKVLl+K9996Dm5tb0ys0wsbGBkFBN79wBwwYgJycHLzyyitYsmQJgJtPm6t98hxw8wY6Xl5eDfanVCqhVCq12hUKRb0j/I1NNHD4urmMHnudL776Cnfd+fUto/UF2go5BgzPc4t9BvQpJLdQSDVNF6FWymmboGOedcqrxgrGz3FrfN8QNcSg4v7qq6/i5MmT8PX1RUBAABwcHDTmHz582OCAhBCorKxEYGAgvL29kZmZKd/WtqqqCllZWXjhhRcM7p+IiMjSGVTcJ0yYAEmSoOfpei3Lli3D6NGj4efnh7KyMmzatAl79+7Fzp07IUkSFi1ahMTERAQHByM4OBiJiYmwt7dHdHR0s7ZLRERkyfQq7hUVFXjmmWfwySefQK1WY+TIkXjttdfg4eFh0MZ///13TJs2DefPn4eLiwt69+6NnTt3IjIyEgCwePFiXLt2DfPmzcPly5cxaNAg7N69G05OTgZtj4iIqD3Qq7ivWLECaWlpmDJlCuzs7JCRkYEnnngCmzdvNmjjb7/9dqPzJUmCSqWCSqUyqH8iIqL2SK/ivmXLFrz99tt4+OGHAQBTpkxBWFgYqqur0aFDywwkIyIiIv3odaeHM2fOYNiwYfL0wIEDYW1trXEbWiIiIjItvYp7dXU1bGxsNNqsra1x48YNowZFREREhtPrsLwQAtOnT9e4jvz69euYO3euxuVwW7ZsMV6EREREpBe9intMTIxW29SpU40WDFFjVmeeMHUI1Jr2JJk6AqI2S6/inpqa2lJxEBERkZEY9MhXIiIiMl8G3aGOLMf+UxeNtn5oV/fmhkNEREbAPXciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5kNPtPXeSNZoiIzACLOxERkYVhcSciIrIwLO5EREQWxqTFPSkpCXfffTecnJzg6emJCRMmIC8vT2MZIQRUKhV8fX1hZ2eHiIgIHDt2zEQRExERmT+T3n42KysL8+fPx913340bN25g+fLliIqKwk8//SQ/QjY5ORkpKSlIS0tDSEgIEhISEBkZiby8PDg5OZkyfKrH4NPrgT233IZ2xFLTBWMp6j4djTkloiaYtLjv3LlTYzo1NRWenp44dOgQhg8fDiEE1qxZg+XLl2PixIkAgPT0dHh5eSEjIwNz5swxRdhERERmzaweHFNSUgIAcHNzAwDk5+ejqKgIUVFR8jJKpRLh4eHIzs6ut7hXVlaisrJSni4tLQUAqNVqqNXqlgwfACCJ6hbp1+ixi5tnZGqkDo0uVju/qeVupRa3nO0xYtzGyG2LfQaEfme4anOk1nO9myu1/OfYLBiQG4Pz2gI5bY3vG6KGSEIIYeoggJvn1sePH4/Lly9j3759AIDs7GyEhYXh7Nmz8PX1lZedPXs2CgsLsWvXLq1+VCoV4uPjtdozMjJgb2/fcm+AiOgWFRUViI6ORklJCZydnU0dDrUzZrPnvmDBAhw9ehTffvut1jxJkjSmhRBabbWWLl2KuLg4ebq0tBR+fn6Iiopqlf9gr+852SL9zh8RZPjK+1IanPVdwaVGV62ROuCS2yC4XToIKx33nAd2cftrYlhcwwvqyRi5vTWPtf01K7e1GslxfdTCCplXQxDpeAIKqUa/bRkxp2ZNz5wCzchrC+S09qghkSmYRXFfuHAhtm3bhm+++QadO3eW2729vQEARUVF8PHxkduLi4vh5eVVb19KpRJKpVKrXaFQQKFQGDlybUKPw9f6aFbsjXzJ6VqwrUS1zstqfKkaMefGyO2teaztzyifC30LdG08Uo3+xb0VPsdmwcCcAgbktQVy2hrfN0QNMWlxF0Jg4cKF2Lp1K/bu3YvAwECN+YGBgfD29kZmZib69esHAKiqqkJWVhZeeOEFU4RMJmCWt7StO4Kd2jZekUAWxqTFff78+cjIyMCnn34KJycnFBUVAQBcXFxgZ2cHSZKwaNEiJCYmIjg4GMHBwUhMTIS9vT2io6NNGToREZHZMmlxX7duHQAgIiJCoz01NRXTp08HACxevBjXrl3DvHnzcPnyZQwaNAi7d+/mNe5EREQNMPlh+aZIkgSVSgWVStXyAREREVkA3lueiIjIwrC4ExERWRizuBSurTPL0dxERNRucc+diIjIwnDPnaitqe8ae16XTUS34J47ERGRhWFxJyIisjA8LN9O7D910dQhmAUOfiSi9oB77kRERBaGxZ2IiMjCsLgTERFZGBZ3IiIiC8PiTkREZGE4Wp7MEke1ExEZjnvuREREFobFnYiIyMKwuBMREVkYFnciIiILY9Li/s0332DcuHHw9fWFJEn45JNPNOYLIaBSqeDr6ws7OztERETg2LFjpgmWiIiojTDpaPny8nL06dMHM2bMwKRJk7TmJycnIyUlBWlpaQgJCUFCQgIiIyORl5cHJycnE0RsOrWjx5+KDDFxJHpqA48nvXVkfpvLLxFRPUxa3EePHo3Ro0fXO08IgTVr1mD58uWYOHEiACA9PR1eXl7IyMjAnDlz6l2vsrISlZWV8nRpaSkAQK1WQ61WG/kd3CSJ6hbptz4GvQdhhRqpg0Hbq11Pn/XVookDQjq8h9bM6a10zm9T77Gp7fy5fpO50rnDlvlsm5QBuTFaXo2Qz5b6viHShSSEEKYOAgAkScLWrVsxYcIEAMCpU6fQrVs3HD58GP369ZOXGz9+PFxdXZGenl5vPyqVCvHx8VrtGRkZsLe3b5HYiYjqqqioQHR0NEpKSuDs7GzqcKidMdub2BQVFQEAvLy8NNq9vLxQWFjY4HpLly5FXFycPF1aWgo/Pz9ERUW12H+w1/ecbJF+6zN/RJD+K+1LwXcFlwzaXo3UAZfcBsHt0kFY6bg3PbCLW+MLDItrfD5aN6e30jm/+1KatR21sELm1RBEOp6AQqppVl8AdMppm2NAjo2WVyPks/aoIZEpmG1xryVJksa0EEKr7VZKpRJKpVKrXaFQQKFQGD0+ABAGHvI2hEHvQarRuTA3xEpU69xHk1+qOryH1szprXTOrzEKMm7myijFvYU+2ybVjLw0O69GyGdLfd8Q6cJsi7u3tzeAm3vwPj4+cntxcbHW3nx7xYFgZFHqG3xpKm1gIChRY8z2OvfAwEB4e3sjMzNTbquqqkJWVhaGDBliwsiIiIjMm0n33K9evYqTJ/86t5qfn48jR47Azc0N/v7+WLRoERITExEcHIzg4GAkJibC3t4e0dHRJoyaiIjIvJm0uOfm5mLEiBHydO1AuJiYGKSlpWHx4sW4du0a5s2bh8uXL2PQoEHYvXu3WVzjzqeWNWz/qYvyz6Fd3U0YCRFR+2TS4h4REYHGrsSTJAkqlQoqlar1giIiImrjzPacOxERERnGbEfLm6s2cTjenEYdtzG8AoGILAH33ImIiCwMizsREZGF4WF5C3brqHWycHVPxfCGK0TtGvfciYiILAyLOxERkYXhYfk2pqHR+hqjvM3ot8ob2hARtT7uuRMREVkYM9rHMy9t4np2IiKienDPnYiIyMKwuBMREVkYHpa3QG3h+nZzPu0x+PT6mz/suTkAkIMCiait4Z47ERGRhWFxJyIisjA8LE+tb08SBp9u/NTBAf/ZrRRMw9rC6Y02g08qJGpV3HMnIiKyMG2iuL/xxhsIDAyEra0t7rrrLuzbt8/UIREREZktsz8s/8EHH2DRokV44403EBYWhrfeegujR4/GTz/9BH9/f1OH1+rkkdxkEm125Hx9h8X55Dgii2X2e+4pKSmYOXMmHn/8cdx+++1Ys2YN/Pz8sG7dOlOHRkREZJbMes+9qqoKhw4dwj//+U+N9qioKGRnZ9e7TmVlJSorK+XpkpISAMClS5egVqt13nbl1RIDIm55ZddvtOr2aiSBiooKlF2/AStR3ay+Ll6tkn9u6n2YMv+65vjW96MvtbBCRUUFLkpVUEg1BvfToIt1BgPWF2vdZVpSM3KljxbNq575KisrAwAIIYwbB5EOzLq4X7hwAdXV1fDy8tJo9/LyQlFRUb3rJCUlIT4+Xqs9MDCwRWKklvKKqQNo41RGWob+ojJorbKyMri4uBg3FKImmHVxryVJksa0EEKrrdbSpUsRFxcnT9fU1ODSpUtwd3dvcB1qWGlpKfz8/HDmzBk4OzubOhyLwby2DHPKqxACZWVl8PX1NWkc1D6ZdXH38PBAhw4dtPbSi4uLtfbmaymVSiiVSo02V1fXlgqx3XB2djb5l6UlYl5bhrnklXvsZCpmPaDOxsYGd911FzIzMzXaMzMzMWTIEBNFRUREZN7Mes8dAOLi4jBt2jQMGDAAoaGhWL9+PU6fPo25c+eaOjQiIiKzZPbF/e9//zsuXryIlStX4vz58+jVqxd27NiBgIAAU4fWLiiVSqxYsULrVAc1D/PaMphXopskwes0iIiILIpZn3MnIiIi/bG4ExERWRgWdyIiIgvD4k5ERGRhWNwJAPDNN99g3Lhx8PX1hSRJ+OSTTzTmCyGgUqng6+sLOzs7RERE4NixY6YJto1ISkrC3XffDScnJ3h6emLChAnIy8vTWIZ51d+6devQu3dv+UY1oaGh+OKLL+T5zCkRizv9qby8HH369MHatWvrnZ+cnIyUlBSsXbsWOTk58Pb2RmRkpPxwDNKWlZWF+fPn48CBA8jMzMSNGzcQFRWF8vJyeRnmVX+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCZUyIAgqgOAGLr1q3ydE1NjfD29harVq2S265fvy5cXFzEm2++aYII26bi4mIBQGRlZQkhmFdjuu2228TGjRuZU6I/cc+dmpSfn4+ioiJERUXJbUqlEuHh4Q0+epe01T5+2M3NDQDzagzV1dXYtGkTysvLERoaypwS/YnFnZpU++AefR69S5qEEIiLi8PQoUPRq1cvAMxrc/zwww9wdHSEUqnE3LlzsXXrVvTs2ZM5JfqT2d9+lsyHPo/eJU0LFizA0aNH8e2332rNY1711717dxw5cgRXrlzBxx9/jJiYGGRlZcnzmVNq77jnTk3y9vYGAL0evUt/WbhwIbZt24Y9e/agc+fOcjvzajgbGxsEBQVhwIABSEpKQp8+ffDKK68wp0R/YnGnJgUGBsLb21vj0btVVVXIysrio3cbIYTAggULsGXLFnz99dcIDAzUmM+8Go8QApWVlcwp0Z94WJ4AAFevXsXJkyfl6fz8fBw5cgRubm7w9/fHokWLkJiYiODgYAQHByMxMRH29vaIjo42YdTmbf78+cjIyMCnn34KJycneW/SxcUFdnZ2kCSJeTXAsmXLMHr0aPj5+aGsrAybNm3C3r17sXPnTuaUqJYph+qT+dizZ48AoPWKiYkRQty8bGvFihXC29tbKJVKMXz4cPHDDz+YNmgzV18+AYjU1FR5GeZVf4899pgICAgQNjY2omPHjmLkyJFi9+7d8nzmlEgIPvKViIjIwvCcOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7URMKCgogSRKOHDli6lCIiHTC4k5thhAC9957L0aNGqU174033oCLiwtOnz5tgsiIiMwLizu1GZIkITU1FQcPHsRbb70lt+fn52PJkiV45ZVX4O/vb8IIiYjMA4s7tSl+fn545ZVX8I9//AP5+fkQQmDmzJkYOXIkpk+frrX8I488gocfflijTa1Ww8PDA6mpqQCAnTt3YujQoXB1dYW7uzvGjh2LX3/9tcEY0tLS4OrqqtH2ySefQJIkjbbPPvsMd911F2xtbdG1a1fEx8fjxo0b8nyVSgV/f38olUr4+voiNjZWz2wQEdWPj3ylNicmJgZbt27FjBkzMGnSJPz444/48ccf6112ypQpeOihh3D16lU4OjoCAHbt2oXy8nJMmjQJAFBeXo64uDjceeedKC8vx3PPPYcHH3wQR44cgZWVYX//7tq1C1OnTsWrr76KYcOG4ddff8Xs2bMBACtWrMBHH32E1atXY9OmTbjjjjtQVFSE//3vfwZti4hIi2kfSkdkmN9//1107NhRWFlZiS1btjS4XFVVlfDw8BD/+c9/5LZHHnlETJ48ucF1iouLBQD5MaH5+fkCgPj++++FEEKkpqYKFxcXjXW2bt0qbv3vNGzYMJGYmKixzLvvvit8fHyEEEK8/PLLIiQkRFRVVen0fomI9MHD8tQmeXp6Yvbs2bj99tvx4IMPNricQqHA5MmT8d///hfAzb30Tz/9FFOmTJGX+fXXXxEdHY2uXbvC2dkZgYGBANCswXmHDh3CypUr4ejoKL9mzZqF8+fPo6KiApMnT8a1a9fQtWtXzJo1C1u3btU4ZE9E1Bw8LE9tlrW1Naytm/4IT5kyBeHh4SguLkZmZiZsbW0xevRoef64cePg5+eHDRs2wNfXFzU1NejVqxeqqqrq7c/KygpCCI02tVqtMV1TU4P4+HhMnDhRa31bW1v4+fkhLy8PmZmZ+PLLLzFv3jy8+OKLyMrKgkKh0OXtExE1iMWdLN6QIUPg5+eHDz74AF988QUmT54MGxsbAMDFixdx/PhxvPXWWxg2bBgA4Ntvv220v44dO6KsrAzl5eVwcHAAAK1r4Pv374+8vDwEBQU12I+dnR0eeOABPPDAA5g/fz569OiBH374Af3792/GuyUiYnGndkCSJERHR+PNN9/EiRMnsGfPHnnebbfdBnd3d6xfvx4+Pj44ffo0/vnPfzba36BBg2Bvb49ly5Zh4cKF+O6775CWlqaxzHPPPYexY8fCz88PkydPhpWVFY4ePYoffvgBCQkJSEtLQ3V1tdzXu+++Czs7OwQEBLRECoioneE5d2oXpkyZgp9++gmdOnVCWFiY3G5lZYVNmzbh0KFD6NWrF5566im8+OKLjfbl5uaG9957Dzt27MCdd96J999/HyqVSmOZUaNGYfv27cjMzMTdd9+NwYMHIyUlRS7erq6u2LBhA8LCwtC7d2989dVX+Oyzz+Du7m70905E7Y8k6p48JCIiojaNe+5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7ERGRhWFxJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZmP8HzQms27H+BcMAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABcFElEQVR4nO3dd1iTV/sH8G8CCSQs2UO2RK2guCp1FRyoqDi6bG2rtNYurVqrba1txVF81WqHVrsc1Mmvtb6v1klVUOuoUqy4QVBR9pA9EvL8/njMA4EASQgkhPtzXbk0z8o5EHLnnOec+/AYhmFACCGEEKPC13cBCCGEEKJ7FOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOB14MqVK5gxYwa6dOkCkUgEkUgEiUSCt956C5cuXVI6NjIyEjweD3w+H6mpqQ2uVVZWBmtra/B4PERERHDb7969Cx6PBx6Ph8jISJXleP3117ljmqMoh+IhFArh4+ODuXPn4tGjR5pUv0mKcn/55Zc6u2ZcXBx4PB5+++23Zo9V1LOukJAQhISEKG1r6ufamEOHDjV6jre3t9LvryVCQkKUflcikQiBgYH4+uuvIZfLdfIaTVH8vOPi4rhtERER8Pb21vhaGzduxLZt2xpsV7xPVO0jhGiHAnwL/fDDD+jXrx8uXLiAuXPn4o8//sDBgwcxb948XLt2DU8++STu3LnT4DxLS0ts3bq1wfZff/0VUqkUAoFA5etZWVlh27ZtDT7YS0tL8euvv8La2lqj8h85cgTnzp3DwYMHMWnSJKxfvx5hYWEwlgzGb7zxBs6dO9fscefOncMbb7yh0bUPHTqEpUuXqty3b98+fPbZZxpdrym+vr44d+4czp07h5iYGHTu3Bnvv/8+Fi1apLPX0MRnn32Gffv2aXxeYwHe1dUV586dw7hx43RQOkIIAJjquwDt2V9//YV3330X48aNw2+//QahUMjtGz58OGbNmoVff/0VIpGowblTpkxBdHQ0li5dCj6/9nvW5s2bMXnyZOzfv1/la06ZMgU///wzjh8/jtDQUG57TEwMampqMGnSJOzYsUPtOvTr1w8ODg4AgNDQUOTn52P79u04e/YsBg8erPKc8vJyiMVitV9Dn9zd3eHu7t7scU899ZROX7dPnz46vZ5IJFIqY1hYGLp3744NGzZgxYoVKr8QMgyDyspKle+/lurSpYtOr2dmZqbz3wEhHR214FsgKioKJiYm+OGHH5SCe13PP/883NzcGmx//fXXkZ6ejtjYWG7b7du3cebMGbz++uuNvma3bt0waNAgbNmyRWn7li1b8Mwzz8DGxkbL2rAUH7L37t0DwHYPBwQE4NSpUxg0aBDEYjFXvvv37+OVV16Bk5MTzMzM8MQTT2Dt2rUqu43lcjm++OILeHp6wtzcHP3798fx48eVjklJScFrr70GiUQCsViMzp07Izw8HElJSSrLWllZifnz58PFxQUikQjBwcFITExUOkZVF70q9bvoy8vLsWDBAvj4+MDc3Bx2dnbo378/du/eDYDtov7uu++4cxWPu3fvAlDdRf/o0SN88MEH8PX1hZmZGZycnDB27FjcvHmz2fLVJxAI0K9fP5SXlyM3N5crx+zZs/H999/jiSeegJmZGaKjowEAycnJmDp1qtLvSlH+um7evIkxY8ZALBbDwcEBb7/9NkpKShocp6qLXi6XY/369ejduzdEIhE6deqEp556ivuy6u3tjWvXriE+Pp77eSmu0VgX/ZkzZzBixAhYWVlBLBZj0KBBOHjwoNIx27ZtA4/Hw8mTJ/HOO+/AwcEB9vb2eOaZZ5CRkaF07IkTJxASEgJ7e3uIRCJ4enri2WefRXl5udo/e0LaC2rBa6mmpgYnT55E//794erqqvH5EokEQ4cOxZYtWzB69GgAbJD29vbGiBEjmjx3xowZmDVrFgoLC2Fra4tbt27h7NmzWLFiBfbu3atVfRRSUlIAAI6Ojty2zMxMvPLKK/jwww8RFRUFPp+P3NxcDBo0CNXV1Vi+fDm8vb3xxx9/YMGCBbhz5w42btyodN0NGzbAy8uLu2+8evVqhIWFIT4+HgMHDgQAZGRkwN7eHv/5z3/g6OiIgoICREdHIygoCImJiejWrZvSNT/55BP07dsXP//8M4qKihAZGYmQkBAkJibC19e3RT+H+fPnY/v27VixYgX69OmDsrIyXL16Ffn5+QDYLuqysjL89ttvSrcAGnsvlJSUYMiQIbh79y4++ugjBAUFobS0FKdOnUJmZia6d++ucRnv3LkDU1NT2Nractv++9//4vTp0/j888/h4uICJycnXL9+HYMGDYKnpyfWrl0LFxcXHD16FHPmzEFeXh6WLFkCAMjOzkZwcDAEAgE2btwIZ2dn7Ny5E7Nnz1arPBEREdixYwdmzJiBZcuWQSgU4p9//uG+9Ozbtw/PPfccbGxsuPeHmZlZo9eLj49HaGgoevXqhc2bN8PMzAwbN25EeHg4du/ejSlTpigd/8Ybb2DcuHHYtWsX0tPTsXDhQrzyyis4ceIEAPZLxLhx47i/u06dOuHhw4c4cuQIqqur202vFCFqY4hWsrKyGADMiy++2GCfTCZjpFIp95DL5dy+JUuWMACY3NxcZuvWrYyZmRmTn5/PyGQyxtXVlYmMjGQYhmEsLCyY6dOnc+elpaUxAJg1a9YwJSUljKWlJbNhwwaGYRhm4cKFjI+PDyOXy5lZs2Yx6vxaFeXIyspipFIpU1hYyOzYsYMRiUSMh4cHU1FRwTAMwwQHBzMAmOPHjyud//HHHzMAmAsXLihtf+eddxgej8fcunVLqdxubm7cNRmGYYqLixk7Oztm5MiRjZZRJpMx1dXVjEQiYd5//31u+8mTJxkATN++fZV+tnfv3mUEAgHzxhtvNKhnXcHBwUxwcLDSNgDMkiVLuOcBAQHMpEmTGi0bwzBN/qy9vLyUfn/Lli1jADCxsbFNXlOV4OBgxt/fn3s/ZWRkcD//559/XqkONjY2TEFBgdL5o0ePZtzd3ZmioiKl7bNnz2bMzc254z/66COGx+Mxly9fVjouNDSUAcCcPHmS2zZ9+nTGy8uLe37q1CkGALN48eIm6+Lv79/gZ88wte+TrVu3ctueeuopxsnJiSkpKeG2yWQyJiAggHF3d+d+91u3bmUAMO+++67SNVevXs0AYDIzMxmGYZjffvuNAdCgfoQYK+qibwX9+vWDQCDgHmvXrlV53PPPPw+hUIidO3fi0KFDyMrKUmvktaWlJZ5//nls2bIFMpkMv/zyC1577TW1uqLrc3FxgUAggK2tLV555RX07dsXR44cgbm5OXeMra0thg8frnTeiRMn0KNHDwwYMEBpe0REBBiG4VpNCs8884zSNa2srBAeHo5Tp06hpqYGACCTyRAVFYUePXpAKBTC1NQUQqEQycnJuHHjRoOyT506VanOXl5eGDRoEE6ePKnxz6G+AQMG4PDhw/j4448RFxeHioqKFl3v8OHD6Nq1K0aOHKnV+deuXePeT25ubli7di1efvll/PTTT0rHDR8+XKlFX1lZiePHj2Py5MkQi8WQyWTcY+zYsaisrMT58+cBACdPnoS/vz8CAwOVrjl16lS16gcAs2bN0qp+9ZWVleHChQt47rnnYGlpyW03MTHBq6++igcPHuDWrVtK50yYMEHpea9evQDU3m7q3bs3hEIh3nzzTURHR6ucxUKIMaEuei05ODhAJBJxHx517dq1C+Xl5cjMzGzwoVOXhYUFpkyZgi1btsDLywsjR46El5eXWq8/Y8YMDBkyBF988QVyc3O1npL1559/wsbGBgKBAO7u7rC3t29wjKpu5/z8fJXTpBTjDRRd2QouLi4NjnVxcUF1dTVKS0thY2OD+fPn47vvvsNHH32E4OBg2Nrags/n44033lAZYBu75r///ttofdX17bffwt3dHTExMVi1ahXMzc0xevRorFmzBhKJROPr5ebmwtPTU+vydOnSBXv27AGPx4O5uTl8fHxUdinX/13l5+dDJpNh/fr1WL9+vcpr5+Xlccf6+Pg02K/q51xfbm4uTExM1DpWHYWFhWAYRuV7r7H3WP33rqL7X/He6dKlC/7880+sXr0as2bNQllZGXx9fTFnzhzMnTtXJ+UmxJBQgNeSiYkJhg8fjmPHjiEzM1Ppg6hHjx4AwN17bMrrr7+On3/+GVeuXMHOnTvVfv3BgwejW7duWLZsGUJDQ+Hh4aFxHQAgMDCQG0XfGFU9A/b29sjMzGywXTGoqf41s7KyGhyblZUFoVDItdB27NiBadOmISoqSum4vLw8dOrUSeX5qrap+pKiKQsLCyxduhRLly5FdnY215oPDw/XalCco6MjHjx4oHV5FAMTm1P/d2Vra8u1ehtrXSuCur29faM/0+Y4OjqipqYGWVlZWo1JqU/x5U6T95g6hg4diqFDh6KmpgaXLl3C+vXrMW/ePDg7O+PFF19scbkJMSTURd8CixYtQk1NDd5++21IpVKtrjFw4EC8/vrrmDx5MiZPnqzRuZ9++inCw8PxwQcfaPXaLTFixAhcv34d//zzj9L2X375BTweD8OGDVPa/vvvv6OyspJ7XlJSggMHDmDo0KEwMTEBwAan+oOuDh48iIcPH6osw+7du5Xm69+7dw9nz55tkMSmpZydnREREYGXXnoJt27d4kZc128hNiUsLAy3b99ucOuitYnFYgwbNgyJiYno1asX+vfv3+Ch+EI0bNgwXLt2rUEPyK5du5p9nbCwMADApk2bmjzOzMxMrZ+XhYUFgoKC8PvvvysdL5fLsWPHDri7u6Nr167NXqcxJiYmCAoK4mYS1H8fE2IMqAXfAoMHD8Z3332H9957D3379sWbb74Jf39/ruWhGNHeXPKZzZs3a/X6r7zyCl555RWtzm2p999/H7/88gvGjRuHZcuWwcvLCwcPHsTGjRvxzjvvNPjwNTExQWhoKObPnw+5XI5Vq1ahuLhYKVHM+PHjsW3bNnTv3h29evVCQkIC1qxZ0+g89pycHEyePBkzZ85EUVERlixZAnNzc50kfwkKCsL48ePRq1cv2Nra4saNG9i+fTsGDhzIdY337NkTALBq1SqEhYXBxMQEvXr1Ujllct68eYiJicHEiRPx8ccfY8CAAaioqEB8fDzGjx/f4AuRLn3zzTcYMmQIhg4dinfeeQfe3t4oKSlBSkoKDhw4wH3pmDdvHrZs2YJx48ZhxYoV3Ch6dXoshg4dildffRUrVqxAdnY2xo8fDzMzMyQmJkIsFuO9994DwP7M9uzZg5iYGPj6+sLc3Jz7Oda3cuVKhIaGYtiwYViwYAGEQiE2btyIq1evYvfu3RqPOfn+++9x4sQJjBs3Dp6enqisrOSmm2o7NoIQQ0YBvoXefvttDBw4EN988w2++uorZGRkgMfjwd3dHYMGDcLx48cbDFAzBo6Ojjh79iwWLVqERYsWobi4GL6+vli9ejXmz5/f4PjZs2ejsrISc+bMQU5ODvz9/XHw4EGlZDrffPMNBAIBVq5cidLSUvTt2xe///47Pv30U5VliIqKwsWLF/Haa6+huLgYAwYMwJ49e3SShGX48OHYv38/vvrqK5SXl6Nz586YNm0aFi9ezB0zdepU/PXXX9i4cSOWLVsGhmGQlpamcmyClZUVzpw5g8jISPz4449YunQpbG1t8eSTT+LNN99scXmb0qNHD/zzzz9Yvnw5Pv30U+Tk5KBTp06QSCQYO3Ysd5yLiwvi4+Mxd+5cvPPOOxCLxZg8eTI2bNiAiRMnNvs627ZtQ9++fbF582Zs27YNIpEIPXr0wCeffMIds3TpUmRmZmLmzJkoKSmBl5dXo7eygoODceLECSxZsgQRERGQy+UIDAzE/v37MX78eI1/Dr1798axY8ewZMkSZGVlwdLSEgEBAdi/fz9GjRql8fUIMXQ8hjGSnKSEEEII4dA9eEIIIcQIUYAnhBBCjBAFeEIIIcQIUYAnhBBCjBAFeEIIIcQIGf00OblcjoyMDFhZWWmVq50QQrTBMAxKSkrg5uYGPp/aUqTtGX2Az8jI0DqNKyGEtFR6enqjyZoIaU1GH+CtrKwAsH9kzWWUU5BKpTh27BhGjRoFgUDQmsVrE1Qfw2Zs9QGMr07a1Ke4uBgeHh7cZxAhbc3oA7yiW97a2lqjAC8Wi2FtbW00H05UH8NlbPUBjK9OLakP3Rok+kI3hgghhBAjRAGeEEIIMUIU4AkhhBAjZPT34AkhxJDV1NRAKpXquxiknRAIBDAxMVHrWArwhBCiBwzDICsrC48ePdJ3UUg706lTJ7i4uDQ7gJMCPCGE6IEiuDs5OUEsFtNoe9IshmFQXl6OnJwcAICrq2uTx1OAJ4SQNlZTU8MFd3t7e30Xh7QjIpEIAJCTkwMnJ6cmu+tpkB0hhDSjokK311PccxeLxbq9MOkQFO+b5sZuUAueEEJUkMuBvDwgJwdQc0yTxqhbnmhD3feNXlvwmzZtQq9evbgscwMHDsThw4e5/QzDIDIyEm5ubhCJRAgJCcG1a9f0WGJCiLGTSoGHD4ErV4D0dKCqSt8lIkQ7eg3w7u7u+M9//oNLly7h0qVLGD58OCZOnMgF8dWrV2PdunXYsGEDLl68CBcXF4SGhqKkpESfxSaEGKHyciAtDUhKArKygJoafZeIkJbRaxd9eHi40vMvvvgCmzZtwvnz59GjRw98/fXXWLx4MZ555hkAQHR0NJydnbFr1y689dZbKq9ZVVWFqjpfuYuLiwGw9yrUnWuqOM5Y5qZSfQybsdUHaF91Ki5mu+FLSxs/pqZG8/q0h7obspCQEPTu3Rtff/01AMDb2xvz5s3DvHnzWu014+LiMGzYMADAxIkT8d///rfVXqsxiu53GxubFk+hNJh78DU1Nfj1119RVlaGgQMHIi0tDVlZWRg1ahR3jJmZGYKDg3H27NlGA/zKlSuxdOnSBtuPHTum8YCW2NhYzSph4Kg+hs3Y6gMYX500qU95eXkrlqTjuXjxIiwsLNrktW7dugUnJyeNzomIiEB0dLTStqCgIJw/f557XlVVhQULFmD37t2oqKjAiBEjsHHjRqXlhDMzMxETE4MlS5a0rBIwgACflJSEgQMHorKyEpaWlti3bx969OiBs2fPAgCcnZ2Vjnd2dsa9e/cavd6iRYswf/587rliycZRo0ZptJpcbGwsQkNDjWYlLKqP4TK2+gCGWyeZjB04l5fH/l9dIpEUd+5oVh9F7yHRDUdHxzZ7LScnJ3Tq1Enj88aMGYOtW7dyz4VCodL+efPm4cCBA9izZw/s7e3xwQcfYPz48UhISOCmu7m4uMDGxqZF5VfQe4Dv1q0bLl++jEePHmHv3r2YPn064uPjuf31RwsyDNPkCEIzMzOYmZk12C4QCDT+oNHmHENG9TFsxlYfwHDqVFkJZGcDBQXs6HgA4GswAkkxil6T+mhSb0UCE33QJMlOSEgIevbsCRMTE0RHR0MoFGL58uV4+eWXMXv2bPz2229wcnLChg0bEBYWxp13/fp1LFiwAKdOnYKFhQVGjRqFr776Cg4ODgCAsrIyvPPOO/j9999hZWWFBQsWNHjt+l3069atw9atW5Gamgo7OzuEh4dj9erVsLS0BABs27YN8+bNQ0xMDObNm4f09HQMGTIEW7dubTZBTH2qynfgwAGlWwgAG39cXFxUXqOoqAibN2/G9u3bMXLkSADAjh074OHhgT///BOjR4/WqEzq0Ps8eKFQCD8/P/Tv3x8rV65EYGAgvvnmG+6HlJWVpXR8Tk5Og1Y9IYSoUlICpKQA166xrXZFcDc05eXlsLS01MtD0y8W0dHRcHBwwN9//4333nsP77zzDp5//nkMGjQI//zzD0aPHo1XX32Vu25mZiaCg4PRu3dvXLp0CUeOHEF2djZeeOEF7poLFy7EyZMnsW/fPhw7dgxxcXFISEhoshx8Ph/ffvstrl69iujoaJw4cQIffvhhg5/rl19+ie3bt+PUqVO4f/++yi8PzVG3fHFxcXByckLXrl0xc+ZMLuMcACQkJEAqlSrddnZzc0NAQADXY61reg/w9TEMg6qqKvj4+MDFxUXpnld1dTXi4+MxaNAgPZaQEGLIGAbIzwdu3ABu3waKivRdIuMSGBiITz/9FBKJBIsWLYJIJIKDgwNmzpwJiUSCzz//HPn5+bhy5QoAdjp03759ERUVhe7du6NPnz7YsmULTp48idu3b6O0tBSbN2/Gl19+idDQUPTs2RPR0dGoaWYaw7x58zBs2DD4+Phg+PDhWL58Of7v//5P6RipVIrvv/8e/fv3R9++fTF79mwcP35co/qqW76wsDDs3LkTJ06cwNq1a3Hx4kUMHz6cG/SdlZUFoVAIW1tbpfOcnZ0bNGR1Ra9d9J988gnCwsLg4eGBkpIS7NmzB3FxcThy5Ah4PB7mzZuHqKgoSCQSSCQSREVFQSwWY+rUqfosNiHEANXUALm57Ij49jaAXSwWo7SpYfyt/Nqa6NWrF/d/ExMT2Nvbo2fPntw2RQ+rovWakJCAkydPcl3ndd25cwcVFRWorq7GwIEDue12dnbo1q1bk+U4efIkoqKicP36dRQXF0Mmk6GyshJlZWXcYDyxWIwuXbpw57i6uiq1qtVx584dtco3ZcoU7v8BAQHo378/vLy8cPDgQW4mmCrN3XZuCb0G+OzsbLz66qvIzMyEjY0NevXqhSNHjiA0NBQA8OGHH6KiogLvvvsuCgsLERQUhGPHjsHKykqfxSaEGJCqKjaoG3IXfHN4PF6bjRBvqfpjC3g8ntI2RbCSP/5lyOVyhIeHY9WqVQ2u5erqiuTkZI3LcO/ePYwdOxZvv/02li9fDjs7O5w5cwYzZsxQmp6oqqwMw2j0Wpoer+Dq6govLy+ufi4uLqiurkZhYaFSKz4nJ6fVeqX1GuA3b97c5H4ej4fIyEhERka2TYEIIe1GaSk7cI5WWzVsffv2xd69e+Ht7Q1T04Yhx8/PDwKBAOfPn4enpycAoLCwELdv30ZwcLDKa166dAkymQxr164F//Foyfrd87qiTfkAID8/H+np6dyAvn79+kEgECA2NpYbf5CZmYmrV69i9erVrVJ2g7sHTwghjWEYoLAQuHkTuHWLgnt7MGvWLBQUFOCll17C33//jdTUVBw7dgyvv/46ampqYGlpiRkzZmDhwoU4fvw4rl69ioiICC5wq9KlSxfIZDKsX78eqamp2L59O77//vtWKb865SstLcWCBQtw7tw53L17F3FxcQgPD4eDgwMmT54MgE1cM2PGDHzwwQc4fvw4EhMT8corr6Bnz57cqHpd0/s0OUIIaU7dhV8oN3z74ubmhr/++gsfffQRRo8ejaqqKnh5eWHMmDFckFyzZg1KS0sxYcIEWFlZ4YMPPkBRE6Mje/fujXXr1mHVqlVYtGgRnn76aaxcuRLTpk1rlTo0Vz4TExMkJSXhl19+waNHj+Dq6ophw4YhJiZG6ZbyV199BVNTU7zwwgtcoptt27Y1ueRrS1CAJ4QYrOrq2vvrlBte/+Li4hpsu3v3boNt9e9bSyQS/P77741e19LSEtu3b8f27du5bQsXLmzydd5//328//77StteffVV7v8RERGIiIhQ2j9p0iSt7qmrKt/Bgwe5/4tEIhw9erTZ65ibm2P9+vVYv369xmXQBgV4QojBKS9n768XFrLd8oS0JXd3d4SHh2P37t1t/tqWlpaQyWQwNzdv8bUowBNCDEZRERvYacFIog9BQUHcqHdV0/rawuXLlwFAJ932FOAJIXoll7OJaXJy2JSyhOiLSCSCn5+fWsequl2hC+q+vjoowBNC9EIqZRPT5OZqtvALIUQ9FOAJIW2qooJtrefn0/11QloTBXhCSJsoLmbvr9MqqoS0DQrwhJBWo1j4JTubbbkTQtoOBXhCiM4p7qlfv07z1wnRFwrwhBCdqapiW+u5uexzqRRoIuMoIaQVUYAnhLRY/YVf2uuqbvr2449t+3pvvqnZ8SEhIYiPjwcAJCYmonfv3rovlIFSrJJnY2ODR+1kEQT6bk0I0Qot/NIxzZw5E5mZmQgICFDr+Li4OEycOBGurq6wsLBA7969sXPnzgbH8Hi8Bo+bN2+2uLyqrsvj8bBmzRrumJCQkAb7X3zxRaXrZGZm4uuvv25xedoSteAJIRqpqald+KW6Wt+lIW1NLBbDxcVF7ePPnj2LXr164aOPPoKzszMOHjyIadOmwdraGuHh4UrH3rp1C9bW1txzR0fHFpc3MzNT6fnhw4cxY8YMPPvss0rbZ86ciWXLlnHPRSKR0n4XFxfY2Ni0uDxtiQI8IUQttPALqS8uLg7Dhg3DH3/8gU8++QS3bt1CYGAgfv75Z/Ts2RMA8MknnyidM2fOHBw9ehT79u1rEOCdnJzQqVMntV8/JCSE60nYsWMHTExM8M4772D58uVcl3r9LyP/+9//MGzYMPj6+ipt1/SLS3tAXfSEkCaVlwNpacDVq+x9dgrupL6FCxfiyy+/xMWLF+Hk5IQJEyZAKpU2enxRURHs7OwabO/Tpw9cXV0xYsQInDx5Uq3Xjo6OhqmpKS5cuIBvv/0WX331FX7++WeVx2ZnZ+PgwYOYMWNGg307d+6Eg4MD/P39sWDBApQYwYII1IInhKj06BEb0EtL9V0SYuiWLFmC0NBQAGzAdXd3x759+/DCCy80OPa3337DxYsX8cMPP3DbXF1d8eOPP6Jfv36oqqrC9u3bMWLECMTFxeHpp59u8rU9PDzw1VdfgcfjoVu3bkhKSsJXX32FmTNnNjg2OjoaVlZWeOaZZ5S2v/zyy/Dx8YGLiwuuXr2KRYsW4d9//0VsbKw2Pw6DQQGeEMJRLPySnc1OeSNEHQMHDuT+b2dnh27duuHGjRsNjouLi0NERAR++ukn+Pv7c9u7deuGbt26KV0vPT0dX375JZ5++mmcPn0aYWFh3P4ffvgBL7/8MgDgqaee4rrjFeeuXbsWNTU1DVZk27JlC15++eUGS7HW/TIQEBAAiUSC/v37459//kHfvn01/XEYDArwhBBIpbX312nhF6ILdYMuAMTHxyM8PBzr1q3DtGnTmj3/qaeewo4dOwAA/fv355ZRBQBnZ2eNy3P69GncunULMTExzR7bt29fCAQCJCcnU4AnhLRPFRVsa72ggBZ+Ido7f/48PD09AQCFhYW4ffs2unfvzu2Pi4vD+PHjsWrVKryp5uT7xMREuLq6Amh6Gdfz5883eC6RSBq03jdv3ox+/fohMDCw2de+du0apFIp9/rtFQV4QjogWviF6NKyZctgb28PZ2dnLF68GA4ODpg0aRIANriPGzcOc+fOxbPPPousrCwAgFAo5Abaff311/D29oa/vz+qq6uxY8cO7N27F3v37m32tdPT0zF//ny89dZb+Oeff7B+/XqsXbtW6Zji4mL8+uuvDbYDwJ07d7Bz506MHTsWDg4OuH79Oj744AP06dMHgwcPbuFPRr8owBPSQSgWfsnJoYVfDJWmmeUMxX/+8x/MnTsXycnJCAwMxP79+yEUCgEA27ZtQ3l5OVauXImVK1dy5wQHByMuLg4AUF1djQULFuDhw4cQiUTw9/fHwYMHMXbs2GZfe9q0aaioqMCAAQNgYmKC9957r0EvwZ49e8AwDF566aUG5wuFQhw/fhzffPMNSktL4eHhgXHjxmHJkiUNegHaGwrwhBg5mYzNDZ+by95rJ0TXhgwZgqtXr6rct23bNmzbtq3J8z/88EN8+OGHWr22QCDA119/jU2bNjV6zJtvvtnorQEPDw8u/a6xoQBPiJGqrGRb6/n5lBue6M7GjRvx888/49y5c/ouSpuytLSETCZrMALfkFGAJ8TIlJSw99eLivRdEmJsdu7ciYrH93c8PT1x9uxZPZeo7ShG8benbnsK8IQYAcXCL9nZbOY5QlpD586dlZ6HhISA0eP0C8U9/LbQ2Ch+Q0YBnpB2jBZ+IYQ0Rq+56FeuXIknn3wSVlZWcHJywqRJk3Dr1i2lYyIiIhos4/fUU0/pqcSEGIbqaiA9HUhKAh48oOBOCGlIrwE+Pj4es2bNwvnz5xEbGwuZTIZRo0ahrKxM6bgxY8YgMzOTexw6dEhPJSZEv8rKgNRUduGXnBxa+IUQ0ji9dtEfOXJE6fnWrVvh5OSEhIQEpQUGzMzM1F7Gr6qqClV1kmgXP87kIZVKm1zdqC7Fceoeb+ioPoZNnfoUFbEBvd53X4Mll0uV/m3vamo0f88Zy/uTtF8GdQ++6PGw3/rLCMbFxXHrBAcHB+OLL76Ak5OTymusXLkSS5cubbD92LFjEIvFGpWnva8kVB/Vx7AZW30AICvLuOqkye+onEY7Ej3jMfocAlkHwzCYOHEiCgsLcfr0aW57TEwMLC0t4eXlhbS0NHz22WeQyWRISEiAmZlZg+uoasF7eHggLy8P1tbWapVFKpUiNjYWoaGhEAgELa+cnlF9DFv9+kil7MC5vLz22wUvl0uRlRULF5dQ8Pnt/3ckEklx545m77ni4mI4ODigqKiowWdPZWUl0tLS4OPj067mVRPDoO77x2Ba8LNnz8aVK1dw5swZpe1Tpkzh/h8QEID+/fvDy8sLBw8ebLCmL8B256sK/AKBQONgoM05hozqY9hqagTIyREoLfzC1+somZbj8wVGEeAVU581ec9p895MSND4lBbp10+z40NCQrisb4mJiejdu7fuC6UH3t7euHfvHgB2sZxOnTrpt0A6YhAfH++99x7279+PkydPwt3dvcljXV1d4eXlheTk5DYqHSGtS7Hgy82bbNY5w+hTI0S1mTNnIjMzEwEBAWodX1lZiYiICPTs2ROmpqbcIjS6UFJSgnnz5sHLywsikQiDBg3CxYsXlY7Jzs5GREQE3NzcIBaLMWbMmAbx4+LFi2otbNPe6DXAMwyD2bNn4/fff8eJEyfg4+PT7Dn5+flIT09v98v4kY6NYdgu+GvX2FHxhLQXYrEYLi4uMDVVrwO4pqYGIpEIc+bMwciRI3ValjfeeAOxsbHYvn07kpKSMGrUKIwcORIPHz4EwMaYSZMmITU1Ff/73/+QmJgILy8vjBw5Umm2lqOjY4OxX8ZArwF+1qxZ2LFjB3bt2gUrKytkZWUhKyuLS4VYWlqKBQsW4Ny5c7h79y7i4uIQHh4OBwcHTJ48WZ9FJ0QrMhmQmQlcuQLcu8fmiyekvYqLiwOPx8PBgwcRGBgIc3NzBAUFISkpiTvGwsICmzZtwsyZM9WeDQWwOVAmTZqEpUuXwsnJCdbW1njrrbdQ/TjpQ0VFBfbu3YvVq1fj6aefhp+fHyIjI+Hj48MtPJOcnIzz589j06ZNePLJJ9GtWzds3LgRpaWl2L17t25/GAZIrwF+06ZNKCoqQkhICFxdXblHTEwMADbnb1JSEiZOnIiuXbti+vTp6Nq1K86dOwcrKyt9Fp0QjVRWsgE9KQnIyGADPSHGYuHChfjyyy9x8eJFODk5YcKECTqZJnj8+HHcuHEDJ0+exO7du7Fv3z5ulpRMJkNNTU2DQWYikYgby6UYcF33GBMTEwiFwgbjvYyRXgfZNTeAXyQS4ejRo21UGkJ0jxZ+IR3BkiVLEBoaCgCIjo6Gu7s79u3bhxdeeKFF1xUKhdiyZQvEYjH8/f2xbNkyLFy4EMuXL4eVlRUGDhyI5cuX44knnoCzszN2796NCxcuQCKRAAC6d+8OLy8vLFq0CD/88AMsLCywbt06ZGVlITMzs8X1NnQGMciOEGPCMEBBAXDjBnD7NgV3YvwGDhzI/d/Ozg7dunXDjRs31Dr3/v37sLS05B5RUVHcvsDAQKX8JQMHDkRpaSnS09MBANu3bwfDMOjcuTPMzMzw7bffYurUqdyKbwKBAHv37sXt27dhZ2cHsViMuLg4hIWFtatV4bRlMNPkCGnvamqA3Fw24xwlMSMdHY/HU+s4Nzc3bilWoGGis6au3aVLF8THx6OsrAzFxcVwdXXFlClTlAZs9+vXD5cvX0ZRURGqq6vh6OiIoKAg9O/fX7MKtUMU4AlpoaoqNqjn5QFyub5LQ0jbO3/+PDw9PQGw88hv376N7t27q3Wuqalpo0ux/vvvv6ioqIBIJOJex9LSssF0agsLC1hYWKCwsBBHjx7F6tWrG1zLxsYGADvw7tKlS1i+fLna9WuvKMAToqWyMvb++qNHNHeddGzLli2Dvb09nJ2dsXjxYjg4OCjNd79+/Tqqq6tRUFCAkpISrsXeXKKc6upqzJgxA59++inu3buHJUuWYPbs2eA/zgB19OhRMAyDbt26ISUlBQsXLkS3bt3w2muvcdf49ddf4ejoCE9PTyQlJWHu3LmYNGkSRo0apesfg8GhAE+IBhiGDejZ2e1n4RfSfmiaWc5Q/Oc//8HcuXORnJyMwMBA7N+/H0KhkNs/duxYLlMcAPTp0wdA8wOtR4wYAYlEgqeffhpVVVV48cUXERkZye0vKirCokWL8ODBA9jZ2eHZZ5/FF198oZRFMDMzE/Pnz0d2djZcXV0xbdo0fPbZZzqquWGjAE+IGuRytgs+J4ftkieE1BoyZAiuXr3a6P67d+9qfe2lS5eqXEAMAF544YVmR+rPmTMHc+bM0fr12zMaRU9IE6RS4OFDNjFNejoFd0I2btwIS0tLpWQ27Z2/vz/CwsL0XQydoxY8ISqUl7Pd8IWFdH+dEIWdO3dymUY9PT1x9uxZPZdINw4dOsQl5lF31dH2gAI8IXUUFbGBvaRE3yUhxPB07txZ6XlISEiz99G1tW3btla5ripeXl5t9lptiQI86fDkcnYVt5wcyg1PCDEeFOBJhyWTsUE9N5dywxP9kFPiBKIFdd83FOBJh1NZyXbDFxRQYhqiH0KhEHw+HxkZGXB0dIRQKFQ78xvpuBiGQXV1NXJzc8Hn85WmIqpCAZ50GMXFbIudcsMTfePz+fDx8UFmZiYyMjL0XRzSzojFYnh6enIJfxpDAZ4YNcXCL9nZwOPBv4QYBKFQCE9PT27ZU0LUYWJiAlNTU7V6fCjAE6NEC7+Q9oDH40EgEChlXiNEVyjAE6NSVcW21vPz6f46IaRjowBPjEJpae3CL4QQQijAk3aMYdhMc7TwCyGENEQBnrQ7ivFIN27Q/HVCCGkMBXjSblRXs4PmcnJqnzczS4QQQjosCvDE4NVf+IUGzxFCSPMowBOD9egRG9hLS/VdEkIIaX8owBODQgu/EEKIbrQ4wNfU1CApKQleXl6wtbXVRZlIBySVsolpaOEXQgjRDY2HKM2bNw+bN28GwAb34OBg9O3bFx4eHoiLi9N1+YiRq6gA7t4FkpKAzEwK7oQQoisaB/jffvsNgYGBAIADBw4gLS0NN2/exLx587B48WKdF5AYp+JiIDkZuH6d7ZJnGH2XiJCG5HK2VyktTd8lIURzGnfR5+XlwcXFBQBw6NAhPP/88+jatStmzJiBb7/9VucFJMaDFn4h7UF1Nfsezcxk/5VKAS8vwNlZ3yUjRDMat+CdnZ1x/fp11NTU4MiRIxg5ciQAoLy8HCYmJhpda+XKlXjyySdhZWUFJycnTJo0Cbdu3VI6hmEYREZGws3NDSKRCCEhIbh27ZqmxSZ6JJOxH5ZJSWx3PAV3YmhKS9kepVOngEOHgIsXgQcPaKEi0r5pHOBfe+01vPDCCwgICACPx0NoaCgA4MKFC+jevbtG14qPj8esWbNw/vx5xMbGQiaTYdSoUSirk3d09erVWLduHTZs2ICLFy/CxcUFoaGhKCkp0bTopI1VVQH377OBPSODPiyJ4WAYIC+PfW8eO8Y+kpLYbZRngRgLjbvoIyMjERAQgPT0dDz//PMwMzMDwK5R+/HHH2t0rSNHjig937p1K5ycnJCQkICnn34aDMPg66+/xuLFi/HMM88AAKKjo+Hs7Ixdu3bhrbfe0rT4pA3Qwi/EEEmlyl3v1dX6LhEhrUuraXLPPfccAKCyzkTl6dOnt7gwRUVFAAA7OzsAQFpaGrKysjBq1CjuGDMzMwQHB+Ps2bMqA3xVVRWqqqq458XFxQAAqVQKqZpNSMVx6h5v6NqiPgzDBvScnNbvgpfLpUr/tnfGVh/AcOpUVsYG86wsdvxH3da5ummO5fIapKaeRX7+da7HUh3G8vlB2i+NA3xNTQ2ioqLw/fffIzs7G7dv34avry8+++wzeHt7Y8aMGVoVhGEYzJ8/H0OGDEFAQAAAICsrCwB7378uZ2dn3Lt3T+V1Vq5ciaVLlzbYfuzYMYjFYo3KFBsbq9Hxhs7Y6pOVRfUxdIZQJ3NzwNubfairvLwcly9fxsWLF5GQkIDi4mL07NkTPXr00OgahOiTxgH+iy++QHR0NFavXo2ZM2dy23v27ImvvvpK6wA/e/ZsXLlyBWfOnGmwj8fjKT1nGKbBNoVFixZh/vz53PPi4mJ4eHhg1KhRsLa2VqssUqkUsbGxCA0NhUAg0KAWhqk16lNdzd6vzM+vXd2trcjlUmRlxcLFJRR8fvv//RhbfYC2rZNMxvYcZWe3rOu9sDANyckHkZx8CPfuxSv1PohENrC3t8fIkSMhFArVup6i95AQfdE4wP/yyy/48ccfMWLECLz99tvc9l69euHmzZtaFeK9997D/v37cerUKbi7u3PbFdPxsrKy4Orqym3Pyclp0KpXMDMz48YF1CUQCDQObtqcY8h0UR9Fl+ejR7Vz1/W1ohufLzCagAgYX32A1qtTeTl7Lz0zU/uBcXJ5DR4+PIeUlD+QnHwAeXnXlfbb2Ung5xcOiSQcgwcPgJtbLIRCodp/Q8b02UHaJ40D/MOHD+Hn59dgu1wu1/ieE8MweO+997Bv3z7ExcXBx8dHab+Pjw9cXFwQGxuLPn36AACqq6sRHx+PVatWaVp00gK08AvRt4KC2qCubeO4srIIqalHkZLyB+7cOYSKinxuH49nAg+PoZBIxsPPLxz29l25fSYmdD+dtD8aB3h/f3+cPn0aXl5eStt//fVXLgira9asWdi1axf+97//wcrKirvnbmNjA5FIBB6Ph3nz5iEqKgoSiQQSiQRRUVEQi8WYOnWqpkUnGlIs/JKdzU55I6QtKbreMzPZQXLavgcLC+8gOfkAUlL+wP378ZDLa/Mhm5vbokuXMPj5jYev7xiIRLSeBjEeGgf4JUuW4NVXX8XDhw8hl8vx+++/49atW/jll1/wxx9/aHStTZs2AQBCQkKUtm/duhUREREAgA8//BAVFRV49913UVhYiKCgIBw7dgxWVlaaFp2oSSplP1jz8ig3PGlbFRW1AT0nR9uudxkePDiHlJQDSE7+A/n5N5T229l1g0QSDj+/8fDwGAw+nxbVJMZJ43d2eHg4YmJiEBUVBR6Ph88//xx9+/bFgQMHNJpCArBd9M3h8XiIjIxEZGSkpkUlGqqoYFvrBQWUG560ncLC2qCube4Etuv9CJKTDyA19TAqKgq4fXy+6eOudzao29lJNLo2jwdYWmpXLkL0SauvrqNHj8bo0aN1XRaiJ0VFbGuJBv2StiCTsb1DivvpddJpaKSgIOVxK/0A0tNPq+h6HwuJJBy+vqNhbt5Jo2vz+YCTE+DmBri4AHZ2wO3b2pWTEH2hvqkOimFq769r+wFLiLoqK9kWemYm+2VSm6mVcrkM6el/caPeCwqU162wt+/+uJUeDnf3gRp3vQsE7IIyiqBuSp+OpJ3T+C3M5/MbnYMOsIlwiOGSydjlL3NzKTc8aX23b7NBvbBQu/MrKgqRmnrk8aj3w6isrL0Qn28KT89g+PmNf9z13nB2T3PMzQFXV/bh5KS/KZ+EtAaNA/y+ffuUnkulUiQmJiI6OlplBjliOB48YO9x0mIapDXU1LBfHBUD5Hr0AG7e1Pz9lp9/mxsgl55+GgxT22gQiezRpcvYx6PeR8Pc3Ebjclpasq10V1fA3l7j0wlpNzQO8BMnTmyw7bnnnoO/vz9iYmK0zmRHWkdJCbuSG8De96QWCtElRde7IqgrZl1o8j6rqZHiwYO/uKlsBQXKN7sdHHpwXe+dOz8FPl+zZakBwNaWDehuboCaCS0Jafd0dpcpKChIKXUt0R+GYbtEs7PZjF/UYie6VFRUez+9sFC7GRcVFQV1Rr0fQWXlI24fny+Ap2cwN+rd1tZX4+vz+WzrXNFS13AZCkKMgk4CfEVFBdavX6+UZpa0PUUXaW4uLYVJdEcur+16z8xkvzRqimEY5Off4gbIPXjwV72udwf4+Y2Fn184fH1HwcxM82a2iQk7SE5xT13NlPGEGC2NA7ytra3SIDuGYVBSUgKxWIwdO3botHBEPdXVbGtd25zchNRXVVW7dnpOjnYDMmUyGdLSTiI5mW2pFxamKO13dAyAn994SCThcHML0qrrXShkR7y7ubGD5GjkOyG1NP5z+Oqrr5QCPJ/Ph6OjI4KCgmBrS2ke25KqhV8I0VZJSe3cdG2THZWX5yM19TBSUvZj3bqDSkum8vkCeHkNexzUx6NTJ58mrtQ4sbj2frqDA5uIhhDSkMYBXpFCluiP4v56WZm+S0LaM8VaA4qgrs37ie16v4nkZDbhzMOHZ8Ewtd1IYrEj/PzGwc8vHD4+oTAz0y7FtLU1G9Dd3IBOnbS6BCEdjloB/sqVK2pfsFevXloXhjROLme74HNyaOEXoj2ptPZeena2dl3vNTXVuH//NJdF7tGjVKX9jo490bXrWEyYYIeiojkAzDV+DR6PzR6nGCRHqWIJ0ZxaAb53797g8XjN5o7n8XiU6EbHFAu/5OZql/2LkNLS2lzv+fnajdMoL8/DnTuHkZx8AGlpR1FVVZvX2MRE+LjrPRwSyXjY2HiBz5eiW7dDuHTJRO3X4/MBR0egc2f2vrq55t8LCCF1qBXg09LSWrscpJ7ycraFpe00JNJxKdIQK1rqJSXaXINBXt51btT7w4fn6nW9O8HPbxwkErbrXSjUrolN6WEJaT1q/TnVX/udtJ6iIjawa/OhTDouqZR93yiSzmgzTZLteo9HcvIfSEk5gEePlL/YOzkF1hn1/iR4PO2yJlF6WELahtbfl69fv4779++jut4nyYQJE1pcqI5GLmdHLdPCL0QTZWW1Xe/aTpEsK8vFnTuHkJLyB1JTj6K6uvabpYmJGby9h8PPLxx+fuNgY+OpdVktLNiud1dX9t46jXwnpPVpHOBTU1MxefJkJCUlKd2XV0ydo3vw6pPJau+vy2TNH086NoZhvwgqgro2y/syDIPc3GvcALmHD88DqL0HZGHhzLXSvb1HQii0aFGZu3en9LCE6IvGAX7u3Lnw8fHBn3/+CV9fX/z999/Iz8/HBx98gC+//LI1ymh0KivZ1npBASWmIU2TyWoTzmjb9S6TVeH+/bjHXe9/oKjortJ+Z+c+XFB3de2ndde7Ij2sovu9sBDo2pW64AnRF40D/Llz53DixAk4OjqCz+eDz+djyJAhWLlyJebMmYPExMTWKKdRKClhP6yLivRdEmLIystrB8jl5mrb9Z6DlJSDSEn5A2lpx1BdXcrtMzU1h7f3CG6ZVWtr7VNMN5YeVi7XfolYQohuaBzga2pqYPl4UqqDgwMyMjLQrVs3eHl54datWzovYHun6FbNydEuhzfpOG7eZIO6Nl8A2a73JG5FtocPL6Bu17ulpSsX0L29R7So653SwxLSPmj8pxkQEIArV67A19cXQUFBWL16NYRCIX788Uf4+mq+6pOxUiz8om0eb2LcZDL2/aHI9R4QANy+rVlrXSarxL17cVxQLy6+r7TfxaUvNzfdxaWv1l3vgHJ6WHt76nYnpD3QOMB/+umnKHuc03LFihUYP348hg4dCnt7e8TExOi8gO1NVRX7gU0Lv5D6Kitr08LWTVykSbAsLc3GnTsHHyeciYVUWptflu16H/l4mdVxsLLq3KLyUnpYQto3jQP86NGjuf/7+vri+vXrKCgoaLDKXEdTWlq78AshCo8e1QZ1bd4bDMMgJ+dfbm56RsbfSvstLd3qjHofDoGgZQufK9LDurlRelhC2juNA3x0dDSee+45WFjU3sOzs7PTaaHaC4ZhP7Rp4ReioLg1oxj1XlGh+TVkskrcvXsCKSl/PO56T1fa7+ranwvqzs59WvTFWpEeVpHzndLDEmI8NA7wCxYswLvvvovw8HC88sorGDNmDEw72Cibmho2FWh2tnbTlohxqayszSCXna3dmgEFBQVITNyC5OTDj7vea0dkmpqK4OMT+niQ3DhYWbm1qLymprWD5Jyd2XSxhBDjo3FkzszMxJEjR7B79268+OKLEIlEeP755/HKK69g0KBBrVFGg1FdXXt/nfL5dGxFRbWt9IICzc9nGAbZ2ZeRnHwAd+4cQEbGJaX9VladuQFyXl7DIRCIWlReM7PaQXKUHpaQjkHjAG9qaorx48dj/PjxKC8vx759+7Br1y4MGzYM7u7uuHPnTmuUU69o4Rcilyt3vWsz5VEqrcC9eye4Ue8lJQ+V9ru59Yef3wT4+Y2Hs3PvFo9psbCo7Xq3t6f0sIR0NC3qWxeLxRg9ejQKCwtx79493LhxQ1flMgjFxWxXPC380jFVVdUmnMnJ0S6dcElJxuOEMweQlvYnZLLam/ICgRg+PqGQSMbiueeEuHPnZcjlLesv79SpNqjb2LToUoSQdk6rAK9oue/cuRN//vknPDw88NJLL+HXX3/Vdfn0KjWVujI7muLi2lHv2vTYMAyDrKx/uGVWs7ISlPZbW3twA+S8vIbB1NQcfL4UtraHtCovjwc4ONR2v4tbNoieEGJENA7wL730Eg4cOACxWIznn38ecXFxWt97P3XqFNasWYOEhARkZmZi3759mDRpErc/IiIC0dHRSucEBQXh/PnzWr0eIfXJ5eyYCkXXuzazIaTScty9e/xx1/tBlJZm1NnLg5vbgMdz08fDyalXi7ve66aHdXFh768TQkh9Ggd4Ho+HmJgYjB49usWj58vKyhAYGIjXXnsNzz77rMpjxowZg61bt3LPhYpk14RoqbqaHVORkaF9psGSkofc3PS7d49DJqtd51cgsICPzyhIJOHo0mUsLC2dW1xmSg9LCNGUxh8Tu3bt0tmLh4WFISwsrMljzMzM4OLiorPXJB1TSUnt/fT8fG263uXIyvoHycnsMqvZ2cqLKllbe0IiCYdEEg5Pz2CYmrZ8QrlYzAb1zp0pPSwhRHMG3w6Ii4uDk5MTOnXqhODgYHzxxRdwcnJq9PiqqipUVVVxz4sfL5otlUohVbOppjhOLjeOJPKKenSk+ihWM1PMTS+tXUwNPJ56I8qrq8tw9+4J3L59ECkph1FamllnLw+dOw+ARDIOXbuOg6NjQL2ud/V/1ny+lPvX2rq2+71+etj2lPrY2N5zNTVsPdT9DNH0WEJaA49hDGPiF4/Ha3APPiYmBpaWlvDy8kJaWho+++wzyGQyJCQkwKyRG4+RkZFYunRpg+27du2CmEYgkWbk5uYiISEBFy9eRFJSEqrrZDIyNzdH79698eSTT6Jfv37oRAnaSRPKy8sxdepUFBUVwdraWt/FIR2Q2gH+wYMHcHfXft3oZguiIsDXl5mZCS8vL+zZswfPPPOMymNUteA9PDyQl5en9h+ZVCpFbGwsXFxCwee3/zRfcrkUWVnGWZ+KCgGys2sTzmjaymUYOTIyEpCcfBDJyQeRnf2v0n4bGy9IJOMgkYyDl9fTMDVt+Yg2Pp/tcldkkhMKjev3Axjfe04kkuLOnViEhoZCoGbqv+LiYjg4OFCAJ3qjdhd9QEAA1q9fj1dffbU1y9MkV1dXeHl5ITk5udFjzMzMVLbuBQKB2n+YCny+wCg+nBSMoT4Mw3a9A0B8vABFRZrXp7q6DGlpsUhJYUe9l5Vl19nLg7v7QPj5saPeHR39lbrete0mNzVlg7mbG3tfve5bUXFNY/j91GcsdTIxYf/V5HNE088bQnRN7QAfFRWFWbNm4b///S9+/PFH2Nvbt2a5VMrPz0d6ejpcXV3b/LWJ/shktbnes7LY5/37a5aAqKjoPjc3/d69k6ipqe3lEQqt4Os7GhJJOHx9w2Bh4aiTctdND+voWBskCCGkLagd4N99912EhYVhxowZ8Pf3x48//ogJEya06MVLS0uRkpLCPU9LS8Ply5dhZ2cHOzs7REZG4tlnn4Wrqyvu3r2LTz75BA4ODpg8eXKLXpcYvvJydhpbVhY7T71uy1md0eRs1/vFx3PTDyAn54rS/k6dfB7neg+Hp+fTMDHRzfRLSg9LCDEUGo2i9/HxwYkTJ7BhwwY8++yzeOKJJxrMhf/nn3/Uvt6lS5cwbNgw7vn8+fMBANOnT8emTZuQlJSEX375BY8ePYKrqyuGDRuGmJgYWFlZaVJs0g4out4VWeQeT37QSHV1KVJTjz1eZvUgystzuH08Hh+dOw+CRDIefn7hcHB4osUJZxQoPSwhxBBpPE3u3r172Lt3L+zs7DBx4sQWJbsJCQlBU2P8jh49qvW1ieGTydhEM4oscnXGRqqtqOget3gL2/VeO+rdzMwavr5j4Oc3Hl26hEEsdtBJuSk9LCGkPdAoOv/000/44IMPMHLkSFy9ehWOjrq5V0k6joqK2lZ6bq7mg9bk8hpkZJzHjRvbcfr0p8jJuaq039a2CzdAztNzqM663k1M2AxyikFylB6WEGLo1A7wY8aMwd9//40NGzZg2rRprVkmYmTqdr0XFWl+flVVCdLSjj1eO/0QystzuX08Hh/u7oO5XO/29t111vWuSA/r6sqOgKf0sISQ9kTtj6yamhpcuXKlVefCE+MgkymvnV5Z2fw59T16lMbler93L04pI5qZmQ2efLIn7O3fgK9vOEQiO52VXSSq7Xp3cKD0sISQ9kvtAB8bG9ua5SDtXGWl8trpNTWanS+X1+Dhw/PcVLa8vGtK+21t/R630sPh5RWEoKBYXLo0tsXrpwOAlRUb0N3cAFvbFl+OEEIMAnU6Eq09elQb1BXJZzRRVVWM1NSjj0e9H0JFRR63j8czgYfHEG7tdHv7btw+Re72lrCzY1vqnTsDlpYtvhwhhBgcCvBEbTU1bNe7IuFMebnm1ygsTOVGvd+/H6/U9W5u3gm+vmGQSMbD13eMTrve+Xy2y13RUjdv+WJvhBBi0CjAkyYput4Vq7Jp1/V+jgvqeXnXlfbb2XXlBsi5uw+GiYnu0ns2lR6WEEKMHQV40kBRUe0AucJCzddOr6wsetz1zo56r6go4PaxXe9D64x676rTslN6WEIIYVGAJ5DLa7veMzO163ovKEjhBsilp5+CXC7j9pmb26JLlzD4+YWjS5cxMDfvpLvCg9LDEkKIKhTgO6iqKuWud5ms+XPqkstlePDgLBfU8/NvKu23t+9ep+t9EPh83b/Vunen9LCEENIYCvAdSGlpbSu9oECbrvdHuHPnyOOu98OorKwdOs/nm8LD42kuqNvZ+em07Dxe7RrqLi7sCP6uXWmeOiGENIYCvBGTy4H8fDag29kBJ05onhq2oCAZyckHHne9nwbD1I6yE4ns0KXLWPj5hcPXd5TOu975fHaQnKsr+1Ckh5XL2QBPCCGkcRTgjUx1NdvlnpnJ/iuVsoHSTs0ZZ3K5DOnpfyElhQ3qBQW3lfbb2z8BiYRdZrVz56d03vUuENQGdEoPSwgh2qOPTyNQWlqb672gQPNWekVFIVJTjyA5+QBSUw+jsvIRt4/PN4WnZ/DjtdPHw9a2i24LD0oPSwghrYECfDvEMLVd71lZQEmJ5tfIz7/FzU1PTz9Tr+vdHl26jIVEEg4fn1EwN9f9KDYrq9qgrm7vAiGEEPVRgG8npFLlrvfq6ubPqUsmk+Hu3Tjcvn0YKSl/oKAgWWm/g4M/N0CO7XrX/QRyRXpYNzc2wBNCCGk9FOANmGLUe1YWkJenTdd7Ae7cOYyUlP9h3bqDKK8zwZ3PF8DLK+Tx2unjYGvrq+PSU3pYQgjRJwrwBoRh2Hvoiq734mJNz2eQn3+LGyD34MFfYJjabwVisQO6dBn3uOs9FGZm1jquAaWHJYQQQ0EBXs9kstqu96wszbvea2qkSE8//fh++gEUFt5R2u/o2BMSSRgmTLBDcfFcALpvRguFta10Sg9LCCGGgQK8HpSX145616brvbw8H6mphx+Pej+Cqqrapr6JiRBeXsPg5zcefn7j0amTN/h8Kbp3P4RLl0w0fq3GiMW1QZ3SwxJCiOGhAN9GFF3vmZnadb3n5d143PX+Bx4+PFuv690Jfn7j4Oc3/nHXe+uMYLOxqc353qlTq7wEIYQQHaEA30pkMiAnp3aQXGWlZufX1FTj/v1T3FS2R49SlfY7OfXi5qa7uQ0Aj6f7yeN108O6urKLuhBCCGkfKMDrUEVFba73nBzNu97LynLrdL0fRXV17QR3tut9ODeVzcbGU8elZ/H5gJNTbVBXpIclhBDSvlCAb6FHj2q73jXNj84wDHJzryEl5Q+kpBzAgwfnANSuAGNh4fy46z0cPj4jIRRa6rLoHIGAHfHu6sr+S+lhCSGk/aOPcg3V1LBrpytGvVdUaHa+TFaF+/fjuWVWi4ruKu13du4NP7/xkEjC4erav1W63gF2Trqile7oSOlhCSHE2FCAV0NlpXLXe01N8+fUVVaWgzt3DiE5+Q+kpR1FdXUpt8/ExAze3iMeB/XxsLb20HHpa1lZ1c5Rp/SwhBBi3CjAN+H2bTawFxRodh7b9X6VGyD38OF5KHe9u0AiYaexeXuPhFDYeqPXFOlhAWDYMGqpE0JIR0EBvgk3b6o/UI7teo/jgnpR0T2l/c7OfbgBcq6u/Vqt671uelhXV3alNrkcyMholZcjhBBioPQa4E+dOoU1a9YgISEBmZmZ2LdvHyZNmsTtZxgGS5cuxY8//ojCwkIEBQXhu+++g7+/v/4KXUdpafbjrvcDSEs7Bqm0jNtnamr+uOudzfVube3eauUwMWG73jt3pvSwhBBCWHoN8GVlZQgMDMRrr72GZ599tsH+1atXY926ddi2bRu6du2KFStWIDQ0FLdu3YKVHpYjYxgGOTlXuAFyGRl/o27Xu6WlKzdAztt7BAQCcauVRSisXZnNyYnSwxJCCFGm1wAfFhaGsLAwlfsYhsHXX3+NxYsX45lnngEAREdHw9nZGbt27cJbb72l8ryqqipUVVVxz4sfp42TSqWQSqVqlUtxHJ8vhUxWibt345CcfAjJyYdQXHxf6VhX176QSMZBIhkHF5c+4CnlbFXv9dQlFtdOZ7OzU04P29StBLlcqvRve0f1MXzGVqeaGrYe6n6GaHosIa3BYO/Bp6WlISsrC6NGjeK2mZmZITg4GGfPnm00wK9cuRJLly5tsP3YsWMQi9VrURcWFuLSpUu4eDEK//77r9IXBqFQiMDAQDz55JPo378/7Ljh6FkADqtdv5aoqmJH9GsqKytW94XRI6qP4TO2OsXGql+fusszE6IPBhvgs7KyAADOzs5K252dnXHv3j1VpwAAFi1ahPnz53PPi4uL4eHhgVGjRsHauvnlUTMyMuDt7a20zcqqMySSsZBIxsHbexgEAhEAIDWVfegSjwfY2tYut6rmd5ImyeVSZGXFwsUlFHx++79BT/UxfMZWJ5FIijt3YhEaGgqBmoNcijVddIIQHTPYAK/Aq7dMGcMwDbbVZWZmBjMV+VUFAoFaf5heXl7w9/dHVVUV3Nymws9vEpydeyu9pq5WZFNoq/SwfL7AKD5sFag+hs9Y6qQY46Lu54jiWEL0yWADvIuLCwC2Je+qmMgNICcnp0GrXtfOnj2LkydP4tKlsZDLW+ePlNLDEmL4TE3ZrI+00BJpjww2rPj4+MDFxQWxsbHo06cPAKC6uhrx8fFYtWpVq762SCRqletSelhCDA+Px/aamZs3fCha7jRejrRHeg3wpaWlSElJ4Z6npaXh8uXLsLOzg6enJ+bNm4eoqChIJBJIJBJERUVBLBZj6tSpeiy1ZqysaqezUXpYQvRHIFAdyIVC5RkphBgLvQb4S5cuYdiwYdxzxeC46dOnY9u2bfjwww9RUVGBd999l0t0c+zYMb3MgdeEYpCcmxsb4AkhbYPHqw3c9YM55YogHY1eA3xISAgYhml0P4/HQ2RkJCIjI9uuUFpQpIdVtNRbqYefEPKYQKA6kLfWAFVC2iODvQdv6BTpYRX31GnALCG6xec3bIUrnlNrnJDmUYDXAKWHJUT36rbG6wZyao0T0jIU4JshFtfeT7e3p8E4hGhDVWtcEcjpizIhrYMCfBOCg9kBc4QQ9Sha4wIBu0Rxly6ApSXb+0UIaVsU4JtgY6PvEhBiePh81aPUzc1rcztIpcCVK+wsEhqfQoh+UIAnhKgkFKoO5NQaJ6R9oABPSAemaI2rCuSUaZGQ9o0CPCEdgKI1Xj+YU2ucEONFAZ4QI1G3NV4/kFNrnJCOhwI8Ie1M/da44kGD2QghdVGAJ8QAmZg0Pm+cWuOEEHVQgCdEj8zM2DXHAcDdnZ0zTq1xQoguUIAnpJWZmKgepa5ojUulwK1b7IJFFNgJIbpCAZ4QHVEE8PqBnII2IUQfKMATogFFa1zV3HFap4AQYkgowBNSD4/X+Lxxao0TQtoLCvCkw6rfGq8bzKk1Tghp7yjAE6OmqjWueJjSu58QYsToI44YBVPTxueNU2ucENIRUYAn7QaPVztvPCMD8PConTdOrXFCCFFGH4vE4JiaNj5vnMdj543fvAnY29OgN0IIaQwFeKIXita4qulm1BonhJCWo49S0qoUrfH6gZzujRNCSOuiAE9arH5rvG4wp9Y4IYToB338ErXVb40rHkIhtcYJIcTQUIAnShprjZubs4lhCCGEtA8U4DsogUB1IKfWOCGEGAcK8EaMx6vNn56RAXh61s4bp9Y4IYQYN76+C9CUyMhI8Hg8pYeLi4u+i2VwBALAygpwdATc3QE/PyAgAOjbF+jRA/D2Zo+zswMsLCi4E0JIR2DwLXh/f3/8+eef3HOTDhqd+PzG54130B8JIYSQJhh8gDc1NdWo1V5VVYWqqirueXFxMQBAKpVCKpWqdQ3FcXK5esfrUt1743X/FQpVHy+Xs4+mKOqjbv0NHdXH8BlbnbSpj7HUnbRfPIZhGH0XojGRkZFYs2YNbGxsYGZmhqCgIERFRcHX17fJc5YuXdpg+65duyAWi1uzuIQQwikvL8fUqVNRVFQEa2trfReHdEAGHeAPHz6M8vJydO3aFdnZ2VixYgVu3ryJa9euwd7eXuU5qlrwHh4eyMvLU/uPTCqVIjY2Fi4uoeDztU92rmlrvLUo6hMaGgqBESRvp/oYPmOrkzb1KS4uhoODAwV4ojcG3UUfFhbG/b9nz54YOHAgunTpgujoaMyfP1/lOWZmZjAzM2uwXSAQaPxBw+cLmg3w9e+N133wDWwIozY/A0NG9TF8xlYnTepjTPUm7ZNBB/j6LCws0LNnTyQnJ7f5awuFqlc4a+vWOCGEEKKOdhXgq6qqcOPGDQwdOrRNXs/Lq3beuKG1xgkhhJCmGHTYWrBgAeLj45GWloYLFy7gueeeQ3FxMaZPn94mr29rC4jFFNwJIYS0Pwbdgn/w4AFeeukl5OXlwdHREU899RTOnz8PLy8vfReNEEIIMWgGHeD37Nmj7yIQQggh7RJ1PhNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQMepqcLijW0lEsG6sOqVSK8vJyFBcXG0U+aaqPYTO2+gDGVydt6qP4zDHg9byIkTP6AF9SUgIA8PDw0HNJCCEdUUlJCWxsbPRdDNIBGfRysbogl8uRkZEBKysr8Hg8tc5RLDGbnp5uFMs8Un0Mm7HVBzC+OmlTH4ZhUFJSAjc3N/Ap3zXRA6NvwfP5fLi7u2t1rrW1tVF8OClQfQybsdUHML46aVofarkTfaKvlYQQQogRogBPCCGEGCEK8CqYmZlhyZIlMDMz03dRdILqY9iMrT6A8dXJ2OpDOgajH2RHCCGEdETUgieEEEKMEAV4QgghxAhRgCeEEEKMEAV4QgghxAh12AC/ceNG+Pj4wNzcHP369cPp06ebPD4+Ph79+vWDubk5fH198f3337dRSdWjSX1+//13hIaGwtHREdbW1hg4cCCOHj3ahqVtnqa/H4W//voLpqam6N27d+sWUEOa1qeqqgqLFy+Gl5cXzMzM0KVLF2zZsqWNSts8Teuzc+dOBAYGQiwWw9XVFa+99hry8/PbqLRNO3XqFMLDw+Hm5gYej4f//ve/zZ5j6J8HhAAAmA5oz549jEAgYH766Sfm+vXrzNy5cxkLCwvm3r17Ko9PTU1lxGIxM3fuXOb69evMTz/9xAgEAua3335r45Krpml95s6dy6xatYr5+++/mdu3bzOLFi1iBAIB888//7RxyVXTtD4Kjx49Ynx9fZlRo0YxgYGBbVNYNWhTnwkTJjBBQUFMbGwsk5aWxly4cIH566+/2rDUjdO0PqdPn2b4fD7zzTffMKmpqczp06cZf39/ZtKkSW1cctUOHTrELF68mNm7dy8DgNm3b1+Txxv65wEhCh0ywA8YMIB5++23lbZ1796d+fjjj1Ue/+GHHzLdu3dX2vbWW28xTz31VKuVUROa1keVHj16MEuXLtV10bSibX2mTJnCfPrpp8ySJUsMKsBrWp/Dhw8zNjY2TH5+flsUT2Oa1mfNmjWMr6+v0rZvv/2WcXd3b7UyakudAG/onweEKHS4Lvrq6mokJCRg1KhRSttHjRqFs2fPqjzn3LlzDY4fPXo0Ll26BKlU2mplVYc29alPLpejpKQEdnZ2rVFEjWhbn61bt+LOnTtYsmRJaxdRI9rUZ//+/ejfvz9Wr16Nzp07o2vXrliwYAEqKiraoshN0qY+gwYNwoMHD3Do0CEwDIPs7Gz89ttvGDduXFsUWecM+fOAkLqMfrGZ+vLy8lBTUwNnZ2el7c7OzsjKylJ5TlZWlsrjZTIZ8vLy4Orq2mrlbY429alv7dq1KCsrwwsvvNAaRdSINvVJTk7Gxx9/jNOnT8PU1LDe0trUJzU1FWfOnIG5uTn27duHvLw8vPvuuygoKND7fXht6jNo0CDs3LkTU6ZMQWVlJWQyGSZMmID169e3RZF1zpA/Dwipq8O14BXqLx3LMEyTy8mqOl7Vdn3RtD4Ku3fvRmRkJGJiYuDk5NRaxdOYuvWpqanB1KlTsXTpUnTt2rWtiqcxTX4/crkcPB4PO3fuxIABAzB27FisW7cO27ZtM4hWPKBZfa5fv445c+bg888/R0JCAo4cOYK0tDS8/fbbbVHUVmHonweEAB2wBe/g4AATE5MGrY2cnJwG38oVXFxcVB5vamoKe3v7ViurOrSpj0JMTAxmzJiBX3/9FSNHjmzNYqpN0/qUlJTg0qVLSExMxOzZswGwAZJhGJiamuLYsWMYPnx4m5RdFW1+P66urujcubPSUqNPPPEEGIbBgwcPIJFIWrXMTdGmPitXrsTgwYOxcOFCAECvXr1gYWGBoUOHYsWKFe2uxWvInweE1NXhWvBCoRD9+vVDbGys0vbY2FgMGjRI5TkDBw5scPyxY8fQv39/CASCViurOrSpD8C23CMiIrBr1y6DuheqaX2sra2RlJSEy5cvc4+3334b3bp1w+XLlxEUFNRWRVdJm9/P4MGDkZGRgdLSUm7b7du3wefz4e7u3qrlbY429SkvLwefr/xRY2JiAqC25dueGPLnASFK9DS4T68U03w2b97MXL9+nZk3bx5jYWHB3L17l2EYhvn444+ZV199lTteMS3m/fffZ65fv85s3rzZoKbFaFqfXbt2Maampsx3333HZGZmco9Hjx7pqwpKNK1PfYY2il7T+pSUlDDu7u7Mc889x1y7do2Jj49nJBIJ88Ybb+irCko0rc/WrVsZU1NTZuPGjcydO3eYM2fOMP3792cGDBigryooKSkpYRITE5nExEQGALNu3TomMTGRm/bX3j4PCFHokAGeYRjmu+++Y7y8vBihUMj07duXiY+P5/ZNnz6dCQ4OVjo+Li6O6dOnDyMUChlvb29m06ZNbVzipmlSn+DgYAZAg8f06dPbvuCN0PT3U5ehBXiG0bw+N27cYEaOHMmIRCLG3d2dmT9/PlNeXt7GpW6cpvX59ttvmR49ejAikYhxdXVlXn75ZebBgwdtXGrVTp482eTfQ3v8PCCEYRiGloslhBBCjFCHuwdPCCGEdAQU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEBXu3r0LHo+Hy5cv67sohBCiFQrwpN2KiIjApEmTGmyPi4sDj8fDo0ePtL62h4cHMjMzERAQoH0BCSFEjzrccrGENKe6uhpCoRAuLi76LgohhGiNWvDE6O3duxf+/v4wMzODt7c31q5dq7Tf29sbK1asQEREBGxsbDBz5swGXfQRERHg8XgNHnFxcQCAwsJCTJs2Dba2thCLxQgLC0NycjL3Gtu2bUOnTp1w9OhRPPHEE7C0tMSYMWOQmZnZVj8GQkgHQwGeGLWEhAS88MILePHFF5GUlITIyEh89tln2LZtm9Jxa9asQUBAABISEvDZZ581uM4333yDzMxM7jF37lw4OTmhe/fuANgvAJcuXcL+/ftx7tw5MAyDsWPHQiqVctcoLy/Hl19+ie3bt+PUqVO4f/8+FixY0Kr1J4R0YHpezY4QrU2fPp0xMTFhLCwslB7m5uYMAKawsJCZOnUqExoaqnTewoULmR49enDPvby8mEmTJikdk5aWxgBgEhMTG7zu3r17GTMzM+b06dMMwzDM7du3GQDMX3/9xR2Tl5fHiEQi5v/+7/8YhmHXRAfApKSkcMd89913jLOzc4t/DoQQogq14Em7NmzYMFy+fFnp8fPPP3P7b9y4gcGDByudM3jwYCQnJ6Ompobb1r9/f7VeLzExEdOmTcN3332HIUOGcK9hamqKoKAg7jh7e3t069YNN27c4LaJxWJ06dKFe+7q6oqcnBzNKkwIIWqiQXakXbOwsICfn5/StgcPHnD/ZxgGPB5PaT/DMCqv05ysrCxMmDABM2bMwIwZM5q8nqrXFggESvt5PF6j5xJCSEtRC54YtR49euDMmTNK286ePYuuXbvCxMRE7etUVlZi4sSJ6N69O9atW9fgNWQyGS5cuMBty8/Px+3bt/HEE0+0rAKEEKIlasETo/bBBx/gySefxPLlyzFlyhScO3cOGzZswMaNGzW6zltvvYX09HQcP34cubm53HY7OztIJBJMnDgRM2fOxA8//AArKyt8/PHH6Ny5MyZOnKjrKhFCiFqoBU+MWt++ffF///d/2LNnDwICAvD5559j2bJliIiI0Og68fHxyMzMRI8ePeDq6so9zp49CwDYunUr+vXrh/Hjx2PgwIFgGAaHDh1q0C1PCCFthcfQTUBCCCHE6FALnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFC/w9tmPyLlQOVXwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -2319,7 +3766,7 @@ "print('stds.shape (N,H,K) \\t', stds.shape)\n", "\n", "distr = GMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (means, stds)\n", + "distr_args = (means, stds, weights)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -2419,47 +3866,55 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " total_count_names = [f\"-total_count-{i}\" for i in range(1, n_components + 1)]\n", + " total_count_names = [\n", + " f\"-total_count-{i}\" for i in range(1, n_components + 1)\n", + " ]\n", " probs_names = [f\"-probs-{i}\" for i in range(1, n_components + 1)]\n", - " param_names = [i for j in zip(total_count_names, probs_names) for i in j]\n", - " self.output_names = self.output_names + param_names\n", + " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", + " self.param_names = [i for j in zip(total_count_names, probs_names, weight_names) for i in j]\n", + " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", - " self.output_names.insert(0, \"\") \n", + " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 2 * n_components\n", + " self.outputsize_multiplier = 3 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " mu, alpha = torch.tensor_split(output, 2, dim=-1)\n", - " return (mu, alpha)\n", + " mu, alpha, weights = output.chunk(3, dim=-1)\n", "\n", - " def scale_decouple(self, \n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float=0.2):\n", - " \"\"\" Scale Decouple\n", + " return mu, alpha, weights\n", + "\n", + " def scale_decouple(\n", + " self,\n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float = 1e-6,\n", + " ):\n", + " \"\"\"Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", " # Efficient NBinomial parametrization\n", - " mu, alpha = output\n", - " mu = F.softplus(mu) + 1e-8\n", - " alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts\n", + " mu, alpha, weights = output\n", + " mu = F.softplus(mu) + eps\n", + " alpha = F.softplus(alpha) + eps # alpha = 1/total_counts\n", + " weights = F.softmax(weights, dim=-1)\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(mu.size(dim=0), 1, -1)\n", " mu *= loc\n", - " alpha /= (loc + 1.)\n", + " alpha /= loc + 1.0\n", "\n", " # mu = total_count * (probs/(1-probs))\n", " # => probs = mu / (total_count + mu)\n", " # => probs = mu / [total_count * (1 + mu * (1/total_count))]\n", " total_count = 1.0 / alpha\n", - " probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 \n", - " return (total_count, probs)\n", + " probs = (mu * alpha / (1.0 + mu * alpha))\n", + " probs = torch.clamp(probs, eps, 1 - eps)\n", + " return (total_count, probs, weights)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -2481,16 +3936,10 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " total_count, probs = distr_args\n", + " total_count, probs, weights = distr_args\n", " B, H, K = total_count.size()\n", " Q = len(self.quantiles)\n", " assert total_count.shape == probs.shape\n", - "\n", - " # Sample K ~ Mult(weights)\n", - " # shared across B, H\n", - " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", - " \n", - " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", @@ -2533,17 +3982,15 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " total_count, probs = distr_args\n", + " total_count, probs, weights = distr_args\n", " B, H, K = total_count.size()\n", " \n", - " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", - " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", "\n", @@ -2567,7 +4014,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -2608,7 +4055,40 @@ "execution_count": null, "id": "b67e2931", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "counts.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "probs.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "samples.shape (N,H,num_samples) torch.Size([2, 2, 2000])\n", + "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", + "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABHt0lEQVR4nO3deVxU1f8/8NcFhmEVBGRTQEzUFHfTxAX8KJi7aZo7+DHTXJAwNbMSzKSw1FzSNAM/mdmn0jIzlUpNv5gLSi750SxATYkUBRSFEc7vD3/cHIdlGAZmez0fj3nonHvuOe8zA8x7zj33XkkIIUBEREQWxcrQARAREVHdYwJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJgYEeOHMHTTz8Nf39/KJVKeHl5oWvXrpg9e7ZavbCwMISFhdV6PJIkIS4uTm/tNW7cGAMHDtRbe5XZv38/JEnC/v3766S/6goLC4MkSZAkCVZWVnB2dkbTpk0xYsQIfPHFFygtLdXYp3HjxoiKiqpWP6mpqYiLi8OtW7eqtd+jfZW9nl988UW12qlMYWEh4uLiyn2PkpOTIUkSMjMz9dYfEVXMxtABWLJvv/0WgwcPRlhYGBITE+Hj44Nr167h+PHj2Lp1K95991257vvvv2/ASE1Dhw4dcPjwYbRs2dLQoVSoSZMm+OSTTwAAd+7cQUZGBr766iuMGDECPXr0wDfffAMXFxe5/vbt21GvXr1q9ZGamor4+HhERUXB1dVV6/106au6CgsLER8fDwAaCe2AAQNw+PBh+Pj41GoMRPQAEwADSkxMRGBgIPbs2QMbm3/eilGjRiExMVGtrjF/qBmaSqWCJEmoV68ennzySUOHUyl7e3uNGJ977jkkJSXh3//+N55//nl89tln8rb27dvXekx3796Fvb19nfRVmQYNGqBBgwYGjYHIkvAQgAHduHEDHh4eah/+Zays1N+aRw8BZGZmQpIkvPPOO1i2bBkCAwPh5OSErl274ueff9Zob8OGDWjWrBmUSiVatmyJLVu2ICoqCo0bN64yzuzsbEyZMgWNGjWCra0tAgMDER8fj/v372s91t27d6NDhw6wt7dHixYt8NFHH2nUOXPmDIYMGYL69evDzs4O7dq1w6ZNm9TqlE1Lf/zxx5g9ezYaNmwIpVKJixcvahwCKHuNKno87KOPPkLbtm1hZ2cHNzc3PP300zh37pxanaioKDg5OeHixYvo378/nJyc4Ofnh9mzZ6OoqEjr16I8EydORP/+/fH5558jKytLLn90Wr60tBSLFy9G8+bNYW9vD1dXV7Rp0wbvvfceACAuLg5z5swBAAQGBspjLXtNyg7JbNu2De3bt4ednZ38jbyiww337t1DbGwsvL29YW9vj9DQUJw8eVKtTkWHqB7+GcvMzJQ/4OPj4+XYyvqs6BCAvt+btWvXom3btnBycoKzszNatGiBV155RSN2InPHGQAD6tq1Kz788ENER0dj7Nix6NChAxQKRbXaWLNmDVq0aIEVK1YAAF577TX0798fGRkZ8lTy+vXrMWXKFAwfPhzLly9HXl4e4uPjtfrQys7ORufOnWFlZYXXX38djz32GA4fPozFixcjMzMTSUlJVbbxyy+/YPbs2Xj55Zfh5eWFDz/8EJMmTULTpk3Rs2dPAMD58+cREhICT09PrFy5Eu7u7ti8eTOioqLw119/Ye7cuWptzp8/H127dsW6detgZWUFT09PZGdnq9Xx8fHB4cOH1cr+/vtvjBs3Dg0bNpTLEhIS8Morr2D06NFISEjAjRs3EBcXh65du+LYsWMICgqS66pUKgwePBiTJk3C7Nmz8dNPP+GNN96Ai4sLXn/99Spfi8oMHjwYu3btwsGDBxEQEFBuncTERMTFxeHVV19Fz549oVKp8L///U8+3v/cc88hNzcXq1atwrZt2+Tp9IdnkE6cOIFz587h1VdfRWBgIBwdHSuN65VXXkGHDh3w4YcfIi8vD3FxcQgLC8PJkyfRpEkTrcfn4+OD3bt346mnnsKkSZPw3HPPAUCl3/r1/d5s3boV06ZNw8yZM/HOO+/AysoKFy9exK+//qr1OIjMhiCDuX79uujevbsAIAAIhUIhQkJCREJCgigoKFCrGxoaKkJDQ+XnGRkZAoBo3bq1uH//vlx+9OhRAUB8+umnQgghSkpKhLe3t+jSpYtae1lZWUKhUIiAgAC1cgBi4cKF8vMpU6YIJycnkZWVpVbvnXfeEQDE2bNnKx1jQECAsLOzU9v/7t27ws3NTUyZMkUuGzVqlFAqleLSpUtq+/fr1084ODiIW7duCSGE2LdvnwAgevbsqdFX2bZ9+/aVG8udO3dE586dhY+Pj8jMzBRCCHHz5k1hb28v+vfvr1b30qVLQqlUijFjxshlkZGRAoD473//q1a3f//+onnz5pW+DkI8eA9btWpV4fbvvvtOABBvv/22XBYQECAiIyPl5wMHDhTt2rWrtJ+lS5cKACIjI0NjW0BAgLC2thbnz58vd9vDfZW9nh06dBClpaVyeWZmplAoFOK5555TG9vDP59lIiMj1X7G/v77b42fsTJJSUlqcdfGezNjxgzh6uqq0TeRJeIhAANyd3fHwYMHcezYMbz11lsYMmQILly4gPnz56N169a4fv16lW0MGDAA1tbW8vM2bdoAgDyNfP78eWRnZ2PkyJFq+/n7+6Nbt25Vtr9z50706tULvr6+uH//vvzo168fAODAgQNVttGuXTv4+/vLz+3s7NCsWTO1qe4ff/wRvXv3hp+fn9q+UVFRKCws1PgmP3z48Cr7fVhJSQmeffZZnDt3Drt27ZK/YR8+fBh3797VmPr28/PDv/71L/zwww9q5ZIkYdCgQWplbdq0URuLroQQVdbp3LkzfvnlF0ybNg179uxBfn5+tftp06YNmjVrpnX9MWPGqB0yCQgIQEhICPbt21ftvqujNt6bzp0749atWxg9ejS+/vprrX7HiMwVEwAj0KlTJ8ybNw+ff/45rl69ihdffBGZmZkaCwHL4+7urvZcqVQCeLCwC3iwzgAAvLy8NPYtr+xRf/31F7755hsoFAq1R6tWrQBAqz+gj8ZYFmdZjGVxlrf629fXV20cZaq7Unzq1KnYvXs3vvjiC7Rr106t34ra8/X11ejXwcEBdnZ2GmO5d+9eteIpT9kHVdmYyzN//ny88847+Pnnn9GvXz+4u7ujd+/eOH78uNb9VPe18/b2Lrfs0ddG32rjvRk/fjw++ugjZGVlYfjw4fD09ESXLl2QkpJSCyMgMm5MAIyMQqHAwoULATxYFFdTZR++f/31l8a2R4+Zl8fDwwMRERE4duxYuY9JkybVOMayOK9du6ZRfvXqVTmOhz26iK8ycXFx+PDDD7FhwwZERERo9Augwr4f7bc27dixA5IkyesiymNjY4PY2FicOHECubm5+PTTT3H58mX07dsXhYWFWvVTndcOKP/nJDs7Wy2xs7OzK3dNSU2+YdfWezNx4kSkpqYiLy8P3377LYQQGDhwoF5mcYhMCRMAAyrvDxsAeYVzZd8EtdW8eXN4e3vjv//9r1r5pUuXkJqaWuX+AwcOxJkzZ/DYY4+hU6dOGg99xAgAvXv3xo8//ih/4Jf5z3/+AwcHB51P79u4cSPi4+OxaNGicle4d+3aFfb29ti8ebNa+ZUrV+TDEnUhKSkJ3333HUaPHq12uKQyrq6ueOaZZzB9+nTk5ubKq+cfnQWqqU8//VTt8ERWVhZSU1PVVv03btwYFy5cUEsCbty4ofEzVp3Yavu9cXR0RL9+/bBgwQIUFxfj7NmzNWqPyNTwLAAD6tu3Lxo1aoRBgwahRYsWKC0tRXp6Ot599104OTlh1qxZNe7DysoK8fHxmDJlCp555hn8+9//xq1btxAfHw8fHx+N0w0ftWjRIqSkpCAkJATR0dFo3rw57t27h8zMTOzatQvr1q1Do0aNahznwoUL5fUGr7/+Otzc3PDJJ5/g22+/RWJiotrFcbR1+PBhTJ06Fd26dUN4eLjG6ZFPPvkkXF1d8dprr+GVV17BhAkTMHr0aNy4cQPx8fGws7OTZ2P05e7du3Icd+/exR9//IGvvvoKO3fuRGhoKNatW1fp/oMGDUJwcDA6deqEBg0aICsrCytWrEBAQIC8Ir5169YAgPfeew+RkZFQKBRo3rw5nJ2ddYo5JycHTz/9NCZPnoy8vDwsXLgQdnZ2mD9/vlxn/Pjx+OCDDzBu3DhMnjwZN27cQGJiosaFhZydnREQEICvv/4avXv3hpubGzw8PMo9HbU23pvJkyfD3t4e3bp1g4+PD7Kzs5GQkAAXFxc88cQT1W6PyKQZeBGiRfvss8/EmDFjRFBQkHBychIKhUL4+/uL8ePHi19//VWtbkVnASxdulSjXZSzynr9+vWiadOmwtbWVjRr1kx89NFHYsiQIaJ9+/ZV7vv333+L6OhoERgYKBQKhXBzcxMdO3YUCxYsELdv3650jAEBAWLAgAEa5eWtGj99+rQYNGiQcHFxEba2tqJt27YiKSlJrU7ZyvTPP/9co81HzwIoW1Ve0eNhH374oWjTpo2wtbUVLi4uYsiQIRpnOERGRgpHR0eNfhcuXKjRXnlCQ0PV+nd0dBRNmjQRzzzzjPj8889FSUmJxj6Prsx/9913RUhIiPDw8BC2trbC399fTJo0ST6rocz8+fOFr6+vsLKyUntNKno/yuur7PX8+OOPRXR0tGjQoIFQKpWiR48e4vjx4xr7b9q0STz++OPCzs5OtGzZUnz22WcaZwEIIcT3338v2rdvL5RKpQAg9/noWQBl9PnebNq0SfTq1Ut4eXkJW1tb4evrK0aOHClOnTpV7mtCZM4kIbRYekxm59atW2jWrBmGDh2K9evXGzocIiKqYzwEYAGys7Px5ptvolevXnB3d0dWVhaWL1+OgoICvRxmICIi08MEwAIolUpkZmZi2rRpyM3NlRfVrVu3Tj6dj4iILAsPARAREVkgngZIRERkgZgAEBERWSCuAcCDW6xevXoVzs7O1b5KGhGRroQQKCgogK+vb5XX5CDSNyYAeHBZ0UdvQkNEVFcuX76slwtqEVUHEwBAvkLa5cuXNa5cVhGVSoW9e/ciIiICCoWiNsOrcxybaTLXsZnruAAgPz8ffn5+Ol+lkagmmADgn5uj1KtXr1oJgIODA+rVq2d2f5Q4NtNkrmMz13E9jIceyRB40ImIiMgCMQEgIiKyQEwAiIiILBDXABARGbHS0lIUFxcbOgwyAQqFAtbW1lrXZwJARGSkiouLkZGRgdLSUkOHQibC1dUV3t7eWi0sZQJARGSEhBC4du0arK2t4efnxwsFUaWEECgsLEROTg4AwMfHp8p9mAAQERmh+/fvo7CwEL6+vnBwcDB0OGQC7O3tAQA5OTnw9PSs8nAAU0oiIiNUUlICALC1tTVwJGRKypJFlUpVZV3OAJiA5SkXyi1/MbxZHUdCRHWNFwmi6qjOzwtnAIiIiCwQEwAiIiILxASAiIjIAnENgAkrb20A1wUQmbeK1gTVFmP8mxIWFoZ27dphxYoVhg7FpHEGgIiI9CoqKgpDhw7VKN+/fz8kScKtW7dq1P62bdvwxhtv1KgNU/DTTz9h0KBB8PX1hSRJ+Oqrr/TaPmcAjExdZ/dERKaiuLgYtra2cHNzM3QoNRIWFoaoqChERUVVWu/OnTto27YtJk6ciOHDh+s9Ds4AEBGRQRQVFSE6Ohqenp6ws7ND9+7dcezYMXl7WFgYZsyYgdjYWHh4eCA8PFwuj4mJAQBkZmZCkiSNR1hYmFZ9lLUXHR2NuXPnws3NDd7e3oiLi6s09sGDB5fbryRJ2LFjh15en379+mHx4sUYNmyYXtp7FBMAIiIyiLlz5+LLL7/Epk2bcOLECTRt2hR9+/ZFbm6uXGfTpk2wsbHB//3f/+GDDz7QaMPPzw/Xrl2THydPnoS7uzt69uypdR9l/Tg6OuLIkSNITEzEokWLkJKSUmHsSUlJuHbtGn777TcAwK5du+QY+vfvr4+Xp9YZNAGozvGNKVOmQJIkjUUfRUVFmDlzJjw8PODo6IjBgwfjypUrtRs4ERFVaufOnXByclJ79OvXT95+584drF27FkuXLkW/fv3QsmVLbNiwAfb29ti4caNcr2nTpkhMTETz5s3RokULjX6sra3h7e0Nb29vuLq6YurUqejatSvi4uK07gMA2rRpg4ULFyIoKAgTJkxAp06d8MMPP1Q4Pnd3d3h7e+Pvv/+GJEno3r27HIeNjWkcXTdoAlB2fGP16tWV1vvqq69w5MgR+Pr6amyLiYnB9u3bsXXrVhw6dAi3b9/GwIED5ctoEhFR3evVqxfS09PVHh9++KG8/ffff4dKpUK3bt3kMoVCgc6dO+PcuXNyWadOnbTuc9KkSSgoKMCWLVtgZWWldR/AgwTgYT4+PvKNdSpz6tQpNG7cGM7OzhXWWbJkiVoidPDgQUydOlWjrK4ZNE3p16+fWkZYnj///BMzZszAnj17MGDAALVteXl52LhxIz7++GP06dMHALB582b4+fnh+++/R9++fWstdnO1POUCJFGCQABr9l2EkKyN8jQgIjJujo6OaNq0qVrZw7OzQggAmpeuFUKolTk6OmrV3+LFi7F7924cPXpU/jDWtg/gQWLwMEmStLoN86lTpzSSh0dNnToVI0eOlJ+PHTsWw4cPVzu237Bhwyr70jejnqcoLS3F+PHjMWfOHLRq1Upje1paGlQqFSIiIuQyX19fBAcHIzU1tcIEoKioCEVFRfLz/Px8AA9unqDNDRTK6j78r75IomYzFzWNRxIlcgxl/+p7jIZUW++bMTDXsZnruADzHJO2mjZtCltbWxw6dAhjxowB8OD1OH78uLzAT1tffvklFi1ahO+++w6PPfZYrfRRkczMTAQHB1dax83NTe3MBXt7e3h6emokSHXNqBOAt99+GzY2NoiOji53e3Z2NmxtbVG/fn21ci8vL2RnZ1fYbkJCAuLj4zXK9+7dW+3bbla2SEQXgTXcf9eump1G+HD/je/9rpc2jZG+3zdjYq5jM8dxFRYWGjoEg3F0dMQLL7yAOXPmwM3NDf7+/khMTERhYSEmTZqkdTtnzpzBhAkTMG/ePLRq1Ur+2192uqA++qhMaWkpsrKycOXKFTRs2FCvN2+6ffs2Ll68KD/PyMhAenq6PJaaMtoEIC0tDe+99x5OnDhR7Re0vOmdh82fPx+xsbHy8/z8fPj5+SEiIgL16tXTqg+VSoWUlBSEh4drTB3VxJp9F6uuVInpvWqWUa7ZdxGSKEHje78j0+4xCMm6xm0ak9p634yBuY7NXMcF/DP7WB3mdEjurbfekmd6CwoK0KlTJ+zZs0fjS11ljh8/jsLCQixevBiLFy+Wy0NDQ7F//3699FGZ6OhoPP/882jRogXy8/P1mgAcP34cvXr1kp+XfW5FRkYiOTm5xu0bbQJw8OBB5OTkqGU5JSUlmD17NlasWIHMzEx4e3ujuLgYN2/eVHszc3JyEBISUmHbSqUSSqVSo1yhUFT7D4wu+1RGSNY12r+msTzcv5CsISRrs/ujC+j/fTMm5jo2cxyXuY2nTEUfTmFhYfJxeQCws7PDypUrsXLlynLr79+/v8ryqi6oU1UfFfWj7VX3+vXrh8uXL2tVt7L+yvPo66VvRnsdgPHjx+PUqVNqK0h9fX0xZ84c7NmzBwDQsWNHKBQKtanBa9eu4cyZM5UmAERERJbOoDMAVR3fcHd3V6uvUCjg7e2N5s2bAwBcXFwwadIkzJ49G+7u7nBzc8NLL72E1q1by2cFEBERkSaDJgD6OL6xfPly2NjYYOTIkbh79y569+6N5ORkWFvXbCqdiIjInBk0Aaju8Y3MzEyNMjs7O6xatQqrVq3SY2RERETmzWjXABAREVHtYQJARERkgZgAEBERWSCjvQ4A6WZ5iuZV+8zpwiFERKQfnAEgIiKyQEwAiIiILBAPARARmZJ9CXXbX6/5ddufFsLCwtCuXTusWLHC0KGYNM4AEBGRXkVFRWHo0KEa5fv374ckSbh161aN2t+2bRveeOONGrVhChISEvDEE0/A2dkZnp6eGDp0KM6fP6+39pkAEBGRSSguLgYAuLm5wdnZ2cDR6C4sLEyrq90eOHAA06dPx88//4yUlBTcv38fERERuHPnjl7iYAJAREQGUVRUhOjoaHh6esLOzg7du3fHsWPH5O1hYWGYMWMGYmNj4eHhgfDwcLk8JiYGwIMrxEqSpPEICwvTqo+y9qKjozF37ly4ubnB29sbcXFxlcY+ePDgcvuVJAk7duzQy+uze/duREVFoVWrVmjbti2SkpJw6dIlpKWl6aV9JgBERGQQc+fOxZdffolNmzbhxIkTaNq0Kfr27Yvc3Fy5zqZNm2BjY4P/+7//wwcffKDRhp+fH65duyY/Tp48CXd3d/Ts2VPrPsr6cXR0xJEjR5CYmIhFixap3Wn2UUlJSbh27Rp+++03AMCuXbvkGPr376+Pl0dDXl4egAczIPrARYAWrOyaAU9eWi+XPQmgVLLGdfcQPHElGVaiBNjnXu5CIF5zgIgqsnPnTjg5OamVlZSUyP+/c+cO1q5di+TkZPTr1w8AsGHDBqSkpGDjxo2YM2cOAKBp06ZITEyssB9ra2t4e3sDAO7du4ehQ4eia9euiIuL07oPAGjTpg0WLlwIAAgKCsLq1avxww8/yLMOjyq7W+3hw4chSRK6d+9eq4clhBCIjY1F9+7dERwcrJc2OQNARER616tXL6Snp6s9PvzwQ3n777//DpVKhW7dusllCoUCnTt3xrlz5+SyTp06ad3npEmTUFBQgC1btsDKykrrPoAHCcDDfHx8kJOTU2Wfp06dQuPGjSv98F+yZAmcnJzkx8GDBzF16lSNssrMmDEDp06dwqefflplTNriDAAREemdo6MjmjZtqlZ25coV+f9ld4KVJEmtjhBCrczR0VGr/hYvXozdu3fj6NGj8oextn0ADxKDh0mShNLS0ir7PXXqlEby8KipU6di5MiR8vOxY8di+PDhGDZsmFzWsGHDCvefOXMmduzYgZ9++gmNGjWqMiZtcQaAiIjqXNOmTWFra4tDhw7JZSqVCsePH8fjjz9erba+/PJLLFq0CP/973/x2GOP1UofFcnMzETz5s0rrePm5oamTZvKD3t7e3h6emqUPUoIgRkzZmDbtm348ccfERgYqJeYy3AGgIiI6pyjoyNeeOEFzJkzB25ubvD390diYiIKCwsxadIkrds5c+YMJkyYgHnz5qFVq1bIzs4GANja2sLNzU0vfVSmtLQUWVlZuHLlCho2bKgxs1AT06dPx5YtW/D111/D2dlZHpuLi0u5CUN1MQEgIjIlRnhlPl299dZbKC0txfjx41FQUIBOnTphz549qF+/vtZtHD9+HIWFhVi8eDEWL14sl4eGhmL//v166aMy0dHReP7559GiRQvk5+frNQFYu3YtAMinNJZJSkpCVFRUjdtnAkBERHpV0UVuwsLC5OPyAGBnZ4eVK1di5cqV5dbfv39/leVRUVGVfhhW1UdF/Xz11VcV1n9Yv379cPnyZa3qVtZfeR5+rWqDQdcA/PTTTxg0aBB8fX0hSZLaC65SqTBv3jy0bt0ajo6O8PX1xYQJE3D16lW1NoqKijBz5kx4eHjA0dERgwcPVltoQkRERJoMmgDcuXMHbdu2xerVqzW2FRYW4sSJE3jttddw4sQJbNu2DRcuXMDgwYPV6sXExGD79u3YunUrDh06hNu3b2PgwIFq55sSERGROoMeAujXr598cYZHubi4aFyFadWqVejcuTMuXboEf39/5OXlYePGjfj444/Rp08fAMDmzZvh5+eH77//Hn379i237aKiIhQVFcnP8/PzATyYdVCpVFrFXlZP2/rakoT+E5eKYizrq1SyVisve172r0pYAeW0UV6sK/ae0yib3qupRpmh1Nb7ZgzMdWzmOi7APMdEpsOk1gDk5eVBkiS4uroCANLS0qBSqRARESHX8fX1RXBwMFJTUytMABISEhAfH69RvnfvXjg4OFQrpsouFakL/Z7k8cCuXZpX7Hu4r+vuIeVuz3Xr8mD/AgC7dlW4v679G5K+3zdjYq5jM8dxFRYWGjoEsmAmkwDcu3cPL7/8MsaMGYN69eoBALKzs2Fra6uxmtPLy0s+XaI88+fPR2xsrPw8Pz8ffn5+iIiIkNuuikqlQkpKCsLDwzUuIFETa/Zd1FtbZSr6Bl7W1xNXktXKSyVr5Lp1gVvuEViJEnRu7Ab0iK1wf137N4Taet+MgbmOzVzHBfwz+1iZ2l4IRuZFm4sXlTGJBEClUmHUqFEoLS3F+++/X2X98q7y9DClUgmlUqlRrlAoqv0HRpd9KiMemY7Xh4riK+vLqoLDDlaiBFaiBMcz/sbPJRmaFbSM1Rj/aOv7fTMm5jo2cxxXZeNRKBSQJAl///03GjRooNfTy8j8CCFQXFyMv//+G1ZWVrC1ta1yH6NPAFQqFUaOHImMjAz8+OOPat/Qvb29UVxcjJs3b6rNAuTk5CAkpPxpbSIiU2BtbY1GjRrhypUryMzMNHQ4ZCIcHBzg7+8PK6uq1/gbdQJQ9uH/22+/Yd++ffLdl8p07NgRCoUCKSkp8nWWr127hjNnzlR69ygiIlPg5OSEoKAgLhYkrVhbW8PGxkbr2SKDJgC3b9/GxYv/HEfOyMhAeno63Nzc4Ovri2eeeQYnTpzAzp07UVJSIh/Xd3Nzg62tLVxcXDBp0iTMnj0b7u7ucHNzw0svvYTWrVvLZwUQ1bl9CVXXMaOruVHtsra2hrW1/g8NEhk0ATh+/Dh69eolPy9bmBcZGYm4uDjs2LEDANCuXTu1/fbt2ydfGnH58uWwsbHByJEjcffuXfTu3RvJyckW8wvz5KX1WtR6p9bjICIi02LQBODRy0I+SpvVr3Z2dli1ahVWrVqlz9CIiIjMGm8HTEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGM+kJApCcVnJf+5KUbWjdR1emGP/s/X62QylXV+fM8d56ISG84A0BERGSBOANgAQ7/of03fSIisgycASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJABr0S4E8//YSlS5ciLS0N165dw/bt2zF06FB5uxAC8fHxWL9+PW7evIkuXbpgzZo1aNWqlVynqKgIL730Ej799FPcvXsXvXv3xvvvv49GjRoZYETVszzlgqFDMD9V3U+g+0t1EwcRkZEzaAJw584dtG3bFhMnTsTw4cM1ticmJmLZsmVITk5Gs2bNsHjxYoSHh+P8+fNwdnYGAMTExOCbb77B1q1b4e7ujtmzZ2PgwIFIS0uDtbV1XQ+JalNVH+7mRJux8uZIRFQDOh0CSE5ORmFhYY0779evHxYvXoxhw4ZpbBNCYMWKFViwYAGGDRuG4OBgbNq0CYWFhdiyZQsAIC8vDxs3bsS7776LPn36oH379ti8eTNOnz6N77//vsbxERERmSudZgDmz5+P6OhojBgxApMmTUJISIi+40JGRgays7MREREhlymVSoSGhiI1NRVTpkxBWloaVCqVWh1fX18EBwcjNTUVffv2LbftoqIiFBUVyc/z8/MBACqVCiqVSqv4yuppW788kijRed8ypZL+ZznK2qxO25WNRevXSNT+khR9vG9V0mYcVfWvQxt1MjYDMNdxAeY5JjIdOiUAV65cwbfffovk5GT06tULgYGBmDhxIiIjI+Ht7a2XwLKzswEAXl5eauVeXl7IysqS69ja2qJ+/foadcr2L09CQgLi4+M1yvfu3QsHB4dqxZmSklKt+g8L1HnPf1x313/yVSbXrYvWdQPvVryeYdcubdc6tNC6P539//erJu9b1bQYx65dtdZG7Y7NcMxxXPqYSSXSlU4JgLW1NQYPHozBgwcjJycHmzdvRnJyMl577TU89dRTmDRpEgYNGgQrq5p/o5MkSe25EEKj7FFV1Zk/fz5iY2Pl5/n5+fDz80NERATq1aunVVwqlQopKSkIDw+HQqHQap9Hrdl3Uaf9HvbEleQat/GoUskauW5d4JZ7BFZazlIcaxRV4bbpvZpq1/HBZdrVqwHVkzNr/L5VSZtx9IitfLsObejjZ9IYmeu4gH9mH4kMocaLAD09PdGtWzecP38eFy5cwOnTpxEVFQVXV1ckJSUhLCxMp3bLZhKys7Ph4+Mjl+fk5MizAt7e3iguLsbNmzfVZgFycnIqPSyhVCqhVCo1yhUKRbX/wOiyTxmhh+l7bT+gdW1b2/YrG4vWr49Uql29mvj/sdTkfauSNuOoqu8atFGrYzMgcxyXuY2HTIvOX9H/+usvvPPOO2jVqhXCwsKQn5+PnTt3IiMjA1evXsWwYcMQGRmpc2CBgYHw9vZWm/YrLi7GgQMH5A/3jh07QqFQqNW5du0azpw5UyvrEoiIiMyFTjMAgwYNwp49e9CsWTNMnjwZEyZMgJubm7zd3t4es2fPxvLlyytt5/bt27h48Z9p8IyMDKSnp8PNzQ3+/v6IiYnBkiVLEBQUhKCgICxZsgQODg4YM2YMAMDFxQWTJk3C7Nmz4e7uDjc3N7z00kto3bo1+vTpo8vQiIiILIJOCYCnpycOHDiArl27VljHx8cHGRkZlbZz/Phx9OrVS35edlw+MjISycnJmDt3Lu7evYtp06bJFwLau3evfA0AAFi+fDlsbGwwcuRI+UJAycnJvAYAERFRJXRKAEJDQ9GhQweN8uLiYmzduhUTJkyAJEkICAiotJ2wsDAIISrcLkkS4uLiEBcXV2EdOzs7rFq1CqtWrdI6fiIiIkun0xqAiRMnIi8vT6O8oKAAEydOrHFQREREVLt0SgAqOs3uypUrcHFxqXFQREREVLuqdQigffv2kCQJkiShd+/esLH5Z/eSkhJkZGTgqaee0nuQREREpF/VSgDK7tSXnp6Ovn37wsnJSd5ma2uLxo0bl3tTHyKjcXAZgBYP/q3oXHtTucnOozcMElZQG5upjIOIDKJaCcDChQsBAI0bN8azzz4LOzu7WgmKiIiIapdOZwHU5AI/REREZHhaJwBubm64cOECPDw8UL9+/UqvtZ+bm6uX4IiIiKh2aJ0ALF++XL4Az/Lly6u8IQ+RmkePVwM4/McNjbKuTdzrIhoiIoundQLw8LR/VFRUbcRCREREdUTrBKA6t63U9pa6REREZBhaJwCurq5VTvuXXSCopKT2blFLxunJS+sr3riP0/pERMZG6wRg3759tRkHERER1SGtE4DQ0NDajIOIiIjqkNYJwKlTpxAcHAwrKyucOnWq0rpt2rSpcWBEBlPOGQtqeIU9IjIDWicA7dq1Q3Z2Njw9PdGuXTtIklTurXy5BoCIiMj4aZ0AZGRkoEGDBvL/iYiIyHRpnQAEBASU+38ii1PVIQJjwUMZRFQJne4FAADnz5/HqlWrcO7cOUiShBYtWmDmzJlo3ry5PuMjIiKiWmCly05ffPEFgoODkZaWhrZt26JNmzY4ceIEgoOD8fnnn+stuPv37+PVV19FYGAg7O3t0aRJEyxatAilpf/cxlUIgbi4OPj6+sLe3h5hYWE4e/as3mIgIiIyRzrNAMydOxfz58/HokWL1MoXLlyIefPmYcSIEXoJ7u2338a6deuwadMmtGrVCsePH8fEiRPh4uKCWbNmAQASExOxbNkyJCcno1mzZli8eDHCw8Nx/vx5+d4FREREpE6nBCA7OxsTJkzQKB83bhyWLl1a46DKHD58GEOGDMGAAQMAAI0bN8ann36K48ePA3jw7X/FihVYsGABhg0bBgDYtGkTvLy8sGXLFkyZMkVvsVDd4A2CiIjqhk4JQFhYGA4ePIimTZuqlR86dAg9evTQS2AA0L17d6xbtw4XLlxAs2bN8Msvv+DQoUNYsWIFgAdnI2RnZyMiIkLeR6lUIjQ0FKmpqRUmAEVFRSgqKpKfl93nQKVSQaVSaRVbWT1t65dHEjU/XbJUsq5xGxW1qa+2VaL8I03atl/R/jWJRZ9t6hZIFT83OsRX7bHV4Ge3Lunjd81YmeOYyHRonQDs2LFD/v/gwYMxb948pKWl4cknnwQA/Pzzz/j8888RHx+vt+DmzZuHvLw8tGjRAtbW1igpKcGbb76J0aNHA3gwEwEAXl5eavt5eXkhKyurwnYTEhLKjXPv3r1wcHCoVowpKSnVqv+wQJ33/Md19xA9tFK+XLcuemlnV0EFG7T8Yl/h/jWQcruZ/hutjl27qqjQQuemtR5blTEYl5r8rhmrwsJCQ4dAFkwS5V3NpxxWVtp9q9DnhYC2bt2KOXPmYOnSpWjVqhXS09MRExODZcuWITIyEqmpqejWrRuuXr0KHx8feb/Jkyfj8uXL2L17d7ntljcD4Ofnh+vXr2t9J0OVSoWUlBSEh4dDoVDoNL41+y7qtN/DnriSXOM2HlUqWSPXrQvcco/ASg+zFJ0bu5VbfjQzt0b760IlrJByuxnCnS5AIZVWvUNt6RFb+faDy6rdZLXHVlUMRkIfv2vGKj8/Hx4eHsjLy+NdVKnOaT0D8PDK+7oyZ84cvPzyyxg1ahQAoHXr1sjKykJCQgIiIyPh7e0N4MFMwMMJQE5OjsaswMOUSiWUSqVGuUKhqPYfGF32KSP0MMWujw/oytrWR/sVfRhp23ZtfFArpFLDJgBV/czUIDatx2ZiH6Y1+V0zVuY2HjItBj4QWrnCwkKNmQdra2s5GQkMDIS3t7fa1GBxcTEOHDiAkJDamxonIiIydTpfCOjOnTs4cOAALl26hOLiYrVt0dHRNQ4MAAYNGoQ333wT/v7+aNWqFU6ePIlly5bh3//+N4AHhxtiYmKwZMkSBAUFISgoCEuWLIGDgwPGjBmjlxiIiIjMkU4JwMmTJ9G/f38UFhbizp07cHNzw/Xr1+Hg4ABPT0+9JQCrVq3Ca6+9hmnTpiEnJwe+vr6YMmUKXn/9dbnO3LlzcffuXUybNg03b95Ely5dsHfvXl4DgIiIqBI6JQAvvvgiBg0ahLVr18LV1RU///wzFAoFxo0bJ1+gRx+cnZ2xYsUK+bS/8kiShLi4OMTFxemtXyIiInOn0xqA9PR0zJ49G9bW1rC2tkZRURH8/PyQmJiIV155Rd8xEhERkZ7plAAoFApIkgTgwTn3ly5dAgC4uLjI/yciIiLjpdMhgPbt2+P48eNo1qwZevXqhddffx3Xr1/Hxx9/jNatW+s7RiIiItIznRKAJUuWoKDgweXZ3njjDURGRuKFF15A06ZNkZSUpNcAiXh/ACIi/dMpAejUqZP8/wYNGmCXiV1SlIgA7Euouk6v+bUfBxEZhM7XAQAeXHHv/PnzkCQJzZs3R4MGDfQVF5F50+bDl4ioFum0CDA/Px/jx49Hw4YNERoaip49e8LX1xfjxo1DXl6evmMkIiIiPdMpAXjuuedw5MgR7Ny5E7du3UJeXh527tyJ48ePY/LkyfqOkYiIiPRMp0MA3377Lfbs2YPu3bvLZX379sWGDRvw1FNP6S04IiIiqh06zQC4u7vDxcVFo9zFxQX169evcVBERERUu3RKAF599VXExsbi2rVrcll2djbmzJmD1157TW/BERERUe3Q+hBA+/bt5av/AcBvv/2GgIAA+Pv7AwAuXboEpVKJv//+G1OmTNF/pERERKQ3WicAQ4cOrcUwiIiIqC5pnQAsXLiwNuMgIiKiOlSjCwGlpaXh3LlzkCQJLVu2RPv27fUVFxEREdUinRKAnJwcjBo1Cvv374erqyuEEMjLy0OvXr2wdetWXhGQiIjIyOl0FsDMmTORn5+Ps2fPIjc3Fzdv3sSZM2eQn5+P6OhofcdIREREeqZTArB7926sXbsWjz/+uFzWsmVLrFmzBt99953eggOAP//8E+PGjYO7uzscHBzQrl07pKWlyduFEIiLi4Ovry/s7e0RFhaGs2fP6jUGIiIic6NTAlBaWgqFQqFRrlAoUFpaWuOgyty8eRPdunWDQqHAd999h19//RXvvvsuXF1d5TqJiYlYtmwZVq9ejWPHjsHb2xvh4eHy7YqJiIhIk04JwL/+9S/MmjULV69elcv+/PNPvPjii+jdu7fegnv77bfh5+eHpKQkdO7cGY0bN0bv3r3x2GOPAXjw7X/FihVYsGABhg0bhuDgYGzatAmFhYXYsmWL3uIgIiIyNzotAly9ejWGDBmCxo0bw8/PD5Ik4dKlS2jdujU2b96st+B27NiBvn37YsSIEThw4AAaNmyIadOmyTccysjIQHZ2NiIiIuR9lEolQkNDkZqaWuEFiYqKilBUVCQ/z8/PBwCoVCqoVCqtYiurp239NfsuapRJ5dSrrlLJWg+tlN+mvtpWifLzzJq0X1Gb2u6n6/7GrFbGpuXPd22q7u+aKTHHMZHpkIQQQtedU1JS8L///Q9CCLRs2RJ9+vTRZ2yws7MDAMTGxmLEiBE4evQoYmJi8MEHH2DChAlITU1Ft27d8Oeff8LX11fe7/nnn0dWVhb27NlTbrtxcXGIj4/XKN+yZQscHBz0OgYioooUFhZizJgxyMvLQ7169QwdDlmYaicA9+/fh52dHdLT0xEcHFxbcQEAbG1t0alTJ6Smpspl0dHROHbsGA4fPiwnAFevXoWPj49cZ/Lkybh8+TJ2795dbrvlzQD4+fnh+vXrWv8SqlQqpKSkIDw8vNz1EI8qbwZAH564kqz3Nksla+S6dYFb7hFYiZIat9e5sVu55Uczc/XeZlVUwgopt5sh3OkCFJL+1qsYg1oZW49Y/bRTA9X9XTMl+fn58PDwYAJABlHtQwA2NjYICAhASUnNPxiq4uPjg5YtW6qVPf744/jyyy8BAN7e3gAe3Ijo4QQgJycHXl5eFbarVCqhVCo1yhUKRbX/wGi7j6iFqXoAevmArqxtfbRf0YdRTdqu6QecQio1uwSgjF7HZkQfuLr8fho7cxsPmRad7wY4f/585Obq/g1OG926dcP58+fVyi5cuICAgAAAQGBgILy9vZGSkiJvLy4uxoEDBxASElKrsREREZkynRYBrly5EhcvXoSvry8CAgLg6Oiotv3EiRN6Ce7FF19ESEgIlixZgpEjR+Lo0aNYv3491q9fDwCQJAkxMTFYsmQJgoKCEBQUhCVLlsDBwQFjxozRSwxERETmSKcEYOjQoZAkCTVYP6iVJ554Atu3b8f8+fOxaNEiBAYGYsWKFRg7dqxcZ+7cubh79y6mTZuGmzdvokuXLti7dy+cnZ1rNTYiIiJTVq0EoLCwEHPmzMFXX30FlUqF3r17Y9WqVfDw8Kit+DBw4EAMHDiwwu2SJCEuLg5xcXG1FgMREZG5qdYagIULFyI5ORkDBgzA6NGj8f333+OFF16ordiIiIiollRrBmDbtm3YuHEjRo0aBQAYO3YsunXrhpKSElhb184qdyIiItK/as0AXL58GT169JCfd+7cGTY2NmqXBCYiIiLjV60EoKSkBLa2tmplNjY2uH//vl6DIiIiotpVrUMAQghERUWpXUTn3r17mDp1qtqpgNu2bdNfhERERKR31UoAIiMjNcrGjRunt2CIiIioblQrAUhKSqqtOIiq5fAfN8ot79rEvY4jISIyTeZ3T1QiIiKqEhMAIiIiC8QEgIiIyALpdC8Aouqo6Hg9mYB9CZVv7zW/buIgIr3jDAAREZEF4gyAkXvy0npDh0BERGaIMwBEREQWiAkAERGRBWICQEREZIG4BoDM3sNnIZRK1gAvFkhExBkAIiIiS2RSCUBCQgIkSUJMTIxcJoRAXFwcfH19YW9vj7CwMJw9e9ZwQRIREZkAk0kAjh07hvXr16NNmzZq5YmJiVi2bBlWr16NY8eOwdvbG+Hh4SgoKDBQpERERMbPJBKA27dvY+zYsdiwYQPq168vlwshsGLFCixYsADDhg1DcHAwNm3ahMLCQmzZssWAERMRERk3k1gEOH36dAwYMAB9+vTB4sWL5fKMjAxkZ2cjIiJCLlMqlQgNDUVqaiqmTJlSbntFRUUoKiqSn+fn5wMAVCoVVCqVVjGV1dO2viRKtKr3qFLJWqf9aqKsT0P0XVMqoZnTPjyOsv+XV8/UlY2pTsem5c9/zbqo3u+aKTHHMZHpMPoEYOvWrThx4gSOHTumsS07OxsA4OXlpVbu5eWFrKysCttMSEhAfHy8RvnevXvh4OBQrfhSUlK0qhdYrVb/cd09RMc9ay7XrYvB+tbVrvKO/JSz6j/ldrNaj8VQ6nRsu3bVWVfa/q6ZksLCQkOHQBbMqBOAy5cvY9asWdi7dy/s7OwqrCdJktpzIYRG2cPmz5+P2NhY+Xl+fj78/PwQERGBevXqaRWbSqVCSkoKwsPDoVAoqqy/Zt9Frdp91BNXknXaryZKJWvkunWBW+4RWOk4c2EonRu7aZQdzcyV/182tnCnC1BIpXUZWq1TCSuk3G5mfGPrEVt1nUpU93fNlJTNPhIZglEnAGlpacjJyUHHjh3lspKSEvz0009YvXo1zp8/D+DBTICPj49cJycnR2NW4GFKpRJKpVKjXKFQVPsPjLb7CB2n0w35AWwlSkwuATie8bdGWXkT4gqp1Lg+JPXI6Mampw9tXX4/jZ25jYdMi1EfCO3duzdOnz6N9PR0+dGpUyeMHTsW6enpaNKkCby9vdWmBouLi3HgwAGEhBhu6pyIiMjYGfUMgLOzM4KDg9XKHB0d4e7uLpfHxMRgyZIlCAoKQlBQEJYsWQIHBweMGTPGECETERGZBKNOALQxd+5c3L17F9OmTcPNmzfRpUsX7N27F87OzoYOjYiIyGiZXAKwf/9+teeSJCEuLg5xcXEGiYeIiMgUGfUaACIiIqodTACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgskMldCtjYLU+5YOgQiIiIqsQZACIiIgvEBICIiMgC8RAAERnWvoTKtwsrAC3qJBQiS8IZACIiIgvEGQAiql1VfcMnIoPgDAAREZEFYgJARERkgYz6EEBCQgK2bduG//3vf7C3t0dISAjefvttNG/eXK4jhEB8fDzWr1+PmzdvokuXLlizZg1atWplwMi19+Sl9YYOgYiILJBRzwAcOHAA06dPx88//4yUlBTcv38fERERuHPnjlwnMTERy5Ytw+rVq3Hs2DF4e3sjPDwcBQUFBoyciIjIuBn1DMDu3bvVniclJcHT0xNpaWno2bMnhBBYsWIFFixYgGHDhgEANm3aBC8vL2zZsgVTpkwpt92ioiIUFRXJz/Pz8wEAKpUKKpVKq9jK6j1aXxIl2g3u/yuVrKtVvy6UxWSMsdVU2ZhUwqhzX52UjcncxiaPS8vfTVNijmMi0yEJIYShg9DWxYsXERQUhNOnTyM4OBh//PEHHnvsMZw4cQLt27eX6w0ZMgSurq7YtGlTue3ExcUhPj5eo3zLli1wcHCotfiJiB5WWFiIMWPGIC8vD/Xq1TN0OGRhTCYBEEJgyJAhuHnzJg4ePAgASE1NRbdu3fDnn3/C19dXrvv8888jKysLe/bsKbet8mYA/Pz8cP36da1/CVUqFVJSUhAeHg6FQiGXr9l3sVrjeuJKcrXq14VSyRq5bl3glnsEVtWc0TB2ZWMLd7oAhVRq6HD0SiWskHK7mdmNTR7XI79r5iA/Px8eHh5MAMggjPoQwMNmzJiBU6dO4dChQxrbJElSey6E0Ch7mFKphFKp1ChXKBTV/gPz6D6imtPmxvwBayVKjDq+mlBIpWb1Ifkwcx2bLr+fxs7cxkOmxSQSgJkzZ2LHjh346aef0KhRI7nc29sbAJCdnQ0fHx+5PCcnB15eXnUeJ5mOo5m55SY3XZu4GyAaIqK6Z9SrhYQQmDFjBrZt24Yff/wRgYGBatsDAwPh7e2NlJQUuay4uBgHDhxASEhIXYdLRERkMox6BmD69OnYsmULvv76azg7OyM7OxsA4OLiAnt7e0iShJiYGCxZsgRBQUEICgrCkiVL4ODggDFjxhg4eiIiIuNl1AnA2rVrAQBhYWFq5UlJSYiKigIAzJ07F3fv3sW0adPkCwHt3bsXzs7OdRwtERGR6TDqBECbExQkSUJcXBzi4uJqPyAiIiIzYdRrAIiIiKh2MAEgIiKyQEZ9CICISHZwGVDR9Q16za/bWIjMAGcAiIiILBBnAIgecviPGxplvDgQEZkjJgBEVWBSQETmiIcAiIiILBATACIiIgvEBICIiMgCcQ0AEZm+fQmVb+dpgkQaOANARERkgZgAEBERWSAeAiDSQXmnBlaEpwwSkTHiDAAREZEF4gwAUS3jhYTMRFULDQEuNiSTwgSghtbsuwghWZe77clL6+s4GiIiIu0wASAi88dv70QazGYNwPvvv4/AwEDY2dmhY8eOOHjwoKFDIiIiMlpmkQB89tlniImJwYIFC3Dy5En06NED/fr1w6VLlwwdGhERkVEyi0MAy5Ytw6RJk/Dcc88BAFasWIE9e/Zg7dq1SEjQYuqPqI5V5zTC8nARIRHVlMknAMXFxUhLS8PLL7+sVh4REYHU1NRy9ykqKkJRUZH8PC8vDwCQm5sLlUqlVb8qlQqFhYUoKsmrcBFgwb37WrVlbEolgcLCQhTcuw8rUWLocPTKXMZ243axRplKWKGwsBA3pGIopFIDRFU76mxcN6pIysp5zavdxiMKCgoAAEKIau1HpA8mnwBcv34dJSUl8PLyUiv38vJCdnZ2ufskJCQgPj5eozwwMLBWYiQiUxBnsDYKCgrg4uKih/6JtGfyCUAZSZLUngshNMrKzJ8/H7GxsfLz0tJS5Obmwt3dvcJ9HpWfnw8/Pz9cvnwZ9erV0z1wI8SxmSZzHZu5jgt48HeqoKAAvr6+hg6FLJDJJwAeHh6wtrbW+Lafk5OjMStQRqlUQqlUqpW5urrq1H+9evXM7o9SGY7NNJnr2Mx1XPzmT4Zi8mcB2NraomPHjkhJSVErT0lJQUhIiIGiIiIiMm4mPwMAALGxsRg/fjw6deqErl27Yv369bh06RKmTp1q6NCIiIiMklkkAM8++yxu3LiBRYsW4dq1awgODsauXbsQEBBQa30qlUosXLhQ41CCOeDYTJO5js1cx0VkaJLg+SdEREQWx+TXABAREVH1MQEgIiKyQEwAiIiILBATACIiIgvEBEAH5nDr4YSEBDzxxBNwdnaGp6cnhg4divPnz6vVEUIgLi4Ovr6+sLe3R1hYGM6ePWugiHWXkJAASZIQExMjl5ny2P7880+MGzcO7u7ucHBwQLt27ZCWliZvN9Wx3b9/H6+++ioCAwNhb2+PJk2aYNGiRSgt/ef6/6Y6NiKjJKhatm7dKhQKhdiwYYP49ddfxaxZs4Sjo6PIysoydGjV0rdvX5GUlCTOnDkj0tPTxYABA4S/v7+4ffu2XOett94Szs7O4ssvvxSnT58Wzz77rPDx8RH5+fkGjLx6jh49Kho3bizatGkjZs2aJZeb6thyc3NFQECAiIqKEkeOHBEZGRni+++/FxcvXpTrmOrYFi9eLNzd3cXOnTtFRkaG+Pzzz4WTk5NYsWKFXMdUx0ZkjJgAVFPnzp3F1KlT1cpatGghXn75ZQNFpB85OTkCgDhw4IAQQojS0lLh7e0t3nrrLbnOvXv3hIuLi1i3bp2hwqyWgoICERQUJFJSUkRoaKicAJjy2ObNmye6d+9e4XZTHtuAAQPEv//9b7WyYcOGiXHjxgkhTHtsRMaIhwCqoezWwxEREWrlld162FSU3RLZzc0NAJCRkYHs7Gy1sSqVSoSGhprMWKdPn44BAwagT58+auWmPLYdO3agU6dOGDFiBDw9PdG+fXts2LBB3m7KY+vevTt++OEHXLhwAQDwyy+/4NChQ+jfvz8A0x4bkTEyiysB1hVdbj1sCoQQiI2NRffu3REcHAwA8njKG2tWVladx1hdW7duxYkTJ3Ds2DGNbaY8tj/++ANr165FbGwsXnnlFRw9ehTR0dFQKpWYMGGCSY9t3rx5yMvLQ4sWLWBtbY2SkhK8+eabGD16NADTft+IjBETAB1U59bDpmDGjBk4deoUDh06pLHNFMd6+fJlzJo1C3v37oWdnV2F9UxxbKWlpejUqROWLFkCAGjfvj3Onj2LtWvXYsKECXI9UxzbZ599hs2bN2PLli1o1aoV0tPTERMTA19fX0RGRsr1THFsRMaIhwCqQZdbDxu7mTNnYseOHdi3bx8aNWokl3t7ewOASY41LS0NOTk56NixI2xsbGBjY4MDBw5g5cqVsLGxkeM3xbH5+PigZcuWamWPP/44Ll26BMC037c5c+bg5ZdfxqhRo9C6dWuMHz8eL774IhISEgCY9tiIjBETgGowp1sPCyEwY8YMbNu2DT/++CMCAwPVtgcGBsLb21ttrMXFxThw4IDRj7V37944ffo00tPT5UenTp0wduxYpKeno0mTJiY7tm7dummcrnnhwgX5xlem/L4VFhbCykr9T5K1tbV8GqApj43IKBlwAaJJKjsNcOPGjeLXX38VMTExwtHRUWRmZho6tGp54YUXhIuLi9i/f7+4du2a/CgsLJTrvPXWW8LFxUVs27ZNnD59WowePdpkT7l6+CwAIUx3bEePHhU2NjbizTffFL/99pv45JNPhIODg9i8ebNcx1THFhkZKRo2bCifBrht2zbh4eEh5s6dK9cx1bERGSMmADpYs2aNCAgIELa2tqJDhw7yqXOmBEC5j6SkJLlOaWmpWLhwofD29hZKpVL07NlTnD592nBB18CjCYApj+2bb74RwcHBQqlUihYtWoj169erbTfVseXn54tZs2YJf39/YWdnJ5o0aSIWLFggioqK5DqmOjYiY8TbARMREVkgrgEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEg0oPMzExIkoT09HRDh0JEpBUmAGRWhBDo06cP+vbtq7Ht/fffh4uLi3znPCIiS8YEgMyKJElISkrCkSNH8MEHH8jlGRkZmDdvHt577z34+/sbMEIiIuPABIDMjp+fH9577z289NJLyMjIgBACkyZNQu/evREVFaVRf/To0Rg1apRamUqlgoeHB5KSkgAAu3fvRvfu3eHq6gp3d3cMHDgQv//+e4UxJCcnw9XVVa3sq6++giRJamXffPMNOnbsCDs7OzRp0gTx8fG4f/++vD0uLg7+/v5QKpXw9fVFdHR0NV8NIqLy2Rg6AKLaEBkZie3bt2PixIkYPnw4zpw5gzNnzpRbd+zYsRg5ciRu374NJycnAMCePXtw584dDB8+HABw584dxMbGonXr1rhz5w5ef/11PP3000hPT9e4h7229uzZg3HjxmHlypXo0aMHfv/9dzz//PMAgIULF+KLL77A8uXLsXXrVrRq1QrZ2dn45ZdfdOqLiEiDYW9GSFR7/vrrL9GgQQNhZWUltm3bVmG94uJi4eHhIf7zn//IZaNHjxYjRoyocJ+cnBwBQL4VbUZGhgAgTp48KYQQIikpSbi4uKjts337dvHwr1yPHj3EkiVL1Op8/PHHwsfHRwghxLvvviuaNWsmiouLtRovEVF18BAAmS1PT088//zzePzxx/H0009XWE+hUGDEiBH45JNPADz4tv/1119j7Nixcp3ff/8dY8aMQZMmTVCvXj0EBgYCQI0WFKalpWHRokVwcnKSH5MnT8a1a9dQWFiIESNG4O7du2jSpAkmT56M7du3qx0eICKqCR4CILNmY2MDG5uqf8zHjh2L0NBQ5OTkICUlBXZ2dujXr5+8fdCgQfDz88OGDRvg6+uL0tJSBAcHo7i4uNz2rKysIIRQK1OpVGrPS0tLER8fj2HDhmnsb2dnBz8/P5w/fx4pKSn4/vvvMW3aNCxduhQHDhyAQqHQZvhERBViAkAEICQkBH5+fvjss8/w3XffYcSIEbC1tQUA3LhxA+fOncMHH3yAHj16AAAOHTpUaXsNGjRAQUEB7ty5A0dHRwDQuEZAhw4dcP78eTRt2rTCduzt7TF48GAMHjwY06dPR4sWLXD69Gl06NChBqMlImICQATgwemDY8aMwbp163DhwgXs27dP3la/fn24u7tj/fr18PHxwaVLl/Dyyy9X2l6XLl3g4OCAV155BTNnzsTRo0eRnJysVuf111/HwIED4efnhxEjRsDKygqnTp3C6dOnsXjxYiQnJ6OkpERu6+OPP4a9vT0CAgJq4yUgIgvDNQBE/9/YsWPx66+/omHDhujWrZtcbmVlha1btyItLQ3BwcF48cUXsXTp0krbcnNzw+bNm7Fr1y60bt0an376KeLi4tTq9O3bFzt37kRKSgqeeOIJPPnkk1i2bJn8Ae/q6ooNGzagW7duaNOmDX744Qd88803cHd31/vYicjySOLRA5VERERk9jgDQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQW6P8BNNz4zl/QBbAAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABb6ElEQVR4nO3deVxU1fsH8M8MDMMMm7IPsiui4r7hmpiCS25tVlZKmlmWaVaWaYlLmJZmm5ZlYi5lZfb1Vy6gCWm4oGIumJosgjAgCLLDLOf3xzQXLusM2yw879drXjD33rnzHGaYZ85yzxEwxhgIIYQQYlaEhg6AEEIIIS2PEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEnwzRUVFQSAQwNraGmlpabX2h4SEoGfPnrxtvr6+EAgE3M3a2hpdunTB4sWLkZubyzs2IiICAoEAQqEQycnJtc5fUlICe3t7CAQChIeHNxpvzee2tbVFcHAwvvvuO/0K3ght3DXL0xzh4eGwtbXV6VhfX1/e3yM1NRUCgQBRUVHcNu1rl5qaqlcckZGR+PXXX2ttj42NhUAgQGxsrF7nq4s2Xu1NKBTCyckJEydOxKlTp5p9fl2Eh4fD19eXt00gECAiIkKv82RmZiIiIgIXL16stU/7PiGEtDxK8C2koqICy5cv1/n44cOH49SpUzh16hQOHTqEefPm4auvvsL48ePrPN7W1hbbt2+vtf2nn36CQqGASCRq0nNrk9ysWbOwZcsWnc9h7Pbv34933323wWMeeughnDp1CjKZTK9z15fg+/fvj1OnTqF///56na8hCxYswKlTp3DixAmsXbsWf//9N0aPHo3ExMQWew59nDp1Cs8//7xej8nMzMTKlSvrTPDPP/98m31hIaS9sTR0AOZi/Pjx2LNnD9544w306dOn0eM7dOiAIUOGcPdHjx6NoqIirF69Gjdu3EDXrl15xz/xxBPYsWMHVq5cCaGw6nvZtm3b8PDDD+PAgQM6x1rzuceOHQsfHx9s3LgRL730Up2PUalUUCqVEIvFOj+PIfXr16/RY1xcXODi4tJiz2lvb8/7u7YEb29v7pzDhw9Hly5dMGbMGGzevBlff/11nY8pKyuDtbV1q9SMW7p8np6e8PT0bNFzEkI0qAbfQpYsWQInJye89dZbTT6Hg4MDANRZG589ezbS09MRExPDbbtx4wZOnjyJ2bNnN/k5AU3CDwwM5LoYtM3D69evx5o1a+Dn5wexWIzjx48DAA4cOIChQ4dCKpXCzs4OoaGh9dbC0tPT8cgjj8De3h4ODg545plncPfuXd4xe/fuRVhYGGQyGSQSCbp37463334bJSUldZ7z6tWrGDNmDGxsbODi4oJXXnkFpaWlvGNqNtHXpa4m+sTEREyaNAmurq4Qi8Xw8PDAQw89hIyMDACaJuqSkhLs2LGDaz4PCQkBUH8T/ZkzZzB58mQ4OTnB2toanTt3xqJFixqMrT7aBKt9rbRliI6OxuzZs+Hi4gKpVIqKigoAmr/t0KFDYWNjA1tbW4wbN67O2n9UVBQCAwMhFovRvXv3erts6mqiv3PnDl544QV4eXnBysoKHh4eeOyxx5CdnY3Y2FgMGjQIAPDcc89xfzPtOepqoler1Vi/fj26desGsVgMV1dXzJw5k3sNtLTdXwkJCRg5ciSkUin8/f3xwQcfQK1W8863Zs0aBAYGQiKRoEOHDujduzc++eQTHf/qhJgmSvAtxM7ODsuXL8eRI0fwxx9/NHo8YwxKpRJKpRLFxcU4fvw4Nm3ahOHDh8PPz6/W8QEBARg5ciS+/fZbbtu3334LX19fjBkzplmxKxQKpKWl1arNfvrpp/jjjz/w0Ucf4dChQ+jWrRv27NmDqVOnwt7eHt9//z22bduG/Px8hISE4OTJk7XO/fDDD6NLly74+eefERERgV9//RXjxo2DQqHgjrl58yYmTpyIbdu24fDhw1i0aBF+/PFHTJ48uc5YJ06ciDFjxuDXX3/FK6+8gq+++gpPPPFEs/4GgGY8Q2hoKLKzs/HFF18gJiYGmzZtgre3N4qKigBomqglEgnXF37q1Cls3ry53nMeOXIEI0eOxO3bt7Fx40YcOnQIy5cvR3Z2dpNi/PfffwGg1ms1e/ZsiEQi7Ny5Ez///DNEIhEiIyPx1FNPoUePHvjxxx+xc+dOFBUVYeTIkUhKSuIeGxUVheeeew7du3fHvn37sHz5cqxevVqn9/GdO3cwaNAg7N+/H4sXL8ahQ4ewadMmODg4ID8/H/379+e6lpYvX879zRpq5n/ppZfw1ltvITQ0FAcOHMDq1atx+PBhDBs2rNaYDrlcjqeffhrPPPMMDhw4gAkTJmDp0qXYtWsXd8z69esRERGBp556Cr///jv27t2LOXPmoKCgoNHyEWLSGGmW7du3MwAsISGBVVRUMH9/fzZw4ECmVqsZY4yNGjWKBQUF8R7j4+PDANS6DR48mGVlZfGOXbFiBQPA7t69y7Zv387EYjHLy8tjSqWSyWQyFhERwRhjzMbGhs2aNavReH18fNjEiROZQqFgCoWCpaSksFmzZjEA7M0332SMMZaSksIAsM6dO7PKykrusSqVinl4eLBevXoxlUrFbS8qKmKurq5s2LBhteJ+7bXXeM+/e/duBoDt2rWrzvjUajVTKBQsLi6OAWB///03t08b5yeffMJ7zPvvv88AsJMnT/LKWf3voS3T9u3buW3a1y4lJYUxxti5c+cYAPbrr782+Des7299/PhxBoAdP36c29a5c2fWuXNnVlZW1uA5a9LGu27dOqZQKFh5eTk7f/48GzRoEAPAfv/9d14ZZs6cyXv87du3maWlJVuwYAFve1FREXN3d2fTp09njFW9pv379+fes4wxlpqaykQiEfPx8eE9HgBbsWIFd3/27NlMJBKxpKSkesuSkJBQ62+vpX2faF27do0BYPPnz+cdd+bMGQaAvfPOO9y2UaNGMQDszJkzvGN79OjBxo0bx92fNGkS69u3b73xEWKuqAbfgqysrLBmzRqcO3cOP/74Y4PHjhgxAgkJCUhISMBff/2Fbdu24e7du3jwwQfrHXn++OOPw8rKCrt378bBgwchl8t1Gjlf08GDByESiSASieDn54cff/wRCxYswJo1a3jHTZkyhdddcP36dWRmZuLZZ5/ljQOwtbXFo48+itOnT9dqKn/66ad596dPnw5LS0uuuR8AkpOTMWPGDLi7u8PCwgIikQijRo0CAFy7dq1W/DXPOWPGDADgnbMpunTpgo4dO+Ktt97Cl19+yavlNsWNGzdw69YtzJkzB9bW1k06x1tvvQWRSARra2sMGDAAt2/fxldffYWJEyfyjnv00Ud5948cOQKlUomZM2dyLUVKpRLW1tYYNWoU142gfU1nzJjBayr38fHBsGHDGo3v0KFDGD16NLp3796k8tWkfQ1rvq8HDx6M7t2749ixY7zt7u7uGDx4MG9b7969eVe0DB48GH///Tfmz5+PI0eOoLCwsEViJcTY0SC7Fvbkk0/io48+wrJly/DII4/Ue5yDgwMGDhzI3R82bBh69OiBoUOHYsOGDVi7dm2tx9jY2OCJJ57At99+Cx8fH25wnL5GjBiBjz/+GAKBAFKpFJ07d4aVlVWt42qOLs/Ly6tzOwB4eHhArVYjPz8fUqmU2+7u7s47ztLSEk5OTty5iouLMXLkSFhbW2PNmjXo2rUrpFIp13dfVlZW5+Or0z6H9pxN5eDggLi4OLz//vt45513kJ+fD5lMhrlz52L58uV6XakAgBtr0JxBZAsXLsQzzzwDoVCIDh06wM/Pr87BczVfE20XgLb/uybtFzTt36zm66Td1tglhHfv3m3RQXKNvcdqXopa870AAGKxmPe+Wbp0KWxsbLBr1y58+eWXsLCwwAMPPIB169bx/gcJMTeU4FuYQCDAunXrEBoaiq1bt+r12N69ewMA/v7773qPmT17Nr755htcunQJu3fvblKMNb9c1KdmItF+mGZlZdU6NjMzE0KhEB07duRtl8vl6NSpE3dfqVQiLy+PO9cff/yBzMxMxMbGcrV2APX2j9Z8vPY5qsfXHL169cIPP/wAxhguXbqEqKgorFq1ChKJBG+//bZe59L2k9ccHKYPT0/PJr1Wzs7OAICff/65wS+B2r+Z9m9YXV3banJxcWlW+eqLJysrq9YXh8zMTK5c+rC0tMTixYuxePFiFBQU4OjRo3jnnXcwbtw4pKen876QEmJOqIm+FYwdOxahoaFYtWoViouLdX6c9jphV1fXeo8ZOnQoZs+ejYcffhgPP/xwc0PVS2BgIDp16oQ9e/aAMcZtLykpwb59+7iR9dXV/BLy448/QqlUciPPtYmp5uV3X331Vb1x1Dznnj17AIA7Z0sQCATo06cPPv74Y3To0AEXLlzg9tWsIdana9eu6Ny5M7799ltuVHtbGTduHCwtLXHr1i0MHDiwzhugeU1lMhm+//573mualpaG+Pj4Rp9nwoQJOH78OK5fv17vMdrXVpe/2YMPPggAvEFyAJCQkIBr1641e0Bphw4d8Nhjj+Hll1/GvXv39J7kiBBTQjX4VrJu3ToMGDAAOTk5CAoKqrW/oKAAp0+fBqAZGX7t2jVERkZCLBbj5ZdfbvDc27Zta5WYGyMUCrF+/Xo8/fTTmDRpEubNm4eKigp8+OGHKCgowAcffFDrMb/88gssLS0RGhqKq1ev4t1330WfPn0wffp0AJquiY4dO+LFF1/EihUrIBKJsHv37npbMaysrLBhwwYUFxdj0KBBiI+Px5o1azBhwgSMGDGiWeX77bffsHnzZkybNg3+/v5gjOGXX35BQUEBQkNDueN69eqF2NhY/N///R9kMhns7OwQGBhY5zm/+OILTJ48GUOGDMFrr70Gb29v3L59G0eOHGlyC4wufH19sWrVKixbtgzJyckYP348OnbsiOzsbJw9exY2NjbcnAqrV6/G888/j4cffhhz585FQUEBIiIi6my2r2nVqlU4dOgQHnjgAbzzzjvo1asXCgoKcPjwYSxevBjdunVD586dIZFIsHv3bnTv3h22trbw8PCAh4dHrfMFBgbihRdewGeffQahUIgJEyYgNTUV7777Lry8vPDaa6/p/beYPHkyevbsiYEDB8LFxQVpaWnYtGkTfHx8EBAQoPf5CDEZhh3jZ/qqj6KvacaMGQxAo6PoLSwsmLe3N3vsscdYYmIi79jqo+gbos8o+oceeqjBY7QjuD/88MM69//6668sODiYWVtbMxsbGzZmzBj2119/1Rn3+fPn2eTJk5mtrS2zs7NjTz31FMvOzuYdGx8fz4YOHcqkUilzcXFhzz//PLtw4UKtkdezZs1iNjY27NKlSywkJIRJJBLm6OjIXnrpJVZcXFyrnPqOov/nn3/YU089xTp37swkEglzcHBggwcPZlFRUbxzX7x4kQ0fPpxJpVIGgI0aNYoxVvcoesYYO3XqFJswYQJzcHBgYrGYde7cudbVBTU19hrULENd7z/GNK/V6NGjmb29PROLxczHx4c99thj7OjRo7zjvvnmGxYQEMCsrKxY165d2bfffstmzZrV6Ch6xhhLT09ns2fPZu7u7kwkEjEPDw82ffp03uv8/fffs27dujGRSMQ7R81R9IxpRvavW7eOde3alYlEIubs7MyeeeYZlp6ezjuuritUGGO14t6wYQMbNmwYc3Z2ZlZWVszb25vNmTOHpaam1vk3I8RcCBir1i5HCCGEELNAffCEEEKIGaIETwghhJghSvCEEEKIGaIETwghhJghSvCEEEKIGTL76+DVajUyMzNhZ2fXKutjE0JIXRhjKCoqgoeHB2/tBkLaitkn+MzMTHh5eRk6DEJIO5Went6i8/UToiuzT/B2dnYANP9k9vb2Oj1GoVAgOjoaYWFhei8wYoyoPMbN3MoDmF+ZmlKewsJCeHl5cZ9BhLQ1s0/w2mZ5e3t7vRK8VCqFvb292Xw4UXmMl7mVBzC/MjWnPNQ1SAyFOoYIIYQQM0QJnhBCCDFDlOAJIYQQM2T2ffCEEGLMVCoVFAqFocMgJkIkEsHCwkKnYynBE0KIATDGIJfLUVBQYOhQiInp0KED3N3dGx3ASQmeEEIMQJvcXV1dIZVKabQ9aRRjDKWlpcjJyQEAyGSyBo+nBE8IIW1MpVJxyd3JycnQ4RATIpFIAAA5OTlwdXVtsLmeBtkRQkgDGAPKylr2nNo+d6lU2rInJu2C9n3T2NgNqsETQkgdlEogNxe4exewbKVPSmqWJ02h6/uGEjwhhFRTVgbk5AD37gFqtWZbayV4QloTvW0JIe0eY8D9+5rEXlRk6GgIaRnUB08IabdUKkAuB65cAW7douRuTEJCQrBo0SLuvq+vLzZt2tSqzxkbGwuBQACBQIBp06a16nPVR/v8HTp0aPa5KMETQtqdsjIgLQ24dAm4cweorDR0RKQxCQkJeOGFF9rkua5fv46oqCi9HhMeHs4lZ+1tyJAhvGMqKiqwYMECODs7w8bGBlOmTEFGRgbvmKysrBb7IkMJnhDSbhQUADduAElJmgF02j52YvxcXFza7KoDV1fXJtWgx48fj6ysLO528OBB3v5FixZh//79+OGHH3Dy5EkUFxdj0qRJUKlU3DHu7u5wcHBobhEAUIInhJg5lQrIzgYuXzbeZnjGGEpKSgxyY4zpHGdISAgWLFiARYsWoWPHjnBzc8PWrVtRUlKC5557DnZ2dujcuTMOHTrEe1xSUhImTpwIW1tbuLm54dlnn0Vubi63v6SkBDNnzoStrS1kMhk2bNhQ67lrNtFv3LgRvXr1go2NDby8vDB//nwUFxdz+6OiotChQwccOXIE3bt3h62tLZeA9VVXfDW7EABALBbD3d2duzk6OnL77t+/j23btmHDhg0YO3Ys+vXrh127duHy5cs4evSo3jHpghI8IcQslZcDt29rmuEzMoy7Gb60tBS2trYGuZWWluoV644dO+Ds7IyzZ89iwYIFeOmll/D4449j2LBhuHDhAsaNG4dnn32WO29WVhZGjRqFvn374ty5czh8+DCys7Mxffp07pxvvvkmjh8/jv379yM6OhqxsbE4f/58g3EIhUJ8+umnuHLlCnbs2IE//vgDS5YsqfV3/eijj7Bz5078+eefuH37Nt544w29yqtPfLGxsXB1dUXXrl0xd+5cbsY5ADh//jwUCgXCwsK4bR4eHujZsyfi4+P1jkkXNIqeEGJWCgpoNHxr6tOnD5YvXw4AWLp0KT744AM4Oztj7ty5AID33nsPW7ZswaVLlzBkyBBs2bIF/fv3R2RkJHeOb7/9Fl5eXrhx4wY8PDywbds2fPfddwgNDQWg+RLh6enZYBzVa89+fn5YvXo1XnrpJWzevJnbrlAo8OWXX6Jz584AgFdeeQWrVq3Sq7zFxcU6xTdhwgQ8/vjj8PHxQUpKCt599108+OCDOH/+PMRiMeRyOaysrNCxY0fe49zc3CCXy/WKSVeU4AkhJk+lqpqUpqLC0NHoTyqV8pqX2/q59dG7d2/udwsLCzg5OaFXr17cNjc3NwDgaq/nz5/H8ePHYWtrW+tct27dQllZGSorKzF06FBuu6OjIwIDAxuM4/jx44iMjERSUhIKCwuhVCpRXl6OkpIS2NjYcGXTJndAM3d79Vq1Lm7duqVTfE888QT3e8+ePTFw4ED4+Pjg999/xyOPPFLv+RljrTbhESV4QojJKi/X1Nbz8kx7wJxAIOCSkrETiUS8+wKBgLdNm6zU/70garUakydPxrp162qdSyaT4ebNm3rHkJaWhokTJ+LFF1/E6tWr4ejoiJMnT2LOnDm86VvrilWfMQcA9D5eSyaTwcfHhyufu7s7KisrkZ+fz6vF5+TkYNiwYU16jsZQHzwhxOTcvw/cvAlcvaqptZtycjd3/fv3x9WrV+Hr64suXbrwbjY2NujSpQtEIhFOnz7NPSY/Px83btyo95znzp2DUqnEhg0bMGTIEHTt2hWZmZmtEn9T4gOAvLw8pKencyu+DRgwACKRCDExMdwxWVlZuHLlCiV4Qkj7plJpautXrgD//gsUFho6IqKLl19+Gffu3cNTTz2Fs2fPIjk5GdHR0Zg9ezZUKhVsbW0xZ84cvPnmmzh27BiuXLmC8PBwCIX1p6fOnTtDqVTis88+Q3JyMnbu3Ikvv/yyVeLXJb7i4mK88cYbOHXqFFJTUxEbG4vJkyfD2dkZDz/8MADAwcEBc+bMweuvv45jx44hMTERzzzzDHr16oWxY8e2SuzURE8IMWrm0gzfXnl4eOCvv/7CW2+9hXHjxqGiogI+Pj4YP348lyQ//PBDFBcXY8qUKbCzs8Prr7+O+/fv13vOvn37YuPGjVi3bh2WLl2KBx54AGvXrsXMmTNbpQyNxWdhYYHLly/ju+++Q0FBAWQyGUaPHo29e/fCzs6OO+7jjz+GpaUlpk+fjrKyMowZMwZRUVENLvnaHJTgCSFGSTs3PNXUjUdsbGytbampqbW21ey3DggIwC+//FLveW1tbbFz507s3LmT2/bmm282+DyvvfYaXnvtNd62Z599lvs9PDwc4eHhvP3Tpk1rUp96XfH9/vvv3O8SiQRHjhxp9DzW1tb47LPP8Nlnn+kdQ1NQgieEGA2VSlNTz8kxzdHwxDx4enpi8uTJ+P7779v8uW1tbaFUKmFtbd3sc1GCJ4QYXHm5ZrBcXp4myRNiCMHBwdyo97ou62sLFy9eBIAWabY3+CC7O3fu4JlnnoGTkxOkUin69u3LmyGIMYaIiAh4eHhAIpEgJCQEV69eNWDEhJCWUn00fE4OJXdiWBKJhBvh7+7u3uCxsbGxrbK6nfb5/fz8mn0ugyb4/Px8DB8+HCKRCIcOHUJSUhI2bNjAm+R//fr12LhxIz7//HMkJCTA3d0doaGhKKJpqggxSWq1JplfvUqj4QlpTQZtol+3bh28vLywfft2bpuvry/3O2MMmzZtwrJly7iZgHbs2AE3Nzfs2bMH8+bNa+uQCSFNVFFRNRqeauqEtD6DJvgDBw5g3LhxePzxxxEXF4dOnTph/vz53JzGKSkpkMvlvMn5xWIxRo0ahfj4+DoTfEVFBSqqjc4p/K96oFAoeDMcNUR7nK7HGzsqj3Ezt/IA/DIVFWn61025pq5S6f8amdPrSUyTQRN8cnIytmzZgsWLF+Odd97B2bNn8eqrr0IsFmPmzJncBPzauY213NzckJaWVuc5165di5UrV9baHh0drfecy9VnHDIHVB7jZm7lAcyvTPqUR99V2ghpaQZN8Gq1GgMHDuRWGerXrx+uXr2KLVu28CYsqDkRf0OT8y9duhSLFy/m7hcWFsLLywthYWGwt7fXKS6FQoGYmBiEhobWmsvYFFF5jJs5laeyUlNbz81V4M6dGLi7h0IoNO0yAYBEosCtW/q9RoWm3GRBzIJBE7xMJkOPHj1427p37459+/YBADeKUS6Xc/P5AprJ+WvW6rXEYjHEYnGt7SKRSO8Pz6Y8xphReYybKZensFDTv66d3Es7l4hQKDKLBK+9Ykmf18hUX0tiPgya4IcPH47r16/ztt24cQM+Pj4ANGv8uru7IyYmBv369QMAVFZWIi4urs6ViQghbUetrpqUprzc0NGYh61b2/b5XnhBv+NDQkIQFxcHAEhMTETfvn1bPigjpW01dnBwQEFBgWGD0ZFBL5N77bXXcPr0aURGRuLff//Fnj17sHXrVrz88ssANH/QRYsWITIyEvv37+cm+ZdKpZgxY4YhQyek3aqoADIygEuXgNu3Kbm3N3PnzkVWVhZ69uyp0/GxsbGYOnUqZDIZbGxs0LdvX+zevbvWMQKBoNbtn3/+aXa8dZ1XIBDgww8/5I4JCQmptf/JJ5/knScrK6tVrntvTQatwQ8aNAj79+/H0qVLsWrVKvj5+WHTpk14+umnuWOWLFmCsrIyzJ8/H/n5+QgODkZ0dDRvAn9CSOsrKtLU1k2k8kJaiVQqbXQSmOri4+PRu3dvvPXWW3Bzc8Pvv/+OmTNnwt7eHpMnT+Yde/36dd5YKRcXl2bHm5WVxbt/6NAhzJkzB48++ihv+9y5c7Fq1SruvkQi4e13d3eHg4NDs+NpSwafqnbSpEmYNGlSvfsFAgEiIiIQERHRdkERQgBUNcPfvQuUlRk6GmJsYmNjMXr0aPz222945513cP36dfTp0wfffPMNevXqBQB45513eI959dVXceTIEezfv79Wgnd1deVNdNaYkJAQriVh165dsLCwwEsvvYTVq1dzTeo1v4z873//w+jRo+Hv78/bru8XF1Ng8KlqCSHGp7KS3wxPyZ005M0338RHH32EhIQEuLq6YsqUKQ3OA3D//n04OjrW2t6vXz/IZDKMGTMGx48f1+m5d+zYAUtLS5w5cwaffvopPv74Y3zzzTd1HpudnY3ff/8dc+bMqbVv9+7dcHZ2RlBQEN544w2zmC3V4DV4Qojx0DbD379fNRKekMasWLECoaGhADQJ19PTE/v378f06dNrHfvzzz8jISEBX331FbdNJpNh69atGDBgACoqKrBz506MGTMGsbGxeOCBBxp8bi8vL3z88ccQCAQIDAzE5cuX8fHHH3MTplW3Y8cO2NnZcTOjaj399NPcoO4rV65g6dKl+Pvvv01+HgdK8IS0c2o1cO+eJrFTTZ00xdChQ7nfHR0dERgYiGvXrtU6LjY2FuHh4fj6668RFBTEbQ8MDERgYCDvfOnp6fjoo4/wwAMP4MSJE5gwYQK3/6uvvuLGag0ZMoQ3L8rQoUOxYcMGqFSqWiuyffvtt3j66adrLcVa/ctAz549ERAQgIEDB+LChQvo37+/vn8Oo0EJnpB2qmpSGkCpNHQ0xNzUnIwsLi4OkydPxsaNG3kTmdVnyJAh2LVrFwBg4MCB3DKqQO3ZTXVx4sQJXL9+HXv37m302P79+0MkEuHmzZuU4AkhpoOa4UlLO336NLy9vQFoVgm9ceMGunXrxu2PjY3FpEmTsG7dOryg48X3iYmJ3ARn2mVc63vumvcDAgJq1d63bduGAQMGoE+fPo0+99WrV6FQKHgTrJkiSvCEtAPUDE9a06pVq+Dk5AQ3NzcsW7YMzs7OmDZtGgBNcn/ooYewcOFCPProo9waI1ZWVtxAu02bNsHX1xdBQUGorKzErl27sG/fPm5W04akp6dj8eLFmDdvHi5cuIDPPvsMGzZs4B1TWFiIn376qdZ2ALh16xZ2796NiRMnwtnZGUlJSXj99dfRr18/DB8+vJl/GcOiBE+IGaNmeNOi78xyxuKDDz7AwoULcfPmTfTp0wcHDhyAlZUVACAqKgqlpaVYu3Yt1q5dyz1m1KhRiI2NBaCZofSNN97AnTt3IJFIEBQUhN9//x0TJ05s9LlnzpyJsrIyDB48GBYWFliwYEGtVoIffvgBjDE89dRTtR5vZWWFY8eO4ZNPPkFxcTG8vLzw0EMPYcWKFbVaAUwNJXhCzFBxcdWkNNQMT1rbiBEjcOXKlTr3RUVFISoqqsHHL1myBEuWLGnSc4tEImzatAlbtmyp95gXXnih3q4BLy8vbvpdc0MJnhAzwVhVMzytVEpay+bNm/HNN9/g1KlThg6lTdna2kKpVNYagW/MKMETYuIUCk1Sp2Z40tp2796Nsv8GcXh7eyM+Pt7AEbUd7Sh+U2q2pwRPiImiZnjS1jp16sS7HxISAmbAN5+2D78t1DeK35hRgifEhFAzPCFEV5TgCTEBCoVmNPzdu9QMTwjRDSV4QoxYSYmmtp6fT83whBD9UIInxMhom+Hv3tUkeEIIaQpK8IQYCW0zfG6u5ndCCGkOSvCEGJh2sFxSElBjfQ5CCGkySvCEGABjmn71nBzN4i/abZTg27fz59v2+QYM0O/4kJAQbta3xMRE9O3bt+WDMgBfX1+kpaUB0CyW06FDB8MG1EKEhg6AkPZEoQCysoDLl4GUFOpjJ6Zn7ty5yMrKQs+ePXU6vry8HOHh4ejVqxcsLS25RWhaQlFRERYtWgQfHx9IJBIMGzYMCQkJvGOys7MRHh4ODw8PSKVSjB8/Hjdv3uQdk5CQoNPCNqaGEjwhbaC0VJPQL18GMjOpj52YLqlUCnd3d1ha6tYArFKpIJFI8Oqrr2Ls2LEtGsvzzz+PmJgY7Ny5E5cvX0ZYWBjGjh2LO3fuAAAYY5g2bRqSk5Pxv//9D4mJifDx8cHYsWNRUu3btYuLC7eynTmhBE9IK9GOhv/nH+DaNc3vdKkbMSexsbEQCAT4/fff0adPH1hbWyM4OBiXL1/mjrGxscGWLVswd+5cuLu763zu8PBwTJs2DStXroSrqyvs7e0xb948VFZWAgDKysqwb98+rF+/Hg888AC6dOmCiIgI+Pn5cQvP3Lx5E6dPn8aWLVswaNAgBAYGYvPmzSguLsb333/fsn8MI0QJnpAWplRSMzxpX95880189NFHSEhIgKurK6ZMmQJFCzRTHTt2DNeuXcPx48fx/fffY//+/Vi5ciUAQKlUQqVS1Vr8RSKR4OTJkwCAiooKAOAdY2FhASsrK+4Yc0YJnpAWUloKpKYCly5RMzxpX1asWIHQ0FD06tULO3bsQHZ2Nvbv39/s81pZWeHbb79FUFAQHnroIaxatQqffvop1Go17OzsMHToUKxevRqZmZlQqVTYtWsXzpw5g6ysLABAt27d4OPjg6VLlyI/Px+VlZX44IMPIJfLuWPMGSV4QppBOxr++nVNM3xeHjXDk/Zn6NCh3O+Ojo4IDAzEtWvXdHrs7du3YWtry90iIyO5fX369IFUKuU9T3FxMdLT0wEAO3fuBGMMnTp1glgsxqeffooZM2ZwK76JRCLs27cPN27cgKOjI6RSKWJjYzFhwgSTWhWuqQya4CMiIiAQCHi36n00jDFERETAw8MDEokEISEhuHr1qgEjJkSjejN8crJmZTdCSBWBjtd8enh44OLFi9ztxRdf1PncnTt3RlxcHJf0z549C4VCAT8/P+7YAQMG4OLFiygoKEBWVhYOHz6MvLw83jHmyuA1+KCgIGRlZXG36oMz1q9fj40bN+Lzzz9HQkIC3N3dERoaiiLthcOEtDFtMzyNhiekyunTp7nf8/PzcePGDXTr1k2nx1paWqJLly7crfpo9r///ptbf177PLa2tvD09OSdw8bGBjKZDPn5+Thy5AimTp1a63kcHBzg4uKCmzdv4ty5c3UeY24MPtGNpaVlnSMrGWPYtGkTli1bhkceeQQAsGPHDri5uWHPnj2YN29eW4dK2inGNGuu5+RQTZ2QuqxatQpOTk5wc3PDsmXL4OzszLvePSkpCZWVlbh37x6Kiopw8eJFAGh0opzKykrMmTMHy5cvR1paGlasWIFXXnkFQqGmbnrkyBEwxhAYGIh///0Xb775JgIDA/Hcc89x5/jpp5/g4uICb29vXL58GQsXLsS0adMQFhbW0n8Go2PwBH/z5k14eHhALBYjODgYkZGR8Pf3R0pKCuRyOe9FEIvFGDVqFOLj4+tN8BUVFdzISQAoLCwEACgUCp1HdWqPa4lRoMaAytM0SqWmTz0vD/jvypxWoVYreD/NgbmVSaXS/z3XlPenvjPLGYsPPvgACxcuxM2bN9GnTx8cOHAAVlZW3P6JEydyM8UBQL9+/QBoKnINGTNmDAICAvDAAw+goqICTz75JCIiIrj99+/fx9KlS5GRkQFHR0c8+uijeP/99yESibhjsrKysHjxYmRnZ0Mmk2HmzJl49913W6jkxs2gCT44OBjfffcdunbtiuzsbKxZswbDhg3D1atXIZfLAQBubm68x7i5ufHeKDWtXbuWu4yiuujoaN5gDV3ExMTodbyxo/IYN7ncvMoDmF+Z9HnPlWoXGWgHRowYgStXrtS7PzU1tcnnXrlyZZ2f6QAwffp0TJ8+vcHHv/rqq3j11Veb/PymzKAJfsKECdzvvXr1wtChQ9G5c2fs2LEDQ4YMAVB7oAZjrMHBG0uXLsXixYu5+4WFhfDy8kJYWBjs7e11ikuhUCAmJgahoaG8b4KmisrTOMaA+/cNs0SrWq2AXB4Dd/dQCIWm//oA5lcmiUSBW7f0e89pWw/NzebNm/HNN9/g1KlThg6lxQQFBSE5OdnQYbQ4gzfRV2djY4NevXrh5s2bXP+NXC6HTCbjjsnJyalVq69OLBZDLBbX2i4SifROBk15jDGj8tSmVGqWZ717t6oZXmigoadCocgskmF15lIm7RVV+rznzOl/TWv37t3coDdvb2/Ex8cbOKKWcfDgQa5LRdeKoCkwqgRfUVGBa9euYeTIkfDz84O7uztiYmK4/prKykrExcVh3bp1Bo6UmLqyMs2guXv3ALXa0NEQYho6derEux8SEtJoP3pTRUVFtcp56+Lj49Nmz9WWDJrg33jjDUyePBne3t7IycnBmjVrUFhYiFmzZkEgEGDRokWIjIxEQEAAAgICEBkZCalUihkzZhgybGKitM3w1ZdoJYQQc2XQBJ+RkYGnnnoKubm5cHFxwZAhQ3D69Gnu29SSJUtQVlaG+fPnIz8/H8HBwYiOjoadnZ0hwyYmRqXSNMFXb4YnxBioqfmINIGu7xuDJvgffvihwf0CgQARERG8yyII0RU1wxNjZWVlBaFQiMzMTLi4uMDKykrnmd9I+8UYQ2VlJe7evQuhUMi7FLEuRtUHT0hL0E5KQ83wxFgJhUL4+fkhKysLmZmZhg6HmBipVApvb29uwp/6UIInZkGlqhoNX22eI0KMlpWVFby9vbllTwnRhYWFBSwtLXVq8aEET0xaebmmtp6XR83wxPQIBAKzu3yVGA9K8MRk/fuvZvEXQgghtVGCJyZD2wyfna25X1xsuElpCCHE2FGCJ0avZjM8NcUTQkjjKMETo6WdlMZMp/QmhJBWRQmeGBWVSlNTz8mh0fCEENIclOCJUaDR8IQQ0rIowRODomZ4QghpHZTgSZujZnhiChjTvE+zsgCRCLC1NXREhOin2QlepVLh8uXL8PHxQceOHVsiJmKmKiqqmuFp4i5ijJRKzXs0MxOQy6sWJ/LxoQRPTI/eCX7RokXo1asX5syZA5VKhVGjRiE+Ph5SqRS//fYbQkJCWiFMYsoKCzUfmvfvGzoSQmorL9ck86wszfuUvnwSc6F3gv/555/xzDPPAAD+7//+DykpKfjnn3/w3XffYdmyZfjrr79aPEhietTqqrnhy8sNHQ0hfMXFmlp6VpZmtUHGDB0RIS1P7wSfm5sLd3d3AMDBgwfx+OOPo2vXrpgzZw4+/fTTFg+QmBZqhifGiDFNIs/K0txopUHSHuid4N3c3JCUlASZTIbDhw9j8+bNAIDS0lJYWFi0eIDENFAzPDE2KpWmBUlbU6cBnaS90TvBP/fcc5g+fTpkMhkEAgFCQ0MBAGfOnEG3bt1aPEBivNTqqtHw1AxPjEFFRVV/enY2tSKR9k3vBB8REYGePXsiPT0djz/+OMRiMQDNGrVvv/12iwdIjE9FhaZmlJtLH6DE8IqLq5re8/KoP50QrSZdJvfYY48BAMqrVdtmzZrVMhERo0XN8MRY5OdXNb3TJEmE1E3vBK9SqRAZGYkvv/wS2dnZuHHjBvz9/fHuu+/C19cXc+bMaY04iYFom+Hv3gXKygwdDWmv1GrNl0u5XJPYqUuIkMbpvZr2+++/j6ioKKxfvx5WVlbc9l69euGbb75p0eCI4VRWAhkZwKVLwO3blNxJ21MogPR04MwZ4LffgPh4IDmZkjshutK7Bv/dd99h69atGDNmDF588UVue+/evfHPP/+0aHCk7RUVaWpKBQWGjoS0R6WlVU3vtPAQIc2jd4K/c+cOunTpUmu7Wq2GQqFokaBI21KrNdcI5+RQTZ20vYKCqqRO4zsIaTl6J/igoCCcOHECPj4+vO0//fQT+vXr12KBkdZXWVk1KY1SaehoSHuhVmvGdGRlafrUS0sNHREh5knvBL9ixQo8++yzuHPnDtRqNX755Rdcv34d3333HX777bcmB7J27Vq88847WLhwITZt2gQAYIxh5cqV2Lp1K/Lz8xEcHIwvvvgCQUFBTX4eUtUMf/8+XVJE2oZCobkuXZvUqbGPkNan9yC7yZMnY+/evTh48CAEAgHee+89XLt2Df/3f//HTXqjr4SEBGzduhW9e/fmbV+/fj02btyIzz//HAkJCXB3d0doaCiKaJ7JJsnLA5KSgBs3NM2ilNxJa0tNBU6eBH7/HTh7VjNojpI7IW1D7wQPAOPGjUNcXByKi4tRWlqKkydPIiwsrEkBFBcX4+mnn8bXX3/NW26WMYZNmzZh2bJleOSRR9CzZ0/s2LEDpaWl2LNnT5Oeqz2qrNTUmgDNhyv1sZPWdP8+8M8/wJ9/au5fuqRpLaLBcoS0vWavB99cL7/8Mh566CGMHTsWa9as4banpKRALpfzvjiIxWJuedp58+bVeb6KigpUVJt0uvC/WTAUCoXOgwC1x5nyoMGSEk0/5/37gEqlKYdabbrlqU5bDiqP4WkHaMrl/P50oVDB+2nqBAL9PxNM+fODmAe9E7xQKIRAIKh3v0qPuUt/+OEHXLhwAQkJCbX2yeVyAJrFbapzc3NDWlpavedcu3YtVq5cWWt7dHQ0pFKpzrEBQExMjF7HGzu5nMpjzEy5PI6OmltN/fubbpnqos9nQimNHiQGpneC379/P+++QqFAYmIiduzYUWdirU96ejoWLlyI6OhoWFtb13tczS8TjLEGv2AsXboUixcv5u4XFhbCy8sLYWFhsLe31yk2hUKBmJgYhIaGQiQS6fQYQ6qs1PSv1zcaXq1WQC6Pgbt7KIRC4y9PY6g8ba+8vGqQXG5u403uQqEC/fvH4MKFUKjVxlkmfXh6lkMq/RWPPvqozp8JhTSHLjEwvRP81KlTa2177LHHEBQUhL179+o8Ve358+eRk5ODAQMGcNtUKhX+/PNPfP7557h+/ToATU1eJpNxx+Tk5NSq1VcnFou5BXCqE4lEeifrpjymLRUXV01Kox0wJ2xgVIVQKDLaBNIUVJ7WVVhYtYjLvXtNO4daLTLJBM8YQ0FBMlJSYpCSEoP09D/Qt28PPPnkkzp/JhjzZwdpH1qsDz44OBhz587V+fgxY8bg8uXLvG3PPfccunXrhrfeegv+/v5wd3dHTEwMd319ZWUl4uLisG7dupYK2+QwVjUpDbUAkpbEmKYVKCtLM/FMSYmhI2pbpaV5SEv7g0vq9++n8vbfunULjC49ISakRRJ8WVkZPvvsM3h6eur8GDs7O/Ts2ZO3zcbGBk5OTtz2RYsWITIyEgEBAQgICEBkZCSkUilmzJjREmGbFIVCM2ju7l2alIa0HKVS82UxM1MzSK6y0tARtR2lsgIZGX9xCV0uvwCgKoELhSJ06jQUfn6hGDZsNAYNym6we5AQY6N3gu/YsSPvTc4YQ1FREaRSKXbt2tWiwS1ZsgRlZWWYP38+N9FNdHQ07OzsWvR5jFldzfCENEd5uSaZZ2Vp3lt6jIs1aYypkZNzuVqz+wkolfzrRp2dg+DnFwo/v1B4ez8AKytbAICPjwIWFgcNETYhTaZ3gv/44495CV4oFMLFxQXBwcG869ibIjY2lndfIBAgIiICERERzTqvqaFmeNLSioqqmt7z89vPl8XCwgwuoaemHkNpaQ5vv62tDL6+Y/9L6mNhayur50yEmB69E3x4eHgrhEEAaoYnLUf7JVE7SK69TP5YUVGItLRYLqnfu3edt18ksoG39yiulu7s3IOa3YnZ0inBX7p0SecT1pxuljSupERTW29PNSvS8lQqzZdD7cps1eZ7MlsqlQKZmWf/q6EfxZ07p8FYVZ+DQCCETDaIS+idOg2BhYWVASMmpO3olOD79u0LgUDQ6AhSgUCg10Q37RljmoSek9P+RiuTllNRUdWfnp1t/v3pjDHk5V3/L6HHIC0tFpWV/OaJjh27VOtHD4FE0ryuQ0JMlU4JPiUlpbXjaDe0zfC5ubToBmma4uKqpve8PPNv9SkpyUFq6tH/mt2Poqgog7dfInGCr+8Y+PmFwtd3LDp08DVMoIQYGZ0SfM2134n+qBmeNEf1/nRznyBNoShFevoJrh89J4ffRWhhIYaX1wiulu7m1hcCQZPWzSLErDX5OvikpCTcvn0blTUunJ0yZUqzgzIX1AxPmkrbn65N6uXlho6o9ajVKmRnJ3I19IyMk1Cp+J8rbm59uYTu6TkCIpHEQNESYjr0TvDJycl4+OGHcfnyZV6/vHYkKvXBa5rec3M1H9DUDE90VVlZNUguO9u8r6QoKEjhEnpa2jGUlfHnwrW39/qvyT0Uvr4PwsbG1UCREmK69E7wCxcuhJ+fH44ePQp/f3+cPXsWeXl5eP311/HRRx+1Rowmo7RU88FMzfBEV6WlmoRubQ1ER5tvUi8ry+emgU1NPYr8/Fu8/WKxPXx8RsPXV3M9uqNjV7p8jZBm0jvBnzp1Cn/88QdcXFwgFAohFAoxYsQIrF27Fq+++ioSExNbI06jRc3wRF8FBVWXst2/r1kgaODAxldoMyVKZQUuX76M48dPISXlOLKyzoGxqgIKhZbw8BjCTTDj4TEYQmGLLY1BCEETErxKpYKtrWb6RmdnZ2RmZiIwMBA+Pj7cCnDtgVJZNSkNNcOThqjVVf3pcrl5zk7IGMPdu1e4Gvrt23FQKPgFdXLqziV0b+8QiMXtZ8ppQgxB7wTfs2dPXLp0Cf7+/ggODsb69ethZWWFrVu3wt/fvzViNCqlpZra+r171AxP6qdQaLprtP3p5vglsKgok0voKSlHUVIi5+3v0KEDPD0nwNc3DL6+Y2Fvr/tiVISQ5tM7wS9fvhwl/7VFr1mzBpMmTcLIkSPh5OSEvXv3tniAxoAxTbNqTo7mGmRC6lJWVtX0nptrXk3uAFBRUYTbt+O4a9Jzc5N4+y0tJdw0sP7+ozBxYjrOn3/IJNeDJ8Qc6J3gx40bx/3u7++PpKQk3Lt3r9Yqc+ZAqaTR8KRh9+9XLeJSUGDoaFqWWq1EZmYCl9Dv3DkFtbr6KEABZLKB3AQznp7DYGkpBgAIhQoIBBl1n5gQ0ib0TvA7duzAY489BhsbG26bo6NjiwZlLJKSGj+GtC9qtWb2OG1N3Zz60xljuHfvJpfQ09KOo6LiPu+YDh38uYTu6/sgJBLz/N8nxBzoneDfeOMNzJ8/H5MnT8YzzzyD8ePHw9LSPEe/qtWaEc6kfVMqNYPjtLcaczuZtJKSu0hNPcYl9cLC27z91tYd4es7hltStWNH8x9nQ4i50DszZ2Vl4fDhw/j+++/x5JNPQiKR4PHHH8czzzyDYcOGtUaMhLS58vKqpve7d82nP12hKENGxkmkpGgSenY2/7JWCwsreHoO5xK6u3t/CIUWBoqWENIceid4S0tLTJo0CZMmTUJpaSn279+PPXv2YPTo0fD09MStW7caPwkhRqiwUJPQ5XLNVRLmgDE1srMvcgk9I+MklEr+vLeurr25hO7lNRJWVjb1nI0QYkqa1bYulUoxbtw45OfnIy0tDdeuXWupuAhpdYxp+tO1NXVzmajo/v00LqGnph5DWVkub7+trQc3r7uv7xjY2robKFLjJRAA9vaAk5Pm5uUFZNCYQWJimpTgtTX33bt34+jRo/Dy8sJTTz2Fn376qaXjI6RFKZWayx21NXVz6E8vLy9AWlost0b6vXs3efutrGzh7R3CJXUnp25md8VLc4lEQMeOgLMz4OiouVUfWiSVGi42QppK7wT/1FNP4f/+7/8glUrx+OOPIzY2lvreiVErL9ck86wsTXI39fWQVKpK3LlzhkvomZlnedPACgQW6NQpmGt29/AIhoUFXYtenY2Npmbu6Kj5aW+vqbUTYk70TvACgQB79+7FuHHjzHb0PDF9RUVVTe+mvvgPYwy5ude4hH77dhwqK/kzLjk6BsLPT5PQvb1DYG3tYKBojY9QCHToUNXc7uioWdyHEHOnd4bes2dPa8RBSLMwphkYp10/vajI0BE1T3GxnLt0LSXlKIqLM3n7JRJnLqH7+o6Fg4O3gSI1PlZW/Kb2jh0BC7oQgLRDVAUnJkul0szzrk3qFRWGjqjpysvL8e+/h5GcfBwpKTG4e/cKb7+lpTW8vEZy/eiurr0hENAkDYCmeV3b1O7kBPy3FhYh7R4leGJSKio0/ekWFsDhw6Y7hbBarYJcfv6/Gno0MjPjoVTyp4F1d+/3Xw09FF5ew2FpSe3KFhZVyVz7U0TDCwipk84JPiMjA56eLbsa1JYtW7BlyxakpqYCAIKCgvDee+9hwoQJADR9jytXrsTWrVuRn5+P4OBgfPHFFwgKCmrROIhxKy6uqqXn5WkGQw0caHqD5fLzb/2X0GOQlvYHyssLePsdHHy4hO7r+yCkUmfDBGpEpNKqpnZnZ01tnWaXJEQ3Oif4nj174rPPPsOzzz7bYk/u6emJDz74AF26dAGgmed+6tSpSExMRFBQENavX4+NGzciKioKXbt2xZo1axAaGorr16/Dzo7WkjZn1fvTCwv5+0xltHNpaR7S0v7gllQtKEjh7ReLHeDj8yA6d34QU6ZYIiNjNhizMlC0hicU8q89d3Sky9MIaQ6dE3xkZCRefvll/Prrr9i6dSucnJya/eSTJ0/m3X///fexZcsWnD59Gj169MCmTZuwbNkyPPLIIwA0XwDc3NywZ88ezJs3r9nPT4yHSqWZElab1MvLG3+MsVEqK5CR8RdXS5fLLwCoGr4vFIrQqdNQrh9dJhsAodASQqECMtlB3LkjMOnR/vqystIMgNMm9I4d+deeE0KaR+d/p/nz52PChAmYM2cOgoKCsHXrVkyZMqXFAlGpVPjpp59QUlKCoUOHIiUlBXK5HGFhYdwxYrEYo0aNQnx8fL0JvqKiAhXVRlsV/lf9UygUUOjYYas9Tq020Q7eGrTlMLbyVFZqrkuXyzU/q3dBN9QMKxQqeD8NhTE1cnIuIzn5GFJSjuH27ZNQKst4x7i49ICf31j4+4+Bt/dIWFlVHwHGACiMpjwtqa4y2dpWJfSOHYG6GuGMdc5/lUpTDl0/Q/Q9lpDWIGBM/zrD559/jtdeew3du3evdS38hQsX9DrX5cuXMXToUJSXl8PW1hZ79uzBxIkTER8fj+HDh+POnTvw8PDgjn/hhReQlpaGI0eO1Hm+iIgIrFy5stb2PXv2QErtfaSZcnNz8ffff+PixYu4dOkS7t/nL6fasWNH9OnTh7uZ61LKpHGlpaWYMWMG7t+/D3t7e0OHQ9ohvRvE0tLSsG/fPjg6OmLq1KnNnuwmMDAQFy9eREFBAfbt24dZs2YhLi6O219zSk3GWIPTbC5duhSLFy/m7hcWFsLLywthYWE6/5MpFArExMTA3T0UQqHpD9FVqxWQyw1Xnvv3Nc3u2dma35tLKFSgf/8YXLgQCrW6dctTUVGItLS4/2rpR5GXd4O3XySygY/PA/DzGwN//zFwdu7BvT+TkzW3xrRleVqLtbWmVq4dEGdnp0BOjvn8D0kkCty6FYPQ0FCIdBy2X1hz8AghbUyv7Pz111/j9ddfx9ixY3HlyhW4uLg0OwArKytukN3AgQORkJCATz75BG+99RYAQC6XQyaTccfn5OTAzc2t3vOJxWKIxeJa20Uikc7/mFpCocgsPpy02qo8anVVf7pcDpSWttbziFo8IapUCmRmnuUGxt25cxqMVQ3XFwiEkMkGcf3onToNgYVF1cA4xpo+a15rlKc1CASa5vXqk8nUvPZc29RuLv9D2oly9Pkc0ffzhpCWpnOCHz9+PM6ePYvPP/8cM2fObLWAGGOoqKiAn58f3N3dERMTg379+gEAKisrERcXh3Xr1rXa85OmUSg0NfTMTM1PU+l+ZIwhL+86Nw1sWlosKiv50+B17NiFS+je3iGQSDoaKFrDsLSsSuTa0e2UuwgxfjoneJVKhUuXLrXotfDvvPMOJkyYAC8vLxQVFeGHH35AbGwsDh8+DIFAgEWLFiEyMhIBAQEICAhAZGQkpFIpZsyY0WIxkKYrK9Mk9KwsIDfXeAdI1VRSksObBraoiL8OqETiBF/fMdw0sB06+BomUAORSvmXqjk4mM6liYSQKjon+JiYmBZ/8uzsbDz77LPIysqCg4MDevfujcOHDyM0NBQAsGTJEpSVlWH+/PncRDfR0dF0DbwBFRRomt0zMzW/mwKFohTp6Se4hJ6T8zdvv4WFGF5eI7iE7u7er91MA6tdiKX6ZDK0EAsh5sGgV51u27atwf0CgQARERGIiIhom4BILWq1ZvY4bU29tfrTW5JarUJ2diKX0DMyTkKl4i/87ubWl2t29/QcAZFIYqBo25aVFX/e9g4d6NpzQswV/WuTWpRKTS1de6usbPwxhlZQkMIl9LS0Yygru8fbb2/vxZsG1sbG1UCRti07O/687dT4RUj7QQmeANDMHKddP/3uXePvTy8uLsa1a78gOTkWqakxyM+/xdsvFtvDx2c0fH1D4ec3Fo6OXRu8vNIcWFhUXaqmHeFu1X5nviWk3aME344VFmoSulyumfvdmCmVFbhz5xQ32l0uPw91tW8hQqElPDyG/NfsPhYeHoMhFJr329vauqqp3clJMxiOFmIhhGiZ9ycg4WFMM9pdW1MvKTF0RPVjjOHu3Svc9ei3b8dBoeAPAHB27gZf3zD4+Y2Ft3cIxGLzbX8WCDQJvHr/OU3MSAhpCCV4M6dUaq5LB4AjR4x7EZeiokwuoaekHEVJiZy338bGDb6+Y+HvPxpTpwKpqTNNYmKYphCJNM3t1SeTocFwhBB90EeGGSovr7qULSdHs23gQOMbLFdRUYTbt+O4a9Jzc5N4+y0tJfD2HgU/v7Hw8wuFi0svCAQCCIUKODsfRGqqYeJuLV5eVcnc3p6uPSeENA8leDNRVFTV9J6fz58u1Vj6ZdVqJTIzE7iEfufOKajV1ZaQgwAy2UDuenRPz2GwtKw97bCp0157Xv1StXv3gH79jOe1IoSYPkrwJooxTVLQrp9eVNT4Y9oaYwz37t3kEnpa2nFUVPBXm+nQwZ9L6L6+D0IiMb/V18TiqkvVHB01Te/auc0B479igRBimijBmxCVStPkrk3q1Za9NxolJXeRlvbHf9ekx6Cw8DZvv7V1R/j4PMhNMtOxo7+BIm099vb8a89rLsRCCCFtgRK8kauo0PSna5dbVakaf0xbUijKkJHxF5fQs7MTefstLKzg6Tkcvr6afnR39/4QCi3qOZvpsbDgL8Li5EQLsRBCjAMleCNUXFzVn37vXtOXH20NjKmRnX0RKSmaZveMjJNQKvlD811de3MJ3ctrJKysbAwUbcuTSvnzttNCLIQQY0UJ3khU708vLDR0NHz376dxCT019RjKynJ5+21tPbgmd1/fMbC1dTdQpC1LKKxqbtfeJO1jynpCiBmgBG8gKpVmSlhtUjem69PLywuQlhbLzRp3795N3n4rK1t4e4dwSd3JqZtZTAOrXYhF29TesSNde04IMV308dWGKiv5/elKZeOPaQsqVSXu3DnDJfTMzLNgrGpot0BgAQ+PwVxC9/AIhoWF6Xc029ry5223tzd0RIQQ0nIowbeykpKqWnpennFcEsUYQ27uNS6h374dh8rKYt4xjo6B3AQz3t4hsLZ2MFC0LUMo1NTItU3tjo6ay9cIIcRcUYJvBQUFmgFymZnG05+en5+Py5d3Izn5OFJSjqK4OJO3XyJx5hK6r+9YODh4GyjSlmFtXXvdc5pEhhDSnlCCbwFqNb8/vazM0BEBlZUlSE//87/L16Jx9+5V3n5LS2t4eY3kmt1dXXtDIDDNDCgQaNY5rz5vO117Tghp7yjBN5FCoelHz8zU/FQoDBuPWq2CXH6eux49IyMeanVVUAKBAG5ufeHnFwZf31B4eQ2HpaW1ASNuOu3At8DAqoRO154TQggfJXg9lJZW1dJzcw3fn56ff4tL6Glpf6C8vIC338HBB35+ofD3H41p01S4ceNJk1x9TSrl953b2Wleg8BAanYnhJD6UIJvREFB1cpsBQWGjaW0NI+bBjY19SgKClJ4+8VihxrTwHbmVl+ztz9ooKj1o12IpfpkMtY1GhoM/cWKEEJMASX4Bhw9qplVzlCUygreNLBy+QUAVdPaCYUidOo0lEvoMtkACIWm9ZJaWfGnee3Qga49J4SQlkAfpQ0oLW3b52NMjZycy1xCT08/AaWSP2LP2TmIS+je3g/Aysq0RpPZ2fETup2doSMihBDzRAnewAoLM7iEnpp6DKWlObz9trYybl53X98xsLPzMFCk+rOw0Fx7Xn0yGSsrQ0dFCCHtAyX4NlZRUchNA5uSEoN7967z9otENvD2HsXV0p2de5jMNLASCf/acwcHGgRHCCGGYtAEv3btWvzyyy/4559/IJFIMGzYMKxbtw6BgYHcMYwxrFy5Elu3bkV+fj6Cg4PxxRdfICgoyICR606lUiAz8yw3MO7OndNgrGrNV4FACJlsEJfQO3UaAgsL46/mCgSaBF49oUulho6KEEKIlkETfFxcHF5++WUMGjQISqUSy5YtQ1hYGJKSkmBjo1lidP369di4cSOioqLQtWtXrFmzBqGhobh+/TrsjLADlzGGvLzrXEJPSzuOysoi3jEdO3ap1o8eAomko4Gi1Z1IpGlurz6ZDA2GI4QQ42XQj+jDhw/z7m/fvh2urq44f/48HnjgATDGsGnTJixbtgyPPPIIAGDHjh1wc3PDnj17MG/ePEOEXUtJSQ5SU4/+1+x+FEVFGbz9EokTfH3HcNPAdujga5hA9WBjw7/23N6e1j0nhBBTYlR1sPv37wMAHB0dAQApKSmQy+UICwvjjhGLxRg1ahTi4+PrTPAVFRWoqKjg7hf+Nxm8QqGAQsfp5rTHCYV1H69QlOL27ZNISfkDKSlHkZ19ibffwkIML6/h8PcfAz+/MXB371tjGti2nfZOW476yiMUVjW3Ozpqauo1rz1nTHMzBtoZ+qrP1GfKzK08gPmVSaXSlEPXzxB9jyWkNRhNgmeMYfHixRgxYgR69uwJAJDL5QAANzc33rFubm5IS0ur8zxr167FypUra22Pjo6GVM9O4v79YwAAKpUKKSkpuHjxIv7++29cu3YNyhprvfr5+aFPnz7o27cvunfvDjG3VJkcAL+lwlC05WkIY8C9e20QTAuQyxsvjykxt/IA5lemmBjdy1Pa1tfZElKD0ST4V155BZcuXcLJkydr7as5ipwxVu/I8qVLl2Lx4sXc/cLCQnh5eSEsLAz2Oi74rVAosGvXLhw4UInk5Fikph5HWRk/69nbe8HPbwz8/B6En9+DsLFxBaBZ4/3yZZ2eps106KBAly4xUKlC4egown/DG0yWWq2AXB4Dd/dQCIWmN/VuTeZWHsD8yiSRKHDrVgxCQ0Mh0nHhg0JjWUqStFtGkeAXLFiAAwcO4M8//4Snpye33d3dHYCmJi+TybjtOTk5tWr1WmKxuFrtuYpIJNLpH1Mul2PEiBG4detWjfPaw8dnNHx9Q+HnNxaOjl15XzKMZfpUC4uqke3anxYWmql2vbxEZvFhqyUUUnmMnbmUycJC81PXzxHtsYQYkkETPGMMCxYswP79+xEbGws/Pz/efj8/P7i7uyMmJgb9+vUDAFRWViIuLg7r1q1rlZjc3NxQUlICCwsLeHgMga9vGPz8xsLDY7BRTgMrlfLnbXdwqD0Yzli+fBBCCGk7Bs1YL7/8Mvbs2YP//e9/sLOz4/rcHRwcIJFIIBAIsGjRIkRGRiIgIAABAQGIjIyEVCrFjBkzWiUmgUCA//3vf0hOTsbVq48a1epr1QfDaUe4SySGjooQQogxMmiC37JlCwAgJCSEt3379u0IDw8HACxZsgRlZWWYP38+N9FNdHR0q14D369fP2RlZbXa+XVlZVVVO3dy0oxup2vPCSGE6MLgTfSNEQgEiIiIQEREROsHZGC2tvx523UcE0gIIYTUQvVBAxEKNTXy6pPJ1DE2kBBCCGkSSvBtxNqa33feoQMtxEIIIaT1UIJvBQKBZp1zbVO7kxNM/tpzQgghpoUSfAuwtORfe+7oqFmchRBCCDEUSvBNIJXy+87ruvacEEIIMSRK8I0QCjX95dX7z2suxEIIIYQYG0rwDRg+XJPYtdNUEkLaD0tLzVwUIhFNKEVMEyX4Bjg50Uh3QsyNQKBJ2iJRVQKv62f1bjda+ZWYIkrwhBCzIRQ2nLS1iZ2Q9oASPCHEJFhaNl7rpu40QqpQgieEGJRAwO/vru8ndZcRoh9K8ISQViMUaqZgbih5W1rSZaaEtAZK8ISQJrGwaLi2nZkJ9O5Nfd6EGAoleEJILY31dTfWZE6jzgkxPErwhLQjAoFuo8ypyZwQ00cJnhAzYWHReM3bkv7jCWk36N+dEBOgy8QsNMqcEFIdJXhCDEjbZA5o1jyQSOpO4NRkTgjRFyV4QlqJrk3mCgWQmgr4+tKIc0JIy6EET0gT6DIxC82qRggxJErwhFSjXYikrpHl9S1EQgghxogSPGk3aCESQkh7QgmemAVaiIQQQvgowROjVtfa3dppUAMCqkad0yVihBDCZ9CPxT///BOTJ0+Gh4cHBAIBfv31V95+xhgiIiLg4eEBiUSCkJAQXL161TDBkhanXYjEzg5wdATc3QEvL6BzZ6BbN8085v37A716ae77+2v2u7pqHm9jo3k8JXdCCKnNoDX4kpIS9OnTB8899xweffTRWvvXr1+PjRs3IioqCl27dsWaNWsQGhqK69evw87OzgARE11RkzkhhBiWQRP8hAkTMGHChDr3McawadMmLFu2DI888ggAYMeOHXBzc8OePXswb968tgyVVNPchUgIIYS0PqPtg09JSYFcLkdYWBi3TSwWY9SoUYiPj683wVdUVKCiooK7X1hYCABQKBRQ6LjElfY4tdo8lsTSlqOx8giFmuRc/Rrv6v3f2lp5Y5eIqVSaW2vRvj66vp7GztzKA5hfmZpSHnMpOzFdRpvg5XI5AMDNzY233c3NDWlpafU+bu3atVi5cmWt7dHR0ZBKpXrGEKPX8cbO3MoTE0PlMXbmViZ9ylNaWtqKkRDSOKNN8FqCGtVFxlitbdUtXboUixcv5u4XFhbCy8sLYWFhsLe31+k5FQoFYmJi4O4eCqHQ+C+MbqjWralxK3D0aAxCQ0MhMoMLvbWvD5XHeJlbmZpSHm3rISGGYrQJ3t3dHYCmJi+TybjtOTk5tWr11YnFYojF4lrbRSKR3h80QqHIoAm+pdbu1rYUNuVvYMyoPMbP3MqkT3nMqdzENBltgvfz84O7uztiYmLQr18/AEBlZSXi4uKwbt06A0fXfLR2NyGEkNZk0BRSXFyMf//9l7ufkpKCixcvwtHREd7e3li0aBEiIyMREBCAgIAAREZGQiqVYsaMGQaMunG0EAkhhBBDM2iCP3fuHEaPHs3d1/adz5o1C1FRUViyZAnKysowf/585OfnIzg4GNHR0Qa7Br6+hUho7W5CCCHGxqAJPiQkBIyxevcLBAJEREQgIiKi7YKqxtNTMxUqLURCCCHE1FAvbwOcnSmpE0IIMU003xghhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWbI7Ce60c6Up8/SjQqFAqWlpSgsLDSLFaGoPMbN3MoDmF+ZmlIe7WdOQ7N1EtKazD7BFxUVAQC8vLwMHAkhpD0qKiqCg4ODocMg7ZCAmfnXS7VajczMTNjZ2UGg4yowhYWF8PLyQnp6Ouzt7Vs5wtZH5TFu5lYewPzK1JTyMMZQVFQEDw8PCIXUG0rantnX4IVCITw9PZv0WHt7e7P4cNKi8hg3cysPYH5l0rc8VHMnhkRfKwkhhBAzRAmeEEIIMUOU4OsgFouxYsUKiMViQ4fSIqg8xs3cygOYX5nMrTykfTD7QXaEEEJIe0Q1eEIIIcQMUYInhBBCzBAleEIIIcQMUYInhBBCzFC7TfCbN2+Gn58frK2tMWDAAJw4caLB4+Pi4jBgwABYW1vD398fX375ZRtFqht9yvPLL78gNDQULi4usLe3x9ChQ3HkyJE2jLZx+r4+Wn/99RcsLS3Rt2/f1g1QT/qWp6KiAsuWLYOPjw/EYjE6d+6Mb7/9to2ibZy+5dm9ezf69OkDqVQKmUyG5557Dnl5eW0UbcP+/PNPTJ48GR4eHhAIBPj1118bfYyxfx4QAgBg7dAPP/zARCIR+/rrr1lSUhJbuHAhs7GxYWlpaXUen5yczKRSKVu4cCFLSkpiX3/9NROJROznn39u48jrpm95Fi5cyNatW8fOnj3Lbty4wZYuXcpEIhG7cOFCG0deN33Lo1VQUMD8/f1ZWFgY69OnT9sEq4OmlGfKlCksODiYxcTEsJSUFHbmzBn2119/tWHU9dO3PCdOnGBCoZB98sknLDk5mZ04cYIFBQWxadOmtXHkdTt48CBbtmwZ27dvHwPA9u/f3+Dxxv55QIhWu0zwgwcPZi+++CJvW7du3djbb79d5/FLlixh3bp1422bN28eGzJkSKvFqA99y1OXHj16sJUrV7Z0aE3S1PI88cQTbPny5WzFihVGleD1Lc+hQ4eYg4MDy8vLa4vw9KZveT788EPm7+/P2/bpp58yT0/PVouxqXRJ8Mb+eUCIVrtroq+srMT58+cRFhbG2x4WFob4+Pg6H3Pq1Klax48bNw7nzp2DQqFotVh10ZTy1KRWq1FUVARHR8fWCFEvTS3P9u3bcevWLaxYsaK1Q9RLU8pz4MABDBw4EOvXr0enTp3QtWtXvPHGGygrK2uLkBvUlPIMGzYMGRkZOHjwIBhjyM7Oxs8//4yHHnqoLUJuccb8eUBIdWa/2ExNubm5UKlUcHNz4213c3ODXC6v8zFyubzO45VKJXJzcyGTyVot3sY0pTw1bdiwASUlJZg+fXprhKiXppTn5s2bePvtt3HixAlYWhrXW7op5UlOTsbJkydhbW2N/fv3Izc3F/Pnz8e9e/cM3g/flPIMGzYMu3fvxhNPPIHy8nIolUpMmTIFn332WVuE3OKM+fOAkOraXQ1eq+bSsYyxBpeTrev4urYbir7l0fr+++8RERGBvXv3wtXVtbXC05uu5VGpVJgxYwZWrlyJrl27tlV4etPn9VGr1RAIBNi9ezcGDx6MiRMnYuPGjYiKijKKWjygX3mSkpLw6quv4r333sP58+dx+PBhpKSk4MUXX2yLUFuFsX8eEAK0wxq8s7MzLCwsatU2cnJyan0r13J3d6/zeEtLSzg5ObVarLpoSnm09u7dizlz5uCnn37C2LFjWzNMnelbnqKiIpw7dw6JiYl45ZVXAGgSJGMMlpaWiI6OxoMPPtgmsdelKa+PTCZDp06deEuNdu/eHYwxZGRkICAgoFVjbkhTyrN27VoMHz4cb775JgCgd+/esLGxwciRI7FmzRqTq/Ea8+cBIdW1uxq8lZUVBgwYgJiYGN72mJgYDBs2rM7HDB06tNbx0dHRGDhwIEQiUavFqoumlAfQ1NzDw8OxZ88eo+oL1bc89vb2uHz5Mi5evMjdXnzxRQQGBuLixYsIDg5uq9Dr1JTXZ/jw4cjMzERxcTG37caNGxAKhfD09GzVeBvTlPKUlpZCKOR/1FhYWACoqvmaEmP+PCCEx0CD+wxKe5nPtm3bWFJSElu0aBGzsbFhqampjDHG3n77bfbss89yx2svi3nttddYUlIS27Ztm1FdFqNvefbs2cMsLS3ZF198wbKysrhbQUGBoYrAo295ajK2UfT6lqeoqIh5enqyxx57jF29epXFxcWxgIAA9vzzzxuqCDz6lmf79u3M0tKSbd68md26dYudPHmSDRw4kA0ePNhQReApKipiiYmJLDExkQFgGzduZImJidxlf6b2eUCIVrtM8Iwx9sUXXzAfHx9mZWXF+vfvz+Li4rh9s2bNYqNGjeIdHxsby/r168esrKyYr68v27JlSxtH3DB9yjNq1CgGoNZt1qxZbR94PfR9faoztgTPmP7luXbtGhs7diyTSCTM09OTLV68mJWWlrZx1PXTtzyffvop69GjB5NIJEwmk7Gnn36aZWRktHHUdTt+/HiD/w+m+HlACGOM0XKxhBBCiBlqd33whBBCSHtACZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCalDamoqBAIBLl68aOhQCCGkSSjBE5MVHh6OadOm1doeGxsLgUCAgoKCJp/by8sLWVlZ6NmzZ9MDJIQQA2p3y8US0pjKykpYWVnB3d3d0KEQQkiTUQ2emL19+/YhKCgIYrEYvr6+2LBhA2+/r68v1qxZg/DwcDg4OGDu3Lm1mujDw8MhEAhq3WJjYwEA+fn5mDlzJjp27AipVIoJEybg5s2b3HNERUWhQ4cOOHLkCLp37w5bW1uMHz8eWVlZbfVnIIS0M5TgiVk7f/48pk+fjieffBKXL19GREQE3n33XURFRfGO+/DDD9GzZ0+cP38e7777bq3zfPLJJ8jKyuJuCxcuhKurK7p16wZA8wXg3LlzOHDgAE6dOgXGGCZOnAiFQsGdo7S0FB999BF27tyJP//8E7dv38Ybb7zRquUnhLRjBl7NjpAmmzVrFrOwsGA2Nja8m7W1NQPA8vPz2YwZM1hoaCjvcW+++Sbr0aMHd9/Hx4dNmzaNd0xKSgoDwBITE2s97759+5hYLGYnTpxgjDF248YNBoD99ddf3DG5ublMIpGwH3/8kTGmWRMdAPv333+5Y7744gvm5ubW7L8DIYTUhWrwxKSNHj0aFy9e5N2++eYbbv+1a9cwfPhw3mOGDx+OmzdvQqVScdsGDhyo0/MlJiZi5syZ+OKLLzBixAjuOSwtLREcHMwd5+TkhMDAQFy7do3bJpVK0blzZ+6+TCZDTk6OfgUmhBAd0SA7YtJsbGzQpUsX3raMjAzud8YYBAIBbz9jrM7zNEYul2PKlCmYM2cO5syZ0+D56npukUjE2y8QCOp9LCGENBfV4IlZ69GjB06ePMnbFh8fj65du8LCwkLn85SXl2Pq1Kno1q0bNm7cWOs5lEolzpw5w23Ly8vDjRs30L179+YVgBBCmohq8MSsvf766xg0aBBWr16NJ554AqdOncLnn3+OzZs363WeefPmIT09HceOHcPdu3e57Y6OjggICMDUqVMxd+5cfPXVV7Czs8Pbb7+NTp06YerUqS1dJEII0QnV4IlZ69+/P3788Uf88MMP6NmzJ9577z2sWrUK4eHhep0nLi4OWVlZ6NGjB2QyGXeLj48HAGzfvh0DBgzApEmTMHToUDDGcPDgwVrN8oQQ0lYEjDoBCSGEELNDNXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDP0/abufTubPN7wAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -2625,7 +4105,7 @@ "print('probs.shape (N,H,K) \\t', probs.shape)\n", "\n", "model = NBMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (counts, probs)\n", + "distr_args = (counts, probs, weights)\n", "samples, sample_mean, quants = model.sample(distr_args, num_samples=2000)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb new file mode 100644 index 000000000..6bafac332 --- /dev/null +++ b/nbs/models.deepnpts.ipynb @@ -0,0 +1,1137 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.deepnpts" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# DeepNPTS" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting.\n", + "\n", + "\n", + "**References**
\n", + "[Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", + "\n", + "\n", + ":::{.callout-warning collapse=\"false\"}\n", + "#### Exogenous Variables, Losses, and Parameters Availability\n", + "\n", + "Given the sampling procedure during inference, DeepNPTS only supports `DistributionLoss` as training loss.\n", + "\n", + "Note that DeepNPTS generates a non-parametric forecast distribution using Monte Carlo. We use this sampling procedure also during validation to make it closer to the inference procedure. Therefore, only the `MQLoss` is available for validation.\n", + "\n", + "Aditionally, Monte Carlo implies that historic exogenous variables are not available for the model.\n", + ":::" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "import numpy as np\n", + "\n", + "import torch\n", + "import torch.nn as nn\n", + "import neuralforecast.losses.pytorch as losses\n", + "from typing import Optional\n", + "from functools import partial\n", + "\n", + "\n", + "from neuralforecast.common._base_windows import BaseWindows\n", + "from neuralforecast.losses.pytorch import MQLoss, GMM, PMM, NBMM\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import logging\n", + "import warnings\n", + "\n", + "from fastcore.test import test_eq\n", + "from nbdev.showdoc import show_doc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "logging.getLogger(\"pytorch_lightning\").setLevel(logging.ERROR)\n", + "warnings.filterwarnings(\"ignore\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. DeepNPTS" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class DeepNPTS(BaseWindows):\n", + " \"\"\" DeepNPTS\n", + "\n", + " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + "\n", + " **Parameters:**
\n", + " `h`: int, Forecast horizon.
\n", + " `input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + " `hidden_size`: int=32, hidden size of dense layers.
\n", + " `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + " `dropout`: float=0.1, dropout.
\n", + " `n_layers`: int=2, number of dense layers.
\n", + " `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", + " `stat_exog_list`: str list, static exogenous columns.
\n", + " `hist_exog_list`: str list, historic exogenous columns.
\n", + " `futr_exog_list`: str list, future exogenous columns.
\n", + " `exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + " `loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + " `valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + " `max_steps`: int=1000, maximum number of training steps.
\n", + " `learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + " `num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + " `early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + " `val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + " `batch_size`: int=32, number of different series in each batch.
\n", + " `valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + " `windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + " `inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + " `step_size`: int=1, step size between each window of temporal data.
\n", + " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `alias`: str, optional, Custom name of the model.
\n", + " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + " **References**
\n", + " - [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", + "\n", + " \"\"\"\n", + " # Class attributes\n", + " SAMPLING_TYPE = 'windows'\n", + " \n", + " def __init__(self,\n", + " h,\n", + " input_size: int = -1,\n", + " hidden_size: int = 32,\n", + " batch_norm: bool = True,\n", + " dropout: float = 0.1,\n", + " n_layers: int = 2,\n", + " trajectory_samples: int = 100,\n", + " futr_exog_list = None,\n", + " hist_exog_list = None,\n", + " stat_exog_list = None,\n", + " exclude_insample_y = False,\n", + " loss = GMM(),\n", + " valid_loss = MQLoss(level=[80, 90]),\n", + " max_steps: int = 1000,\n", + " learning_rate: float = 1e-5,\n", + " num_lr_decays: int = 3,\n", + " early_stop_patience_steps: int =-1,\n", + " val_check_steps: int = 100,\n", + " batch_size: int = 32,\n", + " valid_batch_size: Optional[int] = None,\n", + " windows_batch_size: int = 1024,\n", + " inference_windows_batch_size: int = -1,\n", + " start_padding_enabled = False,\n", + " step_size: int = 1,\n", + " scaler_type: str = 'standard',\n", + " random_seed: int = 1,\n", + " num_workers_loader = 0,\n", + " drop_last_loader = False,\n", + " optimizer = None,\n", + " optimizer_kwargs = None,\n", + " **trainer_kwargs):\n", + "\n", + " if hist_exog_list is not None:\n", + " raise Exception('DeepNPTS does not support historical exogenous variables.')\n", + "\n", + " if exclude_insample_y:\n", + " raise Exception('DeepNPTS has no possibility for excluding y.')\n", + " \n", + " supported_losses = (losses.GMM,\n", + " losses.PMM,\n", + " losses.NBMM)\n", + "\n", + " if not isinstance(loss, supported_losses):\n", + " raise Exception('DeepNPTS only supports GMM, PMM or NBMM as loss function.') \n", + " \n", + " if not isinstance(valid_loss, losses.MQLoss):\n", + " raise Exception('DeepNPTS only supports MQLoss as validation loss.')\n", + " \n", + " # Overwrite n_components, it has to be the input_size in DeepNPTS\n", + " loss.n_components = input_size\n", + " \n", + " # Inherit BaseWindows class\n", + " super(DeepNPTS, self).__init__(h=h,\n", + " input_size=input_size,\n", + " futr_exog_list=futr_exog_list,\n", + " hist_exog_list=hist_exog_list,\n", + " stat_exog_list=stat_exog_list,\n", + " exclude_insample_y = exclude_insample_y,\n", + " loss=loss,\n", + " valid_loss=valid_loss,\n", + " max_steps=max_steps,\n", + " learning_rate=learning_rate,\n", + " num_lr_decays=num_lr_decays,\n", + " early_stop_patience_steps=early_stop_patience_steps,\n", + " val_check_steps=val_check_steps,\n", + " batch_size=batch_size,\n", + " windows_batch_size=windows_batch_size,\n", + " valid_batch_size=valid_batch_size,\n", + " inference_windows_batch_size=inference_windows_batch_size,\n", + " start_padding_enabled=start_padding_enabled,\n", + " step_size=step_size,\n", + " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", + " drop_last_loader=drop_last_loader,\n", + " random_seed=random_seed,\n", + " optimizer=optimizer,\n", + " optimizer_kwargs=optimizer_kwargs,\n", + " **trainer_kwargs)\n", + "\n", + " self.h = h\n", + " self.h_backup = self.h # Used because h=1 during training\n", + " self.use_softmax = True\n", + " self.hidden_size = hidden_size\n", + " self.dropout = dropout\n", + " self.trajectory_samples = trajectory_samples\n", + "\n", + " self.futr_exog_size = len(self.futr_exog_list)\n", + " self.stat_exog_size = len(self.stat_exog_list)\n", + "\n", + " input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size\n", + " # Create DeepNPTSNetwork\n", + " modules = [] \n", + " for i in range(n_layers):\n", + " modules.append(nn.Linear(input_dim if i == 0 else hidden_size, hidden_size))\n", + " modules.append(nn.ReLU())\n", + " if batch_norm:\n", + " modules.append(nn.BatchNorm1d(hidden_size))\n", + " if dropout > 0.0:\n", + " modules.append(nn.Dropout(dropout))\n", + "\n", + " self.deepnptsnetwork = nn.Sequential(*modules)\n", + " self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07))\n", + "\n", + " # Add output layers for Mixture distribution \n", + " output_modules = []\n", + " if dropout > 0.0:\n", + " output_modules.append(nn.Dropout(self.dropout))\n", + " \n", + " if isinstance(loss, GMM):\n", + " output_modules.append(nn.Linear(hidden_size, input_size + 1))\n", + " elif isinstance(loss, PMM):\n", + " output_modules.append(nn.Linear(hidden_size, input_size))\n", + " elif isinstance(loss, NBMM):\n", + " output_modules.append(nn.Linear(hidden_size, input_size))\n", + "\n", + " self.output_layer = nn.Sequential(*output_modules)\n", + " self.output_layer.apply(self._init_weights)\n", + "\n", + "\n", + " @staticmethod\n", + " def _init_weights(module, scale=1.0):\n", + " if type(module) == nn.Linear:\n", + " nn.init.uniform_(module.weight, -scale, scale)\n", + " nn.init.zeros_(module.bias)\n", + "\n", + " def _domain_map(self, o_t, insample_y):\n", + " if isinstance(self.loss, GMM):\n", + " weights = o_t[:, :-1] # [B, L + 1] -> [B, L]\n", + " kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1]\n", + " kernel_width = torch.repeat_interleave(input=kernel_width,\n", + " repeats=weights.shape[1],\n", + " dim=-1) # [B, 1] -> [B, L]\n", + " output = torch.cat([insample_y, kernel_width, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", + " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L]\n", + " elif isinstance(self.loss, PMM):\n", + " weights = o_t # [B, L] -> [B, L]\n", + " output = torch.cat([insample_y, weights], dim=-1) # [B, L] + [B, L] = [B, 2 * L]\n", + " output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] \n", + " elif isinstance(self.loss, NBMM):\n", + " weights = torch.ones_like(o_t) # [B, L] -> [B, L]\n", + " output = torch.cat([insample_y, o_t, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", + " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * \n", + "\n", + " else:\n", + " raise NotImplementedError\n", + " \n", + " return output\n", + "\n", + " # Override BaseWindows method\n", + " def training_step(self, batch, batch_idx):\n", + " \n", + " # Only train one-step ahead\n", + " self.h = 1\n", + " self.quantiles = self.loss.quantiles\n", + "\n", + " # Create and normalize windows [Ws, L+H, C]\n", + " y_idx = batch[\"y_idx\"]\n", + " windows = self._create_windows(batch, step=\"train\")\n", + " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h :, y_idx])\n", + " windows = self._normalization(windows=windows, y_idx=y_idx)\n", + "\n", + " # Parse windows\n", + " (\n", + " insample_y,\n", + " insample_mask,\n", + " outsample_y,\n", + " outsample_mask,\n", + " _,\n", + " futr_exog,\n", + " stat_exog,\n", + " ) = self._parse_windows(batch, windows)\n", + "\n", + " windows_batch = dict(\n", + " insample_y=insample_y, # [Ws, L]\n", + " insample_mask=insample_mask, # [Ws, L]\n", + " futr_exog=futr_exog, # [Ws, L+H]\n", + " hist_exog=None, \n", + " stat_exog=stat_exog, # [Ws, 1]\n", + " y_idx=y_idx # [Ws, 1]\n", + " ) \n", + "\n", + " # Model Predictions\n", + " output = self.train_forward(windows_batch)\n", + "\n", + " _, y_loc, y_scale = self._inv_normalization(\n", + " y_hat=outsample_y, \n", + " temporal_cols=batch[\"temporal_cols\"], \n", + " y_idx=y_idx\n", + " )\n", + " # outsample_y = original_insample_y\n", + " outsample_y = original_outsample_y\n", + " distr_args = self.loss.scale_decouple(\n", + " output=output, loc=y_loc, scale=y_scale\n", + " )\n", + " loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask)\n", + "\n", + " if torch.isnan(loss):\n", + " print(\"Model Parameters\", self.hparams)\n", + " print(\"insample_y\", torch.isnan(insample_y).sum())\n", + " print(\"outsample_y\", torch.isnan(outsample_y).sum())\n", + " print(\"output\", torch.isnan(output).sum())\n", + " raise Exception(\"Loss is NaN, training stopped.\")\n", + "\n", + " self.log(\"train_loss\", loss, prog_bar=True, on_epoch=True)\n", + " self.train_trajectories.append((self.global_step, float(loss)))\n", + "\n", + " self.h = self.h_backup \n", + " \n", + " return loss\n", + "\n", + " # Override BaseWindows method\n", + " def validation_step(self, batch, batch_idx):\n", + "\n", + " self.h = self.h_backup\n", + " self.quantiles = self.valid_loss.quantiles\n", + "\n", + " if self.val_size == 0:\n", + " return np.nan\n", + "\n", + " # TODO: Hack to compute number of windows\n", + " windows = self._create_windows(batch, step=\"val\")\n", + " n_windows = len(windows[\"temporal\"])\n", + " y_idx = batch[\"y_idx\"]\n", + "\n", + " # Number of windows in batch\n", + " windows_batch_size = self.inference_windows_batch_size\n", + " if windows_batch_size < 0:\n", + " windows_batch_size = n_windows\n", + " n_batches = int(np.ceil(n_windows / windows_batch_size))\n", + "\n", + " valid_losses = []\n", + " batch_sizes = []\n", + " for i in range(n_batches):\n", + " # Create and normalize windows [Ws, L+H, C]\n", + " w_idxs = np.arange(\n", + " i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows)\n", + " )\n", + " windows = self._create_windows(batch, step=\"val\", w_idxs=w_idxs)\n", + " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h:, 0])\n", + " windows = self._normalization(windows=windows, y_idx=y_idx)\n", + "\n", + " # Parse windows\n", + " (\n", + " insample_y,\n", + " insample_mask,\n", + " _,\n", + " outsample_mask,\n", + " _,\n", + " futr_exog,\n", + " stat_exog,\n", + " ) = self._parse_windows(batch, windows)\n", + " \n", + " windows_batch = dict(\n", + " insample_y=insample_y, # [Ws, L]\n", + " insample_mask=insample_mask, # [Ws, L]\n", + " futr_exog=futr_exog, # [Ws, L+H]\n", + " hist_exog=None, # [Ws, L]\n", + " stat_exog=stat_exog,\n", + " y_idx=y_idx,\n", + " ) # [Ws, 1]\n", + "\n", + " # Model Predictions\n", + " output_batch = self(windows_batch)\n", + " # Monte Carlo already returns y_hat with mean and quantiles\n", + " output_batch = output_batch[:,:, 1:] # Remove mean\n", + " valid_loss_batch = self.valid_loss(y=original_outsample_y, y_hat=output_batch, mask=outsample_mask)\n", + " valid_losses.append(valid_loss_batch)\n", + " batch_sizes.append(len(output_batch))\n", + "\n", + " valid_loss = torch.stack(valid_losses)\n", + " batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device)\n", + " valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes)\n", + "\n", + " if torch.isnan(valid_loss):\n", + " raise Exception(\"Loss is NaN, training stopped.\")\n", + "\n", + " self.log(\"valid_loss\", valid_loss, prog_bar=True, on_epoch=True)\n", + " self.validation_step_outputs.append(valid_loss)\n", + " return valid_loss\n", + "\n", + " # Override BaseWindows method\n", + " def predict_step(self, batch, batch_idx):\n", + "\n", + " self.h == self.h_backup\n", + " self.quantiles = self.loss.quantiles\n", + "\n", + " # TODO: Hack to compute number of windows\n", + " windows = self._create_windows(batch, step='predict')\n", + " n_windows = len(windows['temporal'])\n", + " y_idx = batch['y_idx']\n", + "\n", + " # Number of windows in batch\n", + " windows_batch_size = self.inference_windows_batch_size\n", + " if windows_batch_size < 0:\n", + " windows_batch_size = n_windows\n", + " n_batches = int(np.ceil(n_windows/windows_batch_size))\n", + "\n", + " y_hats = []\n", + " for i in range(n_batches):\n", + " # Create and normalize windows [Ws, L+H, C]\n", + " w_idxs = np.arange(i*windows_batch_size, \n", + " min((i+1)*windows_batch_size, n_windows))\n", + " windows = self._create_windows(batch, step='predict', w_idxs=w_idxs)\n", + " windows = self._normalization(windows=windows, y_idx=y_idx)\n", + "\n", + " # Parse windows\n", + " insample_y, insample_mask, _, _, _, futr_exog, stat_exog = self._parse_windows(batch, windows)\n", + " windows_batch = dict(insample_y=insample_y, # [Ws, L]\n", + " insample_mask=insample_mask, # [Ws, L]\n", + " futr_exog=futr_exog, # [Ws, L+H]\n", + " stat_exog=stat_exog,\n", + " y_idx=y_idx)\n", + " \n", + " # Model Predictions\n", + " y_hat = self(windows_batch)\n", + " # Monte Carlo already returns y_hat with mean and quantiles\n", + " y_hats.append(y_hat)\n", + " y_hat = torch.cat(y_hats, dim=0)\n", + " return y_hat\n", + "\n", + " def train_forward(self, windows_batch):\n", + " # Parse windows_batch\n", + " x_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", + " stat_exog = windows_batch['stat_exog'] # [B, S]\n", + "\n", + " batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len\n", + "\n", + " # Concatenate x_t with future exogenous\n", + " if self.futr_exog_size > 0: \n", + " futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F]\n", + " x_t = torch.cat((x_t, futr_exog_t), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", + " \n", + " x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)]\n", + "\n", + " # Concatenate x_t with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x_t = torch.cat((x_t, stat_exog), dim=1) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S]\n", + "\n", + " # Run through DeepNPTSNetwork\n", + " h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size]\n", + " o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1]\n", + "\n", + " output = self._domain_map(o_t, windows_batch['insample_y']) # [B, L + 1], [B, L] -> [B, 3 * L]\n", + " output = self.loss.domain_map(output) # [B, 3 * L] -> ([B, L], [B, L], [B, L])\n", + "\n", + " return output\n", + "\n", + " def forward(self, windows_batch):\n", + " # Parse windows_batch\n", + " insample_y_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", + " stat_exog = windows_batch['stat_exog'] # [B, S]\n", + " y_idx = windows_batch['y_idx']\n", + "\n", + " batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len\n", + " device = insample_y_t.device\n", + " dtype = insample_y_t.dtype\n", + "\n", + " # Repeat insample_y for trajectory samples\n", + " insample_y_t = torch.repeat_interleave(input=insample_y_t, \n", + " repeats=self.trajectory_samples, \n", + " dim=0) # [B, L, 1] -> [B * n_samples, L, 1]\n", + " \n", + " # Input x_t is insample_y at time t\n", + " x_t = insample_y_t\n", + "\n", + " # Repeat futr_exog if available for trajectory samples and add to x_t \n", + " if self.futr_exog_size > 0: \n", + " futr_exog = torch.repeat_interleave(input=futr_exog, \n", + " repeats=self.trajectory_samples, \n", + " dim=0) # [B, L + h, F] -> [B * n_samples, L + h, F] \n", + " x_t = torch.cat((x_t, futr_exog[:, :seq_len]), dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", + " \n", + " x_t = x_t.reshape(batch_size * self.trajectory_samples, -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + "\n", + " # Repeat stat_exog if available for trajectory samples and add to x_t\n", + " if self.stat_exog_size > 0:\n", + " stat_exog = torch.repeat_interleave(\n", + " input=stat_exog, \n", + " repeats=self.trajectory_samples, \n", + " dim=0) # [B, S] -> [B * n_samples, S] \n", + " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", + "\n", + " # Scales for inverse normalization\n", + " y_scale = self.scaler.x_scale[:, :, y_idx]\n", + " y_loc = self.scaler.x_shift[:, :, y_idx]\n", + " y_scale = torch.repeat_interleave(input=y_scale, \n", + " repeats=self.trajectory_samples, \n", + " dim=0)\n", + " y_loc = torch.repeat_interleave(input=y_loc, \n", + " repeats=self.trajectory_samples, \n", + " dim=0)\n", + " # Create forecasts tensor\n", + " forecasts = torch.zeros((batch_size, \n", + " self.h,\n", + " len(self.quantiles) + 1), \n", + " device=device, \n", + " dtype=dtype)\n", + " \n", + " # Recursive predictions\n", + " for t in range(self.h):\n", + " # Run input throught DeepNPTSNetwork\n", + " h_t = self.deepnptsnetwork(x_t) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size]\n", + " o_t = self.output_layer(h_t) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)]\n", + " output = self._domain_map(o_t, insample_y_t.squeeze(-1)) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L]\n", + " output = self.loss.domain_map(output) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L])\n", + "\n", + " # Inverse normalization\n", + " distr_args = self.loss.scale_decouple(output=output, \n", + " loc=y_loc, \n", + " scale=y_scale)\n", + "\n", + " # Sample and create probabilistic outputs\n", + " samples_t_flat, _, _ = self.loss.sample(distr_args=distr_args, \n", + " num_samples=1)\n", + "\n", + " samples_t_flat = samples_t_flat.squeeze()\n", + " samples_t = samples_t_flat.reshape(batch_size, \n", + " self.trajectory_samples) # [B * n_samples] -> [B, n_samples] \n", + " \n", + " samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] \n", + " quantiles_t = torch.quantile(input=samples_t, \n", + " q=self.quantiles, \n", + " dim=-1) # [B, n_samples] -> [Q, B]\n", + " forecasts[:, t, 0] = samples_t_mean\n", + " forecasts[:, t, 1:] = quantiles_t.permute(1, 0)\n", + "\n", + " insample_y_t_next = self.scaler.scaler(samples_t_flat, \n", + " y_loc.squeeze(), \n", + " y_scale.squeeze()) # [B * n_samples] -> [B * n_samples]\n", + " insample_y_t_next = insample_y_t_next.unsqueeze(-1)\\\n", + " .unsqueeze(-1) # [B * n_samples] -> [B * n_samples, 1, 1]\n", + "\n", + " # Update insample_y_t \n", + " insample_y_t = torch.cat([insample_y_t[:, 1:], \n", + " insample_y_t_next], \n", + " dim=1) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1]\n", + " \n", + " # Update input\n", + " x_t = insample_y_t\n", + " # Concatenate x_t with future exogenous\n", + " if self.futr_exog_size > 0: \n", + " x_t = torch.cat((x_t, \n", + " futr_exog[:, t:seq_len + t]), \n", + " dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", + " \n", + " x_t = x_t.reshape(batch_size * self.trajectory_samples\n", + " , -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + "\n", + " # Concatenate x_t with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", + " \n", + " return forecasts\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", + "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=-1,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", + "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=-1,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "show_doc(DeepNPTS, title_level=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage Example" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from neuralforecast import NeuralForecast\n", + "from neuralforecast.losses.pytorch import MQLoss, DistributionLoss, GMM\n", + "from neuralforecast.tsdataset import TimeSeriesDataset\n", + "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Seed set to 1\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b74158f17d254e4884139ee5c48e5706", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Sanity Checking: | | 0/? [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#| eval: false\n", + "import pandas as pd\n", + "import pytorch_lightning as pl\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from neuralforecast import NeuralForecast\n", + "#from neuralforecast.models import DeepAR\n", + "from neuralforecast.losses.pytorch import DistributionLoss, HuberMQLoss\n", + "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic\n", + "\n", + "#AirPassengersPanel['y'] = AirPassengersPanel['y'] + 10\n", + "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds=AirPassengersPanel['ds'].values[-12]].reset_index(drop=True) # 12 test\n", + "\n", + "nf = NeuralForecast(\n", + " models=[DeepNPTS(h=12,\n", + " input_size=12,\n", + " trajectory_samples=100,\n", + " loss=GMM(),\n", + " # learning_rate=1e-5,\n", + " n_layers = 2,\n", + " dropout=0.0,\n", + " stat_exog_list=['airline1'],\n", + " futr_exog_list=['trend'],\n", + " max_steps=1000,\n", + " val_check_steps=10,\n", + " early_stop_patience_steps=3,\n", + " scaler_type='robust',\n", + " enable_progress_bar=True),\n", + " ],\n", + " freq='M'\n", + ")\n", + "nf.fit(df=Y_train_df, static_df=AirPassengersStatic, val_size=12)\n", + "Y_hat_df = nf.predict(futr_df=Y_test_df)\n", + "\n", + "# Plot quantile predictions\n", + "Y_hat_df = Y_hat_df.reset_index(drop=False).drop(columns=['unique_id','ds'])\n", + "plot_df = pd.concat([Y_test_df, Y_hat_df], axis=1)\n", + "plot_df = pd.concat([Y_train_df, plot_df])\n", + "\n", + "plot_df = plot_df[plot_df.unique_id=='Airline1'].drop('unique_id', axis=1)\n", + "plt.plot(plot_df['ds'], plot_df['y'], c='black', label='True')\n", + "plt.plot(plot_df['ds'], plot_df['DeepNPTS'], c='red', label='mean')\n", + "plt.plot(plot_df['ds'], plot_df['DeepNPTS-median'], c='blue', label='median')\n", + "plt.fill_between(x=plot_df['ds'][-12:], \n", + " y1=plot_df['DeepNPTS-lo-90'][-12:].values, \n", + " y2=plot_df['DeepNPTS-hi-90'][-12:].values,\n", + " alpha=0.4, label='level 90')\n", + "plt.legend()\n", + "plt.grid()\n", + "plt.plot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/neuralforecast/_modidx.py b/neuralforecast/_modidx.py index 208162bd1..3efc57b96 100644 --- a/neuralforecast/_modidx.py +++ b/neuralforecast/_modidx.py @@ -508,6 +508,24 @@ 'neuralforecast/models/deepar.py'), 'neuralforecast.models.deepar.DeepAR.validation_step': ( 'models.deepar.html#deepar.validation_step', 'neuralforecast/models/deepar.py')}, + 'neuralforecast.models.deepnpts': { 'neuralforecast.models.deepnpts.DeepNPTS': ( 'models.deepnpts.html#deepnpts', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.__init__': ( 'models.deepnpts.html#deepnpts.__init__', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS._domain_map': ( 'models.deepnpts.html#deepnpts._domain_map', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS._init_weights': ( 'models.deepnpts.html#deepnpts._init_weights', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.forward': ( 'models.deepnpts.html#deepnpts.forward', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.predict_step': ( 'models.deepnpts.html#deepnpts.predict_step', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.train_forward': ( 'models.deepnpts.html#deepnpts.train_forward', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.training_step': ( 'models.deepnpts.html#deepnpts.training_step', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.validation_step': ( 'models.deepnpts.html#deepnpts.validation_step', + 'neuralforecast/models/deepnpts.py')}, 'neuralforecast.models.dilated_rnn': { 'neuralforecast.models.dilated_rnn.AttentiveLSTMLayer': ( 'models.dilated_rnn.html#attentivelstmlayer', 'neuralforecast/models/dilated_rnn.py'), 'neuralforecast.models.dilated_rnn.AttentiveLSTMLayer.__init__': ( 'models.dilated_rnn.html#attentivelstmlayer.__init__', diff --git a/neuralforecast/common/_scalers.py b/neuralforecast/common/_scalers.py index 15ddb3bd4..bef76f7e9 100644 --- a/neuralforecast/common/_scalers.py +++ b/neuralforecast/common/_scalers.py @@ -313,8 +313,8 @@ def identity_statistics(x, mask, dim=-1, eps=1e-6): shape = list(x.shape) shape[dim] = 1 - x_shift = torch.zeros(shape) - x_scale = torch.ones(shape) + x_shift = torch.zeros(shape, device=x.device) + x_scale = torch.ones(shape, device=x.device) return x_shift, x_scale diff --git a/neuralforecast/core.py b/neuralforecast/core.py index f0a52224b..3c2ce1942 100644 --- a/neuralforecast/core.py +++ b/neuralforecast/core.py @@ -56,6 +56,7 @@ MLPMultivariate, iTransformer, BiTCN, + DeepNPTS, ) # %% ../nbs/core.ipynb 5 @@ -164,6 +165,8 @@ def _insample_times( "autoitransformer": iTransformer, "bitcn": BiTCN, "autobitcn": BiTCN, + "deepnpts": DeepNPTS, + "autodeepnpts": DeepNPTS, } # %% ../nbs/core.ipynb 8 diff --git a/neuralforecast/losses/pytorch.py b/neuralforecast/losses/pytorch.py index d7f29c83b..2e5ede2f5 100644 --- a/neuralforecast/losses/pytorch.py +++ b/neuralforecast/losses/pytorch.py @@ -1166,17 +1166,20 @@ def __init__( # If True, predict_step will return Distribution's parameters self.return_params = return_params if self.return_params: - self.param_names = [f"-lambda-{i}" for i in range(1, n_components + 1)] + lambda_names = [f"-lambda-{i}" for i in range(1, n_components + 1)] + weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] + self.param_names = [i for j in zip(lambda_names, weight_names) for i in j] self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = n_components + self.outputsize_multiplier = 2 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - return (output,) # , weights + lambdas, weights = output.chunk(2, dim=-1) + return (lambdas, weights) def scale_decouple( self, @@ -1190,13 +1193,15 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - lambdas = output[0] + lambdas, weights = output + weights = F.softmax(weights, dim=-1) + if (loc is not None) and (scale is not None): loc = loc.view(lambdas.size(dim=0), 1, -1) scale = scale.view(lambdas.size(dim=0), 1, -1) lambdas = (lambdas * scale) + loc lambdas = F.softplus(lambdas) - return (lambdas,) + return (lambdas, weights) def sample(self, distr_args, num_samples=None): """ @@ -1218,15 +1223,10 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - lambdas = distr_args[0] + lambdas, weights = distr_args B, H, K = lambdas.size() Q = len(self.quantiles) - # Sample K ~ Mult(weights) - # shared across B, H - # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) - weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) - # Avoid loop, vectorize weights = weights.reshape(-1, K) lambdas = lambdas.flatten() @@ -1267,7 +1267,7 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: @@ -1276,11 +1276,9 @@ def neglog_likelihood( mask = mask * ((y > 0) * 1) eps = 1e-10 - lambdas = distr_args[0] + lambdas, weights = distr_args B, H, K = lambdas.size() - weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) - y = y[:, :, None] mask = mask[:, :, None] @@ -1307,7 +1305,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1369,18 +1367,22 @@ def __init__( if self.return_params: mu_names = [f"-mu-{i}" for i in range(1, n_components + 1)] std_names = [f"-std-{i}" for i in range(1, n_components + 1)] - mu_std_names = [i for j in zip(mu_names, std_names) for i in j] - self.output_names = self.output_names + mu_std_names + weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] + self.param_names = [ + i for j in zip(mu_names, std_names, weight_names) for i in j + ] + self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 2 * n_components + self.outputsize_multiplier = 3 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - means, stds = torch.tensor_split(output, 2, dim=-1) - return (means, stds) + means, stds, weights = output.chunk(3, dim=-1) + + return (means, stds, weights) def scale_decouple( self, @@ -1395,14 +1397,16 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - means, stds = output + means, stds, weights = output stds = F.softplus(stds) + weights = F.softmax(weights, dim=-1) if (loc is not None) and (scale is not None): loc = loc.view(means.size(dim=0), 1, -1) scale = scale.view(means.size(dim=0), 1, -1) means = (means * scale) + loc stds = (stds + eps) * scale - return (means, stds) + + return (means, stds, weights) def sample(self, distr_args, num_samples=None): """ @@ -1424,17 +1428,11 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - means, stds = distr_args + means, stds, weights = distr_args B, H, K = means.size() Q = len(self.quantiles) assert means.shape == stds.shape - # Sample K ~ Mult(weights) - # shared across B, H - # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) - - weights = (1 / K) * torch.ones_like(means, device=means.device) - # Avoid loop, vectorize weights = weights.reshape(-1, K) means = means.flatten() @@ -1475,18 +1473,16 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - means, stds = distr_args + means, stds, weights = distr_args B, H, K = means.size() - weights = (1 / K) * torch.ones_like(means, device=means.device) - y = y[:, :, None] mask = mask[:, :, None] @@ -1514,7 +1510,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1572,25 +1568,29 @@ def __init__( f"-total_count-{i}" for i in range(1, n_components + 1) ] probs_names = [f"-probs-{i}" for i in range(1, n_components + 1)] - param_names = [i for j in zip(total_count_names, probs_names) for i in j] - self.output_names = self.output_names + param_names + weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] + self.param_names = [ + i for j in zip(total_count_names, probs_names, weight_names) for i in j + ] + self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 2 * n_components + self.outputsize_multiplier = 3 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - mu, alpha = torch.tensor_split(output, 2, dim=-1) - return (mu, alpha) + mu, alpha, weights = output.chunk(3, dim=-1) + + return mu, alpha, weights def scale_decouple( self, output, loc: Optional[torch.Tensor] = None, scale: Optional[torch.Tensor] = None, - eps: float = 0.2, + eps: float = 1e-6, ): """Scale Decouple @@ -1599,9 +1599,10 @@ def scale_decouple( Also adds domain protection to the distribution parameters. """ # Efficient NBinomial parametrization - mu, alpha = output - mu = F.softplus(mu) + 1e-8 - alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts + mu, alpha, weights = output + mu = F.softplus(mu) + eps + alpha = F.softplus(alpha) + eps # alpha = 1/total_counts + weights = F.softmax(weights, dim=-1) if (loc is not None) and (scale is not None): loc = loc.view(mu.size(dim=0), 1, -1) mu *= loc @@ -1611,8 +1612,9 @@ def scale_decouple( # => probs = mu / (total_count + mu) # => probs = mu / [total_count * (1 + mu * (1/total_count))] total_count = 1.0 / alpha - probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 - return (total_count, probs) + probs = mu * alpha / (1.0 + mu * alpha) + probs = torch.clamp(probs, eps, 1 - eps) + return (total_count, probs, weights) def sample(self, distr_args, num_samples=None): """ @@ -1634,17 +1636,11 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - total_count, probs = distr_args + total_count, probs, weights = distr_args B, H, K = total_count.size() Q = len(self.quantiles) assert total_count.shape == probs.shape - # Sample K ~ Mult(weights) - # shared across B, H - # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) - - weights = (1 / K) * torch.ones_like(probs, device=probs.device) - # Avoid loop, vectorize weights = weights.reshape(-1, K) total_count = total_count.flatten() @@ -1686,18 +1682,16 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - total_count, probs = distr_args + total_count, probs, weights = distr_args B, H, K = total_count.size() - weights = (1 / K) * torch.ones_like(probs, device=probs.device) - y = y[:, :, None] mask = mask[:, :, None] @@ -1728,7 +1722,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): diff --git a/neuralforecast/models/__init__.py b/neuralforecast/models/__init__.py index d4a6ead9d..b4e7153f1 100644 --- a/neuralforecast/models/__init__.py +++ b/neuralforecast/models/__init__.py @@ -3,6 +3,7 @@ 'TFT', 'VanillaTransformer', 'Informer', 'Autoformer', 'PatchTST', 'FEDformer', 'StemGNN', 'HINT', 'TimesNet', 'TimeLLM', 'TSMixer', 'TSMixerx', 'MLPMultivariate', 'iTransformer', 'BiTCN', + 'DeepNPTS' ] from .rnn import RNN @@ -32,4 +33,4 @@ from .mlpmultivariate import MLPMultivariate from .itransformer import iTransformer from .bitcn import BiTCN - +from .deepnpts import DeepNPTS diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py new file mode 100644 index 000000000..d4da85974 --- /dev/null +++ b/neuralforecast/models/deepnpts.py @@ -0,0 +1,557 @@ +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.deepnpts.ipynb. + +# %% auto 0 +__all__ = ['DeepNPTS'] + +# %% ../../nbs/models.deepnpts.ipynb 3 +import numpy as np + +import torch +import torch.nn as nn +import neuralforecast.losses.pytorch as losses +from typing import Optional +from functools import partial + + +from ..common._base_windows import BaseWindows +from ..losses.pytorch import MQLoss, GMM, PMM, NBMM + +# %% ../../nbs/models.deepnpts.ipynb 7 +class DeepNPTS(BaseWindows): + """DeepNPTS + + Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. + + **Parameters:**
+ `h`: int, Forecast horizon.
+ `input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
+ `hidden_size`: int=32, hidden size of dense layers.
+ `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
+ `dropout`: float=0.1, dropout.
+ `n_layers`: int=2, number of dense layers.
+ `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
+ `stat_exog_list`: str list, static exogenous columns.
+ `hist_exog_list`: str list, historic exogenous columns.
+ `futr_exog_list`: str list, future exogenous columns.
+ `exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
+ `loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
+ `valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
+ `max_steps`: int=1000, maximum number of training steps.
+ `learning_rate`: float=1e-3, Learning rate between (0, 1).
+ `num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
+ `early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
+ `val_check_steps`: int=100, Number of training steps between every validation loss check.
+ `batch_size`: int=32, number of different series in each batch.
+ `valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
+ `windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
+ `inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
+ `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
+ `step_size`: int=1, step size between each window of temporal data.
+ `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
+ `random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `alias`: str, optional, Custom name of the model.
+ `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
+ `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
+ `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
+ + **References**
+ - [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). "Deep Non-Parametric Time Series Forecaster". arXiv.](https://arxiv.org/abs/2312.14657)
+ + """ + + # Class attributes + SAMPLING_TYPE = "windows" + + def __init__( + self, + h, + input_size: int = -1, + hidden_size: int = 32, + batch_norm: bool = True, + dropout: float = 0.1, + n_layers: int = 2, + trajectory_samples: int = 100, + futr_exog_list=None, + hist_exog_list=None, + stat_exog_list=None, + exclude_insample_y=False, + loss=GMM(), + valid_loss=MQLoss(level=[80, 90]), + max_steps: int = 1000, + learning_rate: float = 1e-5, + num_lr_decays: int = 3, + early_stop_patience_steps: int = -1, + val_check_steps: int = 100, + batch_size: int = 32, + valid_batch_size: Optional[int] = None, + windows_batch_size: int = 1024, + inference_windows_batch_size: int = -1, + start_padding_enabled=False, + step_size: int = 1, + scaler_type: str = "standard", + random_seed: int = 1, + num_workers_loader=0, + drop_last_loader=False, + optimizer=None, + optimizer_kwargs=None, + **trainer_kwargs + ): + + if hist_exog_list is not None: + raise Exception("DeepNPTS does not support historical exogenous variables.") + + if exclude_insample_y: + raise Exception("DeepNPTS has no possibility for excluding y.") + + supported_losses = (losses.GMM, losses.PMM, losses.NBMM) + + if not isinstance(loss, supported_losses): + raise Exception("DeepNPTS only supports GMM, PMM or NBMM as loss function.") + + if not isinstance(valid_loss, losses.MQLoss): + raise Exception("DeepNPTS only supports MQLoss as validation loss.") + + # Overwrite n_components, it has to be the input_size in DeepNPTS + loss.n_components = input_size + + # Inherit BaseWindows class + super(DeepNPTS, self).__init__( + h=h, + input_size=input_size, + futr_exog_list=futr_exog_list, + hist_exog_list=hist_exog_list, + stat_exog_list=stat_exog_list, + exclude_insample_y=exclude_insample_y, + loss=loss, + valid_loss=valid_loss, + max_steps=max_steps, + learning_rate=learning_rate, + num_lr_decays=num_lr_decays, + early_stop_patience_steps=early_stop_patience_steps, + val_check_steps=val_check_steps, + batch_size=batch_size, + windows_batch_size=windows_batch_size, + valid_batch_size=valid_batch_size, + inference_windows_batch_size=inference_windows_batch_size, + start_padding_enabled=start_padding_enabled, + step_size=step_size, + scaler_type=scaler_type, + num_workers_loader=num_workers_loader, + drop_last_loader=drop_last_loader, + random_seed=random_seed, + optimizer=optimizer, + optimizer_kwargs=optimizer_kwargs, + **trainer_kwargs + ) + + self.h = h + self.h_backup = self.h # Used because h=1 during training + self.use_softmax = True + self.hidden_size = hidden_size + self.dropout = dropout + self.trajectory_samples = trajectory_samples + + self.futr_exog_size = len(self.futr_exog_list) + self.stat_exog_size = len(self.stat_exog_list) + + input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size + # Create DeepNPTSNetwork + modules = [] + for i in range(n_layers): + modules.append(nn.Linear(input_dim if i == 0 else hidden_size, hidden_size)) + modules.append(nn.ReLU()) + if batch_norm: + modules.append(nn.BatchNorm1d(hidden_size)) + if dropout > 0.0: + modules.append(nn.Dropout(dropout)) + + self.deepnptsnetwork = nn.Sequential(*modules) + self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07)) + + # Add output layers for Mixture distribution + output_modules = [] + if dropout > 0.0: + output_modules.append(nn.Dropout(self.dropout)) + + if isinstance(loss, GMM): + output_modules.append(nn.Linear(hidden_size, input_size + 1)) + elif isinstance(loss, PMM): + output_modules.append(nn.Linear(hidden_size, input_size)) + elif isinstance(loss, NBMM): + output_modules.append(nn.Linear(hidden_size, input_size)) + + self.output_layer = nn.Sequential(*output_modules) + self.output_layer.apply(self._init_weights) + + @staticmethod + def _init_weights(module, scale=1.0): + if type(module) == nn.Linear: + nn.init.uniform_(module.weight, -scale, scale) + nn.init.zeros_(module.bias) + + def _domain_map(self, o_t, insample_y): + if isinstance(self.loss, GMM): + weights = o_t[:, :-1] # [B, L + 1] -> [B, L] + kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1] + kernel_width = torch.repeat_interleave( + input=kernel_width, repeats=weights.shape[1], dim=-1 + ) # [B, 1] -> [B, L] + output = torch.cat( + [insample_y, kernel_width, weights], dim=-1 + ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] + output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L] + elif isinstance(self.loss, PMM): + weights = o_t # [B, L] -> [B, L] + output = torch.cat( + [insample_y, weights], dim=-1 + ) # [B, L] + [B, L] = [B, 2 * L] + output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] + elif isinstance(self.loss, NBMM): + weights = torch.ones_like(o_t) # [B, L] -> [B, L] + output = torch.cat( + [insample_y, o_t, weights], dim=-1 + ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] + output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * + + else: + raise NotImplementedError + + return output + + # Override BaseWindows method + def training_step(self, batch, batch_idx): + + # Only train one-step ahead + self.h = 1 + self.quantiles = self.loss.quantiles + + # Create and normalize windows [Ws, L+H, C] + y_idx = batch["y_idx"] + windows = self._create_windows(batch, step="train") + original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, y_idx]) + windows = self._normalization(windows=windows, y_idx=y_idx) + + # Parse windows + ( + insample_y, + insample_mask, + outsample_y, + outsample_mask, + _, + futr_exog, + stat_exog, + ) = self._parse_windows(batch, windows) + + windows_batch = dict( + insample_y=insample_y, # [Ws, L] + insample_mask=insample_mask, # [Ws, L] + futr_exog=futr_exog, # [Ws, L+H] + hist_exog=None, + stat_exog=stat_exog, # [Ws, 1] + y_idx=y_idx, # [Ws, 1] + ) + + # Model Predictions + output = self.train_forward(windows_batch) + + _, y_loc, y_scale = self._inv_normalization( + y_hat=outsample_y, temporal_cols=batch["temporal_cols"], y_idx=y_idx + ) + # outsample_y = original_insample_y + outsample_y = original_outsample_y + distr_args = self.loss.scale_decouple(output=output, loc=y_loc, scale=y_scale) + loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask) + + if torch.isnan(loss): + print("Model Parameters", self.hparams) + print("insample_y", torch.isnan(insample_y).sum()) + print("outsample_y", torch.isnan(outsample_y).sum()) + print("output", torch.isnan(output).sum()) + raise Exception("Loss is NaN, training stopped.") + + self.log("train_loss", loss, prog_bar=True, on_epoch=True) + self.train_trajectories.append((self.global_step, float(loss))) + + self.h = self.h_backup + + return loss + + # Override BaseWindows method + def validation_step(self, batch, batch_idx): + + self.h = self.h_backup + self.quantiles = self.valid_loss.quantiles + + if self.val_size == 0: + return np.nan + + # TODO: Hack to compute number of windows + windows = self._create_windows(batch, step="val") + n_windows = len(windows["temporal"]) + y_idx = batch["y_idx"] + + # Number of windows in batch + windows_batch_size = self.inference_windows_batch_size + if windows_batch_size < 0: + windows_batch_size = n_windows + n_batches = int(np.ceil(n_windows / windows_batch_size)) + + valid_losses = [] + batch_sizes = [] + for i in range(n_batches): + # Create and normalize windows [Ws, L+H, C] + w_idxs = np.arange( + i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) + ) + windows = self._create_windows(batch, step="val", w_idxs=w_idxs) + original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, 0]) + windows = self._normalization(windows=windows, y_idx=y_idx) + + # Parse windows + ( + insample_y, + insample_mask, + _, + outsample_mask, + _, + futr_exog, + stat_exog, + ) = self._parse_windows(batch, windows) + + windows_batch = dict( + insample_y=insample_y, # [Ws, L] + insample_mask=insample_mask, # [Ws, L] + futr_exog=futr_exog, # [Ws, L+H] + hist_exog=None, # [Ws, L] + stat_exog=stat_exog, + y_idx=y_idx, + ) # [Ws, 1] + + # Model Predictions + output_batch = self(windows_batch) + # Monte Carlo already returns y_hat with mean and quantiles + output_batch = output_batch[:, :, 1:] # Remove mean + valid_loss_batch = self.valid_loss( + y=original_outsample_y, y_hat=output_batch, mask=outsample_mask + ) + valid_losses.append(valid_loss_batch) + batch_sizes.append(len(output_batch)) + + valid_loss = torch.stack(valid_losses) + batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device) + valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes) + + if torch.isnan(valid_loss): + raise Exception("Loss is NaN, training stopped.") + + self.log("valid_loss", valid_loss, prog_bar=True, on_epoch=True) + self.validation_step_outputs.append(valid_loss) + return valid_loss + + # Override BaseWindows method + def predict_step(self, batch, batch_idx): + + self.h == self.h_backup + self.quantiles = self.loss.quantiles + + # TODO: Hack to compute number of windows + windows = self._create_windows(batch, step="predict") + n_windows = len(windows["temporal"]) + y_idx = batch["y_idx"] + + # Number of windows in batch + windows_batch_size = self.inference_windows_batch_size + if windows_batch_size < 0: + windows_batch_size = n_windows + n_batches = int(np.ceil(n_windows / windows_batch_size)) + + y_hats = [] + for i in range(n_batches): + # Create and normalize windows [Ws, L+H, C] + w_idxs = np.arange( + i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) + ) + windows = self._create_windows(batch, step="predict", w_idxs=w_idxs) + windows = self._normalization(windows=windows, y_idx=y_idx) + + # Parse windows + insample_y, insample_mask, _, _, _, futr_exog, stat_exog = ( + self._parse_windows(batch, windows) + ) + windows_batch = dict( + insample_y=insample_y, # [Ws, L] + insample_mask=insample_mask, # [Ws, L] + futr_exog=futr_exog, # [Ws, L+H] + stat_exog=stat_exog, + y_idx=y_idx, + ) + + # Model Predictions + y_hat = self(windows_batch) + # Monte Carlo already returns y_hat with mean and quantiles + y_hats.append(y_hat) + y_hat = torch.cat(y_hats, dim=0) + return y_hat + + def train_forward(self, windows_batch): + # Parse windows_batch + x_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + futr_exog = windows_batch["futr_exog"] # [B, L + h, F] + stat_exog = windows_batch["stat_exog"] # [B, S] + + batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len + + # Concatenate x_t with future exogenous + if self.futr_exog_size > 0: + futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F] + x_t = torch.cat( + (x_t, futr_exog_t), dim=2 + ) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] + + x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)] + + # Concatenate x_t with static exogenous + if self.stat_exog_size > 0: + x_t = torch.cat( + (x_t, stat_exog), dim=1 + ) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S] + + # Run through DeepNPTSNetwork + h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size] + o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1] + + output = self._domain_map( + o_t, windows_batch["insample_y"] + ) # [B, L + 1], [B, L] -> [B, 3 * L] + output = self.loss.domain_map( + output + ) # [B, 3 * L] -> ([B, L], [B, L], [B, L]) + + return output + + def forward(self, windows_batch): + # Parse windows_batch + insample_y_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + futr_exog = windows_batch["futr_exog"] # [B, L + h, F] + stat_exog = windows_batch["stat_exog"] # [B, S] + y_idx = windows_batch["y_idx"] + + batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len + device = insample_y_t.device + dtype = insample_y_t.dtype + + # Repeat insample_y for trajectory samples + insample_y_t = torch.repeat_interleave( + input=insample_y_t, repeats=self.trajectory_samples, dim=0 + ) # [B, L, 1] -> [B * n_samples, L, 1] + + # Input x_t is insample_y at time t + x_t = insample_y_t + + # Repeat futr_exog if available for trajectory samples and add to x_t + if self.futr_exog_size > 0: + futr_exog = torch.repeat_interleave( + input=futr_exog, repeats=self.trajectory_samples, dim=0 + ) # [B, L + h, F] -> [B * n_samples, L + h, F] + x_t = torch.cat( + (x_t, futr_exog[:, :seq_len]), dim=2 + ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] + + x_t = x_t.reshape( + batch_size * self.trajectory_samples, -1 + ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] + + # Repeat stat_exog if available for trajectory samples and add to x_t + if self.stat_exog_size > 0: + stat_exog = torch.repeat_interleave( + input=stat_exog, repeats=self.trajectory_samples, dim=0 + ) # [B, S] -> [B * n_samples, S] + x_t = torch.cat( + (x_t, stat_exog), dim=1 + ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] + + # Scales for inverse normalization + y_scale = self.scaler.x_scale[:, :, y_idx] + y_loc = self.scaler.x_shift[:, :, y_idx] + y_scale = torch.repeat_interleave( + input=y_scale, repeats=self.trajectory_samples, dim=0 + ) + y_loc = torch.repeat_interleave( + input=y_loc, repeats=self.trajectory_samples, dim=0 + ) + # Create forecasts tensor + forecasts = torch.zeros( + (batch_size, self.h, len(self.quantiles) + 1), device=device, dtype=dtype + ) + + # Recursive predictions + for t in range(self.h): + # Run input throught DeepNPTSNetwork + h_t = self.deepnptsnetwork( + x_t + ) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size] + o_t = self.output_layer( + h_t + ) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)] + output = self._domain_map( + o_t, insample_y_t.squeeze(-1) + ) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L] + output = self.loss.domain_map( + output + ) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L]) + + # Inverse normalization + distr_args = self.loss.scale_decouple( + output=output, loc=y_loc, scale=y_scale + ) + + # Sample and create probabilistic outputs + samples_t_flat, _, _ = self.loss.sample( + distr_args=distr_args, num_samples=1 + ) + + samples_t_flat = samples_t_flat.squeeze() + samples_t = samples_t_flat.reshape( + batch_size, self.trajectory_samples + ) # [B * n_samples] -> [B, n_samples] + + samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] + quantiles_t = torch.quantile( + input=samples_t, q=self.quantiles, dim=-1 + ) # [B, n_samples] -> [Q, B] + forecasts[:, t, 0] = samples_t_mean + forecasts[:, t, 1:] = quantiles_t.permute(1, 0) + + insample_y_t_next = self.scaler.scaler( + samples_t_flat, y_loc.squeeze(), y_scale.squeeze() + ) # [B * n_samples] -> [B * n_samples] + insample_y_t_next = insample_y_t_next.unsqueeze(-1).unsqueeze( + -1 + ) # [B * n_samples] -> [B * n_samples, 1, 1] + + # Update insample_y_t + insample_y_t = torch.cat( + [insample_y_t[:, 1:], insample_y_t_next], dim=1 + ) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1] + + # Update input + x_t = insample_y_t + # Concatenate x_t with future exogenous + if self.futr_exog_size > 0: + x_t = torch.cat( + (x_t, futr_exog[:, t : seq_len + t]), dim=2 + ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] + + x_t = x_t.reshape( + batch_size * self.trajectory_samples, -1 + ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] + + # Concatenate x_t with static exogenous + if self.stat_exog_size > 0: + x_t = torch.cat( + (x_t, stat_exog), dim=1 + ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] + + return forecasts From 17d1ef0007c4e8a00127b7c6ea937b74f23bbdb4 Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Mon, 22 Apr 2024 23:01:57 +0200 Subject: [PATCH 02/11] deepnpts_simple --- nbs/core.ipynb | 3 +- nbs/losses.pytorch.ipynb | 1714 ++--------------------------- nbs/models.deepnpts.ipynb | 869 +-------------- nbs/models.ipynb | 147 +-- neuralforecast/_modidx.py | 14 +- neuralforecast/losses/pytorch.py | 118 +- neuralforecast/models/__init__.py | 3 +- neuralforecast/models/deepnpts.py | 454 +------- 8 files changed, 297 insertions(+), 3025 deletions(-) diff --git a/nbs/core.ipynb b/nbs/core.ipynb index 710fcd0b4..42c5321f1 100644 --- a/nbs/core.ipynb +++ b/nbs/core.ipynb @@ -90,7 +90,7 @@ " Informer, Autoformer, FEDformer,\n", " StemGNN, PatchTST, TimesNet, TimeLLM, TSMixer, TSMixerx,\n", " MLPMultivariate, iTransformer,\n", - " BiTCN, DeepNPTS\n", + " BiTCN, DeepNPTS,\n", ")" ] }, @@ -234,6 +234,7 @@ " 'itransformer': iTransformer, 'autoitransformer': iTransformer,\n", " 'bitcn': BiTCN, 'autobitcn': BiTCN,\n", " 'deepnpts': DeepNPTS, 'autodeepnpts': DeepNPTS,\n", + "\n", "}" ] }, diff --git a/nbs/losses.pytorch.ipynb b/nbs/losses.pytorch.ipynb index 36adfaabd..387da910d 100644 --- a/nbs/losses.pytorch.ipynb +++ b/nbs/losses.pytorch.ipynb @@ -67,7 +67,7 @@ " Normal, \n", " StudentT, \n", " Poisson,\n", - " NegativeBinomial\n", + " NegativeBinomial,\n", ")\n", "\n", "from torch.distributions import constraints" @@ -244,61 +244,7 @@ "execution_count": null, "id": "1d004cd0", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__init__\n", - "\n", - "> MAE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Error\n", - "\n", - "Calculates Mean Absolute Error between\n", - "`y` and `y_hat`. MAE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the\n", - "deviation of the prediction and the true\n", - "value at a given time and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__init__\n", - "\n", - "> MAE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Error\n", - "\n", - "Calculates Mean Absolute Error between\n", - "`y` and `y_hat`. MAE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the\n", - "deviation of the prediction and the true\n", - "value at a given time and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAE, name='MAE.__init__', title_level=3)" ] @@ -308,51 +254,7 @@ "execution_count": null, "id": "0a20a273", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__call__\n", - "\n", - "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mae`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__call__\n", - "\n", - "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mae`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAE.__call__, name='MAE.__call__', title_level=3)" ] @@ -426,61 +328,7 @@ "execution_count": null, "id": "e8c65b82", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__init__\n", - "\n", - "> MSE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Squared Error\n", - "\n", - "Calculates Mean Squared Error between\n", - "`y` and `y_hat`. MSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the \n", - "squared deviation of the prediction and the true\n", - "value at a given time, and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__init__\n", - "\n", - "> MSE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Squared Error\n", - "\n", - "Calculates Mean Squared Error between\n", - "`y` and `y_hat`. MSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the \n", - "squared deviation of the prediction and the true\n", - "value at a given time, and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MSE, name='MSE.__init__', title_level=3)" ] @@ -490,51 +338,7 @@ "execution_count": null, "id": "b0126a7f", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__call__\n", - "\n", - "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mse`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__call__\n", - "\n", - "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mse`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MSE.__call__, name='MSE.__call__', title_level=3)" ] @@ -612,67 +416,7 @@ "execution_count": null, "id": "d961d383", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__init__\n", - "\n", - "> RMSE.__init__ (horizon_weight=None)\n", - "\n", - "Root Mean Squared Error\n", - "\n", - "Calculates Root Mean Squared Error between\n", - "`y` and `y_hat`. RMSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the squared deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "Finally the RMSE will be in the same scale\n", - "as the original time series so its comparison with other\n", - "series is possible only if they share a common scale. \n", - "RMSE has a direct connection to the L2 norm.\n", - "\n", - "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__init__\n", - "\n", - "> RMSE.__init__ (horizon_weight=None)\n", - "\n", - "Root Mean Squared Error\n", - "\n", - "Calculates Root Mean Squared Error between\n", - "`y` and `y_hat`. RMSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the squared deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "Finally the RMSE will be in the same scale\n", - "as the original time series so its comparison with other\n", - "series is possible only if they share a common scale. \n", - "RMSE has a direct connection to the L2 norm.\n", - "\n", - "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(RMSE, name='RMSE.__init__', title_level=3)" ] @@ -682,51 +426,7 @@ "execution_count": null, "id": "d398d3e3", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__call__\n", - "\n", - "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`rmse`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__call__\n", - "\n", - "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`rmse`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(RMSE.__call__, name='RMSE.__call__', title_level=3)" ] @@ -817,69 +517,7 @@ "execution_count": null, "id": "174e8042", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__init__\n", - "\n", - "> MAPE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Percentage Error\n", - "\n", - "Calculates Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. MAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the percentual deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "The closer to zero an observed value is, the higher penalty MAPE loss\n", - "assigns to the corresponding error.\n", - "\n", - "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__init__\n", - "\n", - "> MAPE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Percentage Error\n", - "\n", - "Calculates Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. MAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the percentual deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "The closer to zero an observed value is, the higher penalty MAPE loss\n", - "assigns to the corresponding error.\n", - "\n", - "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAPE, name='MAPE.__init__', title_level=3)" ] @@ -889,51 +527,7 @@ "execution_count": null, "id": "da63f136", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__call__\n", - "\n", - "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mape`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__call__\n", - "\n", - "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mape`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAPE.__call__, name='MAPE.__call__', title_level=3)" ] @@ -1015,73 +609,7 @@ "execution_count": null, "id": "dee99fb8", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__init__\n", - "\n", - "> SMAPE.__init__ (horizon_weight=None)\n", - "\n", - "Symmetric Mean Absolute Percentage Error\n", - "\n", - "Calculates Symmetric Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. SMAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the relative deviation\n", - "of the prediction and the observed value scaled by the sum of the\n", - "absolute values for the prediction and observed value at a\n", - "given time, then averages these devations over the length\n", - "of the series. This allows the SMAPE to have bounds between\n", - "0% and 200% which is desireble compared to normal MAPE that\n", - "may be undetermined when the target is zero.\n", - "\n", - "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__init__\n", - "\n", - "> SMAPE.__init__ (horizon_weight=None)\n", - "\n", - "Symmetric Mean Absolute Percentage Error\n", - "\n", - "Calculates Symmetric Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. SMAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the relative deviation\n", - "of the prediction and the observed value scaled by the sum of the\n", - "absolute values for the prediction and observed value at a\n", - "given time, then averages these devations over the length\n", - "of the series. This allows the SMAPE to have bounds between\n", - "0% and 200% which is desireble compared to normal MAPE that\n", - "may be undetermined when the target is zero.\n", - "\n", - "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(SMAPE, name='SMAPE.__init__', title_level=3)" ] @@ -1091,51 +619,7 @@ "execution_count": null, "id": "db62a845", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__call__\n", - "\n", - "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`smape`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__call__\n", - "\n", - "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`smape`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(SMAPE.__call__, name='SMAPE.__call__', title_level=3)" ] @@ -1222,71 +706,7 @@ "execution_count": null, "id": "b6a4cf21", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__init__\n", - "\n", - "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", - "\n", - "Mean Absolute Scaled Error \n", - "Calculates the Mean Absolute Scaled Error between\n", - "`y` and `y_hat`. MASE measures the relative prediction\n", - "accuracy of a forecasting method by comparinng the mean absolute errors\n", - "of the prediction and the observed value against the mean\n", - "absolute errors of the seasonal naive model.\n", - "The MASE partially composed the Overall Weighted Average (OWA), \n", - "used in the M4 Competition.\n", - "\n", - "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", - "\n", - "**Parameters:**
\n", - "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__init__\n", - "\n", - "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", - "\n", - "Mean Absolute Scaled Error \n", - "Calculates the Mean Absolute Scaled Error between\n", - "`y` and `y_hat`. MASE measures the relative prediction\n", - "accuracy of a forecasting method by comparinng the mean absolute errors\n", - "of the prediction and the observed value against the mean\n", - "absolute errors of the seasonal naive model.\n", - "The MASE partially composed the Overall Weighted Average (OWA), \n", - "used in the M4 Competition.\n", - "\n", - "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", - "\n", - "**Parameters:**
\n", - "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MASE, name='MASE.__init__', title_level=3)" ] @@ -1296,53 +716,7 @@ "execution_count": null, "id": "32a2c11b", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__call__\n", - "\n", - "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mase`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__call__\n", - "\n", - "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mase`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MASE.__call__, name='MASE.__call__', title_level=3)" ] @@ -1429,69 +803,7 @@ "execution_count": null, "id": "edeb6f9a", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__init__\n", - "\n", - "> relMSE.__init__ (y_train, horizon_weight=None)\n", - "\n", - "Relative Mean Squared Error\n", - "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", - "as an alternative to percentage errors, to avoid measure unstability.\n", - "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", - "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", - "\n", - "**Parameters:**
\n", - "`y_train`: numpy array, Training values.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "- [Hyndman, R. J and Koehler, A. B. (2006).\n", - " \"Another look at measures of forecast accuracy\",\n", - " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", - " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__init__\n", - "\n", - "> relMSE.__init__ (y_train, horizon_weight=None)\n", - "\n", - "Relative Mean Squared Error\n", - "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", - "as an alternative to percentage errors, to avoid measure unstability.\n", - "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", - "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", - "\n", - "**Parameters:**
\n", - "`y_train`: numpy array, Training values.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "- [Hyndman, R. J and Koehler, A. B. (2006).\n", - " \"Another look at measures of forecast accuracy\",\n", - " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", - " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(relMSE, name='relMSE.__init__', title_level=3)" ] @@ -1501,53 +813,7 @@ "execution_count": null, "id": "a317b5c5", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__call__\n", - "\n", - "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`relMSE`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__call__\n", - "\n", - "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`relMSE`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(relMSE.__call__, name='relMSE.__call__', title_level=3)" ] @@ -1632,67 +898,7 @@ "execution_count": null, "id": "70bd46d9", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__init__\n", - "\n", - "> QuantileLoss.__init__ (q, horizon_weight=None)\n", - "\n", - "Quantile Loss\n", - "\n", - "Computes the quantile loss between `y` and `y_hat`.\n", - "QL measures the deviation of a quantile forecast.\n", - "By weighting the absolute deviation in a non symmetric way, the\n", - "loss pays more attention to under or over estimation.\n", - "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", - "\n", - "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", - "\n", - "**Parameters:**
\n", - "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__init__\n", - "\n", - "> QuantileLoss.__init__ (q, horizon_weight=None)\n", - "\n", - "Quantile Loss\n", - "\n", - "Computes the quantile loss between `y` and `y_hat`.\n", - "QL measures the deviation of a quantile forecast.\n", - "By weighting the absolute deviation in a non symmetric way, the\n", - "loss pays more attention to under or over estimation.\n", - "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", - "\n", - "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", - "\n", - "**Parameters:**
\n", - "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(QuantileLoss, name='QuantileLoss.__init__', title_level=3)" ] @@ -1702,51 +908,7 @@ "execution_count": null, "id": "0b1588e9", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__call__\n", - "\n", - "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`quantile_loss`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__call__\n", - "\n", - "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`quantile_loss`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(QuantileLoss.__call__, name='QuantileLoss.__call__', title_level=3)" ] @@ -1918,87 +1080,7 @@ "execution_count": null, "id": "8f42ec82", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__init__\n", - "\n", - "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", - "\n", - "Multi-Quantile loss\n", - "\n", - "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", - "MQL calculates the average multi-quantile Loss for\n", - "a given set of quantiles, based on the absolute \n", - "difference between predicted quantiles and observed values.\n", - "\n", - "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", - "\n", - "The limit behavior of MQL allows to measure the accuracy \n", - "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", - "the continuous ranked probability score (CRPS). This can be achieved \n", - "through a numerical integration technique, that discretizes the quantiles \n", - "and treats the CRPS integral with a left Riemann approximation, averaging over \n", - "uniformly distanced quantiles. \n", - "\n", - "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", - "\n", - "**Parameters:**
\n", - "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", - "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", - "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__init__\n", - "\n", - "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", - "\n", - "Multi-Quantile loss\n", - "\n", - "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", - "MQL calculates the average multi-quantile Loss for\n", - "a given set of quantiles, based on the absolute \n", - "difference between predicted quantiles and observed values.\n", - "\n", - "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", - "\n", - "The limit behavior of MQL allows to measure the accuracy \n", - "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", - "the continuous ranked probability score (CRPS). This can be achieved \n", - "through a numerical integration technique, that discretizes the quantiles \n", - "and treats the CRPS integral with a left Riemann approximation, averaging over \n", - "uniformly distanced quantiles. \n", - "\n", - "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", - "\n", - "**Parameters:**
\n", - "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", - "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", - "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MQLoss, name='MQLoss.__init__', title_level=3)" ] @@ -2008,51 +1090,7 @@ "execution_count": null, "id": "bac2237a", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__call__\n", - "\n", - "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mqloss`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__call__\n", - "\n", - "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mqloss`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MQLoss.__call__, name='MQLoss.__call__', title_level=3)" ] @@ -2071,17 +1109,7 @@ "execution_count": null, "id": "da37f2ef", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check MQLoss' stored quantiles\n", @@ -2626,99 +1654,7 @@ "execution_count": null, "id": "a462101b", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__init__\n", - "\n", - "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> **distribution_kwargs)\n", - "\n", - "DistributionLoss\n", - "\n", - "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", - "interact with NeuralForecast models modularly. It shares the negative \n", - "log-likelihood as the optimization objective and a sample method to \n", - "generate empirically the quantiles defined by the `level` list.\n", - "\n", - "Additionally, it implements a distribution transformation that factorizes the\n", - "scale-dependent likelihood parameters into a base scale and a multiplier \n", - "efficiently learnable within the network's non-linearities operating ranges.\n", - "\n", - "Available distributions:
\n", - "- Poisson
\n", - "- Normal
\n", - "- StudentT
\n", - "- NegativeBinomial
\n", - "- Tweedie
\n", - "- Bernoulli (Temporal Classifiers)\n", - "\n", - "**Parameters:**
\n", - "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", - "\n", - "**References:**
\n", - "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", - "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", - " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__init__\n", - "\n", - "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> **distribution_kwargs)\n", - "\n", - "DistributionLoss\n", - "\n", - "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", - "interact with NeuralForecast models modularly. It shares the negative \n", - "log-likelihood as the optimization objective and a sample method to \n", - "generate empirically the quantiles defined by the `level` list.\n", - "\n", - "Additionally, it implements a distribution transformation that factorizes the\n", - "scale-dependent likelihood parameters into a base scale and a multiplier \n", - "efficiently learnable within the network's non-linearities operating ranges.\n", - "\n", - "Available distributions:
\n", - "- Poisson
\n", - "- Normal
\n", - "- StudentT
\n", - "- NegativeBinomial
\n", - "- Tweedie
\n", - "- Bernoulli (Temporal Classifiers)\n", - "\n", - "**Parameters:**
\n", - "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", - "\n", - "**References:**
\n", - "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", - "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", - " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DistributionLoss, name='DistributionLoss.__init__', title_level=3)" ] @@ -2728,65 +1664,7 @@ "execution_count": null, "id": "d8c367f8", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.sample\n", - "\n", - "> DistributionLoss.sample (distr_args:torch.Tensor,\n", - "> num_samples:Optional[int]=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.sample\n", - "\n", - "> DistributionLoss.sample (distr_args:torch.Tensor,\n", - "> num_samples:Optional[int]=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DistributionLoss.sample, name='DistributionLoss.sample', title_level=3)" ] @@ -2796,75 +1674,7 @@ "execution_count": null, "id": "04e32679", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__call__\n", - "\n", - "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Computes the negative log-likelihood objective function. \n", - "To estimate the following predictive distribution:\n", - "\n", - "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", - "\n", - "where $\\theta$ represents the distributions parameters. It aditionally \n", - "summarizes the objective signal using a weighted average using the `mask` tensor. \n", - "\n", - "**Parameters**
\n", - "`y`: tensor, Actual values.
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns**
\n", - "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__call__\n", - "\n", - "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Computes the negative log-likelihood objective function. \n", - "To estimate the following predictive distribution:\n", - "\n", - "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", - "\n", - "where $\\theta$ represents the distributions parameters. It aditionally \n", - "summarizes the objective signal using a weighted average using the `mask` tensor. \n", - "\n", - "**Parameters**
\n", - "`y`: tensor, Actual values.
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns**
\n", - "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DistributionLoss.__call__, name='DistributionLoss.__call__', title_level=3)" ] @@ -2874,17 +1684,7 @@ "execution_count": null, "id": "14a7e381", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check DistributionLoss' stored quantiles\n", @@ -2964,42 +1764,35 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " lambda_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", - " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", - " self.param_names = [i for j in zip(lambda_names, weight_names) for i in j]\n", + " self.param_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 2 * n_components\n", + " self.outputsize_multiplier = n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " lambdas, weights = output.chunk(2, dim=-1)\n", - " return (lambdas, weights)\n", - "\n", - " def scale_decouple(\n", - " self,\n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " ):\n", - " \"\"\"Scale Decouple\n", + " return (output,)#, weights\n", + " \n", + " def scale_decouple(self, \n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None):\n", + " \"\"\" Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " lambdas, weights = output\n", - " weights = F.softmax(weights, dim=-1)\n", - "\n", + " lambdas = output[0]\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(lambdas.size(dim=0), 1, -1)\n", " scale = scale.view(lambdas.size(dim=0), 1, -1)\n", " lambdas = (lambdas * scale) + loc\n", " lambdas = F.softplus(lambdas)\n", - " return (lambdas, weights)\n", + " return (lambdas,)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -3021,10 +1814,15 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", "\n", - " lambdas, weights = distr_args\n", + " lambdas = distr_args[0]\n", " B, H, K = lambdas.size()\n", " Q = len(self.quantiles)\n", "\n", + " # Sample K ~ Mult(weights)\n", + " # shared across B, H\n", + " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", + " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", + "\n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " lambdas = lambdas.flatten() \n", @@ -3062,7 +1860,7 @@ " \n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", " if mask is None: \n", " mask = (y > 0) * 1\n", @@ -3070,9 +1868,11 @@ " mask = mask * ((y > 0) * 1)\n", "\n", " eps = 1e-10\n", - " lambdas, weights = distr_args\n", + " lambdas = distr_args[0]\n", " B, H, K = lambdas.size()\n", "\n", + " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", + "\n", " y = y[:,:,None]\n", " mask = mask[:,:,None]\n", "\n", @@ -3097,7 +1897,7 @@ " return loss\n", "\n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)\n" @@ -3108,83 +1908,7 @@ "execution_count": null, "id": "62d7daba", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__init__\n", - "\n", - "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> batch_correlation=False, horizon_correlation=False)\n", - "\n", - "Poisson Mixture Mesh\n", - "\n", - "This Poisson Mixture statistical model assumes independence across groups of \n", - "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", - "\n", - "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", - "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", - "\\prod_{\\beta\\in[g_{i}]} \n", - "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", - "\n", - "**Parameters:**
\n", - "`n_components`: int=10, the number of mixture components.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", - "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", - "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", - "\n", - "**References:**
\n", - "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", - "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__init__\n", - "\n", - "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> batch_correlation=False, horizon_correlation=False)\n", - "\n", - "Poisson Mixture Mesh\n", - "\n", - "This Poisson Mixture statistical model assumes independence across groups of \n", - "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", - "\n", - "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", - "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", - "\\prod_{\\beta\\in[g_{i}]} \n", - "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", - "\n", - "**Parameters:**
\n", - "`n_components`: int=10, the number of mixture components.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", - "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", - "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", - "\n", - "**References:**
\n", - "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", - "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(PMM, name='PMM.__init__', title_level=3)" ] @@ -3194,63 +1918,7 @@ "execution_count": null, "id": "fa8da65c", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.sample\n", - "\n", - "> PMM.sample (distr_args, num_samples=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.sample\n", - "\n", - "> PMM.sample (distr_args, num_samples=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(PMM.sample, name='PMM.sample', title_level=3)" ] @@ -3260,39 +1928,7 @@ "execution_count": null, "id": "ba75717c", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__call__\n", - "\n", - "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Call self as a function." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__call__\n", - "\n", - "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Call self as a function." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(PMM.__call__, name='PMM.__call__', title_level=3)" ] @@ -3311,17 +1947,7 @@ "execution_count": null, "id": "e4a20e21", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -3345,43 +1971,11 @@ "execution_count": null, "id": "a56a2fbe", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "lambdas.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", - "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", - "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9FUlEQVR4nO3de1hU1foH8O8GhuGm4IDcFBHF+/2SijcgA8PUTI+WWoGHvOQtw7TQU4BxpEOllpblJTDL7KamZiqWoh21BOV4ySwLUE8QKQoICiOs3x/+2MdxuM0wwzDM9/M88zzM2muv9a4ZYN5Ze++1JSGEABEREVkUK1MHQERERA2PCQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICYCJ/fDDD3jsscfQpk0bKJVKeHh4ICAgAAsXLtSoFxQUhKCgIKPHI0kSYmNjDdZe27ZtMXr0aIO1V5NDhw5BkiQcOnSoQfrTVVBQECRJgiRJsLKyQrNmzeDv74+JEyfiiy++QEVFhdY+bdu2RUREhE79HD16FLGxsbhx44ZO+93fV+Xr+cUXX+jUTk1KSkoQGxtb5XuUnJwMSZKQlZVlsP6IqHo2pg7Akn399dcYO3YsgoKCkJiYCC8vL+Tk5CAtLQ1bt27Fm2++Kdd99913TRipeejbty+OHTuGrl27mjqUarVr1w4ff/wxAKC4uBiZmZnYsWMHJk6ciGHDhmHXrl1wdnaW62/fvh3NmzfXqY+jR48iLi4OERERcHFxqfN++vSlq5KSEsTFxQGAVkL7yCOP4NixY/Dy8jJqDER0FxMAE0pMTISfnx/27dsHG5v/vRVPPPEEEhMTNeo25g81U1Or1ZAkCc2bN8egQYNMHU6N7O3ttWJ85plnkJSUhL///e+YMWMGPv30U3lbnz59jB7TrVu3YG9v3yB91aRly5Zo2bKlSWMgsiQ8BGBC165dg5ubm8aHfyUrK8235v5DAFlZWZAkCW+88QZWrFgBPz8/ODk5ISAgAMePH9dqb/369ejYsSOUSiW6du2KLVu2ICIiAm3btq01ztzcXMycOROtW7eGra0t/Pz8EBcXhzt37tR5rHv37kXfvn1hb2+Pzp0744MPPtCqc/bsWTz66KNo0aIF7Ozs0Lt3b2zatEmjTuW09ObNm7Fw4UK0atUKSqUSFy9e1DoEUPkaVfe41wcffIBevXrBzs4OKpUKjz32GM6fP69RJyIiAk5OTrh48SJGjRoFJycn+Pj4YOHChSgtLa3za1GVadOmYdSoUfj888+RnZ0tl98/LV9RUYH4+Hh06tQJ9vb2cHFxQc+ePfHWW28BAGJjY7Fo0SIAgJ+fnzzWytek8pDMtm3b0KdPH9jZ2cnfyKs73HD79m1ERUXB09MT9vb2CAwMxKlTpzTqVHeI6t7fsaysLPkDPi4uTo6tss/qDgEY+r1Zu3YtevXqBScnJzRr1gydO3fGkiVLtGInauo4A2BCAQEB2LBhA+bPn4+pU6eib9++UCgUOrXxzjvvoHPnzli1ahUA4OWXX8aoUaOQmZkpTyWvW7cOM2fOxIQJE7By5UoUFBQgLi6uTh9aubm5GDBgAKysrPDKK6+gffv2OHbsGOLj45GVlYWkpKRa2/jPf/6DhQsX4qWXXoKHhwc2bNiAyMhI+Pv7Y/jw4QCACxcuYPDgwXB3d8fbb78NV1dXfPTRR4iIiMCff/6JxYsXa7QZHR2NgIAAvPfee7CysoK7uztyc3M16nh5eeHYsWMaZX/99ReefPJJtGrVSi5LSEjAkiVLMHnyZCQkJODatWuIjY1FQEAATpw4gQ4dOsh11Wo1xo4di8jISCxcuBCHDx/Gq6++CmdnZ7zyyiu1vhY1GTt2LPbs2YMjR47A19e3yjqJiYmIjY3FP/7xDwwfPhxqtRo///yzfLz/mWeeQX5+PlavXo1t27bJ0+n3ziCdPHkS58+fxz/+8Q/4+fnB0dGxxriWLFmCvn37YsOGDSgoKEBsbCyCgoJw6tQptGvXrs7j8/Lywt69e/Hwww8jMjISzzzzDADU+K3f0O/N1q1bMXv2bMybNw9vvPEGrKyscPHiRfz00091HgdRkyHIZK5evSqGDh0qAAgAQqFQiMGDB4uEhARRVFSkUTcwMFAEBgbKzzMzMwUA0aNHD3Hnzh25/McffxQAxCeffCKEEKK8vFx4enqKgQMHarSXnZ0tFAqF8PX11SgHIGJiYuTnM2fOFE5OTiI7O1uj3htvvCEAiHPnztU4Rl9fX2FnZ6ex/61bt4RKpRIzZ86Uy5544gmhVCrFpUuXNPYPCwsTDg4O4saNG0IIIQ4ePCgAiOHDh2v1Vbnt4MGDVcZSXFwsBgwYILy8vERWVpYQQojr168Le3t7MWrUKI26ly5dEkqlUkyZMkUuCw8PFwDEZ599plF31KhRolOnTjW+DkLcfQ+7detW7fZvvvlGABD/+te/5DJfX18RHh4uPx89erTo3bt3jf28/vrrAoDIzMzU2ubr6yusra3FhQsXqtx2b1+Vr2ffvn1FRUWFXJ6VlSUUCoV45plnNMZ27+9npfDwcI3fsb/++kvrd6xSUlKSRtzGeG/mzp0rXFxctPomskQ8BGBCrq6uOHLkCE6cOIHXXnsNjz76KH755RdER0ejR48euHr1aq1tPPLII7C2tpaf9+zZEwDkaeQLFy4gNzcXkyZN0tivTZs2GDJkSK3t7969G8HBwfD29sadO3fkR1hYGAAgNTW11jZ69+6NNm3ayM/t7OzQsWNHjanu7777DiNGjICPj4/GvhERESgpKdH6Jj9hwoRa+71XeXk5Hn/8cZw/fx579uyRv2EfO3YMt27d0pr69vHxwYMPPohvv/1Wo1ySJIwZM0ajrGfPnhpj0ZcQotY6AwYMwH/+8x/Mnj0b+/btQ2Fhoc799OzZEx07dqxz/SlTpmgcMvH19cXgwYNx8OBBnfvWhTHemwEDBuDGjRuYPHkyvvrqqzr9jRE1VUwAGoH+/fvjxRdfxOeff44//vgDzz//PLKysrROBKyKq6urxnOlUgng7oldwN3zDADAw8NDa9+qyu73559/YteuXVAoFBqPbt26AUCd/oHeH2NlnJUxVsZZ1dnf3t7eGuOopOuZ4rNmzcLevXvxxRdfoHfv3hr9Vteet7e3Vr8ODg6ws7PTGsvt27d1iqcqlR9UlWOuSnR0NN544w0cP34cYWFhcHV1xYgRI5CWllbnfnR97Tw9Passu/+1MTRjvDdPPfUUPvjgA2RnZ2PChAlwd3fHwIEDkZKSYoQREDVuTAAaGYVCgZiYGAB3T4qrr8oP3z///FNr2/3HzKvi5uaG0NBQnDhxospHZGRkvWOsjDMnJ0er/I8//pDjuNf9J/HVJDY2Fhs2bMD69esRGhqq1S+Aavu+v19j2rlzJyRJks+LqIqNjQ2ioqJw8uRJ5Ofn45NPPsHly5cxcuRIlJSU1KkfXV47oOrfk9zcXI3Ezs7OrspzSurzDdtY7820adNw9OhRFBQU4Ouvv4YQAqNHjzbILA6ROWECYEJV/WMDIJ/hXNM3wbrq1KkTPD098dlnn2mUX7p0CUePHq11/9GjR+Ps2bNo3749+vfvr/UwRIwAMGLECHz33XfyB36lDz/8EA4ODnpf3rdx40bExcVh2bJlVZ7hHhAQAHt7e3z00Uca5VeuXJEPSzSEpKQkfPPNN5g8ebLG4ZKauLi44G9/+xvmzJmD/Px8+ez5+2eB6uuTTz7RODyRnZ2No0ePapz137ZtW/zyyy8aScC1a9e0fsd0ic3Y742joyPCwsKwdOlSlJWV4dy5c/Vqj8jc8CoAExo5ciRat26NMWPGoHPnzqioqEBGRgbefPNNODk54bnnnqt3H1ZWVoiLi8PMmTPxt7/9DX//+99x48YNxMXFwcvLS+tyw/stW7YMKSkpGDx4MObPn49OnTrh9u3byMrKwp49e/Dee++hdevW9Y4zJiZGPt/glVdegUqlwscff4yvv/4aiYmJGovj1NWxY8cwa9YsDBkyBCEhIVqXRw4aNAguLi54+eWXsWTJEjz99NOYPHkyrl27hri4ONjZ2cmzMYZy69YtOY5bt27h999/x44dO7B7924EBgbivffeq3H/MWPGoHv37ujfvz9atmyJ7OxsrFq1Cr6+vvIZ8T169AAAvPXWWwgPD4dCoUCnTp3QrFkzvWLOy8vDY489hunTp6OgoAAxMTGws7NDdHS0XOepp57C+++/jyeffBLTp0/HtWvXkJiYqLWwULNmzeDr64uvvvoKI0aMgEqlgpubW5WXoxrjvZk+fTrs7e0xZMgQeHl5ITc3FwkJCXB2dsYDDzygc3tEZs3EJyFatE8//VRMmTJFdOjQQTg5OQmFQiHatGkjnnrqKfHTTz9p1K3uKoDXX39dq11UcZb1unXrhL+/v7C1tRUdO3YUH3zwgXj00UdFnz59at33r7/+EvPnzxd+fn5CoVAIlUol+vXrJ5YuXSpu3rxZ4xh9fX3FI488olVe1VnjZ86cEWPGjBHOzs7C1tZW9OrVSyQlJWnUqTwz/fPPP9dq8/6rACrPKq/uca8NGzaInj17CltbW+Hs7CweffRRrSscwsPDhaOjo1a/MTExWu1VJTAwUKN/R0dH0a5dO/G3v/1NfP7556K8vFxrn/vPzH/zzTfF4MGDhZubm7C1tRVt2rQRkZGR8lUNlaKjo4W3t7ewsrLSeE2qez+q6qvy9dy8ebOYP3++aNmypVAqlWLYsGEiLS1Na/9NmzaJLl26CDs7O9G1a1fx6aefal0FIIQQBw4cEH369BFKpVIAkPu8/yqASoZ8bzZt2iSCg4OFh4eHsLW1Fd7e3mLSpEni9OnTVb4mRE2ZJEQdTj2mJufGjRvo2LEjxo0bh3Xr1pk6HCIiamA8BGABcnNz8c9//hPBwcFwdXVFdnY2Vq5ciaKiIoMcZiAiIvPDBMACKJVKZGVlYfbs2cjPz5dPqnvvvffky/mIiMiy8BAAERGRBeJlgERERBaICQAREZEF4jkAuHuL1T/++APNmjXTeZU0IiJ9CSFQVFQEb2/vWtfkIDI0JgC4u6zo/TehISJqKJcvXzbIglpEumACAMgrpF2+fFlr5bLqqNVq7N+/H6GhoVAoFMYMz2Sa+hib+vgAjrGxKywshI+Pj96rNBLVBxMA/O/mKM2bN9cpAXBwcEDz5s3N7p9OXTX1MTb18QEco7ngoUcyBR50IiIiskBMAIiIiCwQEwAiIiILZNJzABISErBt2zb8/PPPsLe3x+DBg/Gvf/0LnTp1kusIIRAXF4d169bh+vXrGDhwIN555x2NJWxLS0vxwgsv4JNPPsGtW7cwYsQIvPvuuzyrlojMXkVFBcrKykwdBpkBhUIBa2vrOtc3aQKQmpqKOXPm4IEHHsCdO3ewdOlShIaG4qeffoKjoyMAIDExEStWrEBycjI6duyI+Ph4hISE4MKFC/KZswsWLMCuXbuwdetWuLq6YuHChRg9ejTS09N1ejGIiBqTsrIyZGZmoqKiwtShkJlwcXGBp6dnnU4sNWkCsHfvXo3nSUlJcHd3R3p6OoYPHw4hBFatWoWlS5di/PjxAIBNmzbBw8MDW7ZswcyZM1FQUICNGzdi8+bNeOihhwAAH330EXx8fHDgwAGMHDmywcdFRFRfQgjk5OTA2toaPj4+XCiIaiSEQElJCfLy8gAAXl5ete7TqC4DLCgoAACoVCoAQGZmJnJzcxEaGirXUSqVCAwMxNGjRzFz5kykp6dDrVZr1PH29kb37t1x9OjRKhOA0tJSlJaWys8LCwsB3L2cSK1W1ynWynp1rW+OmvoYm/r4AI6xsasp5jt37qCkpATe3t5wcHBowKjIXNnb2wMA8vLy4O7uXusMeKNJAIQQiIqKwtChQ9G9e3cAd+9jDwAeHh4adT08PJCdnS3XsbW1RYsWLbTqVO5/v4SEBMTFxWmV79+/X+c/tJSUFJ3qm6OmPsamPj6AY2ysSkpKqt1WXl4OALC1tW2ocKgJqPwMU6vV5pMAzJ07F6dPn8b333+vte3+YxlCiFqPb9RUJzo6GlFRUfLzytW4QkNDdVoIKCUlBSEhIWaz+Mg7By/qVF8S5Wh7+zdk2bXH7Ac71b6DmTHH91BXHGPjVjn7WBMuEkS60OX3pVEkAPPmzcPOnTtx+PBhjTP3PT09Adz9ln/v8Yy8vDx5VsDT0xNlZWW4fv26xixAXl4eBg8eXGV/SqUSSqVSq1yhUOj8D0SffUxFSPqdECkka7MZoz7M6T3UF8fYOJlbvNS0mPSsEiEE5s6di23btuG7776Dn5+fxnY/Pz94enpqTO2VlZUhNTVV/nDv168fFAqFRp2cnBycPXu22gSAiIjI0pl0BmDOnDnYsmULvvrqKzRr1kw+Zu/s7Ax7e3tIkoQFCxZg+fLl6NChAzp06IDly5fDwcEBU6ZMketGRkZi4cKFcHV1hUqlwgsvvIAePXrIVwUQERGRJpMmAGvXrgUABAUFaZQnJSUhIiICALB48WLcunULs2fPlhcC2r9/v8bds1auXAkbGxtMmjRJXggoOTmZawAQUZOzMuWXBu3v+ZCODdpfXQQFBaF3795YtWqVqUMxayY/BFDVo/LDH7h7QkNsbCxycnJw+/ZtpKamylcJVLKzs8Pq1atx7do1lJSUYNeuXfDx8Wng0RAREQBERERg3LhxWuWHDh2CJEm4ceNGvdrftm0bXn311Xq1YQ4OHz6MMWPGwNvbG5IkYceOHQZtnytLEBGRWahcElmlUmnMApuboKAgJCcn11qvuLgYvXr1wpo1a4wSBxMAIiIyidLSUsyfPx/u7u6ws7PD0KFDceLECXl7UFAQ5s6di6ioKLi5uSEkJEQuX7BgAQAgKysLkiRpPSoPLdfWR2V78+fPx+LFi6FSqeDp6YnY2NgaYx87dmyV/UqShJ07dxrk9QkLC0N8fLy8Eq6hMQEgIiKTWLx4Mb788kts2rQJJ0+ehL+/P0aOHIn8/Hy5zqZNm2BjY4N///vfeP/997Xa8PHxQU5Ojvw4deoUXF1dMXz48Dr3UdmPo6MjfvjhByQmJmLZsmU1Li6VlJSEnJwc/PrrrwCAPXv2yDGMGjXKEC+P0TWKdQCIiKhp2b17N5ycnDTKKlc3BO5Ob69duxbJyckICwsDAKxfvx4pKSnYuHEjFi1aBADw9/dHYmJitf1YW1vLa8bcvn0b48aNQ0BAAGJjY+vcBwD07NkTMTExAIAOHTpgzZo1+Pbbb+VZh/u5uroCAI4dOwZJkjB06FCzOyzBGQAiIjK44OBgZGRkaDw2bNggb//tt9+gVqsxZMgQuUyhUGDAgAE4f/68XNa/f/869xkZGYmioiJs2bIFVlZWde4DuJsA3MvLy0u+sU5NTp8+jbZt29b44b98+XI4OTnJjyNHjmDWrFlaZQ2NMwBERGRwjo6O8Pf31yi7cuWK/LMQAkDtS71X3hq+NvHx8di7dy9+/PFH+cO4rn0A2qsySpJUp9swnz59Wit5uN+sWbMwadIk+fnUqVMxYcIEjWP7rVq1qrUvQ+MMABERNTh/f3/Y2tpq3P9FrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp5rvlaJSqeDv7y8/7O3t4e7urlXW0DgDQEREDc7R0RHPPvssFi1aBJVKhTZt2iAxMRElJSWIjIyscztnz57F008/jRdffBHdunWTV5S1tbWFSqUySB81qaioQHZ2Nq5cuYJWrVoZ9OZNN2/exMWL/7uJW2ZmJjIyMuSx1BcTACIiM9IYV+bT12uvvYaKigo89dRTKCoqQv/+/bFv3z6t27vXJC0tDSUlJYiPj0d8fLxcHhgYiEOHDhmkj5rMnz8fM2bMQOfOnVFYWGjQBCAtLQ3BwcHy88q72IaHh9dpHYHaSKLyIIkFKywshLOzMwoKCnS6HfCePXswatQos7mjl65LiEqiHH63fkGmfUcsCDXMdFljYo7voa44xsatpv89t2/fRmZmJvz8/GBnZ2eiCMnc6PJ7w3MAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQFwJ0ER0XZSnUlNaBYyIiEyHMwBEREQWiDMARETm5GBCw/YXHN2w/dVBUFAQevfujVWrVpk6FLPGGQAiIjKoiIgIjBs3Tqv80KFDkCQJN27cqFf727Ztw6uvvlqvNsxBQkICHnjgATRr1gzu7u4YN24cLly4YLD2mQAQEZFZKCsrAwCoVCo0a9bMxNHoLygoqE5380tNTcWcOXNw/PhxpKSk4M6dOwgNDUVxcbFB4mACQEREJlFaWor58+fD3d0ddnZ2GDp0KE6cOCFvDwoKwty5cxEVFQU3NzeEhITI5QsWLAAAZGVlQZIkrUdQUFCd+qhsb/78+Vi8eDFUKhU8PT0RGxtbY+xjx46tsl9JkrBz506DvD579+5FREQEunXrhl69eiEpKQmXLl1Cenq6QdpnAkBERCaxePFifPnll9i0aRNOnjwJf39/jBw5Evn5+XKdTZs2wcbGBv/+97/x/vvva7Xh4+ODnJwc+XHq1Cm4urpi+PDhde6jsh9HR0f88MMPSExMxLJly5CSklJt7ElJScjJycGvv/4KANizZ48cw6hRowzx8mgpKCgAcHcGxBB4EiARERnc7t274eTkpFFWXl4u/1xcXIy1a9ciOTkZYWFhAID169cjJSUFGzduxKJFiwAA/v7+SExMrLYfa2treHp6AgBu376NcePGISAgALGxsXXuAwB69uyJmJgYAECHDh2wZs0afPvtt/Ksw/1cXV0BAMeOHYMkSRg6dKhRD0sIIRAVFYWhQ4eie/fuBmmTMwBERGRwwcHByMjI0Hhs2LBB3v7bb79BrVZjyJAhcplCocCAAQNw/vx5uax///517jMyMhJFRUXYsmULrKys6twHcDcBuJeXlxfy8vJq7fP06dNo27ZtjR/+y5cvh5OTk/w4cuQIZs2apVVWk7lz5+L06dP45JNPao2prjgDQEREBufo6Ah/f3+NsitXrsg/CyEAAJIkadQRQmiUOTo61qm/+Ph47N27Fz/++KP8YVzXPoC7icG9JElCRUVFrf2ePn1aK3m436xZszBp0iT5+dSpUzFhwgSMHz9eLmvVqlW1+8+bNw87d+7E4cOH0bp161pjqivOABARUYPz9/eHra0tvv/+e7lMrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp0411lGpVPD395cf9vb2cHd31yq7nxACc+fOxbZt2/Ddd9/Bz8/PIDFX4gwAERE1OEdHRzz77LNYtGgRVCoV2rRpg8TERJSUlCAyMrLO7Zw9exZPP/00XnzxRXTr1g25ubkAAFtbW6hUKoP0UZOKigpkZ2fjypUraNWqldbMQn3MmTMHW7ZswVdffYVmzZrJY3N2dq4yYdAVEwAiInPSCFfm09drr72GiooKPPXUUygqKkL//v2xb98+tGjRos5tpKWloaSkBPHx8YiPj5fLAwMDcejQIYP0UZP58+djxowZ6Ny5MwoLCw2aAKxduxYA5EsaKyUlJSEiIqLe7TMBICIig6pukZugoCD5uDwA2NnZ4e2338bbb79dZf1Dhw7VWh4REVHjh2FtfVTXz44dO6qtf6+wsDBcvny5TnVr6q8q975WxsBzAIiIiCwQEwAiIiILxEMAZHQrU37Ra7/nQzoaOBIiIqrEGQAiIiILxBkAavyMdf9zYQWgs3HaJjIQY58IRk1LXRYvqsQEgIioEVIoFJAkCX/99Rdatmxp0MvLqOkRQqCsrAx//fUXrKysYGtrW+s+TACIiBoha2trtG7dGleuXEFWVpapwyEz4eDggDZt2sDKqvYj/EwAiIgaKScnJ3To0AFqtdrUoZAZsLa2ho2NTZ1ni5gAEBE1YtbW1rC2tjZ1GNQE8SoAIiIiC8QEgIiIyAKZNAE4fPgwxowZA29vb0iSpLX2ckREBCRJ0ngMGjRIo05paSnmzZsHNzc3ODo6YuzYsRr3nCYiIiJtJk0AiouL0atXL6xZs6baOg8//DBycnLkx549ezS2L1iwANu3b8fWrVvx/fff4+bNmxg9ejTKy8uNHT4REZHZMulJgGFhYQgLC6uxjlKphKenZ5XbCgoKsHHjRmzevBkPPfQQAOCjjz6Cj48PDhw4gJEjR1a5X2lpKUpLS+XnhYWFAAC1Wl3ns20r6+l7dq4k9EtQ6nM2sK59VtaXRHmD9ltJ7lMYJ09V/3+7TfkM6/r+npoDcx6jOcZMTYckGskyU5IkYfv27Rg3bpxcFhERgR07dsDW1hYuLi4IDAzEP//5T7i7uwMAvvvuO4wYMQL5+fka93bu1asXxo0bh7i4uCr7io2NrXLbli1b4ODgYNiBERFVo6SkBFOmTEFBQQGaN29u6nDIwjTqywDDwsIwceJE+Pr6IjMzEy+//DIefPBBpKenQ6lUIjc3F7a2thof/gDg4eGB3NzcatuNjo5GVFSU/LywsBA+Pj4IDQ2t8x+hWq1GSkoKQkJCoFAodB7bOwcv6rwPAMwJ9tdthyMr5B9/zMrXadcKyRr5qoFQ5f+AQb7O1VccFlX9NhhgrPeMwZDUwgopNzvq/R6ag/r+npoDcx5j5ewjkSk06gTg8ccfl3/u3r07+vfvD19fX3z99dcYP358tfsJIWpcCEGpVEKpVGqVKxQKnf+B6LMPAAhJv+t6de5L+t+60FZ6TsVbiXIopBrWl64lpnqPtaa+DUDf99CccIyNk7nFS02LWV0G6OXlBV9fX/z6668AAE9PT5SVleH69esa9fLy8uDh4WGKEImIiMyCWSUA165dw+XLl+Hl5QUA6NevHxQKBVJSUuQ6OTk5OHv2LAYPHmyqMImIiBo9kx4CuHnzJi5e/N/x4czMTGRkZEClUkGlUiE2NhYTJkyAl5cXsrKysGTJEri5ueGxxx4DADg7OyMyMhILFy6Eq6srVCoVXnjhBfTo0UO+KoCoVkdWGP0wA4Kjjds+EZGOTJoApKWlITg4WH5eeWJeeHg41q5dizNnzuDDDz/EjRs34OXlheDgYHz66ado1qyZvM/KlSthY2ODSZMm4datWxgxYgSSk5O5djYREVENTJoABAUFoaarEPft21drG3Z2dli9ejVWr15tyNCIiIiaNLM6B4CIiIgMgwkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIEa9c2AyEwcTKhx86BL1/Rs11W//YiIqFZ6JQDJycmYNGkSHBwcDB0PUdNUS5JUb1xqmIh0pNchgOjoaHh6eiIyMhJHjx41dExERERkZHrNAFy5cgVff/01kpOTERwcDD8/P0ybNg3h4eHw9PQ0dIx0j5Upv+hUX+/pdyIiatL0mgGwtrbG2LFjsW3bNly+fBkzZszAxx9/jDZt2mDs2LH46quvUFFh5LurERERkd7qfRWAu7s7hgwZgoCAAFhZWeHMmTOIiIhA+/btcejQIQOESERERIamdwLw559/4o033kC3bt0QFBSEwsJC7N69G5mZmfjjjz8wfvx4hIeHGzJWIiIiMhC9zgEYM2YM9u3bh44dO2L69Ol4+umnoVKp5O329vZYuHAhVq5cabBAiYiIyHD0SgDc3d2RmpqKgICAaut4eXkhMzNT78CIiIjIePQ6BBAYGIi+fftqlZeVleHDDz8EAEiSBF9f3/pFR0REREahVwIwbdo0FBQUaJUXFRVh2rRp9Q6KiIiIjEuvBEAIAUmStMqvXLkCZ2fnegdFRERExqXTOQB9+vSBJEmQJAkjRoyAjc3/di8vL0dmZiYefvhhgwdJREREhqVTAjBu3DgAQEZGBkaOHAknJyd5m62tLdq2bYsJEyYYNEAiIiIyPJ0SgJiYGABA27Zt8fjjj8POzs4oQREREZFx6XUZIBf4ISIiMm91TgBUKhV++eUXuLm5oUWLFlWeBFgpPz/fIMERERGRcdQ5AVi5ciWaNWsm/1xTAkBERESNW50TgHun/SMiIowRCxERETWQOicAhYWFdW60efPmegVDREREDaPOCYCLi0ut0/6VCwSVl5fXOzAiIiIynjonAAcPHjRmHERERNSA6pwABAYGGjMOIqqPgwlVlwsrAJ2BIysAqaJ+fQRH129/ImpU6pwAnD59Gt27d4eVlRVOnz5dY92ePXvWOzAiIiIynjonAL1790Zubi7c3d3Ru3dvSJIEIYRWPZ4DQERE1PjVOQHIzMxEy5Yt5Z/Jshz7/ZqpQyAiIgOqcwLg6+tb5c9ERERkfvS6FwAAXLhwAatXr8b58+chSRI6d+6MefPmoVOnToaMj4iIiIzASp+dvvjiC3Tv3h3p6eno1asXevbsiZMnT6J79+74/PPPDR0jERERGZheMwCLFy9GdHQ0li1bplEeExODF198ERMnTjRIcERERGQces0A5Obm4umnn9Yqf/LJJ5Gbm1vvoIiIiMi49EoAgoKCcOTIEa3y77//HsOGDat3UERERGRcdT4EsHPnTvnnsWPH4sUXX0R6ejoGDRoEADh+/Dg+//xzxMXFGT5KIiIiMqg6JwDjxo3TKnv33Xfx7rvvapTNmTMHs2bNqndgREREZDx1TgAqKuq5jjgRERE1GnqdA2Aohw8fxpgxY+Dt7Q1JkrBjxw6N7UIIxMbGwtvbG/b29ggKCsK5c+c06pSWlmLevHlwc3ODo6Mjxo4diytXrjTgKIiIiMyP3gsBFRcXIzU1FZcuXUJZWZnGtvnz59e5jV69emHatGmYMGGC1vbExESsWLECycnJ6NixI+Lj4xESEoILFy6gWbNmAIAFCxZg165d2Lp1K1xdXbFw4UKMHj0a6enpsLa21nd4RERETZpeCcCpU6cwatQolJSUoLi4GCqVClevXoWDgwPc3d3rnACEhYUhLCysym1CCKxatQpLly7F+PHjAQCbNm2Ch4cHtmzZgpkzZ6KgoAAbN27E5s2b8dBDDwEAPvroI/j4+ODAgQMYOXKkPsMjIiJq8vRKAJ5//nmMGTMGa9euhYuLC44fPw6FQoEnn3wSzz33nEECy8zMRG5uLkJDQ+UypVKJwMBAHD16FDNnzkR6ejrUarVGHW9vb3Tv3h1Hjx6tNgEoLS1FaWmp/LywsBAAoFaroVar6xRfZb261r+fJBrmjokVkv6zIJX71qeN+lAL4x6hqmzf2P2YkkHHqOfvurHV92/RlMwxZmo69EoAMjIy8P7778Pa2hrW1tYoLS1Fu3btkJiYiPDwcPkbe31ULijk4eGhUe7h4YHs7Gy5jq2tLVq0aKFVp6YFiRISEqq8XHH//v1wcHDQKc6UlBSd6lfy02sv3V11HVzvNvJVAw0Qie72FDVMPyk3OzZMRyZkkDHu2VP/NoxI379FUyopKTF1CGTB9EoAFAoFJEkCcPfD9tKlS+jSpQucnZ1x6dIlgwZY2U8lIYRW2f1qqxMdHY2oqCj5eWFhIXx8fBAaGormzZvXKS61Wo2UlBSEhIRAoVDUaZ97vXPwos776OOBK8l671shWSNfNRCq/B9g1UAzFoYyoK2q1jpqYYWUmx0R4vQLFFLTvMrFoGMcFlV7HROo79+iKVXOPhKZgl4JQJ8+fZCWloaOHTsiODgYr7zyCq5evYrNmzejR48eBgnM09MTwN1v+V5eXnJ5Xl6ePCvg6emJsrIyXL9+XWMWIC8vD4MHV//NV6lUQqlUapUrFAqd/4Hosw8AiAaaVjfEB7eVKDe7BECXDzuFVNFkE4BKBhljI/9w1fdv0ZTMLV5qWvQ6MLh8+XL5Q/nVV1+Fq6srnn32WeTl5WHdunUGCczPzw+enp4a03plZWVITU2VP9z79esHhUKhUScnJwdnz56tMQEgIiKydHrNAPTv31/+uWXLltij57HBmzdv4uLF/02FZ2ZmIiMjAyqVCm3atMGCBQuwfPlydOjQAR06dMDy5cvh4OCAKVOmAACcnZ0RGRmJhQsXwtXVFSqVCi+88AJ69OghXxVARERE2vReBwC4O9V+4cIFSJKETp06oWXLljrtn5aWhuDgYPl55XH58PBwJCcnY/Hixbh16xZmz56N69evY+DAgdi/f7+8BgAArFy5EjY2Npg0aRJu3bqFESNGIDk5mWsAEBER1UCvBKCwsBBz5szB1q1bUV5+99iwtbU1Hn/8cbzzzjtwdnauUztBQUEQQlS7XZIkxMbGIjY2tto6dnZ2WL16NVavXq3TGIiIiCyZXucAPPPMM/jhhx+we/du3LhxAwUFBdi9ezfS0tIwffp0Q8dIREREBqbXDMDXX3+Nffv2YejQoXLZyJEjsX79ejz88MMGC46IiIiMQ68ZAFdX1yqn+Z2dnbUW5SEiIqLGR68E4B//+AeioqKQk5Mjl+Xm5mLRokV4+eWXDRYcERERGUedDwH06dNHY3W9X3/9Fb6+vmjTpg0A4NKlS1Aqlfjrr78wc+ZMw0dKREREBlPnBGDcuHFGDIOIiIgaUp0TgJiYGGPGQURERA2oXgsBpaen4/z585AkCV27dkWfPn0MFRcREREZkV4JQF5eHp544gkcOnQILi4uEEKgoKAAwcHB2Lp1q84rAhIREVHD0usqgHnz5qGwsBDnzp1Dfn4+rl+/jrNnz6KwsBDz5883dIxERERkYHrNAOzduxcHDhxAly5d5LKuXbvinXfeQWhoqMGCIyIiIuPQawagoqKiyvtYKxQKVFQ07fuqExERNQV6JQAPPvggnnvuOfzxxx9y2X//+188//zzGDFihMGCIyIiIuPQKwFYs2YNioqK0LZtW7Rv3x7+/v7w8/NDUVER78pHRERkBvQ6B8DHxwcnT55ESkoKfv75Zwgh0LVrVzz00EOGjo+IiIiMQOcE4M6dO7Czs0NGRgZCQkIQEhJijLiIiIjIiHQ+BGBjYwNfX1+Ul5cbIx4iIiJqAHrfDTA6Ohr5+fmGjoeIiIgagF7nALz99tu4ePEivL294evrC0dHR43tJ0+eNEhwREREZBx6JQDjxo2DJEkQQhg6HiIiImoAOiUAJSUlWLRoEXbs2AG1Wo0RI0Zg9erVcHNzM1Z8REREZAQ6nQMQExOD5ORkPPLII5g8eTIOHDiAZ5991lixERERkZHoNAOwbds2bNy4EU888QQAYOrUqRgyZAjKy8thbW1tlACJiIjI8HSaAbh8+TKGDRsmPx8wYABsbGw0lgQmIiKixk+nBKC8vBy2trYaZTY2Nrhz545BgyIiIiLj0ukQgBACERERUCqVctnt27cxa9YsjUsBt23bZrgIm7hBl9aZOgQiIrJAOiUA4eHhWmVPPvmkwYIhIiKihqFTApCUlGSsOIiIiKgB6bUUMBEREZk3JgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIF0uhkQEVmwgwnGbT842rjtE5EGzgAQERFZICYAREREFqhRJwCxsbGQJEnj4enpKW8XQiA2Nhbe3t6wt7dHUFAQzp07Z8KIiYiIzEOjTgAAoFu3bsjJyZEfZ86ckbclJiZixYoVWLNmDU6cOAFPT0+EhISgqKjIhBETERE1fo3+JEAbGxuNb/2VhBBYtWoVli5divHjxwMANm3aBA8PD2zZsgUzZ86sts3S0lKUlpbKzwsLCwEAarUaarW6TnFV1qtr/ftJohwAUCFZ67V/Q6iMrTHHWB21qD23raxTl7rmyqzGqOffUn3/Fk3JHGOmpkMSQghTB1Gd2NhYvP7663B2doZSqcTAgQOxfPlytGvXDr///jvat2+PkydPok+fPvI+jz76KFxcXLBp06Ya242Li9Mq37JlCxwcHIwyFiKi+5WUlGDKlCkoKChA8+bNTR0OWZhGnQB88803KCkpQceOHfHnn38iPj4eP//8M86dO4cLFy5gyJAh+O9//wtvb295nxkzZiA7Oxv79u2rtt2qZgB8fHxw9erVOv8RqtVqpKSkICQkBAqFQuexvXPwIgDggSvJOu/bUCoka+SrBkKV/wOs/n/GwlwMaKuqtY5aWCHlZkeEOP0ChVTRAFE1PLMa47AovXar79+iKRUWFsLNzY0JAJlEoz4EEBYWJv/co0cPBAQEoH379ti0aRMGDRoEAJAkSWMfIYRW2f2USiWUSqVWuUKh0PkfiD77AID4/2l1c/hgtRLlZhHnvXT5sFNIFY3/w7GezGKM9fzw1vdv0ZTMLV5qWszgwOD/ODo6okePHvj111/l8wJyc3M16uTl5cHDw8MU4REREZkNs0oASktLcf78eXh5ecHPzw+enp5ISUmRt5eVlSE1NRWDBw82YZRERESNX6M+BPDCCy9gzJgxaNOmDfLy8hAfH4/CwkKEh4dDkiQsWLAAy5cvR4cOHdChQwcsX74cDg4OmDJliqlDJyIiatQadQJw5coVTJ48GVevXkXLli0xaNAgHD9+HL6+vgCAxYsX49atW5g9ezauX7+OgQMHYv/+/WjWrJmJIyciImrcGnUCsHXr1hq3S5KE2NhYxMbGNkxARERETYRZnQNAREREhtGoZwDMwTsHL8qX9FHjcez3a7XWqZCsAVfgx6x8jcscA9q5GjM0IqJGgTMAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFsjG1AEQNTbHfr+m974B7VwNGAkRkfFwBoCIiMgCcQaAiBqHgwn67SesAHQGjqwApIrq6wVH69c+URPFGQAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskA2pg6AiKhBHEwwfh/B0cbvg8hAOANARERkgTgDQGRAx36/ptd+Ae1cDRwJEVHNOANARERkgZgAEBERWaAmkwC8++678PPzg52dHfr164cjR46YOiQiIqJGq0kkAJ9++ikWLFiApUuX4tSpUxg2bBjCwsJw6dIlU4dGRETUKDWJBGDFihWIjIzEM888gy5dumDVqlXw8fHB2rVrTR0aERFRo2T2VwGUlZUhPT0dL730kkZ5aGgojh49WuU+paWlKC0tlZ8XFBQAAPLz86FWq+vUr1qtRklJCUrLCyAkaz2jB4pu39F7X2OrkARKSkpQdPsOrES5qcMxuMY0vms3y4zSrlpYoaSkBNekMiikCqP0YWqNaozXdLsKpKioCAAghDBGNEQ1MvsE4OrVqygvL4eHh4dGuYeHB3Jzc6vcJyEhAXFxcVrlfn5+RomRiCxFrF57FRUVwdnZ2bChENXC7BOASpIkaTwXQmiVVYqOjkZUVJT8vKKiAvn5+XB1da12n/sVFhbCx8cHly9fRvPmzfUPvBFr6mNs6uMDOMbGTgiBoqIieHt7mzoUskBmnwC4ubnB2tpa69t+Xl6e1qxAJaVSCaVSqVHm4uKiV//Nmzc3u386umrqY2zq4wM4xsaM3/zJVMz+JEBbW1v069cPKSkpGuUpKSkYPHiwiaIiIiJq3Mx+BgAAoqKi8NRTT6F///4ICAjAunXrcOnSJcyaNcvUoRERETVKTSIBePzxx3Ht2jUsW7YMOTk56N69O/bs2QNfX1+j9alUKhETE6N1KKEpaepjbOrjAzhGIqqeJHj9CRERkcUx+3MAiIiISHdMAIiIiCwQEwAiIiILxASAiIjIAjEB0ENTuvXw4cOHMWbMGHh7e0OSJOzYsUNjuxACsbGx8Pb2hr29PYKCgnDu3DnTBKuHhIQEPPDAA2jWrBnc3d0xbtw4XLhwQaOOuY9x7dq16Nmzp7wQTkBAAL755ht5u7mP734JCQmQJAkLFiyQy5raGIkaAhMAHTW1Ww8XFxejV69eWLNmTZXbExMTsWLFCqxZswYnTpyAp6cnQkJC5JuYNHapqamYM2cOjh8/jpSUFNy5cwehoaEoLi6W65j7GFu3bo3XXnsNaWlpSEtLw4MPPohHH31U/gA09/Hd68SJE1i3bh169uypUd6UxkjUYATpZMCAAWLWrFkaZZ07dxYvvfSSiSIyHABi+/bt8vOKigrh6ekpXnvtNbns9u3bwtnZWbz33nsmiLD+8vLyBACRmpoqhGiaYxRCiBYtWogNGzY0qfEVFRWJDh06iJSUFBEYGCiee+45IUTTfQ+JjI0zADqovPVwaGioRnlNtx42Z5mZmcjNzdUYr1KpRGBgoNmOt/LWzyqVCkDTG2N5eTm2bt2K4uJiBAQENKnxzZkzB4888ggeeughjfKmNEaihtQkVgJsKPrceticVY6pqvFmZ2ebIqR6EUIgKioKQ4cORffu3QE0nTGeOXMGAQEBuH37NpycnLB9+3Z07dpV/gA09/Ft3boVJ0+exIkTJ7S2NZX3kKihMQHQgy63Hm4Kmsp4586di9OnT+P777/X2mbuY+zUqRMyMjJw48YNfPnllwgPD0dqaqq83ZzHd/nyZTz33HPYv38/7Ozsqq1nzmMkMgUeAtCBPrceNmeenp4A0CTGO2/ePOzcuRMHDx5E69at5fKmMkZbW1v4+/ujf//+SEhIQK9evfDWW281ifGlp6cjLy8P/fr1g42NDWxsbJCamoq3334bNjY28jjMeYxEpsAEQAeWduthPz8/eHp6aoy3rKwMqampZjNeIQTmzp2Lbdu24bvvvoOfn5/G9qYwxqoIIVBaWtokxjdixAicOXMGGRkZ8qN///6YOnUqMjIy0K5dO7MfI5FJmO78Q/O0detWoVAoxMaNG8VPP/0kFixYIBwdHUVWVpapQ9NLUVGROHXqlDh16pQAIFasWCFOnTolsrOzhRBCvPbaa8LZ2Vls27ZNnDlzRkyePFl4eXmJwsJCE0deN88++6xwdnYWhw4dEjk5OfKjpKRErmPuY4yOjhaHDx8WmZmZ4vTp02LJkiXCyspK7N+/Xwhh/uOryr1XAQjRNMdIZGxMAPTwzjvvCF9fX2Frayv69u0rX1Jmjg4ePCgAaD3Cw8OFEHcvsYqJiRGenp5CqVSK4cOHizNnzpg2aB1UNTYAIikpSa5j7mP8+9//Lv8+tmzZUowYMUL+8BfC/MdXlfsTgKY4RiJj4+2AiYiILBDPASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAygKysLEiShIyMDFOHQkRUJ0wAqEkRQuChhx7CyJEjtba9++67cHZ2xqVLl0wQGRFR48IEgJoUSZKQlJSEH374Ae+//75cnpmZiRdffBFvvfUW2rRpY8IIiYgaByYA1OT4+PjgrbfewgsvvIDMzEwIIRAZGYkRI0YgIiJCq/7kyZPxxBNPaJSp1Wq4ubkhKSkJALB3714MHToULi4ucHV1xejRo/Hbb79VG0NycjJcXFw0ynbs2AFJkjTKdu3ahX79+sHOzg7t2rVDXFwc7ty5I2+PjY1FmzZtoFQq4e3tjfnz5+v4ahARVc3G1AEQGUN4eDi2b9+OadOmYcKECTh79izOnj1bZd2pU6di0qRJuHnzJpycnAAA+/btQ3FxMSZMmAAAKC4uRlRUFHr06IHi4mK88soreOyxx5CRkQErK/3y6H379uHJJ5/E22+/jWHDhuG3337DjBkzAAAxMTH44osvsHLlSmzduhXdunVDbm4u/vOf/+jVFxGRFtPejJDIeP7880/RsmVLYWVlJbZt21ZtvbKyMuHm5iY+/PBDuWzy5Mli4sSJ1e6Tl5cnAMi3nM3MzBQAxKlTp4QQQiQlJQlnZ2eNfbZv3y7u/ZMbNmyYWL58uUadzZs3Cy8vLyGEEG+++abo2LGjKCsrq9N4iYh0wUMA1GS5u7tjxowZ6NKlCx577LFq6ykUCkycOBEff/wxgLvf9r/66itMnTpVrvPbb79hypQpaNeuHZo3bw4/Pz8AqNcJhenp6Vi2bBmcnJzkx/Tp05GTk4OSkhJMnDgRt27dQrt27TB9+nRs375d4/AAEVF98BAANWk2Njawsan913zq1KkIDAxEXl4eUlJSYGdnh7CwMHn7mDFj4OPjg/Xr18Pb2xsVFRXo3r07ysrKqmzPysoKQgiNMrVarfG8oqICcXFxGD9+vNb+dnZ28PHxwYULF5CSkoIDBw5g9uzZeP3115GamgqFQlGX4RMRVYsJABGAwYMHw8fHB59++im++eYbTJw4Eba2tgCAa9eu4fz583j//fcxbNgwAMD3339fY3stW7ZEUVERiouL4ejoCABaawT07dsXFy5cgL+/f7Xt2NvbY+zYsRg7dizmzJmDzp0748yZM+jbt289RktExASACMDdywenTJmC9957D7/88gsOHjwob2vRogVcXV2xbt06eHl54dKlS3jppZdqbG/gwIFwcHDAkiVLMG/ePPz4449ITk7WqPPKK69g9OjR8PHxwcSJE2FlZYXTp0/jzJkziI+PR3JyMsrLy+W2Nm/eDHt7e/j6+hrjJSAiC8NzAIj+39SpU/HTTz+hVatWGDJkiFxuZWWFrVu3Ij09Hd27d8fzzz+P119/vca2VCoVPvroI+zZswc9evTAJ598gtjYWI06I0eOxO7du5GSkoIHHngAgwYNwooVK+QPeBcXF6xfvx5DhgxBz5498e2332LXrl1wdXU1+NiJyPJI4v4DlURERNTkcQaAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCzQ/wGGQ4CjYVIb5gAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABXk0lEQVR4nO3deXxMV/8H8M9MMjMy2cg+WWSziy20YimxJERRtbZaRFNPn1LlQdVSBC2lpbqg9RShKFWPX7WUhIot0RJJLVEiiUhIkAiTPZOZ8/tjzE1GFjPJbJl836/XvJh779z5nizzyT33nHt5jDEGQgghhJgVvrELIIQQQojuUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwDdQVFQUeDwe97C0tISnpyemTp2Ku3fvctvFxsZy20RFRdW4r4EDB4LH48HHx0dtuY+PD3g8HoKDg2t83c6dO7l9x8bG1llv1Tp4PB4sLCzg6uqKcePG4fr161q0/Pl8fHwwfPhwne6Tx+Phvffee+52qnZW/XpERkaCx+OpbRccHFzr17U2ycnJiIyMxO3bt6utCw8Pr/b9qy9VvaqHUCiEr68vZs2ahcePH+vkPZ6Hx+MhMjKSe676ea+p7XU5cuSI2n6q8vHxQXh4eL1rJITUjAJeR7Zv3474+HjExMRg2rRp+PHHH/HSSy+hqKhIbTtbW1ts3bq12uvT09MRGxsLOzu7Gvdva2uL06dPIzU1tdq6bdu21fq62qxatQrx8fE4efIkPvzwQ8TExKBPnz5qf5Q0ZoGBgYiPj0dgYGCd223atAmbNm3Sat/JyclYvnx5jSG3ZMkSHDx4UKv9Pc/Ro0cRHx+Pw4cPY9SoUfj6668RFhYGY1xl+uWXX0Z8fDwkEolWrzty5AiWL19e47qDBw9iyZIluiiPEFIFBbyOBAQEICgoCAMGDMCyZcswf/58pKen4//+7//UtpswYQLOnj2LlJQUteXbtm2Dh4cH+vTpU+P++/btCw8PD2zbtk1teWpqKk6fPo0JEyZoVW/r1q0RFBSEfv36Yc6cOVi/fj3y8/Nr7V0AgOLiYq3ew5js7OwQFBT03D98OnTogA4dOujsff39/dGtWzed7Q8AunfvjqCgIISEhOCLL77Am2++ifPnzyMuLq7W1+jre+Xs7IygoCCIRCKd7bNbt27w9/fX2f4IIUoU8HoSFBQEAMjIyFBbHhISAi8vL7WgVigU2LFjB6ZMmQI+v+ZvCZ/Px+TJk7Fjxw4oFApu+bZt2+Dl5YXBgwfrtF5V9/ClS5cwduxYtGjRgvsQLi0txcKFC+Hr6wuhUAgPDw/MmDGj1m7jgwcPonPnzmjWrBn8/Pzw1Vdfqa0vLS3F3Llz0bVrV9jb28PBwQG9evXCL7/8Umu93333Hdq0aQORSIQOHTpg7969autr6qKvSU1d9Js3b0aXLl1gY2MDW1tbtGvXDosWLQKg7KIeN24cAGDAgAHVTrvU1EWvUCjw9ddfo2vXrrCyskLz5s0RFBSEQ4cO1VlbbZ79XgUHByMgIACnT59G7969IRaL8dZbbwEApFIp5s2bp/a9mj17drWeJalUimnTpsHR0RE2NjYYOnQobt68We29a+uiP3r0KAYNGgR7e3uIxWK0b98eq1ev5r4mGzduBAC1Uw6qfdTURX/nzh28+eabcHFxgUgkQvv27bFu3Tq1n/3bt2+Dx+Ph888/x/r16+Hr6wsbGxv06tUL58+fV9tfWloaXnvtNbi7u0MkEsHV1RWDBg1CUlKS5l94QhoZS2MXYK5u3boFQHnEUxWfz0d4eDi2bt2Kjz/+GBYWFoiOjkZWVhamTp2KWbNm1brPt956C6tXr8axY8cQFhYGuVyOHTt2ICIiotY/DBpa7+jRo/Haa6/h3//+N4qKisAYw6hRo3DixAksXLgQL730Ei5fvoxly5YhPj4e8fHxakd3SUlJmD17NiIjI+Hm5obdu3dj1qxZKC8vx7x58wAAZWVlePToEebNmwcPDw+Ul5fj+PHjGD16NLZv347Jkyer1XTo0CGcPHkSK1asgLW1NTZt2oTXX38dlpaWGDt2bIO+Dnv37sX06dMxc+ZMfP755+Dz+bh16xaSk5MBKLuoV61ahUWLFmHjxo3cKYC6jkDDw8Oxa9cuREREYMWKFRAKhbh06ZLW57FVavpeZWdn480338T8+fOxatUq8Pl8FBcXo3///sjKysKiRYvQuXNnXLt2DUuXLsWVK1dw/Phx8Hg87nsaFxeHpUuX4oUXXsC5c+cQFhamUT1bt27FtGnT0L9/f3z77bdwcXHBzZs3cfXqVQDK0xZFRUX4+eefER8fz72utm7+hw8fonfv3igvL8fKlSvh4+OD3377DfPmzUNqamq1UyobN25Eu3btsGHDBu79hg0bhvT0dNjb2wMAhg0bBrlcjrVr16Jly5bIzc1FXFycwcYyEGIUjDTI9u3bGQB2/vx5JpPJWEFBAfvtt9+Ys7Mzs7W1ZTk5OYwxxk6ePMkAsP3797O0tDTG4/HYb7/9xhhjbNy4cSw4OJgxxtjLL7/MvL291d7D29ubvfzyy4wxxvr378/Gjh3LGGPs8OHDjMfjsfT0dLZ//34GgJ08ebLOelV17Nu3j8lkMlZcXMxOnz7NWrVqxSwsLNjff//NGGNs2bJlDABbunSp2uuPHj3KALC1a9eqLd+3bx8DwLZs2aJWN4/HY0lJSWrbhoSEMDs7O1ZUVFRjjRUVFUwmk7GIiAjWrVs3tXUAmJWVFfd1VW3frl071qpVq2rtrPr1ULWpqv79+7P+/ftzz9977z3WvHnzGutSqetrPWXKFLXv3+nTpxkAtnjx4jr3WRNVvTk5OUwmk7H8/Hy2a9cuZmVlxby8vFhJSQnXBgDsxIkTaq9fvXo14/P57MKFC2rLf/75ZwaAHTlyhDHG2O+//84AsC+//FJtu08++YQBYMuWLeOWqX7e09PTGWOMFRQUMDs7O9a3b1+mUChqbcuMGTOqfe1VvL292ZQpU7jnCxYsYADYn3/+qbbdu+++y3g8Hrtx4wZjjLH09HQGgHXq1IlVVFRw2/31118MAPvxxx8ZY4zl5uYyAGzDhg211keIOaIueh0JCgqCQCCAra0thg8fDjc3N/z+++9wdXWttq2vry+Cg4Oxbds25OXl4ZdffuG6VJ/nrbfewqFDh5CXl4etW7diwIAB9Rq1PWHCBAgEAojFYvTr1w9yuRw///wzOnfurLbdmDFj1J7/8ccfAFCtS3XcuHGwtrbGiRMn1JZ37NgRXbp0UVs2ceJESKVSXLp0iVu2f/9+9OnTBzY2NrC0tIRAIMDWrVtrHNk/aNAgta+rhYUFJkyYgFu3biErK0vzL0INXnzxRTx+/Bivv/46fvnlF+Tm5jZof7///jsAYMaMGfXeh5ubGwQCAVq0aIE333wTgYGBOHr0KJo1a8Zt06JFCwwcOFDtdb/99hsCAgLQtWtXVFRUcI8hQ4aonb44efIkAOCNN95Qe/3EiROfW1tcXBykUimmT59ebYZCff3xxx/o0KEDXnzxRbXl4eHhYIxxP4MqL7/8MiwsLLjnqp9h1SkMBwcH+Pv747PPPsP69euRmJio1tVPiLmigNeRnTt34sKFC0hMTMS9e/dw+fLlWgfMAUBERAR+/fVXrF+/HlZWVhp3LY8dOxbNmjXDF198gV9//RURERH1qnfNmjW4cOECLl26hDt37iAtLQ2jRo2qtt2z3ah5eXmwtLSs1pXP4/Hg5uaGvLw8teVubm7V9qlaptr2f//7H8aPHw8PDw/s2rUL8fHxuHDhAt566y2UlpbW+vq69llfkyZNwrZt25CRkYExY8bAxcUFPXv2RExMTL329/DhQ1hYWNRYs6aOHz+OCxcuICkpCbm5uTh79my1gYE1dXffv38fly9fhkAgUHvY2tqCMcb98aL6njo6Oqq9XpOaHz58CADw9PSsb/OqycvLq7E97u7u3Pqqnq1bdYqopKQEgPJn88SJExgyZAjWrl2LwMBAODs74/3330dBQYHO6ibE1NA5eB1p3749evToofH2o0ePxowZM/Dpp59i2rRpsLKy0uh1YrEYr732GlavXg07OzuMHj26XvX6+flpVO+zR2WOjo6oqKjAw4cP1UKeMYacnBy88MILatvn5ORU26dqmeqDedeuXfD19cW+ffvU3q+srKzGmjTZZ0NMnToVU6dORVFREU6fPo1ly5Zh+PDhuHnzJry9vbXal7OzM+RyOXJycrSeWqbSpUsXODk51blNTUfPTk5OsLKyqjbzoup6oPJ7mpeXp/b1q+nr/CzVz0BDe06qcnR0RHZ2drXl9+7dA4Dnfi1q4u3tzU1PvXnzJn766SdERkaivLwc3377bcMKJsRE0RG8kVhZWWHp0qUYMWIE3n33Xa1e++6772LEiBFYunSpWjetIQwaNAiAMpSrOnDgAIqKirj1KteuXcPff/+ttmzPnj2wtbXlBqipLuJSNaRycnJqHUV/4sQJ3L9/n3sul8uxb98++Pv76/RI0traGmFhYVi8eDHKy8tx7do1ANWPEOuiGqi2efNmndWlqeHDhyM1NRWOjo7o0aNHtYfq1M6AAQMAALt371Z7/Z49e577Hr1794a9vT2+/fbbOufla/M1GzRoEJKTk9VO4QCVF3RS1Vtfbdq0wUcffYROnTpVew9CzAkdwRvRnDlzMGfOHK1f17Vr12rz6w0lJCQEQ4YMwYcffgipVIo+ffpwo+i7deuGSZMmqW3v7u6OkSNHIjIyEhKJBLt27UJMTAzWrFkDsVgMQBlE//vf/zB9+nSMHTsWmZmZWLlyJSQSSbXrBQDKI7iBAwdiyZIl3Cj6f/75p9pUufpQ9ab06dMHEokEOTk5WL16Nezt7bneiYCAAADAli1bYGtri2bNmsHX17fG3oOXXnoJkyZNwscff4z79+9j+PDhEIlESExMhFgsxsyZMxtcc21mz56NAwcOoF+/fvjPf/6Dzp07Q6FQ4M6dO4iOjsbcuXPRs2dPhIaGol+/fpg/fz6KiorQo0cPnDt3Dj/88MNz38PGxgbr1q3D22+/jcGDB2PatGlwdXXFrVu38Pfff+Obb74BAHTq1AmA8tRQWFgYLCws0LlzZwiFwmr7/M9//oOdO3fi5ZdfxooVK+Dt7Y3Dhw9j06ZNePfdd9GmTRutvg6XL1/Ge++9h3HjxqF169YQCoX4448/cPnyZSxYsECrfRHSmFDAE63weDz83//9HyIjI7F9+3Z88skncHJywqRJk7Bq1apqF0Dp2rUrpk6dimXLliElJQXu7u5Yv349/vOf/3DbTJ06FQ8ePMC3336Lbdu2wc/PDwsWLEBWVlaNVz8bOXIkOnbsiI8++gh37tyBv78/du/erfXFfmry0ksvISoqCj/99BPy8/Ph5OSEvn37YufOnVx3tK+vLzZs2IAvv/wSwcHBkMvl2L59e62XW42KikJgYCC2bt2KqKgoWFlZoUOHDtzcen2xtrbGmTNn8Omnn2LLli1IT0+HlZUVWrZsicGDB3NH8Hw+H4cOHcKcOXOwdu1alJeXo0+fPjhy5AjatWv33PeJiIiAu7s71qxZg7fffhuMMfj4+GDKlCncNhMnTsS5c+ewadMmrFixAowxpKen1zhA1NnZGXFxcVi4cCEWLlwIqVQKPz8/rF27tl5/ELu5ucHf3x+bNm1CZmYmeDwe/Pz8sG7dOr3+gUWIsfFYXf1qhBBCCGmU6Bw8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZsjsp8kpFArcu3cPtra2OrtWNiGEPA9jDAUFBXB3d2/w3R4JqQ+zD/h79+7By8vL2GUQQpqozMxMnV5hkRBNmX3A29raAlD+ktnZ2Wn0GplMhujoaISGhkIgEOizPIOg9pg2c2sPYH5tqk97pFIpvLy8uM8gQgzN7ANe1S1vZ2enVcCLxWLY2dmZzYcTtcd0mVt7APNrU0PaQ6cGibHQiSFCCCHEDFHAE0IIIWaIAp4QQggxQ2Z/Dp4QQkyZXC6HTCYzdhmkkRAIBLCwsNBoWwp4QggxAsYYcnJy8PjxY2OXQhqZ5s2bw83N7bkDOCngCSHECFTh7uLiArFYTKPtyXMxxlBcXIwHDx4AACQSSZ3bU8ATQoiByeVyLtwdHR2NXQ5pRKysrAAADx48gIuLS53d9TTIjhBC6sAYUFys232qzrmLxWLd7pg0Caqfm+eN3aAjeEIIqUFJCZCbCzx6BAiF+nkP6pYn9aHpzw0FPCGEPKVQKAM9NxcoKqpcrq+AJ0SfKOAJIU1eUVHl0bpCYexqCNENOgdPCGmSKiqA+/eBa9eAf/5RBjyFu+kIDg7G7Nmzuec+Pj7YsGGDXt8zNjYWPB4PPB4Po0aN0ut71Ub1/s2bN2/wvijgCSFNilQKpKUBly8DWVlAaamxKyKauHDhAv71r38Z5L1u3LiBqKgorV4THh7OhbPqERQUpLZNWVkZZs6cCScnJ1hbW2PkyJHIyspS2yY7O1tnf8hQFz0hxOyVlwN5ecqj9PJyY1dD6sPZ2dlg7+Xi4lKvI+ihQ4di+/bt3HPhM4M3Zs+ejV9//RV79+6Fo6Mj5s6di+HDhyMhIYGb7ubm5gZ7e/sG1a9CR/CEELPEGJCfD6SkAFeuAPfumW64M8ZQVFRklAdjTOM6g4ODMXPmTMyePRstWrSAq6srtmzZgqKiIkydOhW2trbw9/fH77//rva65ORkDBs2DDY2NnB1dcWkSZOQm5vLrS8qKsLkyZNhY2MDiUSCdevWVXvvZ7vo169fj06dOsHa2hpeXl6YPn06CgsLufVRUVFo3rw5jh07hvbt28PGxgZDhw5Fdna2Ft+Z2ut79hQCAIhEIri5uXEPBwcHbt2TJ0+wdetWrFu3DoMHD0a3bt2wa9cuXLlyBcePH9e6Jk1QwBNCzEppqbLr/fJlZVe8VGrsip6vuLgYNjY2RnkUaznJf8eOHXBycsJff/2FmTNn4t1338W4cePQu3dvXLp0CUOGDMGkSZO4/WZnZ6N///7o2rUrLl68iKNHj+L+/fsYP348t88PPvgAJ0+exMGDBxEdHY3Y2FgkJCTUWQefz8dXX32Fq1evYseOHfjjjz8wf/78al/Xzz//HD/88ANOnz6NO3fuYN68eVq1V5v6YmNj4eLigjZt2mDatGncFecAICEhATKZDKGhodwyd3d3BAQEIC4uTuuaNEFd9ISQRq+26W1E97p06YKPPvoIALBw4UJ8+umncHJywrRp0wAAS5cuxebNm3H58mUEBQVh8+bNCAwMxKpVq7h9bNu2DV5eXrh58ybc3d2xdetW7Ny5EyEhIQCUf0R4enrWWUfVo2dfX1+sXLkS7777LjZt2sQtl8lk+Pbbb+Hv7w8AeO+997BixQqt2ltYWKhRfWFhYRg3bhy8vb2Rnp6OJUuWYODAgUhISIBIJEJOTg6EQiFatGih9jpXV1fk5ORoVZOmKOAJIY2Wanpbfj4glxu7mvoTi8Vq3cuGfm9tdO7cmfu/hYUFHB0d0alTJ26Zq6srAHBHrwkJCTh58iRsbGyq7Ss1NRUlJSUoLy9Hr169uOUODg5o27ZtnXWcPHkSq1atQnJyMqRSKSoqKlBaWoqioiJYW1tzbVOFO6C8dnvVo2pNpKamalTfhAkTuP8HBASgR48e8Pb2xuHDhzF69Oha988Y09sFj4zaRb9582Z07twZdnZ2sLOzQ69evdTO3TDGEBkZCXd3d1hZWSE4OBjXrl0zYsWEEGOrqAAePACSkyuntzXmcAeUU6Osra2N8tA2XAQCQbXaqy5T7U/xdM6hQqHAiBEjkJSUpPZISUlBv379tBoDoJKRkYFhw4YhICAABw4cQEJCAjZu3AhA/fKtNdWq7fvVpz5A+ceEt7c3UlJSACgHz5WXlyM/P19tuwcPHnB/FOmaUQPe09MTn376KS5evIiLFy9i4MCBeOWVV7gQX7t2LdavX49vvvkGFy5cgJubG0JCQlBQUGDMsgkhRlB1eltmpvJSssT0BQYG4tq1a/Dx8UGrVq3UHtbW1mjVqhUEAgHOnz/PvSY/Px83b96sdZ8XL15ERUUF1q1bh6CgILRp0wb37t3TS/31qQ8A8vLykJmZyd3xrXv37hAIBIiJieG2yc7OxtWrV9G7d2+91G7UgB8xYgSGDRuGNm3aoE2bNvjkk09gY2OD8+fPgzGGDRs2YPHixRg9ejQCAgKwY8cOFBcXY8+ePcYsmxBiIOXlQHY2cPWqcjR8fr5ydDxpPGbMmIFHjx7h9ddfx19//YW0tDRER0fjrbfeglwuh42NDSIiIvDBBx/gxIkTuHr1KsLDw8Hn1x5P/v7+qKiowNdff420tDT88MMP+Pbbb/VSvyb1FRYWYt68eYiPj8ft27cRGxuLESNGwMnJCa+++ioAwN7eHhEREZg7dy5OnDiBxMREvPnmm+jUqRMGDx6sl9pN5hy8XC7H/v37UVRUhF69eiE9PR05OTlqIw5FIhH69++PuLg4vPPOOzXup6ysDGVlZdxz6dMhtDKZ7Ll33lFRbafp9qaO2mPazK09QMPaxJjyaD0vz3RGwMvl2rfHnL6fDeHu7o5z587hww8/xJAhQ1BWVgZvb28MHTqUC8nPPvsMhYWFGDlyJGxtbTF37lw8efKk1n127doV69evx5o1a7Bw4UL069cPq1evxuTJk/XShufVZ2FhgStXrmDnzp14/PgxJBIJBgwYgH379sHW1pbb7osvvoClpSXGjx+PkpISDBo0CFFRUXXe8rUheKy+Jxh05MqVK+jVqxdKS0thY2ODPXv2YNiwYYiLi0OfPn1w9+5duLu7c9v/61//QkZGBo4dO1bj/iIjI7F8+fJqy/fs2UO3ZiSEGExxcTEmTpyIJ0+ewM7OTm1daWkp0tPT4evri2bNmhmpQvKs2NhYDBgwAPn5+c+90E1wcDC6du2ql8vnRkVFYfbs2Xj8+HGN6zX9+TH6EXzbtm2RlJSEx48f48CBA5gyZQpOnTrFrX92AMjzRhwuXLgQc+bM4Z5LpVJ4eXkhNDS02i9ZbWQyGWJiYhASElJtkEZjRO0xbebWHkDzNikUwOPHyqN1U57eZmUlQ2qqdt8jqal0PxCteXp6YsSIEfjxxx8N/t42NjaoqKjQyR9+Rg94oVCIVq1aAQB69OiBCxcu4Msvv8SHH34IAMjJyeEGKQDPH3EoEokgEomqLRcIBFp/eNbnNaaM2mPazK09QO1tKipShvqjR5Uj4Os45Wp0qh5Ubb5H5va9bAp69uzJjXqvaVqfISQlJQGATrrtjR7wz2KMoaysDL6+vnBzc0NMTAy6desGACgvL8epU6ewZs0aI1dJCNGWXF55PXgaAU9MkZWVFXfA+TyxsbF6qUHT99eEUQN+0aJFCAsLg5eXFwoKCrB3717Exsbi6NGj4PF4mD17NlatWoXWrVujdevWWLVqFcRiMSZOnGjMsgkhWigoUIb648d0O1ZCDMmoAX///n1MmjQJ2dnZsLe3R+fOnXH06FHucoDz589HSUkJpk+fjvz8fPTs2RPR0dFqoxIJIaZHNYD8+vXK/xNCDMuoAb9169Y61/N4PERGRiIyMtIwBRFC6o0x4MmTyqN1ACgrM+1z64SYM5M7B08IaVzKypShnpdXebROF6MhxPgo4AkhWlMolFeVy80FjHSPFELIc1DAE0I0VlysDPWq09sIIaaJAp4QUie5vPJe68XFxq7GvG3ZYtj3+9e/tNs+ODiYuxBZYmIiunbtqvuiTJTqAmv29va1XmHO1NDwF0JIjQoKgPR05d3b7tyhcCdK06ZNQ3Z2NgICAjTaPjY2Fq+88gokEgmsra3RtWtX7N69u9o2PB6v2uOff/5pcL017ZfH4+Gzzz7jtgkODq62/rXXXlPbT3Z2tl4uS6tPdARPCOHIZJUXo6lyzyZCOGKxGG5ubhpvHxcXh86dO+PDDz+Eq6srDh8+jMmTJ8POzg4jRoxQ2/bGjRtqlxR3dnZucL3Z2dlqz3///XdERERgzJgxasunTZuGFStWcM+trKzU1ru5ucHe3r7B9RgSBTwhTZxqeltenvJfGgFPNKW6Octvv/2GRYsW4caNG+jSpQu+//57dOrUCYDygmZVvf/++zh27BgOHjxYLeBdXFyee5OXqoKDg7mehF27dsHCwgLvvvsuVq5cyXWpP/vHyC+//IIBAwbAz89Pbbm2f7g0BtRFT0gTVVYG3L0LXLkCpKYq565TuJP6+OCDD/D555/jwoULcHFxwciRI+u8Xe6TJ0/g4OBQbXm3bt0gkUgwaNAgnDx5UqP33rFjBywtLfHnn3/iq6++whdffIHvv/++xm3v37+Pw4cPIyIiotq63bt3w8nJCR07dsS8efNQUFCg0fubMjqCJ6QJUd29LTdXeY6dEF1YtmwZdwXSHTt2wNPTEwcPHsT48eOrbfvzzz/jwoUL+O6777hlEokEW7ZsQffu3VFWVoYffvgBgwYNQmxsLPr161fne3t5eeGLL74Aj8dD27ZtceXKFXzxxReYNm1atW137NgBW1tbjB49Wm35G2+8wd3/5OrVq1i4cCH+/vtvxMTE1OfLYTIo4AlpAmh6G9GnXr16cf93cHBA27Ztcf369WrbxcbGIjw8HP/973/RsWNHbnnbtm3Rtm1btf1lZmbi888/R79+/XDmzBmEhYVx67/77ju88cYbAICgoCC1W4j36tUL69atg1wur3ZHtm3btuGNN96odivWqn8MBAQEoHXr1ujRowcuXbqEwMBAbb8cJoMCnhAzRdPbiDFVDV0AOHXqFEaMGIH169dj8uTJz319UFAQdu3aBUB5K3HVbVQB1HnL8NqcOXMGN27cwL59+567bWBgIAQCAVJSUijgCSGmo7BQGer5+XT3NmIY58+fR8uWLQEA+fn5uHnzJtq1a8etj42NxfDhw7FmzRr8S8PJ94mJiZBIJADqvo3r+fPnqz1v3bp1taP3rVu3onv37ujSpctz3/vatWuQyWTc+zdWFPCEmAHV9La8PKC01NjVkKZmxYoVcHR0hKurKxYvXgwnJyeMGjUKgDLcX375ZcyaNQtjxoxBTk4OAEAoFHID7TZs2AAfHx907NgR5eXl2LVrFw4cOIADBw48970zMzMxZ84cvPPOO7h06RK+/vprrFu3Tm0bqVSK/fv3V1sOAKmpqdi9ezeGDRsGJycnJCcnY+7cuejWrRv69OnTwK+McVHAE9JIMQZIpcqjdZreZh60vbKcqfj0008xa9YspKSkoEuXLjh06BCEQiEAICoqCsXFxVi9ejVWr17NvaZ///6IjY0FAJSXl2PevHm4e/curKys0LFjRxw+fBjDhg177ntPnjwZJSUlePHFF2FhYYGZM2dW6yXYu3cvGGN4/fXXq71eKBTixIkT+PLLL1FYWAgvLy+8/PLLWLZsWbVegMaGAp6QRqasrPJiNHSvdWIK+vbti6tXr9a4LioqClFRUXW+fv78+Zg/f3693lsgEGDDhg3YvHlzrdv861//qvXUgJeXF3f5XXNDAU9II8BY5d3baHobMaZNmzbh+++/R3x8vLFLMSgbGxtUVFRUG4FvyijgCTFhJSWV09sqKoxdDWnqdu/ejZKSEgBAy5YtERcXZ+SKDEc1ir8xddtTwBNiYlTT2/LygKIiY1dDSCUPDw+158HBwWBGHPyhOodvCLWN4jdlFPCEmIiiIuVV5mh6GyFEFyjgCTGiigrgwQPl/1NSAD7dHYIQoiMU8IQYwZMnldPb6NKxhBB9oIAnxEDKy5Whnpen/D8hhOgTBTwhesRY5d3bpFJjV0MIaUoo4AnRA5reRggxNgp4QnREoai8extNbyP1kZBg2Pfr3l277YODg7mrviUmJqJr1666L8oIfHx8kJGRAUB5s5zmzZsbtyAdoTG7hDRQURGQkQH8/bfyXwp3Ys6mTZuG7OxsBAQEaLR9aWkpwsPD0alTJ1haWnI3odGFgoICzJ49G97e3rCyskLv3r1x4cIFtW3u37+P8PBwuLu7QywWY+jQoUhJSVHb5sKFCxrd2KaxMWrAr169Gi+88AJsbW3h4uKCUaNG4caNG2rbhIeHg8fjqT2CgoKMVDEhShUVwP37wLVrwD//KI/aae46aQrEYjHc3NxgaalZB7BcLoeVlRXef/99DB48WKe1vP3224iJicEPP/yAK1euIDQ0FIMHD8bdu3cBAIwxjBo1Cmlpafjll1+QmJgIb29vDB48GEVV/hJ3dnbm7mxnTowa8KdOncKMGTNw/vx5xMTEoKKiAqGhoWpfeAAYOnQosrOzuceRI0eMVDFp6qRSIC0NuHwZyMqiW7OSpi02NhY8Hg+HDx9Gly5d0KxZM/Ts2RNXrlzhtrG2tsbmzZsxbdo0uLm5abzv8PBwjBo1CsuXL4eLiwvs7OzwzjvvoPzpFJSSkhIcOHAAa9euRb9+/dCqVStERkbC19eXu/FMSkoKzp8/j82bN+OFF15A27ZtsWnTJhQWFuLHH3/U7RfDBBn1HPzRo0fVnm/fvh0uLi5ISEhAv379uOUikUjjH4yysjKUlZVxz6VPhy7LZDLINLz1lmo7Tbc3ddSehikvV55bf/RIP9PbFAqZ2r/mwNzaJJdr/zNnLr9vmvjggw/w5Zdfws3NDYsWLcLIkSNx8+ZNCASCBu33xIkTaNasGU6ePInbt29j6tSpcHJywieffIKKigrI5fJqN3+xsrLC2bNnAYDLgqrbWFhYQCgU4uzZs3j77bcbVJ+pM6lBdk+ePAGAal0lsbGxcHFxQfPmzdG/f3988skncHFxqXEfq1evxvLly6stj46Ohlgs1qqemJgYrbY3ddQe05aTY17tAcyvTdr8zBUXF+uxEtOybNkyhISEAAB27NgBT09PHDx4EOPHj2/QfoVCIbZt2waxWIyOHTtixYoV+OCDD7By5UrY2tqiV69eWLlyJdq3bw9XV1f8+OOP+PPPP9G6dWsAQLt27eDt7Y2FCxfiu+++g7W1NdavX4+cnBxkZ2c3uN2mzmQCnjGGOXPmoG/fvmqDN8LCwjBu3Dh4e3sjPT0dS5YswcCBA5GQkACRSFRtPwsXLsScOXO451KpFF5eXggNDYWdnZ1GtchkMsTExCAkJKTBf4GaAmqP5lT3Wjfk9DaFQoacnBi4uYWAz2/83x/A/NpkZSVDaqp2P3PSJnThg169enH/d3BwQNu2bXH9+nWNXnvnzh106NCBe75o0SIsWrQIANClSxe1A7NevXqhsLAQmZmZ8Pb2xg8//IC33noLHh4esLCwQGBgICZOnIhLly4BUN4r/sCBA4iIiICDgwMsLCwwePBghIWF6aLZJs9kAv69997D5cuXua4VlQkTJnD/DwgIQI8ePeDt7Y3Dhw9j9OjR1fYjEolqDH6BQKB1GNTnNaaM2lOz2qa3Gfq68Hy+wCzCsCpzaZPqDqHa/MyZ0+9affB4PI22c3d3527FClTvwa1r3/7+/jh16hSKiooglUohkUgwYcIE+Pr6ctt2794dSUlJePLkCcrLy+Hs7IyePXuiR48e2jWoETKJgJ85cyYOHTqE06dPw9PTs85tJRIJvL29q01zIERbRUXKUM/Pp+vBE9IQ58+fR8uWLQEo55HfvHkT7dq10+i1lpaWtd6K9e+//0ZJSQmsrKy497GxsamWE9bW1rC2tkZ+fj6OHTuGtWvXVtuXvb09AOXAu4sXL2LlypUat6+xMmrAM8Ywc+ZMHDx4ELGxsWp/ddUmLy8PmZmZkEgkBqiQmJuKisqj9ZISY1dDiHlYsWIFHB0d4erqisWLF8PJyUltvntycjLKy8vx6NEjFBQUcEfsz7tQTnl5OSIiIvDRRx8hIyMDy5Ytw3vvvQf+0+61Y8eOgTGGtm3b4tatW/jggw/Qtm1bTJ06ldvH/v374ezsjJYtW+LKlSuYNWsWRo0ahdDQUF1/GUyOUQN+xowZ2LNnD3755RfY2toiJycHgPIvLSsrKxQWFiIyMhJjxoyBRCLB7du3sWjRIjg5OeHVV181ZumkkZFKlaH++LHy+vCEmCJtryxnKj799FPMmjULKSkp6NKlCw4dOgShUMitHzZsGHelOADo1q0bAOVBXl0GDRqE1q1bo1+/figrK8Nrr72GyMhIbv2TJ0+wcOFCZGVlwcHBAWPGjMEnn3yidnokOzsbc+bMwf379yGRSDB58mQsWbJERy03bUYNeNVcxeDgYLXl27dvR3h4OCwsLHDlyhXs3LkTjx8/hkQiwYABA7Bv3z7Y2toaoWLSmJSXKwfM5ebS3dsI0ae+ffvi6tWrta6/fft2vfe9fPnyGmdGAcD48eOfO1L//fffx/vvv1/v92/MjN5FXxcrKyscO3bMQNUQc8CY+r3WCSG6tWnTJnz//feIj483dik607FjR6SlpRm7DJ0ziUF2hDRUaWnlvdbp7m2E6Mfu3btR8nTwSsuWLREXF2fkinTjyJEj3IWJNJ1O3RhQwJNGS6Go7IIvLDR2NYSYPw8PD7XnwcHBz+2Jra+oqCi97Lcm3t7eBnsvQ6KAJ42O6gJh167RgDlCCKkNBTxpFOTyyqN11cVo5HLDX4yGEF1S0C0IST1o+nNDAU9MWkFB5fQ2+iwk5kIoFILP5+PevXtwdnaGUCjU+MpvpOlijKG8vBwPHz4En89Xm4pYEwp4YnJkssoBc1VuDEiI2eDz+fD19UV2djbu3btn7HJIIyMWi9GyZUvugj+1oYAnJqHq9DaplM6tE9Oij5kZQqEQLVu25G57SogmLCwsYGlpqVGPDwU8Maqyssqj9SZ0+2zSCJSWAllZwN27gK0tUMsdqhuEx+OZ3U2giOmggCcGp1Aob/BC09uIqaka6nl5lcttbIxXEyH1RQFPDKa4WBnqjx7R3duI6agt1Alp7CjgiV6pprfl5VXOXyfE2CjUSVNAAU/0gqa3EVNDoU6aGgp4ojMyWeXFaGh6GzEFFOqkKaOAJw1C09uIqaka6o8e0c8kaboo4Em90PQ2Ykoo1AmpjgKeaEyhUJ5Tz81VnmMnxJhKS5WBnpVFoU5ITRoc8HK5HFeuXIG3tzdatGihi5qIiaHpbcRUqEJddU6dQp2Q2mkd8LNnz0anTp0QEREBuVyO/v37Iy4uDmKxGL/99huCg4P1UCYxNLlcGei5uTS9jRgXhToh9aN1wP/888948803AQC//vor0tPT8c8//2Dnzp1YvHgxzp07p/MiieEUFipDPT+fprcR46FQJ6ThtA743NxcuLm5AQCOHDmCcePGoU2bNoiIiMBXX32l8wKJ/qmmt+XlKT9YCTEGCnVCdEvrgHd1dUVycjIkEgmOHj2KTZs2AQCKi4thYWGh8wKJfjCmnNaWm6uc5kYfpsQYKNQJ0R+tA37q1KkYP348JBIJeDweQkJCAAB//vkn2rVrp/MCiW6VlVVejIamtxFjKC0F7t1Tjn6nUCdEf7QO+MjISAQEBCAzMxPjxo2DSCQCoLxH7YIFC3ReIGk41QforVs0YI4YhyrU795V/nFJoU6I/tVrmtzYsWMBAKVVTthOmTJFNxURnSkpUX6Y5uYqnxcWAny+cWsiTcvt2xTqhBiL1h/3crkcK1euhIeHB2xsbJCWlgYAWLJkCbZu3arzAol25HLg4UPg+nUgORl48ACoqDB2VaSpKC0F0tKAuDjl88uXlT+PFO6EGJ7WAf/JJ58gKioKa9euhVAo5JZ36tQJ33//vVb7Wr16NV544QXY2trCxcUFo0aNwo0bN9S2YYwhMjIS7u7usLKyQnBwMK5du6Zt2WavsFB5tHT5MnDnDnXFE8NRhfqZM8DvvwNJSZW9RoQQ49E64Hfu3IktW7bgjTfeUBs137lzZ/zzzz9a7evUqVOYMWMGzp8/j5iYGFRUVCA0NBRFRUXcNmvXrsX69evxzTff4MKFC3Bzc0NISAgK6FqpqKgA7t8Hrl0DbtxQDliiuevEEGoKdTpSJ8S0aH0O/u7du2jVqlW15QqFAjIth2UfPXpU7fn27dvh4uKChIQE9OvXD4wxbNiwAYsXL8bo0aMBADt27ICrqyv27NmDd955R9vyzYLq7m00vY0YUlMcKMcYQ25uMm7ePAKJJBPDhg0zdkmEaEzrgO/YsSPOnDkDb29vteX79+9Ht27dGlTMkydPAAAODg4AgPT0dOTk5CA0NJTbRiQScZfHrSngy8rKUFblZuRSqRQAIJPJNP4DRLWdtn+w6FN5ufLSsY8eKf+vDYVCpvZvY0ftMZzSUiAnRxnsVae08XjKR234fJnav41JSUk+0tNPIC0tBqmpMSgoyAIA+Pn54dNPP9V4P6b0+UGaJq0DftmyZZg0aRLu3r0LhUKB//3vf7hx4wZ27tyJ3377rd6FMMYwZ84c9O3bFwEBAQCAnJwcAMqL61Tl6uqKjIyMGvezevVqLF++vNry6OhoiMVirWqKiYnRantTl5ND7TFlptoeoRDw8VE+tBUYaJptqkoulyMlJQWJiYlISkpCSkoKFFXOdQmFQnTo0AGBgYGIjo4Gr66/bKoopoEwxMi0DvgRI0Zg3759WLVqFXg8HpYuXYrAwED8+uuv3EVv6uO9997D5cuXcfbs2Wrrnv2FYozV+ku2cOFCzJkzh3sulUrh5eWF0NBQ2NnZaVSLTCZDTEwMQkJCIBAItGiFbpSWKo+W8vN1MwJeoZAhJycGbm4h4PMN3x5do/boXlkZkJ1d/Ui9vvh8GQIDY3DpUggUCtP7HkmlWUhNjUFaWjTS0/9AaWm+2nonp/bw9w+Fn18IWrZ8Cf7+lnBx0e4zQdV7SIix1Gse/JAhQzBkyBCdFTFz5kwcOnQIp0+fhqenJ7dcdc37nJwcSCQSbvmDBw+qHdWriEQi7uI7VQkEAq3Duj6vqS+FovLubVXGGOp03jqfLzCLQFSh9jRMWVnlFeX0dU5doRCYRMDLZCXIzDyDtLRjSEs7htxc9Zk4zZo1h4/PYPj5DYGf3xDY2XmprWdM2d2uzWeCMQ4OCKmqwfeDbwjGGGbOnImDBw8iNjYWvr6+aut9fX3h5uaGmJgY7vx+eXk5Tp06hTVr1hijZJ0rLFQeMT16RCPgif6pQv3uXfMe9c4YQ17eP08D/Sju3DmFiorKC3PxeHxIJC/Az28o/PyGwN39BfD5Rv04JETntP6J5vP5dZ6DksvlGu9rxowZ2LNnD3755RfY2tpy59zt7e1hZWUFHo+H2bNnY9WqVWjdujVat26NVatWQSwWY+LEidqWbjIqKiqvB093byP61lRCvbT0MdLTjyM9XXmULpVmqq23tfWAn98Q+PoOga/vYFhZORipUkIMQ+uAP3jwoNpzmUyGxMRE7Nixo8bBbXXZvHkzACA4OFht+fbt2xEeHg4AmD9/PkpKSjB9+nTk5+ejZ8+eiI6Ohq2trbalG53q7m2PH5vvhywxDU0h1BUKObKzLyIt7RjS04/h7t3zYKyyG8zCQoSWLftxoe7s3FHjAXKEmAOtA/6VV16ptmzs2LHo2LEj9u3bh4iICI33xTT41OHxeIiMjERkZKQ2ZZqM8nJlqOflaT+9jRBtNIVQLyi4i7S0aKSlHcXt28dRUvJIbb2jY7un59GHomXLfhAItJs5Q4g50dlJp549e2LatGm62l2jxpjyKD03V3nUToi+VA313FzzG8dRUVGKzMyzSEs7irS0Y3j48KraepHIXm1wnL19SyNVSojp0UnAl5SU4Ouvv1YbAd8UlZZWHq3TDV6IvqimtKlGv5tTqCsHx93gut0zMmJRUVFSZQse3N1fgK+vMtA9PHrS4DhCaqH1b0aLFi3UzmMxxlBQUACxWIxdu3bptLjGoLbpbYTokjmHemnpE9y+fYIb8S6V3lFbb2MjqTI4LgRisaORKiWkcdE64L/44gu1gOfz+XB2dkbPnj3RokULnRZnyoqKlB+0+fnKW7QSomvmGuoKhRw5OZe4bnfl4LjKXyILCyG8vF7iprA5OwfQ4DhC6kHrgFeNbm+KKioqj9ZLSp6/PSHaMtdQLyzM5i4yk54eg5KSPLX1Dg5tufPoLVv2h1BobaRKCTEfGgX85cuXNd5h586d612MqaLpbUSfVPdGio9Xjn43h1CvqCjD33//jRMnTiM19TgePFD/DBGJ7ODtPYgL9ebNfYxTKCFmTKOA79q1K3g83nOntfF4PK0udGPq7t9XdsHT9Daia+XllZeJffQICAxs3OHOGMOjRylVrhwXC5ms6s1WeJBIulcZHBcECwu6lCsh+qRRwKenp+u7DpOUna3ba8GTpq1qqFftfm+sP2NlZdIqg+OO4cmT22rrW7RoAS+v4fD1DXs6OM7JOIUS0kRpFPDP3vudEKKZ2kK9MWJM8XRwnDLQs7Liqg2O8/TsCz+/IWjVaiCGDctCQsLLJnGzGUKaonpPIE1OTsadO3dQ/kz/9ciRIxtcFCGNmTmFemFhDtLTo5+GejRKSnLV1js4tOa63b29gyEU2gBQ3i6Wx7trjJIJIU9pHfBpaWl49dVXceXKFbXz8qppLOZ0Dp4QTZlLqMvl5cjMPMdNYXvw4G+19UKhLXx8BnJT2Jo3961lT4QQY9M64GfNmgVfX18cP34cfn5++Ouvv5CXl4e5c+fi888/10eNhJgkcwh1xhjy829x3e4ZGSchk6lfscnNrTs32t3DoxcNjiOkkdA64OPj4/HHH3/A2dkZfD4ffD4fffv2xerVq/H+++8jMTFRH3USYhJUoa66oUtjDPWysgJkZPzBjXh//Fh9EK21tSt8fUOfXj0uBNbWLkaqlBDSEFoHvFwuh42N8jybk5MT7t27h7Zt28Lb2xs3btzQeYGEGFtjD3XGFLh/PwlpaceQmnoUd+/GQaGovFkCny+Ap2cf+PkNgb//ULi4dAaP10iH9hNCOFoHfEBAAC5fvgw/Pz/07NkTa9euhVAoxJYtW+Dn56ePGgkxuMYe6oWF97nBcenpMSgufqC2vkULf+48esuWwRCJbI1UqWkTiwEnJ6BVK6CgwNjVEKIdrQP+o48+QtHTu6p8/PHHGD58OF566SU4Ojpi3759Oi+QEENpzKEul5cjKyuOO5d+/776qTKh0Abe3gO5c+ktWvgbqVLTpgp0JyfA2Rmwtq5cTgFPGhutA37IkCHc//38/JCcnIxHjx5Vu8scIY2BTKYM9MYY6vn5qVUGx/2B8vJCtfWurt24QPf07A0LC6GRKjVdVQPdyQl4evaRELOgdcDv2LEDY8eOhbV15c0gHBwcdFoUIfokk1WOfm9MoV5eXoiMjJNITT2K9PRjyM9PVVsvFjvD1zcU/v5D4eMTAhsbVyNVaroo0ElTonXAz5s3D9OnT8eIESPw5ptvYujQobC0rPf1cggxCFWo370LPHjQOEJdOTju7ypXjjsHhULGrefzLbnBcX5+Q+Dq2pUGxz2DAp00ZVonc3Z2No4ePYoff/wRr732GqysrDBu3Di8+eab6N27tz5qJKReGmOoFxU9QHp6zNPBcdEoKrqvtr55cz8u0L29B0AksjNSpabJykp57pwCnZB6BLylpSWGDx+O4cOHo7i4GAcPHsSePXswYMAAeHp6IjU19fk7IURPGluoy+UyZGaewfXrP+DcueXIyVEfHCcQWMPbewA34t3BoZWRKjVNFOiE1K5BfetisRhDhgxBfn4+MjIycP36dV3VRYjGGluo5+enPT1CP4bbt/9Aebn68GxX165PLzKjHBxnaSkyUqWmhwKdEM3VK+BVR+67d+/G8ePH4eXlhddffx379+/XdX2E1Cozs3GEunJwXCwX6o8epaitF4ud0KNHBzRvPgU+PsNgY+NmpEpNDwU6IfWndcC//vrr+PXXXyEWizFu3DjExsbSuXdiEKoj9Xv3AC8vIDHRNIOdMYYHDy5zl4LNzDxbbXCch0evp+fSh8LdPQAvvHAUFy8Oa/K3VrWyqpyDToFOSMNoHfA8Hg/79u3DkCFDaPQ80buaut/5fGXAm5Kiooe4fTuGu61qUVGO2np7ex/uPLqPz0C1wXE8nuzZ3TUZFOiE6I/WCb1nzx6dvfnp06fx2WefISEhAdnZ2Th48CBGjRrFrQ8PD8eOHTvUXtOzZ0+cP39eZzUQ09MYzqnL5TLcvXue63bPzk4AwLj1AoEY3t4DuHulOzi0pgtBgQKdEEMy6iF4UVERunTpgqlTp2LMmDE1bjN06FBs376dey4U0tW4zJFMBmRnKy8+Y6qh/vjx7SpXjjuBsjKp2noXl87c4Dgvr740OA7KQHdwoEAnxBiMGvBhYWEICwurcxuRSAQ3Nxp0ZI5MPdTLy4tw584ppKUdRVraMTx6dFNtvZWVI3x9Q552vYfCxkZipEpNR7NmyjB3dlY+DwlRnlIhhBiexgGflZUFT09PfdZSo9jYWLi4uKB58+bo378/PvnkE7i41H5/6rKyMpSVlXHPpVLlUZZMJoNMptm5TtV2VQdGNWaqdphCe2Qy4P59ZRf8s6GuaRDw+TK1f3VFOTjuCtLSYpCaGoPMzLOQy8u59TyeBTw9g+DnFwJ//1C4uXUDn29RZQ/1q0df7TGEZs0qR7g7OlbenEWhkCEnxzR+5nRBLle2Q9PPEG23JUQfeIwx9vzNgObNm+Prr7/GpEmT9FMIj1ftHPy+fftgY2MDb29vpKenY8mSJaioqEBCQgJEopq7PyMjI7F8+fJqy/fs2QOxWKyX2knjJZVK8ffffyMxMRFJSUl49OiR2npnZ2d069YN3bp1Q+fOndXuwUBIXYqLizFx4kQ8efIEdnZ0xUFieBoH/KZNm7BgwQKEhIRgy5YtcHR01G0hNQT8s7Kzs+Ht7Y29e/di9OjRNW5T0xG8l5cXcnNzNf4lk8lkiImJgZtbCPj8xj9tSXk0Zdj21HWk3lB8vgyBgTG4dClE62llCkUF7t79C6mp0UhLi8a9e+qD4ywtreDt3R/+/sqjdAeHNnofHNeQ9uib6gjd0VH5r6Z/3xjjZ06frKxkSE2NQUhICAQCzdojlUrh5OREAU+MRuMu+unTpyMsLAwRERHo2LEjtmzZgpEjR+qztmokEgm8vb2RkpJS6zYikajGo3uBQKDxL6YKny8wiw8nFX23R3VO/e5dZbjr+5y6QiHQKBCfPMngBsfdvn0CZWVP1NY7OwdwU9iUg+OacesYUz4MQdP26JPqHLpqpHtDB8WZy++QxdMzMdp8jmj7eUOIrmk1yM7X1xd//PEHvvnmG4wZMwbt27evNhf+0qVLOi2wqry8PGRmZkIiocFMpqKionJKmyFCXRMyWfHTwXHKUM/L+0dtvZWVA3x8Qp5eaCYUtrYeRqrU+FRH6KpQt7U1dkWEEF3RehR9RkYGDhw4AAcHB7zyyisNuthNYWEhbt26xT1PT09HUlISHBwc4ODggMjISIwZMwYSiQS3b9/GokWL4OTkhFdffbXe70karqKicvS7KYQ6YwwPH15Deroy0O/cOQ25vPI0DY/Hh4dHEDcnXSLp8czguKaDAp2QpkOrdP7vf/+LuXPnYvDgwbh69SqcVXNh6unixYsYMGAA93zOnDkAgClTpmDz5s24cuUKdu7cicePH0MikWDAgAHYt28fbOlTyeBUoX73Lp6OjjZuPQUFBbh27Sekpp5Aeno0Cgruqq23s/PiLgXr4zMIzZo1N06hRkaBTkjTpXHADx06FH/99Re++eYbTJ48WSdvHhwcjLrG+B07dkwn70Pqx5RCXaGowL17fz29ctxRZGdfhKJKQZaWzdCyZTB3r3RHx3ZN8spxFOiEEBWNA14ul+Py5ctGmQtPDMeUQl0qzawyOO44Sksfq613du4AX1/V4LiXIBBYGadQI6o6D93ZmQKdEFJJ44CPiYnRZx3EiEwl1GWyEmRmnkZq6tGng+Ouq61v1qwFfHwGw99/MF591QK3b082+qhzQ6NAJ4Roim4H10SZQqgzxpCbe527FGxm5mlUVJRy63k8PtzdX+SmsEkkL4DPtwCfL4OT0xHcvm34mg1NJKoMcwp0Qog2KOCbkIoK5ah3Y4Z6SUk+bt8+znW9FxRkqa23tfXkzqP7+AyClZWD4Ys0Igp0QoiuUMCbOdWROo8HHD2qfG5ICoUc2dkXuEC/d+9PMFb5l4WFhQgtW/bnRrw7ObVvUoPjRKLKG7MMHAjQBc8IIbpCAW+Gnu1+B4AePQx3xC6VZiE9PRqpqUefDo7LV1vv6Ngefn5D4O8/FF5e/ZrU4LiajtAVCuXFguhWqoQQXaKANxMVFcowV118Ri6vXKfv23VWVJTizp3T3FF6bu41tfUikf3T26oqu97t7Lz0W5AJqRroTk50hE4IMRwK+EasrlDXJ8YY8vL+4QL9zp1TqKgoqbIF7+ngOGWgu7u/CD6/afyoUaATQkxF0/jUNSPGCvXS0se4ffvE01A/Cqk0U229jY07dx7d13dwkxkcR4FOCDFVFPCNgDFCXTk47uLTK8cdw927f4KxyjdWDo7rBz+/IfD1HQJn545NYnCcUKh+tzUKdEKIqaKAN1HGCPWCgntVrhwXg5KSR2rrHR3bcd3uLVv2h0Ag1n9RRkaBTghprCjgTYihQ72iohSZmWe5bveHD6+qrReJ7OHjM4gLdXt7b/0WZAIo0Akh5oIC3sgMGerKwXE3kJp6FOnpx5CREVttcJxE0oO7cpyHR0+zHxwnFKqfQ7e3N3ZFhBCiG+b96W2iDBnqpaVPcOfOMfz111b8+ecsPHmSobbexkbCnUf39R0MsdhJf8WYAAp0QkhTQQFvIKpQV118Rl+hzpgC2dkJ3Ln0u3fjnxkcJ4SX10tct7uzcyezHhxHgU4Iaaoo4PXIUKFeWJiNtLRopKUdRXp6DEpK8tTWOzi0Rq9ebWBr+y94eg6CUGitn0JMgCrQVaFOgU4Iaaoo4HXMEKFeUVGGzMyzSE9XHqU/eHBZbb1QaAsfn8HcUbqDgwd69DiCixfDzO72qhTohBBSMwp4HdB3qDPG8OhRCjcnPSPjJGSy4ipb8CCRdIev75Cng+OCYGFRNchlui3IiFSj3AEgOBho3tyY1RBCiOmigK8nfYd6WZkUt2//wU1he/Lkttp6a2s3+PmFws9vKHx8BsPa2lm3BZiImo7QVTdnoSlshBBSOwp4Legz1BlTICfnktrgOIWi8t6ufL4AXl59uSlsLi6dzXJwHHW5E0KIblDAP4c+Q72wMAfp6dFPu95jUFz8UG29g0Nrrtvd2zsYQqH53U+UAp0QQvSDAr4OFy8q76uuq1CXy8uRmXmOGxx3/36S2nqh0AY+PoO4UG/Rwk83b2xCKNAJIcQwKODrcO+e8nxvQzx6dIs7j64cHFektt7NLZC7C5uHR69nBsc1flUD3cmJBsURQoihUMDrWFlZATIy/uDOpT9+nKa2Xix24aav+fqGwNraxUiV6gcFOiGEmAYK+AZiTIH795O4QM/KOldtcJynZx8u1F1du4DH4xuxYt2iQCeEENNk1IA/ffo0PvvsMyQkJCA7OxsHDx7EqFGjuPWMMSxfvhxbtmxBfn4+evbsiY0bN6Jjx47GKxpAUdEDpKdHP71pSwyKix+orW/Rwr/K4LgBEIlsjVSp7j0b6Pb2gBkO5ieEkEbPqAFfVFSELl26YOrUqRgzZky19WvXrsX69esRFRWFNm3a4OOPP0ZISAhu3LgBW1vDhaZcXo6srHikpR19OjguUW29QGANH5+B3BS2Fi38DVabvlGgE0JI42TUgA8LC0NYWFiN6xhj2LBhAxYvXozRo0cDAHbs2AFXV1fs2bMH77zzjl5ry87OxsWL3yI19TgyMv5AeXmh2npX125ct7unZ29YWAj1Wo+hUKATQoh5MNlz8Onp6cjJyUFoaCi3TCQSoX///oiLi6s14MvKylBWVsY9l0qlAACZTAaZ7PmXbM3JycGAAQOQmpqqtlwsdoaf32D4+YXAzy8ENjauz7zSdC8Hy+fL1P6tSigEHByUYe7oqLw6XNVAZ0z5MCUKhUzt38bO3NoDmF+b5HJlOzT5DFHRZltC9MFkAz4nJwcA4OqqHqSurq7IyMio6SUAgNWrV2P58uXVlkdHR0MsFj/3fZXXfX8ECwsLtGvXDt26dUO3bt3g6+sLPl81OC5B84aYkMDAmDrXFxUpH41FTk7d7WlszK09gPm1KSZG8/YUFxc/fyNC9MhkA17l2cuxMsbqvETrwoULMWfOHO65VCqFl5cXQkNDYafhxct//fVX3LlzB//88woUCgHy84H8/PrVb2wCAeDkJIOHRwzE4hDY2wsafZe7QiFDTk4M3NxCwOc3/usGmFt7APNrk5WVDKmpMQgJCYFAoFl7VL2HhBiLyQa8m5sbAOWRvEQi4ZY/ePCg2lF9VSKRCCKRqNpygUCg8S/miy++iNzcXCgUgkZ3e1VloKtfKY4x5UV7mjcXmMWHrQqfT+0xdebSJgsL5b/afI5ouh0h+mKyAe/r6ws3NzfExMSgW7duAIDy8nKcOnUKa9asMXJ1pqOmQH/2CN3UzqETQgjRP6MGfGFhIW7dusU9T09PR1JSEhwcHNCyZUvMnj0bq1atQuvWrdG6dWusWrUKYrEYEydONGLVxqVJoBNCCCFGDfiLFy9iwIAB3HPVufMpU6YgKioK8+fPR0lJCaZPn85d6CY6Otqgc+CNTSBQjm53dqZAJ4QQojmjBnxwcDBYHf3HPB4PkZGRiIyMNFxRRkaBTgghRBdM9hx8U0GBTgghRB8o4A2MAp0Q0yUQACJR9QefD9y8aezqCNEOBbyeVQ101d3WKNAJMQ4er+YAFworg7wmdFE60hhRwOsYBTohxmVhUXOIq4KckKaCAr6BKNAJMbzautJFIsCSPtUIAUABrzVVoKvmoVOgE6J7tXWlq47Ca+tKJ4RUooB/Dgp0QvSDutIJ0S8K+Dr06we0aEGBTkh9CQSAlRV1pRNiDPQrVgc6WiekbrV1pVtYKG9w1LGjMuQJIYZHAU8IqVN9utJpWhkhxkcBTwjh5oFXnRNOXemENG70q0tIE0Cj0glpeijgCTETNCqdEFIVBTwhjciz3efPDmwjhBAVCnhCTEhdXekiEc3qIIRojgKeEAN7titdNaWsQwfA2trY1RFCzAUFPCF6oE1XumpKGZ0nJ4ToEgU8IfVAXemEEFNHAU9ILSwtaz8Sp6NtQoipo4AnTRqNSieEmCsKeGLW+Py6Q5y60gkh5ooCnjR6lpY1d6FTVzohpCmjgCeNQtWjcNW0srZtldPKqCudEEKqo4AnJkGbrnTVtDIrKwp3QgipDQU8MZiautJVD7pnOCGE6BYFPNEpGpVOCCGmwaQDPjIyEsuXL1db5urqipycHCNVRGhUOiGENA4mHfAA0LFjRxw/fpx7bkGHgXpHXemEENL4mXzAW1paws3NTePty8rKUFZWxj2XSqUAAJlMBplqdNZzqLZTKDTb3tSp2lG1PUKh+lSyqtPL6vobSsMvoV6pvj+afj9Nnbm1BzC/NtWnPebSdtJ48RhjzNhF1CYyMhKfffYZ7O3tIRKJ0LNnT6xatQp+fn51vubZbn0A2LNnD8RisT7LJYQQTnFxMSZOnIgnT57Azs7O2OWQJsikA/73339HcXEx2rRpg/v37+Pjjz/GP//8g2vXrsHR0bHG19R0BO/l5YXc3FyNf8lkMhliYmLg5hYCPt90+6SfvVZ61SPyql3pqvaEhIRAYAZ97NQe02dubapPe6RSKZycnCjgidGYdBd9WFgY9/9OnTqhV69e8Pf3x44dOzBnzpwaXyMSiSASiaotFwgEWn/Q8PkCowY8j6cMal2NSq/P18CUUXtMn7m1SZv2mFO7SeNk0gH/LGtra3Tq1AkpKSnGLkVn+PzaA1wopFHphBBC6qdRBXxZWRmuX7+Ol156ydilaIVGpRNCCDE0kw74efPmYcSIEWjZsiUePHiAjz/+GFKpFFOmTDF2aWp03ZVOCCGENJRJB3xWVhZef/115ObmwtnZGUFBQTh//jy8vb0NXgt1pRNCCGlMTDrg9+7da9T3b9lSebcy6konhBDS2Jh0wBubgwMFOyGEkMaJb+wCCCGEEKJ7FPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwBNCCCFmyOwvdKO63b1UKtX4NTKZDMXFxZBKpWZxy0dqj2kzt/YA5tem+rRH9Zmj+gwixNDMPuALCgoAAF5eXkauhBDSFBUUFMDe3t7YZZAmiMfM/M9LhUKBe/fuwdbWFjwN7wgjlUrh5eWFzMxM2NnZ6blC/aP2mDZzaw9gfm2qT3sYYygoKIC7uzv4fDobSgzP7I/g+Xw+PD096/VaOzs7s/hwUqH2mDZzaw9gfm3Stj105E6Mif6sJIQQQswQBTwhhBBihijgayASibBs2TKIRCJjl6IT1B7TZm7tAcyvTebWHtI0mP0gO0IIIaQpoiN4QgghxAxRwBNCCCFmiAKeEEIIMUMU8IQQQogZarIBv2nTJvj6+qJZs2bo3r07zpw5U+f2p06dQvfu3dGsWTP4+fnh22+/NVClmtGmPf/73/8QEhICZ2dn2NnZoVevXjh27JgBq30+bb8/KufOnYOlpSW6du2q3wK1pG17ysrKsHjxYnh7e0MkEsHf3x/btm0zULXPp217du/ejS5dukAsFkMikWDq1KnIy8szULV1O336NEaMGAF3d3fweDz83//933NfY+qfB4QAAFgTtHfvXiYQCNh///tflpyczGbNmsWsra1ZRkZGjdunpaUxsVjMZs2axZKTk9l///tfJhAI2M8//2zgymumbXtmzZrF1qxZw/766y928+ZNtnDhQiYQCNilS5cMXHnNtG2PyuPHj5mfnx8LDQ1lXbp0MUyxGqhPe0aOHMl69uzJYmJiWHp6Ovvzzz/ZuXPnDFh17bRtz5kzZxifz2dffvklS0tLY2fOnGEdO3Zko0aNMnDlNTty5AhbvHgxO3DgAAPADh48WOf2pv55QIhKkwz4F198kf373/9WW9auXTu2YMGCGrefP38+a9eundqyd955hwUFBemtRm1o256adOjQgS1fvlzXpdVLfdszYcIE9tFHH7Fly5aZVMBr257ff/+d2dvbs7y8PEOUpzVt2/PZZ58xPz8/tWVfffUV8/T01FuN9aVJwJv65wEhKk2ui768vBwJCQkIDQ1VWx4aGoq4uLgaXxMfH19t+yFDhuDixYuQyWR6q1UT9WnPsxQKBQoKCuDg4KCPErVS3/Zs374dqampWLZsmb5L1Ep92nPo0CH06NEDa9euhYeHB9q0aYN58+ahpKTEECXXqT7t6d27N7KysnDkyBEwxnD//n38/PPPePnllw1Rss6Z8ucBIVWZ/c1mnpWbmwu5XA5XV1e15a6ursjJyanxNTk5OTVuX1FRgdzcXEgkEr3V+zz1ac+z1q1bh6KiIowfP14fJWqlPu1JSUnBggULcObMGVhamtaPdH3ak5aWhrNnz6JZs2Y4ePAgcnNzMX36dDx69Mjo5+Hr057evXtj9+7dmDBhAkpLS1FRUYGRI0fi66+/NkTJOmfKnweEVNXkjuBVnr11LGOsztvJ1rR9TcuNRdv2qPz444+IjIzEvn374OLioq/ytKZpe+RyOSZOnIjly5ejTZs2hipPa9p8fxQKBXg8Hnbv3o0XX3wRw4YNw/r16xEVFWUSR/GAdu1JTk7G+++/j6VLlyIhIQFHjx5Feno6/v3vfxuiVL0w9c8DQoAmeATv5OQECwuLakcbDx48qPZXuYqbm1uN21taWsLR0VFvtWqiPu1R2bdvHyIiIrB//34MHjxYn2VqTNv2FBQU4OLFi0hMTMR7770HQBmQjDFYWloiOjoaAwcONEjtNanP90cikcDDw0PtVqPt27cHYwxZWVlo3bq1XmuuS33as3r1avTp0wcffPABAKBz586wtrbGSy+9hI8//rjRHfGa8ucBIVU1uSN4oVCI7t27IyYmRm15TEwMevfuXeNrevXqVW376Oho9OjRAwKBQG+1aqI+7QGUR+7h4eHYs2ePSZ0L1bY9dnZ2uHLlCpKSkrjHv//9b7Rt2xZJSUno2bOnoUqvUX2+P3369MG9e/dQWFjILbt58yb4fD48PT31Wu/z1Kc9xcXF4PPVP2osLCwAVB75Niam/HlAiBojDe4zKtU0n61bt7Lk5GQ2e/ZsZm1tzW7fvs0YY2zBggVs0qRJ3PaqaTH/+c9/WHJyMtu6datJTYvRtj179uxhlpaWbOPGjSw7O5t7PH782FhNUKNte55laqPotW1PQUEB8/T0ZGPHjmXXrl1jp06dYq1bt2Zvv/22sZqgRtv2bN++nVlaWrJNmzax1NRUdvbsWdajRw/24osvGqsJagoKClhiYiJLTExkANj69etZYmIiN+2vsX0eEKLSJAOeMcY2btzIvL29mVAoZIGBgezUqVPcuilTprD+/furbR8bG8u6devGhEIh8/HxYZs3bzZwxXXTpj39+/dnAKo9pkyZYvjCa6Ht96cqUwt4xrRvz/Xr19ngwYOZlZUV8/T0ZHPmzGHFxcUGrrp22rbnq6++Yh06dGBWVlZMIpGwN954g2VlZRm46pqdPHmyzt+Hxvh5QAhjjNHtYgkhhBAz1OTOwRNCCCFNAQU8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ6QGt2/fBo/HQ1JSkrFLIYSQeqGAJ41WeHg4Ro0aVW15bGwseDweHj9+XO99e3l5ITs7GwEBAfUvkBBCjKjJ3S6WkOcpLy+HUCiEm5ubsUshhJB6oyN4YvYOHDiAjh07QiQSwcfHB+vWrVNb7+Pjg48//hjh4eGwt7fHtGnTqnXRh4eHg8fjVXvExsYCAPLz8zF58mS0aNECYrEYYWFhSElJ4d4jKioKzZs3x7Fjx9C+fXvY2Nhg6NChyM7ONtSXgRDSxFDAE7OWkJCA8ePH47XXXsOVK1cQGRmJJUuWICoqSm27zz77DAEBAUhISMCSJUuq7efLL79EdnY295g1axZcXFzQrl07AMo/AC5evIhDhw4hPj4ejDEMGzYMMpmM20dxcTE+//xz/PDDDzh9+jTu3LmDefPm6bX9hJAmzMh3syOk3qZMmcIsLCyYtbW12qNZs2YMAMvPz2cTJ05kISEhaq/74IMPWIcOHbjn3t7ebNSoUWrbpKenMwAsMTGx2vseOHCAiUQidubMGcYYYzdv3mQA2Llz57htcnNzmZWVFfvpp58YY8p7ogNgt27d4rbZuHEjc3V1bfDXgRBCakJH8KRRGzBgAJKSktQe33//Pbf++vXr6NOnj9pr+vTpg5SUFMjlcm5Zjx49NHq/xMRETJ48GRs3bkTfvn2597C0tETPnj257RwdHdG2bVtcv36dWyYWi+Hv7889l0gkePDggXYNJoQQDdEgO9KoWVtbo1WrVmrLsrKyuP8zxsDj8dTWM8Zq3M/z5OTkYOTIkYiIiEBERESd+6vpvQUCgdp6Ho9X62sJIaSh6AiemLUOHTrg7Nmzasvi4uLQpk0bWFhYaLyf0tJSvPLKK2jXrh3Wr19f7T0qKirw559/csvy8vJw8+ZNtG/fvmENIISQeqIjeGLW5s6dixdeeAErV67EhAkTEB8fj2+++QabNm3Saj/vvPMOMjMzceLECTx8+JBb7uDggNatW+OVV17BtGnT8N1338HW1hYLFiyAh4cHXnnlFV03iRBCNEJH8MSsBQYG4qeffsLevXsREBCApUuXYsWKFQgPD9dqP6dOnUJ2djY6dOgAiUTCPeLi4gAA27dvR/fu3TF8+HD06tULjDEcOXKkWrc8IYQYCo/RSUBCCCHE7NARPCGEEGKGKOAJIYQQM0QBTwghhJghCnhCCCHEDFHAE0IIIWaIAp4QQggxQxTwhBBCiBmigCeEEELMEAU8IYQQYoYo4AkhhBAzRAFPCCGEmKH/B4YHxYxK7zDVAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| hide\n", - "# Create single mixture and broadcast to N, H, K\n", - "weights = torch.ones((2,3))[None, :, :]\n", + "# Create single mixture and broadcast to N,H,K\n", + "weights = torch.ones((1,3))[None, :, :]\n", "lambdas = torch.Tensor([[5,10,15], [10,20,30]])[None, :, :]\n", "\n", "# Create repetitions for the batch dimension N.\n", @@ -3393,7 +1987,7 @@ "print('lambdas.shape (N,H,K) \\t', lambdas.shape)\n", "\n", "distr = PMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (lambdas, weights)\n", + "distr_args = (lambdas,)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -3498,44 +2092,38 @@ " if self.return_params:\n", " mu_names = [f\"-mu-{i}\" for i in range(1, n_components + 1)]\n", " std_names = [f\"-std-{i}\" for i in range(1, n_components + 1)]\n", - " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", - " self.param_names = [i for j in zip(mu_names, std_names, weight_names) for i in j]\n", - " self.output_names = self.output_names + self.param_names\n", + " mu_std_names = [i for j in zip(mu_names, std_names) for i in j]\n", + " self.output_names = self.output_names + mu_std_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 3 * n_components\n", + " self.outputsize_multiplier = 2 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " means, stds, weights = output.chunk(3, dim=-1)\n", - "\n", - " return (means, stds, weights)\n", + " means, stds = torch.tensor_split(output, 2, dim=-1)\n", + " return (means, stds)\n", "\n", - " def scale_decouple(\n", - " self,\n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float = 0.2,\n", - " ):\n", - " \"\"\"Scale Decouple\n", + " def scale_decouple(self, \n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float=0.2):\n", + " \"\"\" Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " means, stds, weights = output\n", + " means, stds = output\n", " stds = F.softplus(stds)\n", - " weights = F.softmax(weights, dim=-1)\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(means.size(dim=0), 1, -1)\n", - " scale = scale.view(means.size(dim=0), 1, -1)\n", + " scale = scale.view(means.size(dim=0), 1, -1) \n", " means = (means * scale) + loc\n", " stds = (stds + eps) * scale\n", - "\n", - " return (means, stds, weights)\n", + " return (means, stds)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -3557,11 +2145,17 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " means, stds, weights = distr_args\n", + " means, stds = distr_args\n", " B, H, K = means.size()\n", " Q = len(self.quantiles)\n", " assert means.shape == stds.shape\n", "\n", + " # Sample K ~ Mult(weights)\n", + " # shared across B, H\n", + " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", + " \n", + " weights = (1/K) * torch.ones_like(means, device=means.device)\n", + " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " means = means.flatten()\n", @@ -3601,15 +2195,17 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " means, stds, weights = distr_args\n", + " means, stds = distr_args\n", " B, H, K = means.size()\n", - " \n", + " \n", + " weights = (1/K) * torch.ones_like(means, device=means.device)\n", + " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", " \n", @@ -3632,7 +2228,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -3682,17 +2278,7 @@ "execution_count": null, "id": "8ebe4250", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -3716,40 +2302,7 @@ "execution_count": null, "id": "684d2382", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "means.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "stds.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", - "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", - "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfcAAAEyCAYAAADnUJkgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABDOklEQVR4nO3de1wU5f4H8M8gy3KHALkpIApoZt4yFVHBDEzTND12CjU085IXMjrpUTu5GAeMCq0sSy3g1CHL0jIzlUrJfqiB5rHMMBPQVCJvgKCwwvP7w5hclsvusrDL8nm/XvuSeWbmme9+WffLzDwzIwkhBIiIiMhiWJk6ACIiIjIuFnciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgW9xZ08OBBPPjgg/D394dSqYSXlxdCQ0Px9NNPaywXERGBiIiIFo9HkiSoVCqj9delSxeMHTvWaP01Zu/evZAkCXv37m2V7ekrIiICkiRBkiRYWVnByckJQUFBmDx5Mj766CPU1NRordOlSxdMnz5dr+1kZ2dDpVLhypUreq1Xd1u1+fzoo4/06qcxFRUVUKlU9f6O0tLSIEkSCgoKjLY9ImqYtakDsFSff/45HnjgAURERCA5ORk+Pj44f/48cnNzsWnTJrz88svysm+88YYJI20b+vfvj/3796Nnz56mDqVBXbt2xX//+18AQHl5OfLz8/HJJ59g8uTJGDZsGD777DO4uLjIy2/duhXOzs56bSM7Oxvx8fGYPn06XF1ddV7PkG3pq6KiAvHx8QCg9cfq/fffj/3798PHx6dFYyCim1jcW0hycjICAwOxa9cuWFv/leaHH34YycnJGsuac8EyNbVaDUmS4OzsjMGDB5s6nEbZ2dlpxfj4448jNTUVjz32GGbPno0PPvhAntevX78Wj+natWuws7NrlW01pmPHjujYsaNJYyBqT3hYvoVcvHgRHh4eGoW9lpWVZtrrHpYvKCiAJEl46aWXkJKSgsDAQDg6OiI0NBQHDhzQ6m/Dhg0ICQmBUqlEz549kZGRgenTp6NLly5NxllUVIQ5c+agc+fOsLGxQWBgIOLj43Hjxg2d3+vOnTvRv39/2NnZoUePHnjnnXe0lvnxxx8xfvx43HbbbbC1tUXfvn2Rnp6usUztoeJ3330XTz/9NDp16gSlUomTJ09qHZavzVFDr1u988476NOnD2xtbeHm5oYHH3wQx48f11hm+vTpcHR0xMmTJzFmzBg4OjrCz88PTz/9NCorK3XORX1mzJiBMWPGYPPmzSgsLJTb6x4qr6mpQUJCArp37w47Ozu4urqid+/eeOWVVwAAKpUKzzzzDAAgMDBQfq+1Oak9TbJlyxb069cPtra28p50Q6cArl+/jri4OHh7e8POzg7h4eH4/vvvNZZp6LTRrZ+xgoICuXjHx8fLsdVus6HD8sb+3axbtw59+vSBo6MjnJyc0KNHDyxbtkwrdiJLxz33FhIaGoqNGzciNjYWU6ZMQf/+/aFQKPTq4/XXX0ePHj2wZs0aAMC//vUvjBkzBvn5+fLh3fXr12POnDmYNGkSVq9ejZKSEsTHx+tUkIqKijBw4EBYWVnhueeeQ7du3bB//34kJCSgoKAAqampTfbxv//9D08//TT++c9/wsvLCxs3bsTMmTMRFBSE4cOHAwDy8vIwZMgQeHp64tVXX4W7uzvee+89TJ8+Hb///jsWL16s0efSpUsRGhqKN998E1ZWVvD09ERRUZHGMj4+Pti/f79G2x9//IGpU6eiU6dOcltSUhKWLVuGRx55BElJSbh48SJUKhVCQ0ORk5OD4OBgeVm1Wo0HHngAM2fOxNNPP41vvvkGzz//PFxcXPDcc881mYvGPPDAA9ixYwf27duHgICAepdJTk6GSqXCs88+i+HDh0OtVuPnn3+Wz68//vjjuHTpEl577TVs2bJFPsR965Gfw4cP4/jx43j22WcRGBgIBweHRuNatmwZ+vfvj40bN6KkpAQqlQoRERH4/vvv0bVrV53fn4+PD3bu3In77rsPM2fOxOOPPw4Aje6tG/t3s2nTJsybNw8LFy7ESy+9BCsrK5w8eRI//fSTzu+DyGIIahEXLlwQQ4cOFQAEAKFQKMSQIUNEUlKSKCsr01g2PDxchIeHy9P5+fkCgLjzzjvFjRs35PbvvvtOABDvv/++EEKI6upq4e3tLQYNGqTRX2FhoVAoFCIgIECjHYBYsWKFPD1nzhzh6OgoCgsLNZZ76aWXBABx7NixRt9jQECAsLW11Vj/2rVrws3NTcyZM0due/jhh4VSqRSnT5/WWH/06NHC3t5eXLlyRQghxJ49ewQAMXz4cK1t1c7bs2dPvbGUl5eLgQMHCh8fH1FQUCCEEOLy5cvCzs5OjBkzRmPZ06dPC6VSKaKjo+W2mJgYAUB8+OGHGsuOGTNGdO/evdE8CHHzd3jHHXc0OP+LL74QAMQLL7wgtwUEBIiYmBh5euzYsaJv376NbufFF18UAER+fr7WvICAANGhQweRl5dX77xbt1Wbz/79+4uamhq5vaCgQCgUCvH4449rvLdbP5+1YmJiND5jf/zxh9ZnrFZqaqpG3C3xu1mwYIFwdXXV2jZRe8TD8i3E3d0d+/btQ05ODlatWoXx48fjxIkTWLp0Ke68805cuHChyT7uv/9+dOjQQZ7u3bs3AMiHdvPy8lBUVISHHnpIYz1/f3+EhYU12f/27dsxYsQI+Pr64saNG/Jr9OjRAICsrKwm++jbty/8/f3laVtbW4SEhGgcfv76668xcuRI+Pn5aaw7ffp0VFRUaO2BT5o0qcnt3qq6uhp///vfcfz4cezYsUPeM96/fz+uXbumdTjaz88P99xzD7766iuNdkmSMG7cOI223r17a7wXQwkhmlxm4MCB+N///od58+Zh165dKC0t1Xs7vXv3RkhIiM7LR0dHa5zGCAgIwJAhQ7Bnzx69t62PlvjdDBw4EFeuXMEjjzyCTz/9VKf/Y0SWisW9hQ0YMABLlizB5s2bce7cOTz11FMoKCjQGlRXH3d3d41ppVIJ4OYgKeDmeX0A8PLy0lq3vra6fv/9d3z22WdQKBQarzvuuAMAdPpyrBtjbZy1MdbGWd8oaV9fX433UUvfEdVz587Fzp078dFHH6Fv374a222oP19fX63t2tvbw9bWVuu9XL9+Xa946lNbhGrfc32WLl2Kl156CQcOHMDo0aPh7u6OkSNHIjc3V+ft6Js7b2/vetvq5sbYWuJ3M23aNLzzzjsoLCzEpEmT4OnpiUGDBiEzM7MF3gGReWNxb0UKhQIrVqwAcHOAWXPVFtbff/9da17dc9T18fDwQFRUFHJycup9zZw5s9kx1sZ5/vx5rfZz587Jcdyq7oC4xqhUKmzcuBEbNmxAVFSU1nYBNLjtutttSdu2bYMkSfI4hPpYW1sjLi4Ohw8fxqVLl/D+++/jzJkzGDVqFCoqKnTajj65A+r/nBQVFWn80WZra1vvGI7m7Bm31O9mxowZyM7ORklJCT7//HMIITB27FijHH0haktY3FtIfV9aAOSRwI3twemqe/fu8Pb2xocffqjRfvr0aWRnZze5/tixY/Hjjz+iW7duGDBggNbLGDECwMiRI/H111/LxbzWf/7zH9jb2xt8idvbb7+N+Ph4rFy5st6R4KGhobCzs8N7772n0f7bb7/JpwpaQ2pqKr744gs88sgjGqcwGuPq6oq//e1vmD9/Pi5duiSPMq979Ka53n//fY1TBoWFhcjOztYYHd+lSxecOHFCo8BfvHhR6zOmT2wt/btxcHDA6NGjsXz5clRVVeHYsWPN6o+oreFo+RYyatQodO7cGePGjUOPHj1QU1ODI0eO4OWXX4ajoyOefPLJZm/DysoK8fHxmDNnDv72t7/hsccew5UrVxAfHw8fHx+tS+7qWrlyJTIzMzFkyBDExsaie/fuuH79OgoKCrBjxw68+eab6Ny5c7PjXLFihXx+/7nnnoObmxv++9//4vPPP0dycrLGjV10tX//fsydOxdhYWGIjIzUukRw8ODBcHV1xb/+9S8sW7YMjz76KB555BFcvHgR8fHxsLW1lY+iGMu1a9fkOK5du4ZTp07hk08+wfbt2xEeHo4333yz0fXHjRuHXr16YcCAAejYsSMKCwuxZs0aBAQEyCPH77zzTgDAK6+8gpiYGCgUCnTv3h1OTk4GxVxcXIwHH3wQs2bNQklJCVasWAFbW1ssXbpUXmbatGl46623MHXqVMyaNQsXL15EcnKy1k1xnJycEBAQgE8//RQjR46Em5sbPDw86r0ksyV+N7NmzYKdnR3CwsLg4+ODoqIiJCUlwcXFBXfffbfe/RG1aSYe0GexPvjgAxEdHS2Cg4OFo6OjUCgUwt/fX0ybNk389NNPGss2NFr+xRdf1OoX9YxGXr9+vQgKChI2NjYiJCREvPPOO2L8+PGiX79+Ta77xx9/iNjYWBEYGCgUCoVwc3MTd911l1i+fLm4evVqo+8xICBA3H///Vrt9Y2u/uGHH8S4ceOEi4uLsLGxEX369BGpqakay9SO4N68ebNWn3VHy9eOvm7odauNGzeK3r17CxsbG+Hi4iLGjx+vdSVATEyMcHBw0NruihUrtPqrT3h4uMb2HRwcRNeuXcXf/vY3sXnzZlFdXa21Tt0R7C+//LIYMmSI8PDwEDY2NsLf31/MnDlTHv1fa+nSpcLX11dYWVlp5KSh30d926rN57vvvitiY2NFx44dhVKpFMOGDRO5ubla66enp4vbb79d2Nraip49e4oPPvhAa7S8EEJ8+eWXol+/fkKpVAoA8jbrjpavZczfTXp6uhgxYoTw8vISNjY2wtfXVzz00EPi6NGj9eaEyJJJQugwjJfalCtXriAkJAQTJkzA+vXrTR0OERG1Mh6Wb+OKiorw73//GyNGjIC7uzsKCwuxevVqlJWVGeXQPxERtT0s7m2cUqlEQUEB5s2bh0uXLskD1N588035kjYiImpfeFieiIjIwvBSOCIiIgvD4k5ERGRhLP6ce01NDc6dOwcnJye9795FRGQoIQTKysrg6+vb5D0niIzN4ov7uXPntB5YQkTUWs6cOWOUm0ER6cPii3vtnbvOnDmjdUctapparcbu3bsRFRWl9/PoqWHMa8swp7yWlpbCz8/P4LsHEjWHxRf32kPxzs7OLO4GUKvVsLe3h7Ozs8m/LC0J89oyzDGvPB1IpsATQURERBaGxZ2IiMjCsLgTERFZGIs/505EZM6qq6uhVqtNHQaZOYVCgQ4dOui8PIs7EZEJCCFQVFSEK1eumDoUaiNcXV3h7e2t0yBNFnciIhOoLeyenp6wt7fnqHpqkBACFRUVKC4uBgD4+Pg0uQ6LOxFRK6uurpYLu7u7u6nDoTbAzs4OAFBcXAxPT88mD9FzQB0RUSurPcdub29v4kioLan9vOgyRoN77tSwPUmAsALQA9iXAtyzxNQRtW17kv76uTav1K7xUDzpQ5/PC/fciYiILAyLOxERkYVhcSciIrIwPOdORGQmVmeeaNXtPRUZ0qrb00VERAT69u2LNWvWmDqUNo177kREpLPp06djwoQJWu179+6FJEnNvinPli1b8Pzzzzerj7bgm2++wbhx4+Dr6wtJkvDJJ58YtX8WdyIiMrmqqioAgJubG5ycnEwcjeEiIiKQlpbW5HLl5eXo06cP1q5d2yJxsLgTEZHRVVZWIjY2Fp6enrC1tcXQoUORk5Mjz4+IiMCCBQsQFxcHDw8PREZGyu2LFi0CABQUFECSJK1XRESETtuo7S82NhaLFy+Gm5sbvL29oVKpGo39gQceqHe7kiRh27ZtRsnP6NGjkZCQgIkTJxqlv7pY3ImIyOgWL16Mjz/+GOnp6Th8+DCCgoIwatQoXLp0SV4mPT0d1tbW+L//+z+89dZbWn34+fnh/Pnz8uv777+Hu7s7hg8frvM2arfj4OCAgwcPIjk5GStXrkRmZmaDsaempuL8+fP45ZdfAAA7duyQYxgzZowx0tPiOKCOiIj0sn37djg6Omq0VVdXyz+Xl5dj3bp1SEtLw+jRowEAGzZsQGZmJt5++20888wzAICgoCAkJyc3uJ0OHTrA29sbAHD9+nVMmDABoaGhUKlUOm8DAHr37o0VK1YAAIKDg7F27Vp89dVX8tGCumpvCbx//35IkoShQ4e2uVMF3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PjID2FpzNGjR9GlS5dGC3tiYiIcHR3l1759+zB37lytttZm0uLepUuXes9pzJ8/H8DNJ+GoVCr4+vrCzs4OEREROHbsmClDJiJq9xwcHBAUFKTx6tSpkzxfCAFA+3apQgiNNgcHB522l5CQgJ07d2Lbtm1yodV1G8DNon8rSZJQU1PT5HaPHj2q9YdBXXPnztX4I2fAgAFYuXKlVltrM2lxz8nJ0TifUnsOZPLkyQCA5ORkpKSkYO3atcjJyYG3tzciIyNRVlZmyrCJiKgRQUFBsLGxwbfffiu3qdVq5Obm4vbbb9err48//hgrV67Ehx9+iG7durXINhpSUFCA7t27N7qMm5ubxh85dnZ28PT01GprbSY9596xY0eN6VWrVqFbt24IDw+HEAJr1qzB8uXL5dGE6enp8PLyQkZGBubMmVNvn5WVlaisrJSnS0tLAdz8pevyJB26hbCCWtz8+08trADmr3nEX39Ly3llTo2qNp/mkFdziMFUHBwc8MQTT+CZZ56Bm5sb/P39kZycjIqKCsycOVPnfn788Uc8+uijWLJkCe644w4UFRUBAGxsbODm5maUbTSmpqYGhYWF+O2339CpUyejPujn6tWrOHnypDydn5+PI0eOyO+lucxmQF1VVRXee+89xMXFQZIknDp1CkVFRYiKipKXUSqVCA8PR3Z2doPFPSkpCfHx8Vrtu3fv5uMV9fbXU8syr4YAO3aYMBZLoP0UuMZG7JLhzCGvFRUVeq9jjneMM9SqVatQU1ODadOmoaysDAMGDMCuXbtw22236dxHbm4uKioqkJCQgISEBLk9PDwce/fuNco2GhMbG4vZs2ejR48eKC0tNWpxz83NxYgRI+TpuLg4AEBMTIxO18k3RRK1Jy5M7MMPP0R0dDROnz4NX19fZGdnIywsDGfPnoWvr6+83OzZs1FYWIhdu3bV2099e+5+fn64cOECnJ2dW/x9WJR9KVALK2ReDUGk4wkohi8ydURt274U+Uc5r5GRWucDyXBqtRqZmZlmkdfS0lJ4eHigpKRE67vn+vXryM/PR2BgIGxtbU0UIbU1+nxuzGbP/e2338bo0aM1Cjmg22CJWymVSiiVSq12hUJh8v/sbY7014AThVTD/DWXpD2Ah5/LlmEOeTX19ql9M4tL4QoLC/Hll1/i8ccfl9tqr22sPcdSq7i4GF5eXq0aHxERUVtiFsU9NTUVnp6euP/+++W2wMBAeHt7a5w7q6qqQlZWFoYMGWKKMImIiNoEkx+Wr6mpQWpqKmJiYmBt/Vc4kiRh0aJFSExMRHBwMIKDg5GYmAh7e3tER0ebMGIiIiLzZvLi/uWXX+L06dN47LHHtOYtXrwY165dw7x583D58mUMGjQIu3fvbnO3ASQiImpNJi/uUVFRaGjAviRJUKlUTT7Bh4iIiP5iFufciYiIyHhY3ImIiCwMizsREZGFYXEnIiKyMCYfUEdERH/ak9S62xuxtHW3p4OIiAj07dsXa9asMXUobRr33ImISGfTp0/HhAkTtNr37t0LSZJw5cqVZvW/ZcsWPP/8883qoy1ISkrC3XffDScnJ3h6emLChAnIy8szWv8s7kREZHJVVVUAbj4fvS3fyyQiIkKnp7plZWVh/vz5OHDgADIzM3Hjxg1ERUWhvLzcKHGwuBMRkdFVVlYiNjYWnp6esLW1xdChQ5GTkyPPj4iIwIIFCxAXFwcPDw9ERkbK7YsWLQIAFBQUQJIkrVdERIRO26jtLzY2FosXL4abmxu8vb2bvHfKAw88UO92JUnCtm3bjJKfnTt3Yvr06bjjjjvQp08fpKam4vTp0zh06JBR+mdxJyIio1u8eDE+/vhjpKen4/DhwwgKCsKoUaNw6dIleZn09HRYW1vj//7v//DWW29p9eHn54fz58/Lr++//x7u7u4YPny4ztuo3Y6DgwMOHjyI5ORkrFy5UuO5JXWlpqbi/Pnz+OWXXwAAO3bskGMYM2aMMdKjpaSkBMDNIxfGwAF1RESkl+3bt8PR0VGjrbq6Wv65vLwc69atQ1paGkaPHg0A2LBhAzIzM/H222/jmWeeAQAEBQUhOTm5we106NBBfkLo9evXMWHCBISGhkKlUum8DQDo3bs3VqxYAQAIDg7G2rVr8dVXX8lHC+pyd3cHAOzfvx+SJGHo0KEteqpACIG4uDgMHToUvXr1Mkqf3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PiguLi4yW0ePXoUXbp0abSwJyYmwtHRUX7t27cPc+fO1WprzIIFC3D06FG8//77TcakK+65ExGRXhwcHBAUFKTR9ttvv8k/1z4vRJIkjWWEEBptDg4OOm0vISEBO3fuxHfffScXWl23Adws+reSJAk1NTVNbvfo0aNafxjUNXfuXDz00EPy9JQpUzBp0iRMnDhRbuvUqVOD6y9cuBDbtm3DN998g86dOzcZk664505EbdeeJM0XmYWgoCDY2Njg22+/ldvUajVyc3Nx++2369XXxx9/jJUrV+LDDz9Et27dWmQbDSkoKED37t0bXcbNzQ1BQUHyy87ODp6enlptdQkhsGDBAmzZsgVff/01AgMDjRJzLe65ExGRUTk4OOCJJ57AM888Azc3N/j7+yM5ORkVFRWYOXOmzv38+OOPePTRR7FkyRLccccdKCoqAgDY2NjAzc3NKNtoTE1NDQoLC/Hbb7+hU6dOWkcEmmP+/PnIyMjAp59+CicnJ/m9ubi41PvHgL5Y3ImIzIUZ3jHOUKtWrUJNTQ2mTZuGsrIyDBgwALt27cJtt92mcx+5ubmoqKhAQkICEhIS5Pbw8HDs3bvXKNtoTGxsLGbPno0ePXqgtLTUqMV93bp1ACBf1lcrNTUV06dPb3b/kmjoYeoWorS0FC4uLigpKYGzs7Opw2lb9iRBLaywo6wHxjj9DMU9S0wdUdvSyGFiOa9jxmidDyQ91Mmxeug/sGPHDrPIa2PfPdevX0d+fj4CAwNha2trogiprdHnc8Nz7kRERBbG5MX97NmzmDp1Ktzd3WFvb4++fftq3KFHCAGVSgVfX1/Y2dkhIiICx44dM2HERERE5s2kxf3y5csICwuDQqHAF198gZ9++gkvv/wyXF1d5WWSk5ORkpKCtWvXIicnB97e3oiMjERZWZnpAiciIjJjJh1Q98ILL8DPzw+pqalyW5cuXeSfhRBYs2YNli9fLl8zmJ6eDi8vL2RkZGDOnDmtHTIREZHZM2lx37ZtG0aNGoXJkycjKysLnTp1wrx58zBr1iwAQH5+PoqKihAVFSWvo1QqER4ejuzs7HqLe2VlJSorK+Xp0tJSADevf1Sr1S38jiyMsIJa3Dy4oxZWAPOnH9HwgTE5r8xp89TJcW0+zSGvusSgy41UiGrp83kxaXE/deoU1q1bh7i4OCxbtgzfffcdYmNjoVQq8eijj8rX/Xl5eWms5+XlhcLCwnr7TEpKQnx8vFb77t27YW9vb/w3YdF6yD9lXg0BduwwYSxtUY8ml2js4RWkizo5/jOf5pDXioqKBufZ2NjAysoK586dQ8eOHWFjY2PUy6zIsgghUFVVhT/++ANWVlawsbFpch2TXgpnY2ODAQMGIDs7W26LjY1FTk4O9u/fj+zsbISFheHcuXPw8fGRl5k1axbOnDmDnTt3avVZ3567n58fLly4wEvh9LUvBWphhcyrIYh0PAHF8EWmjqht2ZfS4Cw5r5GRJr9kq02rk2P14IXIzMw0i7yWlpbCw8Ojwctwq6qqcP78+Ub/CCC6lb29PXx8fHQq7ibdc/fx8UHPnj012m6//XZ8/PHHACA/DaioqEijuBcXF2vtzddSKpVQKpVa7QqFwuT/2dsc6a9DQAqphvnTl9T0ITTFgdeguHU5C7qJSauom+M/P6Pm8P+9qe3b2NjA398fN27c0HiiGlF9OnToAGtra52P8Ji0uIeFhSEvL0+j7cSJEwgICAAABAYGwtvbG5mZmejXrx+Am3/tZmVl4YUXXmj1eImIjEmSJLP4Q4Qsj0mL+1NPPYUhQ4YgMTERDz30EL777jusX78e69evB3Dzg79o0SIkJiYiODgYwcHBSExMhL29PaKjo00ZOhERkdkyaXG/++67sXXrVixduhQrV65EYGAg1qxZgylTpsjLLF68GNeuXcO8efNw+fJlDBo0CLt37270+bpERETtmckfHDN27FiMHTu2wfmSJEGlUkGlUrVeUERERG2YyW8/S0RERMZl8j13IqIWVffpfLwigdoB7rkTERFZGBZ3IiIiC8PiTkREZGFY3ImIiCwMizsREZGF4Wh5IrIc+1IA9Lj5rw739ieyVNxzJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZGA6oo5vq3qJTl2V4G08iIrPEPXciIiILw+JORERkYXhYngxX36F8HqonIjI57rkTERFZGJMWd5VKBUmSNF7e3t7yfCEEVCoVfH19YWdnh4iICBw7dsyEERMREZk/gw7Lp6Wl4aGHHoK9vX2zA7jjjjvw5ZdfytMdOnSQf05OTkZKSgrS0tIQEhKChIQEREZGIi8vD05OTs3etimszjyh1fZUZIgJIiEiIktl0J770qVL4e3tjZkzZyI7O7tZAVhbW8Pb21t+dezYEcDNvfY1a9Zg+fLlmDhxInr16oX09HRUVFQgIyOjWdskIiKyZAbtuf/222/4/PPPkZaWhhEjRiAwMBAzZsxATEyMxmF1Xfzyyy/w9fWFUqnEoEGDkJiYiK5duyI/Px9FRUWIioqSl1UqlQgPD0d2djbmzJlTb3+VlZWorKyUp0tLSwEAarUaarXagHdrXJKo1mozh7gg6v87T/1nu7qB+dormMF7MReN5KzBvDJ/+qmTP50+r62UY7P4f03tliSEEM3poLi4GO+99x7S0tLw888/47777sPMmTMxbtw4WFk1XhC++OILVFRUICQkBL///jsSEhLw888/49ixY8jLy0NYWBjOnj0LX19feZ3Zs2ejsLAQu3btqrdPlUqF+Ph4rfaMjAyjnEYgItJFRUUFoqOjUVJSAmdnZ1OHQ+1Ms4s7ABw8eBDvvPMO0tPT4ePjgytXrsDV1RWpqamIiIjQuZ/y8nJ069YNixcvxuDBgxEWFoZz587Bx8dHXmbWrFk4c+YMdu7cWW8f9e25+/n54cKFC2bxH+z1PSe12uaPCDJBJHXsS6m3WS2skHk1BJGOJ6DQ5RGaw+KMHFgb1kBOgUbyyvzpp06Odfq8tlKOS0tL4eHhweJOJmHwde6///473n33XaSmpuLUqVOYMGECtm/fjnvvvRfXrl3Ds88+i5iYGBQWFurcp4ODA+6880788ssvmDBhAgCgqKhIo7gXFxfDy8urwT6USiWUSqVWu0KhgEKh0P0NthAhddBqM4e4mnr2tUKq0a24m8N7MRc65Esrr8yffhrIcaOf11bKsVn8v6Z2y6ABdePGjYOfnx/S0tIwa9YsnD17Fu+//z7uvfdeAICdnR2efvppnDlzRq9+Kysrcfz4cfj4+CAwMBDe3t7IzMyU51dVVSErKwtDhgwxJGwiIqJ2waA9d09PT2RlZSE0NLTBZXx8fJCfn99oP//4xz8wbtw4+Pv7o7i4GAkJCSgtLUVMTAwkScKiRYuQmJiI4OBgBAcHIzExEfb29oiOjjYkbCIionbBoOIeHh6O/v37a7VXVVVh06ZNePTRRyFJEgICAhrt57fffsMjjzyCCxcuoGPHjhg8eDAOHDggr7d48WJcu3YN8+bNw+XLlzFo0CDs3r27zV7jTkRE1BoMKu4zZszAfffdB09PT432srIyzJgxA48++qhO/WzatKnR+ZIkQaVSQaVSGRImERFRu2TQOXchBCRJ0mr/7bff4OLi0uygiIiIyHB67bn369dPvgf8yJEjYW391+rV1dXIz8/HfffdZ/QgiYiISHd6Fffay9OOHDmCUaNGwdHRUZ5nY2ODLl26YNKkSUYNkIiIiPSjV3FfsWIFAKBLly74+9//Dltb2xYJioiIiAxn0IC6mJgYY8dBRERERqJzcXdzc8OJEyfg4eGB2267rd4BdbUuXbpklOCIiIhIfzoX99WrV8vXl69evbrR4k5ERESmo3Nxv/VQ/PTp01siFiKixu1JMnUERG2CzsW99rnouuATkIiIiExH5+Lu6ura5KH42pvbVFdXNzswIiIiMozOxX3Pnj0tGQeZqe8KLgHuN/8NC3Q1dTiWr77DziOWtn4cRNSm6Vzcw8PDWzIOIiIiMhKdi/vRo0fRq1cvWFlZ4ejRo40u27t372YHRkRERIbRubj37dsXRUVF8PT0RN++fSFJEoQQWsvxnDsREZFp6Vzc8/Pz0bFjR/lnIiIiMk86F/eAgIB6fyYiIiLzYtC95QEgLy8Pr732Go4fPw5JktCjRw8sXLgQ3bt3N2Z87cLqzBPyz09FhpgwEiIisgRWhqz00UcfoVevXjh06BD69OmD3r174/Dhw+jVqxc2b95s7BiJiIhIDwYV98WLF2Pp0qXYv38/UlJSkJKSguzsbCxbtgxLliwxKJCkpCRIkoRFixbJbUIIqFQq+Pr6ws7ODhERETh27JhB/VPz7T91EftPXTR1GDpZnXlCfhERtTcGFfeioiI8+uijWu1Tp05FUVGR3v3l5ORg/fr1WpfQJScnIyUlBWvXrkVOTg68vb0RGRmJsrIyQ8ImIiJqFww65x4REYF9+/YhKChIo/3bb7/FsGHD9Orr6tWrmDJlCjZs2ICEhAS5XQiBNWvWYPny5Zg4cSIAID09HV5eXsjIyMCcOXPq7a+yshKVlZXydO098dVqNdRqtV6xtQRJNH6ZoMliFPX/nVcjddD4FwDUDSx7c6bpcwxo5tnccgr8lcNGcykvbB45NQtN5EunvLZSPs3h+4baL0nUd7F6PbZt2yb/fO7cOTz33HN46KGHMHjwYADAgQMHsHnzZsTHx2Pu3Lk6BxATEwM3NzesXr0aERER6Nu3L9asWYNTp06hW7duOHz4MPr16ycvP378eLi6uiI9Pb3e/lQqFeLj47XaMzIyYG9vr3NcRETNUVFRgejoaJSUlPBhWtTqdC7uVla6HcHX5yY2mzZtwr///W/k5OTA1tZWo7hnZ2cjLCwMZ8+eha+vr7zO7NmzUVhYiF27dtXbZ3177n5+frhw4YJZ/Ad7fc/JRufPHxHU6PwWsy+l3uYDhSW45DYIbpcOwurPveGBXdwa7mdYXEtEp7db82xuOQVu7llmXg1BpOMJKKSaxvsxk5yahUZyCuiY11bKZ2lpKTw8PFjcySR0PixfU9PEF5Cezpw5gyeffBK7d++Gra1tg8vVfRJd7ZPnGqJUKqFUKrXaFQoFFAqF4QEbibjl8HZ9TBZjA1+EtQXdSlTLPzdajMwgx4Bmns0tp7dSSDVNF3czyalZ0CGnQBN5baV8msP3DbVfBg2oM4ZDhw6huLgYd911F6ytrWFtbY2srCy8+uqrsLa2hpeXFwBoDdArLi6W5xEREZE2g29iU15ejqysLJw+fRpVVVUa82JjY5tcf+TIkfjhhx802mbMmIEePXpgyZIl6Nq1K7y9vZGZmSmfc6+qqkJWVhZeeOEFQ8MmIiKyeAYV9++//x5jxoxBRUUFysvL4ebmhgsXLsDe3h6enp46FXcnJyf06tVLo83BwQHu7u5y+6JFi5CYmIjg4GAEBwcjMTER9vb2iI6ONiRsIiKidsGg4v7UU09h3LhxWLduHVxdXXHgwAEoFApMnToVTz75pNGCW7x4Ma5du4Z58+bh8uXLGDRoEHbv3g0nJyejbYMsH2/vS0TtjUHF/ciRI3jrrbfQoUMHdOjQAZWVlejatSuSk5MRExMjX5eur71792pMS5IElUoFlUplUH9ERETtkUED6hQKhTxi3cvLC6dPnwYAuLi4yD8TERGRaRi0596vXz/k5uYiJCQEI0aMwHPPPYcLFy7g3XffxZ133mnsGNsVHkLWH3NGRKTJoD33xMRE+Pj4AACef/55uLu744knnkBxcTHWr19v1ACJiIhIPwbtuQ8YMED+uWPHjtixY4fRAiIiIqLmMfg6d+DmDWXy8vIgSRK6d++Ojh07GisuMmO3PvY1tKu7SWIwu0e57kkydQSkq/p+VyOWtn4cRC3IoMPypaWlmDZtGjp16oTw8HAMHz4cvr6+mDp1KkpKSowdIxEREenBoOL++OOP4+DBg9i+fTuuXLmCkpISbN++Hbm5uZg1a5axYyQiIiI9GHRY/vPPP8euXbswdOhQuW3UqFHYsGED7rvvPqMFR0RERPozaM/d3d0dLi4uWu0uLi647bbbmh0UERERGc6gPfdnn30WcXFx+M9//iNfEldUVIRnnnkG//rXv4waoCUwu8FfgNagInMYJEdERMahc3Hv16+fxnPUf/nlFwQEBMDf3x8AcPr0aSiVSvzxxx+YM2eO8SMlIiIinehc3CdMmNCCYRAREZGx6FzcV6xY0ZJxEBmFWZ4CISJqZc26ic2hQ4dw/PhxSJKEnj17ol+/fsaKi4iIiAxkUHEvLi7Gww8/jL1798LV1RVCCJSUlGDEiBHYtGkT71RHRERkQgZdCrdw4UKUlpbi2LFjuHTpEi5fvowff/wRpaWliI2NNXaMbcrqzBNt/tDw/lMX5RcREbU9Bu2579y5E19++SVuv/12ua1nz554/fXXERUVZbTgiIiISH8G7bnX1NRAoVBotSsUCtTU1Ojcz7p169C7d284OzvD2dkZoaGh+OKLL+T5QgioVCr4+vrCzs4OEREROHbsmCEhExERtRsGFfd77rkHTz75JM6dOye3nT17Fk899RRGjhypcz+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCTk5ORkpKCtWvXIicnB97e3oiMjERZWZkhYRMREbULBhX3tWvXoqysDF26dEG3bt0QFBSEwMBAlJWV4bXXXtO5n3HjxmHMmDEICQlBSEgI/v3vf8PR0REHDhyAEAJr1qzB8uXLMXHiRPTq1Qvp6emoqKhARkaGIWETERG1Cwadc/fz88Phw4eRmZmJn3/+GUII9OzZE/fee6/BgVRXV2Pz5s0oLy9HaGgo8vPzUVRUpHEOX6lUIjw8HNnZ2Q3eBa+yshKVlZXydGlpKQBArVZDrVYbHJ+uJFEtb69um75aNF6h+XddjdSh3sVq2xuar67TD1ohx4AZ5rRuHpqK48/ltfJX78Ktk9M2oYl86ZVXjRWNn+PW+L4haogkhBD6rHDjxg3Y2triyJEj6NWrV7MD+OGHHxAaGorr16/D0dERGRkZGDNmDLKzsxEWFoazZ8/C19dXXn727NkoLCzErl276u1PpVIhPj5eqz0jIwP29vbNjpeISBcVFRWIjo5GSUkJnJ2dTR0OtTN677lbW1sjICAA1dWG7TnV1b17dxw5cgRXrlzBxx9/jJiYGGRlZcnzb72fPXBzkF3dtlstXboUcXFx8nRpaSn8/PwQFRXVKv/BXt9zskX6nT8iyLgd7kvRmPyu4FK9i9VIHXDJbRDcLh2EVT17ywO7uGk2DIvTWqYlGJpno+exVp18NkUtrJB5NQSRjiegkJoYhNpKOW0TmsizXnm9VQvkuPaoIZEpGPxUuKVLl+K9996Dm5tb0ys0wsbGBkFBN79wBwwYgJycHLzyyitYsmQJgJtPm6t98hxw8wY6Xl5eDfanVCqhVCq12hUKRb0j/I1NNHD4urmMHnudL776Cnfd+fUto/UF2go5BgzPc4t9BvQpJLdQSDVNF6FWymmboGOedcqrxgrGz3FrfN8QNcSg4v7qq6/i5MmT8PX1RUBAABwcHDTmHz582OCAhBCorKxEYGAgvL29kZmZKd/WtqqqCllZWXjhhRcM7p+IiMjSGVTcJ0yYAEmSoOfpei3Lli3D6NGj4efnh7KyMmzatAl79+7Fzp07IUkSFi1ahMTERAQHByM4OBiJiYmwt7dHdHR0s7ZLRERkyfQq7hUVFXjmmWfwySefQK1WY+TIkXjttdfg4eFh0MZ///13TJs2DefPn4eLiwt69+6NnTt3IjIyEgCwePFiXLt2DfPmzcPly5cxaNAg7N69G05OTgZtj4iIqD3Qq7ivWLECaWlpmDJlCuzs7JCRkYEnnngCmzdvNmjjb7/9dqPzJUmCSqWCSqUyqH8iIqL2SK/ivmXLFrz99tt4+OGHAQBTpkxBWFgYqqur0aFDywwkIyIiIv3odaeHM2fOYNiwYfL0wIEDYW1trXEbWiIiIjItvYp7dXU1bGxsNNqsra1x48YNowZFREREhtPrsLwQAtOnT9e4jvz69euYO3euxuVwW7ZsMV6EREREpBe9intMTIxW29SpU40WDFFjVmeeMHUI1Jr2JJk6AqI2S6/inpqa2lJxEBERkZEY9MhXIiIiMl8G3aGOLMf+UxeNtn5oV/fmhkNEREbAPXciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5kNPtPXeSNZoiIzACLOxERkYVhcSciIrIwLO5EREQWxqTFPSkpCXfffTecnJzg6emJCRMmIC8vT2MZIQRUKhV8fX1hZ2eHiIgIHDt2zEQRExERmT+T3n42KysL8+fPx913340bN25g+fLliIqKwk8//SQ/QjY5ORkpKSlIS0tDSEgIEhISEBkZiby8PDg5OZkyfKrH4NPrgT233IZ2xFLTBWMp6j4djTkloiaYtLjv3LlTYzo1NRWenp44dOgQhg8fDiEE1qxZg+XLl2PixIkAgPT0dHh5eSEjIwNz5swxRdhERERmzaweHFNSUgIAcHNzAwDk5+ejqKgIUVFR8jJKpRLh4eHIzs6ut7hXVlaisrJSni4tLQUAqNVqqNXqlgwfACCJ6hbp1+ixi5tnZGqkDo0uVju/qeVupRa3nO0xYtzGyG2LfQaEfme4anOk1nO9myu1/OfYLBiQG4Pz2gI5bY3vG6KGSEIIYeoggJvn1sePH4/Lly9j3759AIDs7GyEhYXh7Nmz8PX1lZedPXs2CgsLsWvXLq1+VCoV4uPjtdozMjJgb2/fcm+AiOgWFRUViI6ORklJCZydnU0dDrUzZrPnvmDBAhw9ehTffvut1jxJkjSmhRBabbWWLl2KuLg4ebq0tBR+fn6Iiopqlf9gr+852SL9zh8RZPjK+1IanPVdwaVGV62ROuCS2yC4XToIKx33nAd2cftrYlhcwwvqyRi5vTWPtf01K7e1GslxfdTCCplXQxDpeAIKqUa/bRkxp2ZNz5wCzchrC+S09qghkSmYRXFfuHAhtm3bhm+++QadO3eW2729vQEARUVF8PHxkduLi4vh5eVVb19KpRJKpVKrXaFQQKFQGDlybUKPw9f6aFbsjXzJ6VqwrUS1zstqfKkaMefGyO2teaztzyifC30LdG08Uo3+xb0VPsdmwcCcAgbktQVy2hrfN0QNMWlxF0Jg4cKF2Lp1K/bu3YvAwECN+YGBgfD29kZmZib69esHAKiqqkJWVhZeeOEFU4RMJmCWt7StO4Kd2jZekUAWxqTFff78+cjIyMCnn34KJycnFBUVAQBcXFxgZ2cHSZKwaNEiJCYmIjg4GMHBwUhMTIS9vT2io6NNGToREZHZMmlxX7duHQAgIiJCoz01NRXTp08HACxevBjXrl3DvHnzcPnyZQwaNAi7d+/mNe5EREQNMPlh+aZIkgSVSgWVStXyAREREVkA3lueiIjIwrC4ExERWRizuBSurTPL0dxERNRucc+diIjIwnDPnaitqe8ae16XTUS34J47ERGRhWFxJyIisjA8LN9O7D910dQhmAUOfiSi9oB77kRERBaGxZ2IiMjCsLgTERFZGBZ3IiIiC8PiTkREZGE4Wp7MEke1ExEZjnvuREREFobFnYiIyMKwuBMREVkYFnciIiILY9Li/s0332DcuHHw9fWFJEn45JNPNOYLIaBSqeDr6ws7OztERETg2LFjpgmWiIiojTDpaPny8nL06dMHM2bMwKRJk7TmJycnIyUlBWlpaQgJCUFCQgIiIyORl5cHJycnE0RsOrWjx5+KDDFxJHpqA48nvXVkfpvLLxFRPUxa3EePHo3Ro0fXO08IgTVr1mD58uWYOHEiACA9PR1eXl7IyMjAnDlz6l2vsrISlZWV8nRpaSkAQK1WQ61WG/kd3CSJ6hbptz4GvQdhhRqpg0Hbq11Pn/XVookDQjq8h9bM6a10zm9T77Gp7fy5fpO50rnDlvlsm5QBuTFaXo2Qz5b6viHShSSEEKYOAgAkScLWrVsxYcIEAMCpU6fQrVs3HD58GP369ZOXGz9+PFxdXZGenl5vPyqVCvHx8VrtGRkZsLe3b5HYiYjqqqioQHR0NEpKSuDs7GzqcKidMdub2BQVFQEAvLy8NNq9vLxQWFjY4HpLly5FXFycPF1aWgo/Pz9ERUW12H+w1/ecbJF+6zN/RJD+K+1LwXcFlwzaXo3UAZfcBsHt0kFY6bg3PbCLW+MLDItrfD5aN6e30jm/+1KatR21sELm1RBEOp6AQqppVl8AdMppm2NAjo2WVyPks/aoIZEpmG1xryVJksa0EEKr7VZKpRJKpVKrXaFQQKFQGD0+ABAGHvI2hEHvQarRuTA3xEpU69xHk1+qOryH1szprXTOrzEKMm7myijFvYU+2ybVjLw0O69GyGdLfd8Q6cJsi7u3tzeAm3vwPj4+cntxcbHW3nx7xYFgZFHqG3xpKm1gIChRY8z2OvfAwEB4e3sjMzNTbquqqkJWVhaGDBliwsiIiIjMm0n33K9evYqTJ/86t5qfn48jR47Azc0N/v7+WLRoERITExEcHIzg4GAkJibC3t4e0dHRJoyaiIjIvJm0uOfm5mLEiBHydO1AuJiYGKSlpWHx4sW4du0a5s2bh8uXL2PQoEHYvXu3WVzjzqeWNWz/qYvyz6Fd3U0YCRFR+2TS4h4REYHGrsSTJAkqlQoqlar1giIiImrjzPacOxERERnGbEfLm6s2cTjenEYdtzG8AoGILAH33ImIiCwMizsREZGF4WF5C3brqHWycHVPxfCGK0TtGvfciYiILAyLOxERkYXhYfk2pqHR+hqjvM3ot8ob2hARtT7uuRMREVkYM9rHMy9t4np2IiKienDPnYiIyMKwuBMREVkYHpa3QG3h+nZzPu0x+PT6mz/suTkAkIMCiait4Z47ERGRhWFxJyIisjA8LE+tb08SBp9u/NTBAf/ZrRRMw9rC6Y02g08qJGpV3HMnIiKyMG2iuL/xxhsIDAyEra0t7rrrLuzbt8/UIREREZktsz8s/8EHH2DRokV44403EBYWhrfeegujR4/GTz/9BH9/f1OH1+rkkdxkEm125Hx9h8X55Dgii2X2e+4pKSmYOXMmHn/8cdx+++1Ys2YN/Pz8sG7dOlOHRkREZJbMes+9qqoKhw4dwj//+U+N9qioKGRnZ9e7TmVlJSorK+XpkpISAMClS5egVqt13nbl1RIDIm55ZddvtOr2aiSBiooKlF2/AStR3ay+Ll6tkn9u6n2YMv+65vjW96MvtbBCRUUFLkpVUEg1BvfToIt1BgPWF2vdZVpSM3KljxbNq575KisrAwAIIYwbB5EOzLq4X7hwAdXV1fDy8tJo9/LyQlFRUb3rJCUlIT4+Xqs9MDCwRWKklvKKqQNo41RGWob+ojJorbKyMri4uBg3FKImmHVxryVJksa0EEKrrdbSpUsRFxcnT9fU1ODSpUtwd3dvcB1qWGlpKfz8/HDmzBk4OzubOhyLwby2DHPKqxACZWVl8PX1NWkc1D6ZdXH38PBAhw4dtPbSi4uLtfbmaymVSiiVSo02V1fXlgqx3XB2djb5l6UlYl5bhrnklXvsZCpmPaDOxsYGd911FzIzMzXaMzMzMWTIEBNFRUREZN7Mes8dAOLi4jBt2jQMGDAAoaGhWL9+PU6fPo25c+eaOjQiIiKzZPbF/e9//zsuXryIlStX4vz58+jVqxd27NiBgIAAU4fWLiiVSqxYsULrVAc1D/PaMphXopskwes0iIiILIpZn3MnIiIi/bG4ExERWRgWdyIiIgvD4k5ERGRhWNwJAPDNN99g3Lhx8PX1hSRJ+OSTTzTmCyGgUqng6+sLOzs7RERE4NixY6YJto1ISkrC3XffDScnJ3h6emLChAnIy8vTWIZ51d+6devQu3dv+UY1oaGh+OKLL+T5zCkRizv9qby8HH369MHatWvrnZ+cnIyUlBSsXbsWOTk58Pb2RmRkpPxwDNKWlZWF+fPn48CBA8jMzMSNGzcQFRWF8vJyeRnmVX+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCZUyIAgqgOAGLr1q3ydE1NjfD29harVq2S265fvy5cXFzEm2++aYII26bi4mIBQGRlZQkhmFdjuu2228TGjRuZU6I/cc+dmpSfn4+ioiJERUXJbUqlEuHh4Q0+epe01T5+2M3NDQDzagzV1dXYtGkTysvLERoaypwS/YnFnZpU++AefR69S5qEEIiLi8PQoUPRq1cvAMxrc/zwww9wdHSEUqnE3LlzsXXrVvTs2ZM5JfqT2d9+lsyHPo/eJU0LFizA0aNH8e2332rNY1711717dxw5cgRXrlzBxx9/jJiYGGRlZcnzmVNq77jnTk3y9vYGAL0evUt/WbhwIbZt24Y9e/agc+fOcjvzajgbGxsEBQVhwIABSEpKQp8+ffDKK68wp0R/YnGnJgUGBsLb21vj0btVVVXIysrio3cbIYTAggULsGXLFnz99dcIDAzUmM+8Go8QApWVlcwp0Z94WJ4AAFevXsXJkyfl6fz8fBw5cgRubm7w9/fHokWLkJiYiODgYAQHByMxMRH29vaIjo42YdTmbf78+cjIyMCnn34KJycneW/SxcUFdnZ2kCSJeTXAsmXLMHr0aPj5+aGsrAybNm3C3r17sXPnTuaUqJYph+qT+dizZ48AoPWKiYkRQty8bGvFihXC29tbKJVKMXz4cPHDDz+YNmgzV18+AYjU1FR5GeZVf4899pgICAgQNjY2omPHjmLkyJFi9+7d8nzmlEgIPvKViIjIwvCcOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7URMKCgogSRKOHDli6lCIiHTC4k5thhAC9957L0aNGqU174033oCLiwtOnz5tgsiIiMwLizu1GZIkITU1FQcPHsRbb70lt+fn52PJkiV45ZVX4O/vb8IIiYjMA4s7tSl+fn545ZVX8I9//AP5+fkQQmDmzJkYOXIkpk+frrX8I488gocfflijTa1Ww8PDA6mpqQCAnTt3YujQoXB1dYW7uzvGjh2LX3/9tcEY0tLS4OrqqtH2ySefQJIkjbbPPvsMd911F2xtbdG1a1fEx8fjxo0b8nyVSgV/f38olUr4+voiNjZWz2wQEdWPj3ylNicmJgZbt27FjBkzMGnSJPz444/48ccf6112ypQpeOihh3D16lU4OjoCAHbt2oXy8nJMmjQJAFBeXo64uDjceeedKC8vx3PPPYcHH3wQR44cgZWVYX//7tq1C1OnTsWrr76KYcOG4ddff8Xs2bMBACtWrMBHH32E1atXY9OmTbjjjjtQVFSE//3vfwZti4hIi2kfSkdkmN9//1107NhRWFlZiS1btjS4XFVVlfDw8BD/+c9/5LZHHnlETJ48ucF1iouLBQD5MaH5+fkCgPj++++FEEKkpqYKFxcXjXW2bt0qbv3vNGzYMJGYmKixzLvvvit8fHyEEEK8/PLLIiQkRFRVVen0fomI9MHD8tQmeXp6Yvbs2bj99tvx4IMPNricQqHA5MmT8d///hfAzb30Tz/9FFOmTJGX+fXXXxEdHY2uXbvC2dkZgYGBANCswXmHDh3CypUr4ejoKL9mzZqF8+fPo6KiApMnT8a1a9fQtWtXzJo1C1u3btU4ZE9E1Bw8LE9tlrW1Naytm/4IT5kyBeHh4SguLkZmZiZsbW0xevRoef64cePg5+eHDRs2wNfXFzU1NejVqxeqqqrq7c/KygpCCI02tVqtMV1TU4P4+HhMnDhRa31bW1v4+fkhLy8PmZmZ+PLLLzFv3jy8+OKLyMrKgkKh0OXtExE1iMWdLN6QIUPg5+eHDz74AF988QUmT54MGxsbAMDFixdx/PhxvPXWWxg2bBgA4Ntvv220v44dO6KsrAzl5eVwcHAAAK1r4Pv374+8vDwEBQU12I+dnR0eeOABPPDAA5g/fz569OiBH374Af3792/GuyUiYnGndkCSJERHR+PNN9/EiRMnsGfPHnnebbfdBnd3d6xfvx4+Pj44ffo0/vnPfzba36BBg2Bvb49ly5Zh4cKF+O6775CWlqaxzHPPPYexY8fCz88PkydPhpWVFY4ePYoffvgBCQkJSEtLQ3V1tdzXu+++Czs7OwQEBLRECoioneE5d2oXpkyZgp9++gmdOnVCWFiY3G5lZYVNmzbh0KFD6NWrF5566im8+OKLjfbl5uaG9957Dzt27MCdd96J999/HyqVSmOZUaNGYfv27cjMzMTdd9+NwYMHIyUlRS7erq6u2LBhA8LCwtC7d2989dVX+Oyzz+Du7m70905E7Y8k6p48JCIiojaNe+5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7ERGRhWFxJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZmP8HzQms27H+BcMAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABcFElEQVR4nO3dd1iTV/sH8G8CCSQs2UO2RK2guCp1FRyoqDi6bG2rtNYurVqrba1txVF81WqHVrsc1Mmvtb6v1klVUOuoUqy4QVBR9pA9EvL8/njMA4EASQgkhPtzXbk0z8o5EHLnnOec+/AYhmFACCGEEKPC13cBCCGEEKJ7FOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOB14MqVK5gxYwa6dOkCkUgEkUgEiUSCt956C5cuXVI6NjIyEjweD3w+H6mpqQ2uVVZWBmtra/B4PERERHDb7969Cx6PBx6Ph8jISJXleP3117ljmqMoh+IhFArh4+ODuXPn4tGjR5pUv0mKcn/55Zc6u2ZcXBx4PB5+++23Zo9V1LOukJAQhISEKG1r6ufamEOHDjV6jre3t9LvryVCQkKUflcikQiBgYH4+uuvIZfLdfIaTVH8vOPi4rhtERER8Pb21vhaGzduxLZt2xpsV7xPVO0jhGiHAnwL/fDDD+jXrx8uXLiAuXPn4o8//sDBgwcxb948XLt2DU8++STu3LnT4DxLS0ts3bq1wfZff/0VUqkUAoFA5etZWVlh27ZtDT7YS0tL8euvv8La2lqj8h85cgTnzp3DwYMHMWnSJKxfvx5hYWEwlgzGb7zxBs6dO9fscefOncMbb7yh0bUPHTqEpUuXqty3b98+fPbZZxpdrym+vr44d+4czp07h5iYGHTu3Bnvv/8+Fi1apLPX0MRnn32Gffv2aXxeYwHe1dUV586dw7hx43RQOkIIAJjquwDt2V9//YV3330X48aNw2+//QahUMjtGz58OGbNmoVff/0VIpGowblTpkxBdHQ0li5dCj6/9nvW5s2bMXnyZOzfv1/la06ZMgU///wzjh8/jtDQUG57TEwMampqMGnSJOzYsUPtOvTr1w8ODg4AgNDQUOTn52P79u04e/YsBg8erPKc8vJyiMVitV9Dn9zd3eHu7t7scU899ZROX7dPnz46vZ5IJFIqY1hYGLp3744NGzZgxYoVKr8QMgyDyspKle+/lurSpYtOr2dmZqbz3wEhHR214FsgKioKJiYm+OGHH5SCe13PP/883NzcGmx//fXXkZ6ejtjYWG7b7du3cebMGbz++uuNvma3bt0waNAgbNmyRWn7li1b8Mwzz8DGxkbL2rAUH7L37t0DwHYPBwQE4NSpUxg0aBDEYjFXvvv37+OVV16Bk5MTzMzM8MQTT2Dt2rUqu43lcjm++OILeHp6wtzcHP3798fx48eVjklJScFrr70GiUQCsViMzp07Izw8HElJSSrLWllZifnz58PFxQUikQjBwcFITExUOkZVF70q9bvoy8vLsWDBAvj4+MDc3Bx2dnbo378/du/eDYDtov7uu++4cxWPu3fvAlDdRf/o0SN88MEH8PX1hZmZGZycnDB27FjcvHmz2fLVJxAI0K9fP5SXlyM3N5crx+zZs/H999/jiSeegJmZGaKjowEAycnJmDp1qtLvSlH+um7evIkxY8ZALBbDwcEBb7/9NkpKShocp6qLXi6XY/369ejduzdEIhE6deqEp556ivuy6u3tjWvXriE+Pp77eSmu0VgX/ZkzZzBixAhYWVlBLBZj0KBBOHjwoNIx27ZtA4/Hw8mTJ/HOO+/AwcEB9vb2eOaZZ5CRkaF07IkTJxASEgJ7e3uIRCJ4enri2WefRXl5udo/e0LaC2rBa6mmpgYnT55E//794erqqvH5EokEQ4cOxZYtWzB69GgAbJD29vbGiBEjmjx3xowZmDVrFgoLC2Fra4tbt27h7NmzWLFiBfbu3atVfRRSUlIAAI6Ojty2zMxMvPLKK/jwww8RFRUFPp+P3NxcDBo0CNXV1Vi+fDm8vb3xxx9/YMGCBbhz5w42btyodN0NGzbAy8uLu2+8evVqhIWFIT4+HgMHDgQAZGRkwN7eHv/5z3/g6OiIgoICREdHIygoCImJiejWrZvSNT/55BP07dsXP//8M4qKihAZGYmQkBAkJibC19e3RT+H+fPnY/v27VixYgX69OmDsrIyXL16Ffn5+QDYLuqysjL89ttvSrcAGnsvlJSUYMiQIbh79y4++ugjBAUFobS0FKdOnUJmZia6d++ucRnv3LkDU1NT2Nractv++9//4vTp0/j888/h4uICJycnXL9+HYMGDYKnpyfWrl0LFxcXHD16FHPmzEFeXh6WLFkCAMjOzkZwcDAEAgE2btwIZ2dn7Ny5E7Nnz1arPBEREdixYwdmzJiBZcuWQSgU4p9//uG+9Ozbtw/PPfccbGxsuPeHmZlZo9eLj49HaGgoevXqhc2bN8PMzAwbN25EeHg4du/ejSlTpigd/8Ybb2DcuHHYtWsX0tPTsXDhQrzyyis4ceIEAPZLxLhx47i/u06dOuHhw4c4cuQIqqur202vFCFqY4hWsrKyGADMiy++2GCfTCZjpFIp95DL5dy+JUuWMACY3NxcZuvWrYyZmRmTn5/PyGQyxtXVlYmMjGQYhmEsLCyY6dOnc+elpaUxAJg1a9YwJSUljKWlJbNhwwaGYRhm4cKFjI+PDyOXy5lZs2Yx6vxaFeXIyspipFIpU1hYyOzYsYMRiUSMh4cHU1FRwTAMwwQHBzMAmOPHjyud//HHHzMAmAsXLihtf+eddxgej8fcunVLqdxubm7cNRmGYYqLixk7Oztm5MiRjZZRJpMx1dXVjEQiYd5//31u+8mTJxkATN++fZV+tnfv3mUEAgHzxhtvNKhnXcHBwUxwcLDSNgDMkiVLuOcBAQHMpEmTGi0bwzBN/qy9vLyUfn/Lli1jADCxsbFNXlOV4OBgxt/fn3s/ZWRkcD//559/XqkONjY2TEFBgdL5o0ePZtzd3ZmioiKl7bNnz2bMzc254z/66COGx+Mxly9fVjouNDSUAcCcPHmS2zZ9+nTGy8uLe37q1CkGALN48eIm6+Lv79/gZ88wte+TrVu3ctueeuopxsnJiSkpKeG2yWQyJiAggHF3d+d+91u3bmUAMO+++67SNVevXs0AYDIzMxmGYZjffvuNAdCgfoQYK+qibwX9+vWDQCDgHmvXrlV53PPPPw+hUIidO3fi0KFDyMrKUmvktaWlJZ5//nls2bIFMpkMv/zyC1577TW1uqLrc3FxgUAggK2tLV555RX07dsXR44cgbm5OXeMra0thg8frnTeiRMn0KNHDwwYMEBpe0REBBiG4VpNCs8884zSNa2srBAeHo5Tp06hpqYGACCTyRAVFYUePXpAKBTC1NQUQqEQycnJuHHjRoOyT506VanOXl5eGDRoEE6ePKnxz6G+AQMG4PDhw/j4448RFxeHioqKFl3v8OHD6Nq1K0aOHKnV+deuXePeT25ubli7di1efvll/PTTT0rHDR8+XKlFX1lZiePHj2Py5MkQi8WQyWTcY+zYsaisrMT58+cBACdPnoS/vz8CAwOVrjl16lS16gcAs2bN0qp+9ZWVleHChQt47rnnYGlpyW03MTHBq6++igcPHuDWrVtK50yYMEHpea9evQDU3m7q3bs3hEIh3nzzTURHR6ucxUKIMaEuei05ODhAJBJxHx517dq1C+Xl5cjMzGzwoVOXhYUFpkyZgi1btsDLywsjR46El5eXWq8/Y8YMDBkyBF988QVyc3O1npL1559/wsbGBgKBAO7u7rC3t29wjKpu5/z8fJXTpBTjDRRd2QouLi4NjnVxcUF1dTVKS0thY2OD+fPn47vvvsNHH32E4OBg2Nrags/n44033lAZYBu75r///ttofdX17bffwt3dHTExMVi1ahXMzc0xevRorFmzBhKJROPr5ebmwtPTU+vydOnSBXv27AGPx4O5uTl8fHxUdinX/13l5+dDJpNh/fr1WL9+vcpr5+Xlccf6+Pg02K/q51xfbm4uTExM1DpWHYWFhWAYRuV7r7H3WP33rqL7X/He6dKlC/7880+sXr0as2bNQllZGXx9fTFnzhzMnTtXJ+UmxJBQgNeSiYkJhg8fjmPHjiEzM1Ppg6hHjx4AwN17bMrrr7+On3/+GVeuXMHOnTvVfv3BgwejW7duWLZsGUJDQ+Hh4aFxHQAgMDCQG0XfGFU9A/b29sjMzGywXTGoqf41s7KyGhyblZUFoVDItdB27NiBadOmISoqSum4vLw8dOrUSeX5qrap+pKiKQsLCyxduhRLly5FdnY215oPDw/XalCco6MjHjx4oHV5FAMTm1P/d2Vra8u1ehtrXSuCur29faM/0+Y4OjqipqYGWVlZWo1JqU/x5U6T95g6hg4diqFDh6KmpgaXLl3C+vXrMW/ePDg7O+PFF19scbkJMSTURd8CixYtQk1NDd5++21IpVKtrjFw4EC8/vrrmDx5MiZPnqzRuZ9++inCw8PxwQcfaPXaLTFixAhcv34d//zzj9L2X375BTweD8OGDVPa/vvvv6OyspJ7XlJSggMHDmDo0KEwMTEBwAan+oOuDh48iIcPH6osw+7du5Xm69+7dw9nz55tkMSmpZydnREREYGXXnoJt27d4kZc128hNiUsLAy3b99ucOuitYnFYgwbNgyJiYno1asX+vfv3+Ch+EI0bNgwXLt2rUEPyK5du5p9nbCwMADApk2bmjzOzMxMrZ+XhYUFgoKC8PvvvysdL5fLsWPHDri7u6Nr167NXqcxJiYmCAoK4mYS1H8fE2IMqAXfAoMHD8Z3332H9957D3379sWbb74Jf39/ruWhGNHeXPKZzZs3a/X6r7zyCl555RWtzm2p999/H7/88gvGjRuHZcuWwcvLCwcPHsTGjRvxzjvvNPjwNTExQWhoKObPnw+5XI5Vq1ahuLhYKVHM+PHjsW3bNnTv3h29evVCQkIC1qxZ0+g89pycHEyePBkzZ85EUVERlixZAnNzc50kfwkKCsL48ePRq1cv2Nra4saNG9i+fTsGDhzIdY337NkTALBq1SqEhYXBxMQEvXr1Ujllct68eYiJicHEiRPx8ccfY8CAAaioqEB8fDzGjx/f4AuRLn3zzTcYMmQIhg4dinfeeQfe3t4oKSlBSkoKDhw4wH3pmDdvHrZs2YJx48ZhxYoV3Ch6dXoshg4dildffRUrVqxAdnY2xo8fDzMzMyQmJkIsFuO9994DwP7M9uzZg5iYGPj6+sLc3Jz7Oda3cuVKhIaGYtiwYViwYAGEQiE2btyIq1evYvfu3RqPOfn+++9x4sQJjBs3Dp6enqisrOSmm2o7NoIQQ0YBvoXefvttDBw4EN988w2++uorZGRkgMfjwd3dHYMGDcLx48cbDFAzBo6Ojjh79iwWLVqERYsWobi4GL6+vli9ejXmz5/f4PjZs2ejsrISc+bMQU5ODvz9/XHw4EGlZDrffPMNBAIBVq5cidLSUvTt2xe///47Pv30U5VliIqKwsWLF/Haa6+huLgYAwYMwJ49e3SShGX48OHYv38/vvrqK5SXl6Nz586YNm0aFi9ezB0zdepU/PXXX9i4cSOWLVsGhmGQlpamcmyClZUVzpw5g8jISPz4449YunQpbG1t8eSTT+LNN99scXmb0qNHD/zzzz9Yvnw5Pv30U+Tk5KBTp06QSCQYO3Ysd5yLiwvi4+Mxd+5cvPPOOxCLxZg8eTI2bNiAiRMnNvs627ZtQ9++fbF582Zs27YNIpEIPXr0wCeffMIds3TpUmRmZmLmzJkoKSmBl5dXo7eygoODceLECSxZsgQRERGQy+UIDAzE/v37MX78eI1/Dr1798axY8ewZMkSZGVlwdLSEgEBAdi/fz9GjRql8fUIMXQ8hjGSnKSEEEII4dA9eEIIIcQIUYAnhBBCjBAFeEIIIcQIUYAnhBBCjBAFeEIIIcQIGf00OblcjoyMDFhZWWmVq50QQrTBMAxKSkrg5uYGPp/aUqTtGX2Az8jI0DqNKyGEtFR6enqjyZoIaU1GH+CtrKwAsH9kzWWUU5BKpTh27BhGjRoFgUDQmsVrE1Qfw2Zs9QGMr07a1Ke4uBgeHh7cZxAhbc3oA7yiW97a2lqjAC8Wi2FtbW00H05UH8NlbPUBjK9OLakP3Rok+kI3hgghhBAjRAGeEEIIMUIU4AkhhBAjZPT34AkhxJDV1NRAKpXquxiknRAIBDAxMVHrWArwhBCiBwzDICsrC48ePdJ3UUg706lTJ7i4uDQ7gJMCPCGE6IEiuDs5OUEsFtNoe9IshmFQXl6OnJwcAICrq2uTx1OAJ4SQNlZTU8MFd3t7e30Xh7QjIpEIAJCTkwMnJ6cmu+tpkB0hhDSjokK311PccxeLxbq9MOkQFO+b5sZuUAueEEJUkMuBvDwgJwdQc0yTxqhbnmhD3feNXlvwmzZtQq9evbgscwMHDsThw4e5/QzDIDIyEm5ubhCJRAgJCcG1a9f0WGJCiLGTSoGHD4ErV4D0dKCqSt8lIkQ7eg3w7u7u+M9//oNLly7h0qVLGD58OCZOnMgF8dWrV2PdunXYsGEDLl68CBcXF4SGhqKkpESfxSaEGKHyciAtDUhKArKygJoafZeIkJbRaxd9eHi40vMvvvgCmzZtwvnz59GjRw98/fXXWLx4MZ555hkAQHR0NJydnbFr1y689dZbKq9ZVVWFqjpfuYuLiwGw9yrUnWuqOM5Y5qZSfQybsdUHaF91Ki5mu+FLSxs/pqZG8/q0h7obspCQEPTu3Rtff/01AMDb2xvz5s3DvHnzWu014+LiMGzYMADAxIkT8d///rfVXqsxiu53GxubFk+hNJh78DU1Nfj1119RVlaGgQMHIi0tDVlZWRg1ahR3jJmZGYKDg3H27NlGA/zKlSuxdOnSBtuPHTum8YCW2NhYzSph4Kg+hs3Y6gMYX500qU95eXkrlqTjuXjxIiwsLNrktW7dugUnJyeNzomIiEB0dLTStqCgIJw/f557XlVVhQULFmD37t2oqKjAiBEjsHHjRqXlhDMzMxETE4MlS5a0rBIwgACflJSEgQMHorKyEpaWlti3bx969OiBs2fPAgCcnZ2Vjnd2dsa9e/cavd6iRYswf/587rliycZRo0ZptJpcbGwsQkNDjWYlLKqP4TK2+gCGWyeZjB04l5fH/l9dIpEUd+5oVh9F7yHRDUdHxzZ7LScnJ3Tq1Enj88aMGYOtW7dyz4VCodL+efPm4cCBA9izZw/s7e3xwQcfYPz48UhISOCmu7m4uMDGxqZF5VfQe4Dv1q0bLl++jEePHmHv3r2YPn064uPjuf31RwsyDNPkCEIzMzOYmZk12C4QCDT+oNHmHENG9TFsxlYfwHDqVFkJZGcDBQXs6HgA4GswAkkxil6T+mhSb0UCE33QJMlOSEgIevbsCRMTE0RHR0MoFGL58uV4+eWXMXv2bPz2229wcnLChg0bEBYWxp13/fp1LFiwAKdOnYKFhQVGjRqFr776Cg4ODgCAsrIyvPPOO/j9999hZWWFBQsWNHjt+l3069atw9atW5Gamgo7OzuEh4dj9erVsLS0BABs27YN8+bNQ0xMDObNm4f09HQMGTIEW7dubTZBTH2qynfgwAGlWwgAG39cXFxUXqOoqAibN2/G9u3bMXLkSADAjh074OHhgT///BOjR4/WqEzq0Ps8eKFQCD8/P/Tv3x8rV65EYGAgvvnmG+6HlJWVpXR8Tk5Og1Y9IYSoUlICpKQA166xrXZFcDc05eXlsLS01MtD0y8W0dHRcHBwwN9//4333nsP77zzDp5//nkMGjQI//zzD0aPHo1XX32Vu25mZiaCg4PRu3dvXLp0CUeOHEF2djZeeOEF7poLFy7EyZMnsW/fPhw7dgxxcXFISEhoshx8Ph/ffvstrl69iujoaJw4cQIffvhhg5/rl19+ie3bt+PUqVO4f/++yi8PzVG3fHFxcXByckLXrl0xc+ZMLuMcACQkJEAqlSrddnZzc0NAQADXY61reg/w9TEMg6qqKvj4+MDFxUXpnld1dTXi4+MxaNAgPZaQEGLIGAbIzwdu3ABu3waKivRdIuMSGBiITz/9FBKJBIsWLYJIJIKDgwNmzpwJiUSCzz//HPn5+bhy5QoAdjp03759ERUVhe7du6NPnz7YsmULTp48idu3b6O0tBSbN2/Gl19+idDQUPTs2RPR0dGoaWYaw7x58zBs2DD4+Phg+PDhWL58Of7v//5P6RipVIrvv/8e/fv3R9++fTF79mwcP35co/qqW76wsDDs3LkTJ06cwNq1a3Hx4kUMHz6cG/SdlZUFoVAIW1tbpfOcnZ0bNGR1Ra9d9J988gnCwsLg4eGBkpIS7NmzB3FxcThy5Ah4PB7mzZuHqKgoSCQSSCQSREVFQSwWY+rUqfosNiHEANXUALm57Ij49jaAXSwWo7SpYfyt/Nqa6NWrF/d/ExMT2Nvbo2fPntw2RQ+rovWakJCAkydPcl3ndd25cwcVFRWorq7GwIEDue12dnbo1q1bk+U4efIkoqKicP36dRQXF0Mmk6GyshJlZWXcYDyxWIwuXbpw57i6uiq1qtVx584dtco3ZcoU7v8BAQHo378/vLy8cPDgQW4mmCrN3XZuCb0G+OzsbLz66qvIzMyEjY0NevXqhSNHjiA0NBQA8OGHH6KiogLvvvsuCgsLERQUhGPHjsHKykqfxSaEGJCqKjaoG3IXfHN4PF6bjRBvqfpjC3g8ntI2RbCSP/5lyOVyhIeHY9WqVQ2u5erqiuTkZI3LcO/ePYwdOxZvv/02li9fDjs7O5w5cwYzZsxQmp6oqqwMw2j0Wpoer+Dq6govLy+ufi4uLqiurkZhYaFSKz4nJ6fVeqX1GuA3b97c5H4ej4fIyEhERka2TYEIIe1GaSk7cI5WWzVsffv2xd69e+Ht7Q1T04Yhx8/PDwKBAOfPn4enpycAoLCwELdv30ZwcLDKa166dAkymQxr164F//Foyfrd87qiTfkAID8/H+np6dyAvn79+kEgECA2NpYbf5CZmYmrV69i9erVrVJ2g7sHTwghjWEYoLAQuHkTuHWLgnt7MGvWLBQUFOCll17C33//jdTUVBw7dgyvv/46ampqYGlpiRkzZmDhwoU4fvw4rl69ioiICC5wq9KlSxfIZDKsX78eqamp2L59O77//vtWKb865SstLcWCBQtw7tw53L17F3FxcQgPD4eDgwMmT54MgE1cM2PGDHzwwQc4fvw4EhMT8corr6Bnz57cqHpd0/s0OUIIaU7dhV8oN3z74ubmhr/++gsfffQRRo8ejaqqKnh5eWHMmDFckFyzZg1KS0sxYcIEWFlZ4YMPPkBRE6Mje/fujXXr1mHVqlVYtGgRnn76aaxcuRLTpk1rlTo0Vz4TExMkJSXhl19+waNHj+Dq6ophw4YhJiZG6ZbyV199BVNTU7zwwgtcoptt27Y1ueRrS1CAJ4QYrOrq2vvrlBte/+Li4hpsu3v3boNt9e9bSyQS/P77741e19LSEtu3b8f27du5bQsXLmzydd5//328//77StteffVV7v8RERGIiIhQ2j9p0iSt7qmrKt/Bgwe5/4tEIhw9erTZ65ibm2P9+vVYv369xmXQBgV4QojBKS9n768XFrLd8oS0JXd3d4SHh2P37t1t/tqWlpaQyWQwNzdv8bUowBNCDEZRERvYacFIog9BQUHcqHdV0/rawuXLlwFAJ932FOAJIXoll7OJaXJy2JSyhOiLSCSCn5+fWsequl2hC+q+vjoowBNC9EIqZRPT5OZqtvALIUQ9FOAJIW2qooJtrefn0/11QloTBXhCSJsoLmbvr9MqqoS0DQrwhJBWo1j4JTubbbkTQtoOBXhCiM4p7qlfv07z1wnRFwrwhBCdqapiW+u5uexzqRRoIuMoIaQVUYAnhLRY/YVf2uuqbvr2449t+3pvvqnZ8SEhIYiPjwcAJCYmonfv3rovlIFSrJJnY2ODR+1kEQT6bk0I0Qot/NIxzZw5E5mZmQgICFDr+Li4OEycOBGurq6wsLBA7969sXPnzgbH8Hi8Bo+bN2+2uLyqrsvj8bBmzRrumJCQkAb7X3zxRaXrZGZm4uuvv25xedoSteAJIRqpqald+KW6Wt+lIW1NLBbDxcVF7ePPnj2LXr164aOPPoKzszMOHjyIadOmwdraGuHh4UrH3rp1C9bW1txzR0fHFpc3MzNT6fnhw4cxY8YMPPvss0rbZ86ciWXLlnHPRSKR0n4XFxfY2Ni0uDxtiQI8IUQttPALqS8uLg7Dhg3DH3/8gU8++QS3bt1CYGAgfv75Z/Ts2RMA8MknnyidM2fOHBw9ehT79u1rEOCdnJzQqVMntV8/JCSE60nYsWMHTExM8M4772D58uVcl3r9LyP/+9//MGzYMPj6+ipt1/SLS3tAXfSEkCaVlwNpacDVq+x9dgrupL6FCxfiyy+/xMWLF+Hk5IQJEyZAKpU2enxRURHs7OwabO/Tpw9cXV0xYsQInDx5Uq3Xjo6OhqmpKS5cuIBvv/0WX331FX7++WeVx2ZnZ+PgwYOYMWNGg307d+6Eg4MD/P39sWDBApQYwYII1IInhKj06BEb0EtL9V0SYuiWLFmC0NBQAGzAdXd3x759+/DCCy80OPa3337DxYsX8cMPP3DbXF1d8eOPP6Jfv36oqqrC9u3bMWLECMTFxeHpp59u8rU9PDzw1VdfgcfjoVu3bkhKSsJXX32FmTNnNjg2OjoaVlZWeOaZZ5S2v/zyy/Dx8YGLiwuuXr2KRYsW4d9//0VsbKw2Pw6DQQGeEMJRLPySnc1OeSNEHQMHDuT+b2dnh27duuHGjRsNjouLi0NERAR++ukn+Pv7c9u7deuGbt26KV0vPT0dX375JZ5++mmcPn0aYWFh3P4ffvgBL7/8MgDgqaee4rrjFeeuXbsWNTU1DVZk27JlC15++eUGS7HW/TIQEBAAiUSC/v37459//kHfvn01/XEYDArwhBBIpbX312nhF6ILdYMuAMTHxyM8PBzr1q3DtGnTmj3/qaeewo4dOwAA/fv355ZRBQBnZ2eNy3P69GncunULMTExzR7bt29fCAQCJCcnU4AnhLRPFRVsa72ggBZ+Ido7f/48PD09AQCFhYW4ffs2unfvzu2Pi4vD+PHjsWrVKryp5uT7xMREuLq6Amh6Gdfz5883eC6RSBq03jdv3ox+/fohMDCw2de+du0apFIp9/rtFQV4QjogWviF6NKyZctgb28PZ2dnLF68GA4ODpg0aRIANriPGzcOc+fOxbPPPousrCwAgFAo5Abaff311/D29oa/vz+qq6uxY8cO7N27F3v37m32tdPT0zF//ny89dZb+Oeff7B+/XqsXbtW6Zji4mL8+uuvDbYDwJ07d7Bz506MHTsWDg4OuH79Oj744AP06dMHgwcPbuFPRr8owBPSQSgWfsnJoYVfDJWmmeUMxX/+8x/MnTsXycnJCAwMxP79+yEUCgEA27ZtQ3l5OVauXImVK1dy5wQHByMuLg4AUF1djQULFuDhw4cQiUTw9/fHwYMHMXbs2GZfe9q0aaioqMCAAQNgYmKC9957r0EvwZ49e8AwDF566aUG5wuFQhw/fhzffPMNSktL4eHhgXHjxmHJkiUNegHaGwrwhBg5mYzNDZ+by95rJ0TXhgwZgqtXr6rct23bNmzbtq3J8z/88EN8+OGHWr22QCDA119/jU2bNjV6zJtvvtnorQEPDw8u/a6xoQBPiJGqrGRb6/n5lBue6M7GjRvx888/49y5c/ouSpuytLSETCZrMALfkFGAJ8TIlJSw99eLivRdEmJsdu7ciYrH93c8PT1x9uxZPZeo7ShG8benbnsK8IQYAcXCL9nZbOY5QlpD586dlZ6HhISA0eP0C8U9/LbQ2Ch+Q0YBnpB2jBZ+IYQ0Rq+56FeuXIknn3wSVlZWcHJywqRJk3Dr1i2lYyIiIhos4/fUU0/pqcSEGIbqaiA9HUhKAh48oOBOCGlIrwE+Pj4es2bNwvnz5xEbGwuZTIZRo0ahrKxM6bgxY8YgMzOTexw6dEhPJSZEv8rKgNRUduGXnBxa+IUQ0ji9dtEfOXJE6fnWrVvh5OSEhIQEpQUGzMzM1F7Gr6qqClV1kmgXP87kIZVKm1zdqC7Fceoeb+ioPoZNnfoUFbEBvd53X4Mll0uV/m3vamo0f88Zy/uTtF8GdQ++6PGw3/rLCMbFxXHrBAcHB+OLL76Ak5OTymusXLkSS5cubbD92LFjEIvFGpWnva8kVB/Vx7AZW30AICvLuOqkye+onEY7Ej3jMfocAlkHwzCYOHEiCgsLcfr0aW57TEwMLC0t4eXlhbS0NHz22WeQyWRISEiAmZlZg+uoasF7eHggLy8P1tbWapVFKpUiNjYWoaGhEAgELa+cnlF9DFv9+kil7MC5vLz22wUvl0uRlRULF5dQ8Pnt/3ckEklx545m77ni4mI4ODigqKiowWdPZWUl0tLS4OPj067mVRPDoO77x2Ba8LNnz8aVK1dw5swZpe1Tpkzh/h8QEID+/fvDy8sLBw8ebLCmL8B256sK/AKBQONgoM05hozqY9hqagTIyREoLfzC1+somZbj8wVGEeAVU581ec9p895MSND4lBbp10+z40NCQrisb4mJiejdu7fuC6UH3t7euHfvHgB2sZxOnTrpt0A6YhAfH++99x7279+PkydPwt3dvcljXV1d4eXlheTk5DYqHSGtS7Hgy82bbNY5w+hTI0S1mTNnIjMzEwEBAWodX1lZiYiICPTs2ROmpqbcIjS6UFJSgnnz5sHLywsikQiDBg3CxYsXlY7Jzs5GREQE3NzcIBaLMWbMmAbx4+LFi2otbNPe6DXAMwyD2bNn4/fff8eJEyfg4+PT7Dn5+flIT09v98v4kY6NYdgu+GvX2FHxhLQXYrEYLi4uMDVVrwO4pqYGIpEIc+bMwciRI3ValjfeeAOxsbHYvn07kpKSMGrUKIwcORIPHz4EwMaYSZMmITU1Ff/73/+QmJgILy8vjBw5Umm2lqOjY4OxX8ZArwF+1qxZ2LFjB3bt2gUrKytkZWUhKyuLS4VYWlqKBQsW4Ny5c7h79y7i4uIQHh4OBwcHTJ48WZ9FJ0QrMhmQmQlcuQLcu8fmiyekvYqLiwOPx8PBgwcRGBgIc3NzBAUFISkpiTvGwsICmzZtwsyZM9WeDQWwOVAmTZqEpUuXwsnJCdbW1njrrbdQ/TjpQ0VFBfbu3YvVq1fj6aefhp+fHyIjI+Hj48MtPJOcnIzz589j06ZNePLJJ9GtWzds3LgRpaWl2L17t25/GAZIrwF+06ZNKCoqQkhICFxdXblHTEwMADbnb1JSEiZOnIiuXbti+vTp6Nq1K86dOwcrKyt9Fp0QjVRWsgE9KQnIyGADPSHGYuHChfjyyy9x8eJFODk5YcKECTqZJnj8+HHcuHEDJ0+exO7du7Fv3z5ulpRMJkNNTU2DQWYikYgby6UYcF33GBMTEwiFwgbjvYyRXgfZNTeAXyQS4ejRo21UGkJ0jxZ+IR3BkiVLEBoaCgCIjo6Gu7s79u3bhxdeeKFF1xUKhdiyZQvEYjH8/f2xbNkyLFy4EMuXL4eVlRUGDhyI5cuX44knnoCzszN2796NCxcuQCKRAAC6d+8OLy8vLFq0CD/88AMsLCywbt06ZGVlITMzs8X1NnQGMciOEGPCMEBBAXDjBnD7NgV3YvwGDhzI/d/Ozg7dunXDjRs31Dr3/v37sLS05B5RUVHcvsDAQKX8JQMHDkRpaSnS09MBANu3bwfDMOjcuTPMzMzw7bffYurUqdyKbwKBAHv37sXt27dhZ2cHsViMuLg4hIWFtatV4bRlMNPkCGnvamqA3Fw24xwlMSMdHY/HU+s4Nzc3bilWoGGis6au3aVLF8THx6OsrAzFxcVwdXXFlClTlAZs9+vXD5cvX0ZRURGqq6vh6OiIoKAg9O/fX7MKtUMU4AlpoaoqNqjn5QFyub5LQ0jbO3/+PDw9PQGw88hv376N7t27q3Wuqalpo0ux/vvvv6ioqIBIJOJex9LSssF0agsLC1hYWKCwsBBHjx7F6tWrG1zLxsYGADvw7tKlS1i+fLna9WuvKMAToqWyMvb++qNHNHeddGzLli2Dvb09nJ2dsXjxYjg4OCjNd79+/Tqqq6tRUFCAkpISrsXeXKKc6upqzJgxA59++inu3buHJUuWYPbs2eA/zgB19OhRMAyDbt26ISUlBQsXLkS3bt3w2muvcdf49ddf4ejoCE9PTyQlJWHu3LmYNGkSRo0apesfg8GhAE+IBhiGDejZ2e1n4RfSfmiaWc5Q/Oc//8HcuXORnJyMwMBA7N+/H0KhkNs/duxYLlMcAPTp0wdA8wOtR4wYAYlEgqeffhpVVVV48cUXERkZye0vKirCokWL8ODBA9jZ2eHZZ5/FF198oZRFMDMzE/Pnz0d2djZcXV0xbdo0fPbZZzqquWGjAE+IGuRytgs+J4ftkieE1BoyZAiuXr3a6P67d+9qfe2lS5eqXEAMAF544YVmR+rPmTMHc+bM0fr12zMaRU9IE6RS4OFDNjFNejoFd0I2btwIS0tLpWQ27Z2/vz/CwsL0XQydoxY8ISqUl7Pd8IWFdH+dEIWdO3dymUY9PT1x9uxZPZdINw4dOsQl5lF31dH2gAI8IXUUFbGBvaRE3yUhxPB07txZ6XlISEiz99G1tW3btla5ripeXl5t9lptiQI86fDkcnYVt5wcyg1PCDEeFOBJhyWTsUE9N5dywxP9kFPiBKIFdd83FOBJh1NZyXbDFxRQYhqiH0KhEHw+HxkZGXB0dIRQKFQ78xvpuBiGQXV1NXJzc8Hn85WmIqpCAZ50GMXFbIudcsMTfePz+fDx8UFmZiYyMjL0XRzSzojFYnh6enIJfxpDAZ4YNcXCL9nZwOPBv4QYBKFQCE9PT27ZU0LUYWJiAlNTU7V6fCjAE6NEC7+Q9oDH40EgEChlXiNEVyjAE6NSVcW21vPz6f46IaRjowBPjEJpae3CL4QQQijAk3aMYdhMc7TwCyGENEQBnrQ7ivFIN27Q/HVCCGkMBXjSblRXs4PmcnJqnzczS4QQQjosCvDE4NVf+IUGzxFCSPMowBOD9egRG9hLS/VdEkIIaX8owBODQgu/EEKIbrQ4wNfU1CApKQleXl6wtbXVRZlIBySVsolpaOEXQgjRDY2HKM2bNw+bN28GwAb34OBg9O3bFx4eHoiLi9N1+YiRq6gA7t4FkpKAzEwK7oQQoisaB/jffvsNgYGBAIADBw4gLS0NN2/exLx587B48WKdF5AYp+JiIDkZuH6d7ZJnGH2XiJCG5HK2VyktTd8lIURzGnfR5+XlwcXFBQBw6NAhPP/88+jatStmzJiBb7/9VucFJMaDFn4h7UF1Nfsezcxk/5VKAS8vwNlZ3yUjRDMat+CdnZ1x/fp11NTU4MiRIxg5ciQAoLy8HCYmJhpda+XKlXjyySdhZWUFJycnTJo0Cbdu3VI6hmEYREZGws3NDSKRCCEhIbh27ZqmxSZ6JJOxH5ZJSWx3PAV3YmhKS9kepVOngEOHgIsXgQcPaKEi0r5pHOBfe+01vPDCCwgICACPx0NoaCgA4MKFC+jevbtG14qPj8esWbNw/vx5xMbGQiaTYdSoUSirk3d09erVWLduHTZs2ICLFy/CxcUFoaGhKCkp0bTopI1VVQH377OBPSODPiyJ4WAYIC+PfW8eO8Y+kpLYbZRngRgLjbvoIyMjERAQgPT0dDz//PMwMzMDwK5R+/HHH2t0rSNHjig937p1K5ycnJCQkICnn34aDMPg66+/xuLFi/HMM88AAKKjo+Hs7Ixdu3bhrbfe0rT4pA3Qwi/EEEmlyl3v1dX6LhEhrUuraXLPPfccAKCyzkTl6dOnt7gwRUVFAAA7OzsAQFpaGrKysjBq1CjuGDMzMwQHB+Ps2bMqA3xVVRWqqqq458XFxQAAqVQKqZpNSMVx6h5v6NqiPgzDBvScnNbvgpfLpUr/tnfGVh/AcOpUVsYG86wsdvxH3da5ummO5fIapKaeRX7+da7HUh3G8vlB2i+NA3xNTQ2ioqLw/fffIzs7G7dv34avry8+++wzeHt7Y8aMGVoVhGEYzJ8/H0OGDEFAQAAAICsrCwB7378uZ2dn3Lt3T+V1Vq5ciaVLlzbYfuzYMYjFYo3KFBsbq9Hxhs7Y6pOVRfUxdIZQJ3NzwNubfairvLwcly9fxsWLF5GQkIDi4mL07NkTPXr00OgahOiTxgH+iy++QHR0NFavXo2ZM2dy23v27ImvvvpK6wA/e/ZsXLlyBWfOnGmwj8fjKT1nGKbBNoVFixZh/vz53PPi4mJ4eHhg1KhRsLa2VqssUqkUsbGxCA0NhUAg0KAWhqk16lNdzd6vzM+vXd2trcjlUmRlxcLFJRR8fvv//RhbfYC2rZNMxvYcZWe3rOu9sDANyckHkZx8CPfuxSv1PohENrC3t8fIkSMhFArVup6i95AQfdE4wP/yyy/48ccfMWLECLz99tvc9l69euHmzZtaFeK9997D/v37cerUKbi7u3PbFdPxsrKy4Orqym3Pyclp0KpXMDMz48YF1CUQCDQObtqcY8h0UR9Fl+ejR7Vz1/W1ohufLzCagAgYX32A1qtTeTl7Lz0zU/uBcXJ5DR4+PIeUlD+QnHwAeXnXlfbb2Ung5xcOiSQcgwcPgJtbLIRCodp/Q8b02UHaJ40D/MOHD+Hn59dgu1wu1/ieE8MweO+997Bv3z7ExcXBx8dHab+Pjw9cXFwQGxuLPn36AACqq6sRHx+PVatWaVp00gK08AvRt4KC2qCubeO4srIIqalHkZLyB+7cOYSKinxuH49nAg+PoZBIxsPPLxz29l25fSYmdD+dtD8aB3h/f3+cPn0aXl5eStt//fVXLgira9asWdi1axf+97//wcrKirvnbmNjA5FIBB6Ph3nz5iEqKgoSiQQSiQRRUVEQi8WYOnWqpkUnGlIs/JKdzU55I6QtKbreMzPZQXLavgcLC+8gOfkAUlL+wP378ZDLa/Mhm5vbokuXMPj5jYev7xiIRLSeBjEeGgf4JUuW4NVXX8XDhw8hl8vx+++/49atW/jll1/wxx9/aHStTZs2AQBCQkKUtm/duhUREREAgA8//BAVFRV49913UVhYiKCgIBw7dgxWVlaaFp2oSSplP1jz8ig3PGlbFRW1AT0nR9uudxkePDiHlJQDSE7+A/n5N5T229l1g0QSDj+/8fDwGAw+nxbVJMZJ43d2eHg4YmJiEBUVBR6Ph88//xx9+/bFgQMHNJpCArBd9M3h8XiIjIxEZGSkpkUlGqqoYFvrBQWUG560ncLC2qCube4Etuv9CJKTDyA19TAqKgq4fXy+6eOudzao29lJNLo2jwdYWmpXLkL0SauvrqNHj8bo0aN1XRaiJ0VFbGuJBv2StiCTsb1DivvpddJpaKSgIOVxK/0A0tNPq+h6HwuJJBy+vqNhbt5Jo2vz+YCTE+DmBri4AHZ2wO3b2pWTEH2hvqkOimFq769r+wFLiLoqK9kWemYm+2VSm6mVcrkM6el/caPeCwqU162wt+/+uJUeDnf3gRp3vQsE7IIyiqBuSp+OpJ3T+C3M5/MbnYMOsIlwiOGSydjlL3NzKTc8aX23b7NBvbBQu/MrKgqRmnrk8aj3w6isrL0Qn28KT89g+PmNf9z13nB2T3PMzQFXV/bh5KS/KZ+EtAaNA/y+ffuUnkulUiQmJiI6OlplBjliOB48YO9x0mIapDXU1LBfHBUD5Hr0AG7e1Pz9lp9/mxsgl55+GgxT22gQiezRpcvYx6PeR8Pc3Ebjclpasq10V1fA3l7j0wlpNzQO8BMnTmyw7bnnnoO/vz9iYmK0zmRHWkdJCbuSG8De96QWCtElRde7IqgrZl1o8j6rqZHiwYO/uKlsBQXKN7sdHHpwXe+dOz8FPl+zZakBwNaWDehuboCaCS0Jafd0dpcpKChIKXUt0R+GYbtEs7PZjF/UYie6VFRUez+9sFC7GRcVFQV1Rr0fQWXlI24fny+Ap2cwN+rd1tZX4+vz+WzrXNFS13AZCkKMgk4CfEVFBdavX6+UZpa0PUUXaW4uLYVJdEcur+16z8xkvzRqimEY5Off4gbIPXjwV72udwf4+Y2Fn184fH1HwcxM82a2iQk7SE5xT13NlPGEGC2NA7ytra3SIDuGYVBSUgKxWIwdO3botHBEPdXVbGtd25zchNRXVVW7dnpOjnYDMmUyGdLSTiI5mW2pFxamKO13dAyAn994SCThcHML0qrrXShkR7y7ubGD5GjkOyG1NP5z+Oqrr5QCPJ/Ph6OjI4KCgmBrS2ke25KqhV8I0VZJSe3cdG2THZWX5yM19TBSUvZj3bqDSkum8vkCeHkNexzUx6NTJ58mrtQ4sbj2frqDA5uIhhDSkMYBXpFCluiP4v56WZm+S0LaM8VaA4qgrs37ie16v4nkZDbhzMOHZ8Ewtd1IYrEj/PzGwc8vHD4+oTAz0y7FtLU1G9Dd3IBOnbS6BCEdjloB/sqVK2pfsFevXloXhjROLme74HNyaOEXoj2ptPZeena2dl3vNTXVuH//NJdF7tGjVKX9jo490bXrWEyYYIeiojkAzDV+DR6PzR6nGCRHqWIJ0ZxaAb53797g8XjN5o7n8XiU6EbHFAu/5OZql/2LkNLS2lzv+fnajdMoL8/DnTuHkZx8AGlpR1FVVZvX2MRE+LjrPRwSyXjY2HiBz5eiW7dDuHTJRO3X4/MBR0egc2f2vrq55t8LCCF1qBXg09LSWrscpJ7ycraFpe00JNJxKdIQK1rqJSXaXINBXt51btT7w4fn6nW9O8HPbxwkErbrXSjUrolN6WEJaT1q/TnVX/udtJ6iIjawa/OhTDouqZR93yiSzmgzTZLteo9HcvIfSEk5gEePlL/YOzkF1hn1/iR4PO2yJlF6WELahtbfl69fv4779++jut4nyYQJE1pcqI5GLmdHLdPCL0QTZWW1Xe/aTpEsK8vFnTuHkJLyB1JTj6K6uvabpYmJGby9h8PPLxx+fuNgY+OpdVktLNiud1dX9t46jXwnpPVpHOBTU1MxefJkJCUlKd2XV0ydo3vw6pPJau+vy2TNH086NoZhvwgqgro2y/syDIPc3GvcALmHD88DqL0HZGHhzLXSvb1HQii0aFGZu3en9LCE6IvGAX7u3Lnw8fHBn3/+CV9fX/z999/Iz8/HBx98gC+//LI1ymh0KivZ1npBASWmIU2TyWoTzmjb9S6TVeH+/bjHXe9/oKjortJ+Z+c+XFB3de2ndde7Ij2sovu9sBDo2pW64AnRF40D/Llz53DixAk4OjqCz+eDz+djyJAhWLlyJebMmYPExMTWKKdRKClhP6yLivRdEmLIystrB8jl5mrb9Z6DlJSDSEn5A2lpx1BdXcrtMzU1h7f3CG6ZVWtr7VNMN5YeVi7XfolYQohuaBzga2pqYPl4UqqDgwMyMjLQrVs3eHl54datWzovYHun6FbNydEuhzfpOG7eZIO6Nl8A2a73JG5FtocPL6Bu17ulpSsX0L29R7So653SwxLSPmj8pxkQEIArV67A19cXQUFBWL16NYRCIX788Uf4+mq+6pOxUiz8om0eb2LcZDL2/aHI9R4QANy+rVlrXSarxL17cVxQLy6+r7TfxaUvNzfdxaWv1l3vgHJ6WHt76nYnpD3QOMB/+umnKHuc03LFihUYP348hg4dCnt7e8TExOi8gO1NVRX7gU0Lv5D6Kitr08LWTVykSbAsLc3GnTsHHyeciYVUWptflu16H/l4mdVxsLLq3KLyUnpYQto3jQP86NGjuf/7+vri+vXrKCgoaLDKXEdTWlq78AshCo8e1QZ1bd4bDMMgJ+dfbm56RsbfSvstLd3qjHofDoGgZQufK9LDurlRelhC2juNA3x0dDSee+45WFjU3sOzs7PTaaHaC4ZhP7Rp4ReioLg1oxj1XlGh+TVkskrcvXsCKSl/PO56T1fa7+ranwvqzs59WvTFWpEeVpHzndLDEmI8NA7wCxYswLvvvovw8HC88sorGDNmDEw72Cibmho2FWh2tnbTlohxqayszSCXna3dmgEFBQVITNyC5OTDj7vea0dkmpqK4OMT+niQ3DhYWbm1qLymprWD5Jyd2XSxhBDjo3FkzszMxJEjR7B79268+OKLEIlEeP755/HKK69g0KBBrVFGg1FdXXt/nfL5dGxFRbWt9IICzc9nGAbZ2ZeRnHwAd+4cQEbGJaX9VladuQFyXl7DIRCIWlReM7PaQXKUHpaQjkHjAG9qaorx48dj/PjxKC8vx759+7Br1y4MGzYM7u7uuHPnTmuUU69o4Rcilyt3vWsz5VEqrcC9eye4Ue8lJQ+V9ru59Yef3wT4+Y2Hs3PvFo9psbCo7Xq3t6f0sIR0NC3qWxeLxRg9ejQKCwtx79493LhxQ1flMgjFxWxXPC380jFVVdUmnMnJ0S6dcElJxuOEMweQlvYnZLLam/ICgRg+PqGQSMbiueeEuHPnZcjlLesv79SpNqjb2LToUoSQdk6rAK9oue/cuRN//vknPDw88NJLL+HXX3/Vdfn0KjWVujI7muLi2lHv2vTYMAyDrKx/uGVWs7ISlPZbW3twA+S8vIbB1NQcfL4UtraHtCovjwc4ONR2v4tbNoieEGJENA7wL730Eg4cOACxWIznn38ecXFxWt97P3XqFNasWYOEhARkZmZi3759mDRpErc/IiIC0dHRSucEBQXh/PnzWr0eIfXJ5eyYCkXXuzazIaTScty9e/xx1/tBlJZm1NnLg5vbgMdz08fDyalXi7ve66aHdXFh768TQkh9Ggd4Ho+HmJgYjB49usWj58vKyhAYGIjXXnsNzz77rMpjxowZg61bt3LPhYpk14RoqbqaHVORkaF9psGSkofc3PS7d49DJqtd51cgsICPzyhIJOHo0mUsLC2dW1xmSg9LCNGUxh8Tu3bt0tmLh4WFISwsrMljzMzM4OLiorPXJB1TSUnt/fT8fG263uXIyvoHycnsMqvZ2cqLKllbe0IiCYdEEg5Pz2CYmrZ8QrlYzAb1zp0pPSwhRHMG3w6Ii4uDk5MTOnXqhODgYHzxxRdwcnJq9PiqqipUVVVxz4sfL5otlUohVbOppjhOLjeOJPKKenSk+ihWM1PMTS+tXUwNPJ56I8qrq8tw9+4J3L59ECkph1FamllnLw+dOw+ARDIOXbuOg6NjQL2ud/V/1ny+lPvX2rq2+71+etj2lPrY2N5zNTVsPdT9DNH0WEJaA49hDGPiF4/Ha3APPiYmBpaWlvDy8kJaWho+++wzyGQyJCQkwKyRG4+RkZFYunRpg+27du2CmEYgkWbk5uYiISEBFy9eRFJSEqrrZDIyNzdH79698eSTT6Jfv37oRAnaSRPKy8sxdepUFBUVwdraWt/FIR2Q2gH+wYMHcHfXft3oZguiIsDXl5mZCS8vL+zZswfPPPOMymNUteA9PDyQl5en9h+ZVCpFbGwsXFxCwee3/zRfcrkUWVnGWZ+KCgGys2sTzmjaymUYOTIyEpCcfBDJyQeRnf2v0n4bGy9IJOMgkYyDl9fTMDVt+Yg2Pp/tcldkkhMKjev3Axjfe04kkuLOnViEhoZCoGbqv+LiYjg4OFCAJ3qjdhd9QEAA1q9fj1dffbU1y9MkV1dXeHl5ITk5udFjzMzMVLbuBQKB2n+YCny+wCg+nBSMoT4Mw3a9A0B8vABFRZrXp7q6DGlpsUhJYUe9l5Vl19nLg7v7QPj5saPeHR39lbrete0mNzVlg7mbG3tfve5bUXFNY/j91GcsdTIxYf/V5HNE088bQnRN7QAfFRWFWbNm4b///S9+/PFH2Nvbt2a5VMrPz0d6ejpcXV3b/LWJ/shktbnes7LY5/37a5aAqKjoPjc3/d69k6ipqe3lEQqt4Os7GhJJOHx9w2Bh4aiTctdND+voWBskCCGkLagd4N99912EhYVhxowZ8Pf3x48//ogJEya06MVLS0uRkpLCPU9LS8Ply5dhZ2cHOzs7REZG4tlnn4Wrqyvu3r2LTz75BA4ODpg8eXKLXpcYvvJydhpbVhY7T71uy1md0eRs1/vFx3PTDyAn54rS/k6dfB7neg+Hp+fTMDHRzfRLSg9LCDEUGo2i9/HxwYkTJ7BhwwY8++yzeOKJJxrMhf/nn3/Uvt6lS5cwbNgw7vn8+fMBANOnT8emTZuQlJSEX375BY8ePYKrqyuGDRuGmJgYWFlZaVJs0g4out4VWeQeT37QSHV1KVJTjz1eZvUgystzuH08Hh+dOw+CRDIefn7hcHB4osUJZxQoPSwhxBBpPE3u3r172Lt3L+zs7DBx4sQWJbsJCQlBU2P8jh49qvW1ieGTydhEM4oscnXGRqqtqOget3gL2/VeO+rdzMwavr5j4Oc3Hl26hEEsdtBJuSk9LCGkPdAoOv/000/44IMPMHLkSFy9ehWOjrq5V0k6joqK2lZ6bq7mg9bk8hpkZJzHjRvbcfr0p8jJuaq039a2CzdAztNzqM663k1M2AxyikFylB6WEGLo1A7wY8aMwd9//40NGzZg2rRprVkmYmTqdr0XFWl+flVVCdLSjj1eO/0QystzuX08Hh/u7oO5XO/29t111vWuSA/r6sqOgKf0sISQ9kTtj6yamhpcuXKlVefCE+MgkymvnV5Z2fw59T16lMbler93L04pI5qZmQ2efLIn7O3fgK9vOEQiO52VXSSq7Xp3cKD0sISQ9kvtAB8bG9ua5SDtXGWl8trpNTWanS+X1+Dhw/PcVLa8vGtK+21t/R630sPh5RWEoKBYXLo0tsXrpwOAlRUb0N3cAFvbFl+OEEIMAnU6Eq09elQb1BXJZzRRVVWM1NSjj0e9H0JFRR63j8czgYfHEG7tdHv7btw+Re72lrCzY1vqnTsDlpYtvhwhhBgcCvBEbTU1bNe7IuFMebnm1ygsTOVGvd+/H6/U9W5u3gm+vmGQSMbD13eMTrve+Xy2y13RUjdv+WJvhBBi0CjAkyYput4Vq7Jp1/V+jgvqeXnXlfbb2XXlBsi5uw+GiYnu0ns2lR6WEEKMHQV40kBRUe0AucJCzddOr6wsetz1zo56r6go4PaxXe9D64x676rTslN6WEIIYVGAJ5DLa7veMzO163ovKEjhBsilp5+CXC7j9pmb26JLlzD4+YWjS5cxMDfvpLvCg9LDEkKIKhTgO6iqKuWud5ms+XPqkstlePDgLBfU8/NvKu23t+9ep+t9EPh83b/Vunen9LCEENIYCvAdSGlpbSu9oECbrvdHuHPnyOOu98OorKwdOs/nm8LD42kuqNvZ+em07Dxe7RrqLi7sCP6uXWmeOiGENIYCvBGTy4H8fDag29kBJ05onhq2oCAZyckHHne9nwbD1I6yE4ns0KXLWPj5hcPXd5TOu975fHaQnKsr+1Ckh5XL2QBPCCGkcRTgjUx1NdvlnpnJ/iuVsoHSTs0ZZ3K5DOnpfyElhQ3qBQW3lfbb2z8BiYRdZrVz56d03vUuENQGdEoPSwgh2qOPTyNQWlqb672gQPNWekVFIVJTjyA5+QBSUw+jsvIRt4/PN4WnZ/DjtdPHw9a2i24LD0oPSwghrYECfDvEMLVd71lZQEmJ5tfIz7/FzU1PTz9Tr+vdHl26jIVEEg4fn1EwN9f9KDYrq9qgrm7vAiGEEPVRgG8npFLlrvfq6ubPqUsmk+Hu3Tjcvn0YKSl/oKAgWWm/g4M/N0CO7XrX/QRyRXpYNzc2wBNCCGk9FOANmGLUe1YWkJenTdd7Ae7cOYyUlP9h3bqDKK8zwZ3PF8DLK+Tx2unjYGvrq+PSU3pYQgjRJwrwBoRh2Hvoiq734mJNz2eQn3+LGyD34MFfYJjabwVisQO6dBn3uOs9FGZm1jquAaWHJYQQQ0EBXs9kstqu96wszbvea2qkSE8//fh++gEUFt5R2u/o2BMSSRgmTLBDcfFcALpvRguFta10Sg9LCCGGgQK8HpSX145616brvbw8H6mphx+Pej+Cqqrapr6JiRBeXsPg5zcefn7j0amTN/h8Kbp3P4RLl0w0fq3GiMW1QZ3SwxJCiOGhAN9GFF3vmZnadb3n5d143PX+Bx4+PFuv690Jfn7j4Oc3/nHXe+uMYLOxqc353qlTq7wEIYQQHaEA30pkMiAnp3aQXGWlZufX1FTj/v1T3FS2R49SlfY7OfXi5qa7uQ0Aj6f7yeN108O6urKLuhBCCGkfKMDrUEVFba73nBzNu97LynLrdL0fRXV17QR3tut9ODeVzcbGU8elZ/H5gJNTbVBXpIclhBDSvlCAb6FHj2q73jXNj84wDHJzryEl5Q+kpBzAgwfnANSuAGNh4fy46z0cPj4jIRRa6rLoHIGAHfHu6sr+S+lhCSGk/aOPcg3V1LBrpytGvVdUaHa+TFaF+/fjuWVWi4ruKu13du4NP7/xkEjC4erav1W63gF2Trqile7oSOlhCSHE2FCAV0NlpXLXe01N8+fUVVaWgzt3DiE5+Q+kpR1FdXUpt8/ExAze3iMeB/XxsLb20HHpa1lZ1c5Rp/SwhBBi3CjAN+H2bTawFxRodh7b9X6VGyD38OF5KHe9u0AiYaexeXuPhFDYeqPXFOlhAWDYMGqpE0JIR0EBvgk3b6o/UI7teo/jgnpR0T2l/c7OfbgBcq6u/Vqt671uelhXV3alNrkcyMholZcjhBBioPQa4E+dOoU1a9YgISEBmZmZ2LdvHyZNmsTtZxgGS5cuxY8//ojCwkIEBQXhu+++g7+/v/4KXUdpafbjrvcDSEs7Bqm0jNtnamr+uOudzfVube3eauUwMWG73jt3pvSwhBBCWHoN8GVlZQgMDMRrr72GZ599tsH+1atXY926ddi2bRu6du2KFStWIDQ0FLdu3YKVHpYjYxgGOTlXuAFyGRl/o27Xu6WlKzdAztt7BAQCcauVRSisXZnNyYnSwxJCCFGm1wAfFhaGsLAwlfsYhsHXX3+NxYsX45lnngEAREdHw9nZGbt27cJbb72l8ryqqipUVVVxz4sfp42TSqWQSqVqlUtxHJ8vhUxWibt345CcfAjJyYdQXHxf6VhX176QSMZBIhkHF5c+4CnlbFXv9dQlFtdOZ7OzU04P29StBLlcqvRve0f1MXzGVqeaGrYe6n6GaHosIa3BYO/Bp6WlISsrC6NGjeK2mZmZITg4GGfPnm00wK9cuRJLly5tsP3YsWMQi9VrURcWFuLSpUu4eDEK//77r9IXBqFQiMDAQDz55JPo378/7Ljh6FkADqtdv5aoqmJH9GsqKytW94XRI6qP4TO2OsXGql+fusszE6IPBhvgs7KyAADOzs5K252dnXHv3j1VpwAAFi1ahPnz53PPi4uL4eHhgVGjRsHauvnlUTMyMuDt7a20zcqqMySSsZBIxsHbexgEAhEAIDWVfegSjwfY2tYut6rmd5ImyeVSZGXFwsUlFHx++79BT/UxfMZWJ5FIijt3YhEaGgqBmoNcijVddIIQHTPYAK/Aq7dMGcMwDbbVZWZmBjMV+VUFAoFaf5heXl7w9/dHVVUV3Nymws9vEpydeyu9pq5WZFNoq/SwfL7AKD5sFag+hs9Y6qQY46Lu54jiWEL0yWADvIuLCwC2Je+qmMgNICcnp0GrXtfOnj2LkydP4tKlsZDLW+ePlNLDEmL4TE3ZrI+00BJpjww2rPj4+MDFxQWxsbHo06cPAKC6uhrx8fFYtWpVq762SCRqletSelhCDA+Px/aamZs3fCha7jRejrRHeg3wpaWlSElJ4Z6npaXh8uXLsLOzg6enJ+bNm4eoqChIJBJIJBJERUVBLBZj6tSpeiy1ZqysaqezUXpYQvRHIFAdyIVC5RkphBgLvQb4S5cuYdiwYdxzxeC46dOnY9u2bfjwww9RUVGBd999l0t0c+zYMb3MgdeEYpCcmxsb4AkhbYPHqw3c9YM55YogHY1eA3xISAgYhml0P4/HQ2RkJCIjI9uuUFpQpIdVtNRbqYefEPKYQKA6kLfWAFVC2iODvQdv6BTpYRX31GnALCG6xec3bIUrnlNrnJDmUYDXAKWHJUT36rbG6wZyao0T0jIU4JshFtfeT7e3p8E4hGhDVWtcEcjpizIhrYMCfBOCg9kBc4QQ9Sha4wIBu0Rxly6ApSXb+0UIaVsU4JtgY6PvEhBiePh81aPUzc1rcztIpcCVK+wsEhqfQoh+UIAnhKgkFKoO5NQaJ6R9oABPSAemaI2rCuSUaZGQ9o0CPCEdgKI1Xj+YU2ucEONFAZ4QI1G3NV4/kFNrnJCOhwI8Ie1M/da44kGD2QghdVGAJ8QAmZg0Pm+cWuOEEHVQgCdEj8zM2DXHAcDdnZ0zTq1xQoguUIAnpJWZmKgepa5ojUulwK1b7IJFFNgJIbpCAZ4QHVEE8PqBnII2IUQfKMATogFFa1zV3HFap4AQYkgowBNSD4/X+Lxxao0TQtoLCvCkw6rfGq8bzKk1Tghp7yjAE6OmqjWueJjSu58QYsToI44YBVPTxueNU2ucENIRUYAn7QaPVztvPCMD8PConTdOrXFCCFFGH4vE4JiaNj5vnMdj543fvAnY29OgN0IIaQwFeKIXita4qulm1BonhJCWo49S0qoUrfH6gZzujRNCSOuiAE9arH5rvG4wp9Y4IYToB338ErXVb40rHkIhtcYJIcTQUIAnShprjZubs4lhCCGEtA8U4DsogUB1IKfWOCGEGAcK8EaMx6vNn56RAXh61s4bp9Y4IYQYN76+C9CUyMhI8Hg8pYeLi4u+i2VwBALAygpwdATc3QE/PyAgAOjbF+jRA/D2Zo+zswMsLCi4E0JIR2DwLXh/f3/8+eef3HOTDhqd+PzG54130B8JIYSQJhh8gDc1NdWo1V5VVYWqqirueXFxMQBAKpVCKpWqdQ3FcXK5esfrUt1743X/FQpVHy+Xs4+mKOqjbv0NHdXH8BlbnbSpj7HUnbRfPIZhGH0XojGRkZFYs2YNbGxsYGZmhqCgIERFRcHX17fJc5YuXdpg+65duyAWi1uzuIQQwikvL8fUqVNRVFQEa2trfReHdEAGHeAPHz6M8vJydO3aFdnZ2VixYgVu3ryJa9euwd7eXuU5qlrwHh4eyMvLU/uPTCqVIjY2Fi4uoeDztU92rmlrvLUo6hMaGgqBESRvp/oYPmOrkzb1KS4uhoODAwV4ojcG3UUfFhbG/b9nz54YOHAgunTpgujoaMyfP1/lOWZmZjAzM2uwXSAQaPxBw+cLmg3w9e+N133wDWwIozY/A0NG9TF8xlYnTepjTPUm7ZNBB/j6LCws0LNnTyQnJ7f5awuFqlc4a+vWOCGEEKKOdhXgq6qqcOPGDQwdOrRNXs/Lq3beuKG1xgkhhJCmGHTYWrBgAeLj45GWloYLFy7gueeeQ3FxMaZPn94mr29rC4jFFNwJIYS0Pwbdgn/w4AFeeukl5OXlwdHREU899RTOnz8PLy8vfReNEEIIMWgGHeD37Nmj7yIQQggh7RJ1PhNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQMepqcLijW0lEsG6sOqVSK8vJyFBcXG0U+aaqPYTO2+gDGVydt6qP4zDHg9byIkTP6AF9SUgIA8PDw0HNJCCEdUUlJCWxsbPRdDNIBGfRysbogl8uRkZEBKysr8Hg8tc5RLDGbnp5uFMs8Un0Mm7HVBzC+OmlTH4ZhUFJSAjc3N/Ap3zXRA6NvwfP5fLi7u2t1rrW1tVF8OClQfQybsdUHML46aVofarkTfaKvlYQQQogRogBPCCGEGCEK8CqYmZlhyZIlMDMz03dRdILqY9iMrT6A8dXJ2OpDOgajH2RHCCGEdETUgieEEEKMEAV4QgghxAhRgCeEEEKMEAV4QgghxAh12AC/ceNG+Pj4wNzcHP369cPp06ebPD4+Ph79+vWDubk5fH198f3337dRSdWjSX1+//13hIaGwtHREdbW1hg4cCCOHj3ahqVtnqa/H4W//voLpqam6N27d+sWUEOa1qeqqgqLFy+Gl5cXzMzM0KVLF2zZsqWNSts8Teuzc+dOBAYGQiwWw9XVFa+99hry8/PbqLRNO3XqFMLDw+Hm5gYej4f//ve/zZ5j6J8HhAAAmA5oz549jEAgYH766Sfm+vXrzNy5cxkLCwvm3r17Ko9PTU1lxGIxM3fuXOb69evMTz/9xAgEAua3335r45Krpml95s6dy6xatYr5+++/mdu3bzOLFi1iBAIB888//7RxyVXTtD4Kjx49Ynx9fZlRo0YxgYGBbVNYNWhTnwkTJjBBQUFMbGwsk5aWxly4cIH566+/2rDUjdO0PqdPn2b4fD7zzTffMKmpqczp06cZf39/ZtKkSW1cctUOHTrELF68mNm7dy8DgNm3b1+Txxv65wEhCh0ywA8YMIB5++23lbZ1796d+fjjj1Ue/+GHHzLdu3dX2vbWW28xTz31VKuVUROa1keVHj16MEuXLtV10bSibX2mTJnCfPrpp8ySJUsMKsBrWp/Dhw8zNjY2TH5+flsUT2Oa1mfNmjWMr6+v0rZvv/2WcXd3b7UyakudAG/onweEKHS4Lvrq6mokJCRg1KhRSttHjRqFs2fPqjzn3LlzDY4fPXo0Ll26BKlU2mplVYc29alPLpejpKQEdnZ2rVFEjWhbn61bt+LOnTtYsmRJaxdRI9rUZ//+/ejfvz9Wr16Nzp07o2vXrliwYAEqKiraoshN0qY+gwYNwoMHD3Do0CEwDIPs7Gz89ttvGDduXFsUWecM+fOAkLqMfrGZ+vLy8lBTUwNnZ2el7c7OzsjKylJ5TlZWlsrjZTIZ8vLy4Orq2mrlbY429alv7dq1KCsrwwsvvNAaRdSINvVJTk7Gxx9/jNOnT8PU1LDe0trUJzU1FWfOnIG5uTn27duHvLw8vPvuuygoKND7fXht6jNo0CDs3LkTU6ZMQWVlJWQyGSZMmID169e3RZF1zpA/Dwipq8O14BXqLx3LMEyTy8mqOl7Vdn3RtD4Ku3fvRmRkJGJiYuDk5NRaxdOYuvWpqanB1KlTsXTpUnTt2rWtiqcxTX4/crkcPB4PO3fuxIABAzB27FisW7cO27ZtM4hWPKBZfa5fv445c+bg888/R0JCAo4cOYK0tDS8/fbbbVHUVmHonweEAB2wBe/g4AATE5MGrY2cnJwG38oVXFxcVB5vamoKe3v7ViurOrSpj0JMTAxmzJiBX3/9FSNHjmzNYqpN0/qUlJTg0qVLSExMxOzZswGwAZJhGJiamuLYsWMYPnx4m5RdFW1+P66urujcubPSUqNPPPEEGIbBgwcPIJFIWrXMTdGmPitXrsTgwYOxcOFCAECvXr1gYWGBoUOHYsWKFe2uxWvInweE1NXhWvBCoRD9+vVDbGys0vbY2FgMGjRI5TkDBw5scPyxY8fQv39/CASCViurOrSpD8C23CMiIrBr1y6DuheqaX2sra2RlJSEy5cvc4+3334b3bp1w+XLlxEUFNRWRVdJm9/P4MGDkZGRgdLSUm7b7du3wefz4e7u3qrlbY429SkvLwefr/xRY2JiAqC25dueGPLnASFK9DS4T68U03w2b97MXL9+nZk3bx5jYWHB3L17l2EYhvn444+ZV199lTteMS3m/fffZ65fv85s3rzZoKbFaFqfXbt2Maampsx3333HZGZmco9Hjx7pqwpKNK1PfYY2il7T+pSUlDDu7u7Mc889x1y7do2Jj49nJBIJ88Ybb+irCko0rc/WrVsZU1NTZuPGjcydO3eYM2fOMP3792cGDBigryooKSkpYRITE5nExEQGALNu3TomMTGRm/bX3j4PCFHokAGeYRjmu+++Y7y8vBihUMj07duXiY+P5/ZNnz6dCQ4OVjo+Li6O6dOnDyMUChlvb29m06ZNbVzipmlSn+DgYAZAg8f06dPbvuCN0PT3U5ehBXiG0bw+N27cYEaOHMmIRCLG3d2dmT9/PlNeXt7GpW6cpvX59ttvmR49ejAikYhxdXVlXn75ZebBgwdtXGrVTp482eTfQ3v8PCCEYRiGloslhBBCjFCHuwdPCCGEdAQU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEBXu3r0LHo+Hy5cv67sohBCiFQrwpN2KiIjApEmTGmyPi4sDj8fDo0ePtL62h4cHMjMzERAQoH0BCSFEjzrccrGENKe6uhpCoRAuLi76LgohhGiNWvDE6O3duxf+/v4wMzODt7c31q5dq7Tf29sbK1asQEREBGxsbDBz5swGXfQRERHg8XgNHnFxcQCAwsJCTJs2Dba2thCLxQgLC0NycjL3Gtu2bUOnTp1w9OhRPPHEE7C0tMSYMWOQmZnZVj8GQkgHQwGeGLWEhAS88MILePHFF5GUlITIyEh89tln2LZtm9Jxa9asQUBAABISEvDZZ581uM4333yDzMxM7jF37lw4OTmhe/fuANgvAJcuXcL+/ftx7tw5MAyDsWPHQiqVctcoLy/Hl19+ie3bt+PUqVO4f/8+FixY0Kr1J4R0YHpezY4QrU2fPp0xMTFhLCwslB7m5uYMAKawsJCZOnUqExoaqnTewoULmR49enDPvby8mEmTJikdk5aWxgBgEhMTG7zu3r17GTMzM+b06dMMwzDM7du3GQDMX3/9xR2Tl5fHiEQi5v/+7/8YhmHXRAfApKSkcMd89913jLOzc4t/DoQQogq14Em7NmzYMFy+fFnp8fPPP3P7b9y4gcGDByudM3jwYCQnJ6Ompobb1r9/f7VeLzExEdOmTcN3332HIUOGcK9hamqKoKAg7jh7e3t069YNN27c4LaJxWJ06dKFe+7q6oqcnBzNKkwIIWqiQXakXbOwsICfn5/StgcPHnD/ZxgGPB5PaT/DMCqv05ysrCxMmDABM2bMwIwZM5q8nqrXFggESvt5PF6j5xJCSEtRC54YtR49euDMmTNK286ePYuuXbvCxMRE7etUVlZi4sSJ6N69O9atW9fgNWQyGS5cuMBty8/Px+3bt/HEE0+0rAKEEKIlasETo/bBBx/gySefxPLlyzFlyhScO3cOGzZswMaNGzW6zltvvYX09HQcP34cubm53HY7OztIJBJMnDgRM2fOxA8//AArKyt8/PHH6Ny5MyZOnKjrKhFCiFqoBU+MWt++ffF///d/2LNnDwICAvD5559j2bJliIiI0Og68fHxyMzMRI8ePeDq6so9zp49CwDYunUr+vXrh/Hjx2PgwIFgGAaHDh1q0C1PCCFthcfQTUBCCCHE6FALnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFC/w9tmPyLlQOVXwAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -3766,7 +2319,7 @@ "print('stds.shape (N,H,K) \\t', stds.shape)\n", "\n", "distr = GMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (means, stds, weights)\n", + "distr_args = (means, stds)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -3866,55 +2419,47 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " total_count_names = [\n", - " f\"-total_count-{i}\" for i in range(1, n_components + 1)\n", - " ]\n", + " total_count_names = [f\"-total_count-{i}\" for i in range(1, n_components + 1)]\n", " probs_names = [f\"-probs-{i}\" for i in range(1, n_components + 1)]\n", - " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", - " self.param_names = [i for j in zip(total_count_names, probs_names, weight_names) for i in j]\n", - " self.output_names = self.output_names + self.param_names\n", + " param_names = [i for j in zip(total_count_names, probs_names) for i in j]\n", + " self.output_names = self.output_names + param_names\n", "\n", " # Add first output entry for the sample_mean\n", - " self.output_names.insert(0, \"\")\n", + " self.output_names.insert(0, \"\") \n", "\n", - " self.outputsize_multiplier = 3 * n_components\n", + " self.outputsize_multiplier = 2 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " mu, alpha, weights = output.chunk(3, dim=-1)\n", + " mu, alpha = torch.tensor_split(output, 2, dim=-1)\n", + " return (mu, alpha)\n", "\n", - " return mu, alpha, weights\n", - "\n", - " def scale_decouple(\n", - " self,\n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float = 1e-6,\n", - " ):\n", - " \"\"\"Scale Decouple\n", + " def scale_decouple(self, \n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float=0.2):\n", + " \"\"\" Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", " # Efficient NBinomial parametrization\n", - " mu, alpha, weights = output\n", - " mu = F.softplus(mu) + eps\n", - " alpha = F.softplus(alpha) + eps # alpha = 1/total_counts\n", - " weights = F.softmax(weights, dim=-1)\n", + " mu, alpha = output\n", + " mu = F.softplus(mu) + 1e-8\n", + " alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(mu.size(dim=0), 1, -1)\n", " mu *= loc\n", - " alpha /= loc + 1.0\n", + " alpha /= (loc + 1.)\n", "\n", " # mu = total_count * (probs/(1-probs))\n", " # => probs = mu / (total_count + mu)\n", " # => probs = mu / [total_count * (1 + mu * (1/total_count))]\n", " total_count = 1.0 / alpha\n", - " probs = (mu * alpha / (1.0 + mu * alpha))\n", - " probs = torch.clamp(probs, eps, 1 - eps)\n", - " return (total_count, probs, weights)\n", + " probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 \n", + " return (total_count, probs)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -3936,10 +2481,16 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " total_count, probs, weights = distr_args\n", + " total_count, probs = distr_args\n", " B, H, K = total_count.size()\n", " Q = len(self.quantiles)\n", " assert total_count.shape == probs.shape\n", + "\n", + " # Sample K ~ Mult(weights)\n", + " # shared across B, H\n", + " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", + " \n", + " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", @@ -3982,15 +2533,17 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " total_count, probs, weights = distr_args\n", + " total_count, probs = distr_args\n", " B, H, K = total_count.size()\n", " \n", + " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", + " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", "\n", @@ -4014,7 +2567,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -4055,40 +2608,7 @@ "execution_count": null, "id": "b67e2931", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "counts.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "probs.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "samples.shape (N,H,num_samples) torch.Size([2, 2, 2000])\n", - "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", - "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABHt0lEQVR4nO3deVxU1f8/8NcFhmEVBGRTQEzUFHfTxAX8KJi7aZo7+DHTXJAwNbMSzKSw1FzSNAM/mdmn0jIzlUpNv5gLSi750SxATYkUBRSFEc7vD3/cHIdlGAZmez0fj3nonHvuOe8zA8x7zj33XkkIIUBEREQWxcrQARAREVHdYwJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJgYEeOHMHTTz8Nf39/KJVKeHl5oWvXrpg9e7ZavbCwMISFhdV6PJIkIS4uTm/tNW7cGAMHDtRbe5XZv38/JEnC/v3766S/6goLC4MkSZAkCVZWVnB2dkbTpk0xYsQIfPHFFygtLdXYp3HjxoiKiqpWP6mpqYiLi8OtW7eqtd+jfZW9nl988UW12qlMYWEh4uLiyn2PkpOTIUkSMjMz9dYfEVXMxtABWLJvv/0WgwcPRlhYGBITE+Hj44Nr167h+PHj2Lp1K95991257vvvv2/ASE1Dhw4dcPjwYbRs2dLQoVSoSZMm+OSTTwAAd+7cQUZGBr766iuMGDECPXr0wDfffAMXFxe5/vbt21GvXr1q9ZGamor4+HhERUXB1dVV6/106au6CgsLER8fDwAaCe2AAQNw+PBh+Pj41GoMRPQAEwADSkxMRGBgIPbs2QMbm3/eilGjRiExMVGtrjF/qBmaSqWCJEmoV68ennzySUOHUyl7e3uNGJ977jkkJSXh3//+N55//nl89tln8rb27dvXekx3796Fvb19nfRVmQYNGqBBgwYGjYHIkvAQgAHduHEDHh4eah/+Zays1N+aRw8BZGZmQpIkvPPOO1i2bBkCAwPh5OSErl274ueff9Zob8OGDWjWrBmUSiVatmyJLVu2ICoqCo0bN64yzuzsbEyZMgWNGjWCra0tAgMDER8fj/v372s91t27d6NDhw6wt7dHixYt8NFHH2nUOXPmDIYMGYL69evDzs4O7dq1w6ZNm9TqlE1Lf/zxx5g9ezYaNmwIpVKJixcvahwCKHuNKno87KOPPkLbtm1hZ2cHNzc3PP300zh37pxanaioKDg5OeHixYvo378/nJyc4Ofnh9mzZ6OoqEjr16I8EydORP/+/fH5558jKytLLn90Wr60tBSLFy9G8+bNYW9vD1dXV7Rp0wbvvfceACAuLg5z5swBAAQGBspjLXtNyg7JbNu2De3bt4ednZ38jbyiww337t1DbGwsvL29YW9vj9DQUJw8eVKtTkWHqB7+GcvMzJQ/4OPj4+XYyvqs6BCAvt+btWvXom3btnBycoKzszNatGiBV155RSN2InPHGQAD6tq1Kz788ENER0dj7Nix6NChAxQKRbXaWLNmDVq0aIEVK1YAAF577TX0798fGRkZ8lTy+vXrMWXKFAwfPhzLly9HXl4e4uPjtfrQys7ORufOnWFlZYXXX38djz32GA4fPozFixcjMzMTSUlJVbbxyy+/YPbs2Xj55Zfh5eWFDz/8EJMmTULTpk3Rs2dPAMD58+cREhICT09PrFy5Eu7u7ti8eTOioqLw119/Ye7cuWptzp8/H127dsW6detgZWUFT09PZGdnq9Xx8fHB4cOH1cr+/vtvjBs3Dg0bNpTLEhIS8Morr2D06NFISEjAjRs3EBcXh65du+LYsWMICgqS66pUKgwePBiTJk3C7Nmz8dNPP+GNN96Ai4sLXn/99Spfi8oMHjwYu3btwsGDBxEQEFBuncTERMTFxeHVV19Fz549oVKp8L///U8+3v/cc88hNzcXq1atwrZt2+Tp9IdnkE6cOIFz587h1VdfRWBgIBwdHSuN65VXXkGHDh3w4YcfIi8vD3FxcQgLC8PJkyfRpEkTrcfn4+OD3bt346mnnsKkSZPw3HPPAUCl3/r1/d5s3boV06ZNw8yZM/HOO+/AysoKFy9exK+//qr1OIjMhiCDuX79uujevbsAIAAIhUIhQkJCREJCgigoKFCrGxoaKkJDQ+XnGRkZAoBo3bq1uH//vlx+9OhRAUB8+umnQgghSkpKhLe3t+jSpYtae1lZWUKhUIiAgAC1cgBi4cKF8vMpU6YIJycnkZWVpVbvnXfeEQDE2bNnKx1jQECAsLOzU9v/7t27ws3NTUyZMkUuGzVqlFAqleLSpUtq+/fr1084ODiIW7duCSGE2LdvnwAgevbsqdFX2bZ9+/aVG8udO3dE586dhY+Pj8jMzBRCCHHz5k1hb28v+vfvr1b30qVLQqlUijFjxshlkZGRAoD473//q1a3f//+onnz5pW+DkI8eA9btWpV4fbvvvtOABBvv/22XBYQECAiIyPl5wMHDhTt2rWrtJ+lS5cKACIjI0NjW0BAgLC2thbnz58vd9vDfZW9nh06dBClpaVyeWZmplAoFOK5555TG9vDP59lIiMj1X7G/v77b42fsTJJSUlqcdfGezNjxgzh6uqq0TeRJeIhAANyd3fHwYMHcezYMbz11lsYMmQILly4gPnz56N169a4fv16lW0MGDAA1tbW8vM2bdoAgDyNfP78eWRnZ2PkyJFq+/n7+6Nbt25Vtr9z50706tULvr6+uH//vvzo168fAODAgQNVttGuXTv4+/vLz+3s7NCsWTO1qe4ff/wRvXv3hp+fn9q+UVFRKCws1PgmP3z48Cr7fVhJSQmeffZZnDt3Drt27ZK/YR8+fBh3797VmPr28/PDv/71L/zwww9q5ZIkYdCgQWplbdq0URuLroQQVdbp3LkzfvnlF0ybNg179uxBfn5+tftp06YNmjVrpnX9MWPGqB0yCQgIQEhICPbt21ftvqujNt6bzp0749atWxg9ejS+/vprrX7HiMwVEwAj0KlTJ8ybNw+ff/45rl69ihdffBGZmZkaCwHL4+7urvZcqVQCeLCwC3iwzgAAvLy8NPYtr+xRf/31F7755hsoFAq1R6tWrQBAqz+gj8ZYFmdZjGVxlrf629fXV20cZaq7Unzq1KnYvXs3vvjiC7Rr106t34ra8/X11ejXwcEBdnZ2GmO5d+9eteIpT9kHVdmYyzN//ny88847+Pnnn9GvXz+4u7ujd+/eOH78uNb9VPe18/b2Lrfs0ddG32rjvRk/fjw++ugjZGVlYfjw4fD09ESXLl2QkpJSCyMgMm5MAIyMQqHAwoULATxYFFdTZR++f/31l8a2R4+Zl8fDwwMRERE4duxYuY9JkybVOMayOK9du6ZRfvXqVTmOhz26iK8ycXFx+PDDD7FhwwZERERo9Augwr4f7bc27dixA5IkyesiymNjY4PY2FicOHECubm5+PTTT3H58mX07dsXhYWFWvVTndcOKP/nJDs7Wy2xs7OzK3dNSU2+YdfWezNx4kSkpqYiLy8P3377LYQQGDhwoF5mcYhMCRMAAyrvDxsAeYVzZd8EtdW8eXN4e3vjv//9r1r5pUuXkJqaWuX+AwcOxJkzZ/DYY4+hU6dOGg99xAgAvXv3xo8//ih/4Jf5z3/+AwcHB51P79u4cSPi4+OxaNGicle4d+3aFfb29ti8ebNa+ZUrV+TDEnUhKSkJ3333HUaPHq12uKQyrq6ueOaZZzB9+nTk5ubKq+cfnQWqqU8//VTt8ERWVhZSU1PVVv03btwYFy5cUEsCbty4ofEzVp3Yavu9cXR0RL9+/bBgwQIUFxfj7NmzNWqPyNTwLAAD6tu3Lxo1aoRBgwahRYsWKC0tRXp6Ot599104OTlh1qxZNe7DysoK8fHxmDJlCp555hn8+9//xq1btxAfHw8fHx+N0w0ftWjRIqSkpCAkJATR0dFo3rw57t27h8zMTOzatQvr1q1Do0aNahznwoUL5fUGr7/+Otzc3PDJJ5/g22+/RWJiotrFcbR1+PBhTJ06Fd26dUN4eLjG6ZFPPvkkXF1d8dprr+GVV17BhAkTMHr0aNy4cQPx8fGws7OTZ2P05e7du3Icd+/exR9//IGvvvoKO3fuRGhoKNatW1fp/oMGDUJwcDA6deqEBg0aICsrCytWrEBAQIC8Ir5169YAgPfeew+RkZFQKBRo3rw5nJ2ddYo5JycHTz/9NCZPnoy8vDwsXLgQdnZ2mD9/vlxn/Pjx+OCDDzBu3DhMnjwZN27cQGJiosaFhZydnREQEICvv/4avXv3hpubGzw8PMo9HbU23pvJkyfD3t4e3bp1g4+PD7Kzs5GQkAAXFxc88cQT1W6PyKQZeBGiRfvss8/EmDFjRFBQkHBychIKhUL4+/uL8ePHi19//VWtbkVnASxdulSjXZSzynr9+vWiadOmwtbWVjRr1kx89NFHYsiQIaJ9+/ZV7vv333+L6OhoERgYKBQKhXBzcxMdO3YUCxYsELdv3650jAEBAWLAgAEa5eWtGj99+rQYNGiQcHFxEba2tqJt27YiKSlJrU7ZyvTPP/9co81HzwIoW1Ve0eNhH374oWjTpo2wtbUVLi4uYsiQIRpnOERGRgpHR0eNfhcuXKjRXnlCQ0PV+nd0dBRNmjQRzzzzjPj8889FSUmJxj6Prsx/9913RUhIiPDw8BC2trbC399fTJo0ST6rocz8+fOFr6+vsLKyUntNKno/yuur7PX8+OOPRXR0tGjQoIFQKpWiR48e4vjx4xr7b9q0STz++OPCzs5OtGzZUnz22WcaZwEIIcT3338v2rdvL5RKpQAg9/noWQBl9PnebNq0SfTq1Ut4eXkJW1tb4evrK0aOHClOnTpV7mtCZM4kIbRYekxm59atW2jWrBmGDh2K9evXGzocIiKqYzwEYAGys7Px5ptvolevXnB3d0dWVhaWL1+OgoICvRxmICIi08MEwAIolUpkZmZi2rRpyM3NlRfVrVu3Tj6dj4iILAsPARAREVkgngZIRERkgZgAEBERWSCuAcCDW6xevXoVzs7O1b5KGhGRroQQKCgogK+vb5XX5CDSNyYAeHBZ0UdvQkNEVFcuX76slwtqEVUHEwBAvkLa5cuXNa5cVhGVSoW9e/ciIiICCoWiNsOrcxybaTLXsZnruAAgPz8ffn5+Ol+lkagmmADgn5uj1KtXr1oJgIODA+rVq2d2f5Q4NtNkrmMz13E9jIceyRB40ImIiMgCMQEgIiKyQEwAiIiILBDXABARGbHS0lIUFxcbOgwyAQqFAtbW1lrXZwJARGSkiouLkZGRgdLSUkOHQibC1dUV3t7eWi0sZQJARGSEhBC4du0arK2t4efnxwsFUaWEECgsLEROTg4AwMfHp8p9mAAQERmh+/fvo7CwEL6+vnBwcDB0OGQC7O3tAQA5OTnw9PSs8nAAU0oiIiNUUlICALC1tTVwJGRKypJFlUpVZV3OAJiA5SkXyi1/MbxZHUdCRHWNFwmi6qjOzwtnAIiIiCwQEwAiIiILxASAiIjIAnENgAkrb20A1wUQmbeK1gTVFmP8mxIWFoZ27dphxYoVhg7FpHEGgIiI9CoqKgpDhw7VKN+/fz8kScKtW7dq1P62bdvwxhtv1KgNU/DTTz9h0KBB8PX1hSRJ+Oqrr/TaPmcAjExdZ/dERKaiuLgYtra2cHNzM3QoNRIWFoaoqChERUVVWu/OnTto27YtJk6ciOHDh+s9Ds4AEBGRQRQVFSE6Ohqenp6ws7ND9+7dcezYMXl7WFgYZsyYgdjYWHh4eCA8PFwuj4mJAQBkZmZCkiSNR1hYmFZ9lLUXHR2NuXPnws3NDd7e3oiLi6s09sGDB5fbryRJ2LFjh15en379+mHx4sUYNmyYXtp7FBMAIiIyiLlz5+LLL7/Epk2bcOLECTRt2hR9+/ZFbm6uXGfTpk2wsbHB//3f/+GDDz7QaMPPzw/Xrl2THydPnoS7uzt69uypdR9l/Tg6OuLIkSNITEzEokWLkJKSUmHsSUlJuHbtGn777TcAwK5du+QY+vfvr4+Xp9YZNAGozvGNKVOmQJIkjUUfRUVFmDlzJjw8PODo6IjBgwfjypUrtRs4ERFVaufOnXByclJ79OvXT95+584drF27FkuXLkW/fv3QsmVLbNiwAfb29ti4caNcr2nTpkhMTETz5s3RokULjX6sra3h7e0Nb29vuLq6YurUqejatSvi4uK07gMA2rRpg4ULFyIoKAgTJkxAp06d8MMPP1Q4Pnd3d3h7e+Pvv/+GJEno3r27HIeNjWkcXTdoAlB2fGP16tWV1vvqq69w5MgR+Pr6amyLiYnB9u3bsXXrVhw6dAi3b9/GwIED5ctoEhFR3evVqxfS09PVHh9++KG8/ffff4dKpUK3bt3kMoVCgc6dO+PcuXNyWadOnbTuc9KkSSgoKMCWLVtgZWWldR/AgwTgYT4+PvKNdSpz6tQpNG7cGM7OzhXWWbJkiVoidPDgQUydOlWjrK4ZNE3p16+fWkZYnj///BMzZszAnj17MGDAALVteXl52LhxIz7++GP06dMHALB582b4+fnh+++/R9++fWstdnO1POUCJFGCQABr9l2EkKyN8jQgIjJujo6OaNq0qVrZw7OzQggAmpeuFUKolTk6OmrV3+LFi7F7924cPXpU/jDWtg/gQWLwMEmStLoN86lTpzSSh0dNnToVI0eOlJ+PHTsWw4cPVzu237Bhwyr70jejnqcoLS3F+PHjMWfOHLRq1Upje1paGlQqFSIiIuQyX19fBAcHIzU1tcIEoKioCEVFRfLz/Px8AA9unqDNDRTK6j78r75IomYzFzWNRxIlcgxl/+p7jIZUW++bMTDXsZnruADzHJO2mjZtCltbWxw6dAhjxowB8OD1OH78uLzAT1tffvklFi1ahO+++w6PPfZYrfRRkczMTAQHB1dax83NTe3MBXt7e3h6emokSHXNqBOAt99+GzY2NoiOji53e3Z2NmxtbVG/fn21ci8vL2RnZ1fYbkJCAuLj4zXK9+7dW+3bbla2SEQXgTXcf9eump1G+HD/je/9rpc2jZG+3zdjYq5jM8dxFRYWGjoEg3F0dMQLL7yAOXPmwM3NDf7+/khMTERhYSEmTZqkdTtnzpzBhAkTMG/ePLRq1Ur+2192uqA++qhMaWkpsrKycOXKFTRs2FCvN2+6ffs2Ll68KD/PyMhAenq6PJaaMtoEIC0tDe+99x5OnDhR7Re0vOmdh82fPx+xsbHy8/z8fPj5+SEiIgL16tXTqg+VSoWUlBSEh4drTB3VxJp9F6uuVInpvWqWUa7ZdxGSKEHje78j0+4xCMm6xm0ak9p634yBuY7NXMcF/DP7WB3mdEjurbfekmd6CwoK0KlTJ+zZs0fjS11ljh8/jsLCQixevBiLFy+Wy0NDQ7F//3699FGZ6OhoPP/882jRogXy8/P1mgAcP34cvXr1kp+XfW5FRkYiOTm5xu0bbQJw8OBB5OTkqGU5JSUlmD17NlasWIHMzEx4e3ujuLgYN2/eVHszc3JyEBISUmHbSqUSSqVSo1yhUFT7D4wu+1RGSNY12r+msTzcv5CsISRrs/ujC+j/fTMm5jo2cxyXuY2nTEUfTmFhYfJxeQCws7PDypUrsXLlynLr79+/v8ryqi6oU1UfFfWj7VX3+vXrh8uXL2tVt7L+yvPo66VvRnsdgPHjx+PUqVNqK0h9fX0xZ84c7NmzBwDQsWNHKBQKtanBa9eu4cyZM5UmAERERJbOoDMAVR3fcHd3V6uvUCjg7e2N5s2bAwBcXFwwadIkzJ49G+7u7nBzc8NLL72E1q1by2cFEBERkSaDJgD6OL6xfPly2NjYYOTIkbh79y569+6N5ORkWFvXbCqdiIjInBk0Aaju8Y3MzEyNMjs7O6xatQqrVq3SY2RERETmzWjXABAREVHtYQJARERkgZgAEBERWSCjvQ4A6WZ5iuZV+8zpwiFERKQfnAEgIiKyQEwAiIiILBAPARARmZJ9CXXbX6/5ddufFsLCwtCuXTusWLHC0KGYNM4AEBGRXkVFRWHo0KEa5fv374ckSbh161aN2t+2bRveeOONGrVhChISEvDEE0/A2dkZnp6eGDp0KM6fP6+39pkAEBGRSSguLgYAuLm5wdnZ2cDR6C4sLEyrq90eOHAA06dPx88//4yUlBTcv38fERERuHPnjl7iYAJAREQGUVRUhOjoaHh6esLOzg7du3fHsWPH5O1hYWGYMWMGYmNj4eHhgfDwcLk8JiYGwIMrxEqSpPEICwvTqo+y9qKjozF37ly4ubnB29sbcXFxlcY+ePDgcvuVJAk7duzQy+uze/duREVFoVWrVmjbti2SkpJw6dIlpKWl6aV9JgBERGQQc+fOxZdffolNmzbhxIkTaNq0Kfr27Yvc3Fy5zqZNm2BjY4P/+7//wwcffKDRhp+fH65duyY/Tp48CXd3d/Ts2VPrPsr6cXR0xJEjR5CYmIhFixap3Wn2UUlJSbh27Rp+++03AMCuXbvkGPr376+Pl0dDXl4egAczIPrARYAWrOyaAU9eWi+XPQmgVLLGdfcQPHElGVaiBNjnXu5CIF5zgIgqsnPnTjg5OamVlZSUyP+/c+cO1q5di+TkZPTr1w8AsGHDBqSkpGDjxo2YM2cOAKBp06ZITEyssB9ra2t4e3sDAO7du4ehQ4eia9euiIuL07oPAGjTpg0WLlwIAAgKCsLq1avxww8/yLMOjyq7W+3hw4chSRK6d+9eq4clhBCIjY1F9+7dERwcrJc2OQNARER616tXL6Snp6s9PvzwQ3n777//DpVKhW7dusllCoUCnTt3xrlz5+SyTp06ad3npEmTUFBQgC1btsDKykrrPoAHCcDDfHx8kJOTU2Wfp06dQuPGjSv98F+yZAmcnJzkx8GDBzF16lSNssrMmDEDp06dwqefflplTNriDAAREemdo6MjmjZtqlZ25coV+f9ld4KVJEmtjhBCrczR0VGr/hYvXozdu3fj6NGj8oextn0ADxKDh0mShNLS0ir7PXXqlEby8KipU6di5MiR8vOxY8di+PDhGDZsmFzWsGHDCvefOXMmduzYgZ9++gmNGjWqMiZtcQaAiIjqXNOmTWFra4tDhw7JZSqVCsePH8fjjz9erba+/PJLLFq0CP/973/x2GOP1UofFcnMzETz5s0rrePm5oamTZvKD3t7e3h6emqUPUoIgRkzZmDbtm348ccfERgYqJeYy3AGgIiI6pyjoyNeeOEFzJkzB25ubvD390diYiIKCwsxadIkrds5c+YMJkyYgHnz5qFVq1bIzs4GANja2sLNzU0vfVSmtLQUWVlZuHLlCho2bKgxs1AT06dPx5YtW/D111/D2dlZHpuLi0u5CUN1MQEgIjIlRnhlPl299dZbKC0txfjx41FQUIBOnTphz549qF+/vtZtHD9+HIWFhVi8eDEWL14sl4eGhmL//v166aMy0dHReP7559GiRQvk5+frNQFYu3YtAMinNJZJSkpCVFRUjdtnAkBERHpV0UVuwsLC5OPyAGBnZ4eVK1di5cqV5dbfv39/leVRUVGVfhhW1UdF/Xz11VcV1n9Yv379cPnyZa3qVtZfeR5+rWqDQdcA/PTTTxg0aBB8fX0hSZLaC65SqTBv3jy0bt0ajo6O8PX1xYQJE3D16lW1NoqKijBz5kx4eHjA0dERgwcPVltoQkRERJoMmgDcuXMHbdu2xerVqzW2FRYW4sSJE3jttddw4sQJbNu2DRcuXMDgwYPV6sXExGD79u3YunUrDh06hNu3b2PgwIFq55sSERGROoMeAujXr598cYZHubi4aFyFadWqVejcuTMuXboEf39/5OXlYePGjfj444/Rp08fAMDmzZvh5+eH77//Hn379i237aKiIhQVFcnP8/PzATyYdVCpVFrFXlZP2/rakoT+E5eKYizrq1SyVisve172r0pYAeW0UV6sK/ae0yib3qupRpmh1Nb7ZgzMdWzmOi7APMdEpsOk1gDk5eVBkiS4uroCANLS0qBSqRARESHX8fX1RXBwMFJTUytMABISEhAfH69RvnfvXjg4OFQrpsouFakL/Z7k8cCuXZpX7Hu4r+vuIeVuz3Xr8mD/AgC7dlW4v679G5K+3zdjYq5jM8dxFRYWGjoEsmAmkwDcu3cPL7/8MsaMGYN69eoBALKzs2Fra6uxmtPLy0s+XaI88+fPR2xsrPw8Pz8ffn5+iIiIkNuuikqlQkpKCsLDwzUuIFETa/Zd1FtbZSr6Bl7W1xNXktXKSyVr5Lp1gVvuEViJEnRu7Ab0iK1wf137N4Taet+MgbmOzVzHBfwz+1iZ2l4IRuZFm4sXlTGJBEClUmHUqFEoLS3F+++/X2X98q7y9DClUgmlUqlRrlAoqv0HRpd9KiMemY7Xh4riK+vLqoLDDlaiBFaiBMcz/sbPJRmaFbSM1Rj/aOv7fTMm5jo2cxxXZeNRKBSQJAl///03GjRooNfTy8j8CCFQXFyMv//+G1ZWVrC1ta1yH6NPAFQqFUaOHImMjAz8+OOPat/Qvb29UVxcjJs3b6rNAuTk5CAkpPxpbSIiU2BtbY1GjRrhypUryMzMNHQ4ZCIcHBzg7+8PK6uq1/gbdQJQ9uH/22+/Yd++ffLdl8p07NgRCoUCKSkp8nWWr127hjNnzlR69ygiIlPg5OSEoKAgLhYkrVhbW8PGxkbr2SKDJgC3b9/GxYv/HEfOyMhAeno63Nzc4Ovri2eeeQYnTpzAzp07UVJSIh/Xd3Nzg62tLVxcXDBp0iTMnj0b7u7ucHNzw0svvYTWrVvLZwUQ1bl9CVXXMaOruVHtsra2hrW1/g8NEhk0ATh+/Dh69eolPy9bmBcZGYm4uDjs2LEDANCuXTu1/fbt2ydfGnH58uWwsbHByJEjcffuXfTu3RvJyckW8wvz5KX1WtR6p9bjICIi02LQBODRy0I+SpvVr3Z2dli1ahVWrVqlz9CIiIjMGm8HTEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGM+kJApCcVnJf+5KUbWjdR1emGP/s/X62QylXV+fM8d56ISG84A0BERGSBOANgAQ7/of03fSIisgycASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJABr0S4E8//YSlS5ciLS0N165dw/bt2zF06FB5uxAC8fHxWL9+PW7evIkuXbpgzZo1aNWqlVynqKgIL730Ej799FPcvXsXvXv3xvvvv49GjRoZYETVszzlgqFDMD9V3U+g+0t1EwcRkZEzaAJw584dtG3bFhMnTsTw4cM1ticmJmLZsmVITk5Gs2bNsHjxYoSHh+P8+fNwdnYGAMTExOCbb77B1q1b4e7ujtmzZ2PgwIFIS0uDtbV1XQ+JalNVH+7mRJux8uZIRFQDOh0CSE5ORmFhYY0779evHxYvXoxhw4ZpbBNCYMWKFViwYAGGDRuG4OBgbNq0CYWFhdiyZQsAIC8vDxs3bsS7776LPn36oH379ti8eTNOnz6N77//vsbxERERmSudZgDmz5+P6OhojBgxApMmTUJISIi+40JGRgays7MREREhlymVSoSGhiI1NRVTpkxBWloaVCqVWh1fX18EBwcjNTUVffv2LbftoqIiFBUVyc/z8/MBACqVCiqVSqv4yuppW788kijRed8ypZL+ZznK2qxO25WNRevXSNT+khR9vG9V0mYcVfWvQxt1MjYDMNdxAeY5JjIdOiUAV65cwbfffovk5GT06tULgYGBmDhxIiIjI+Ht7a2XwLKzswEAXl5eauVeXl7IysqS69ja2qJ+/foadcr2L09CQgLi4+M1yvfu3QsHB4dqxZmSklKt+g8L1HnPf1x313/yVSbXrYvWdQPvVryeYdcubdc6tNC6P539//erJu9b1bQYx65dtdZG7Y7NcMxxXPqYSSXSlU4JgLW1NQYPHozBgwcjJycHmzdvRnJyMl577TU89dRTmDRpEgYNGgQrq5p/o5MkSe25EEKj7FFV1Zk/fz5iY2Pl5/n5+fDz80NERATq1aunVVwqlQopKSkIDw+HQqHQap9Hrdl3Uaf9HvbEleQat/GoUskauW5d4JZ7BFZazlIcaxRV4bbpvZpq1/HBZdrVqwHVkzNr/L5VSZtx9IitfLsObejjZ9IYmeu4gH9mH4kMocaLAD09PdGtWzecP38eFy5cwOnTpxEVFQVXV1ckJSUhLCxMp3bLZhKys7Ph4+Mjl+fk5MizAt7e3iguLsbNmzfVZgFycnIqPSyhVCqhVCo1yhUKRbX/wOiyTxmhh+l7bT+gdW1b2/YrG4vWr49Uql29mvj/sdTkfauSNuOoqu8atFGrYzMgcxyXuY2HTIvOX9H/+usvvPPOO2jVqhXCwsKQn5+PnTt3IiMjA1evXsWwYcMQGRmpc2CBgYHw9vZWm/YrLi7GgQMH5A/3jh07QqFQqNW5du0azpw5UyvrEoiIiMyFTjMAgwYNwp49e9CsWTNMnjwZEyZMgJubm7zd3t4es2fPxvLlyytt5/bt27h48Z9p8IyMDKSnp8PNzQ3+/v6IiYnBkiVLEBQUhKCgICxZsgQODg4YM2YMAMDFxQWTJk3C7Nmz4e7uDjc3N7z00kto3bo1+vTpo8vQiIiILIJOCYCnpycOHDiArl27VljHx8cHGRkZlbZz/Phx9OrVS35edlw+MjISycnJmDt3Lu7evYtp06bJFwLau3evfA0AAFi+fDlsbGwwcuRI+UJAycnJvAYAERFRJXRKAEJDQ9GhQweN8uLiYmzduhUTJkyAJEkICAiotJ2wsDAIISrcLkkS4uLiEBcXV2EdOzs7rFq1CqtWrdI6fiIiIkun0xqAiRMnIi8vT6O8oKAAEydOrHFQREREVLt0SgAqOs3uypUrcHFxqXFQREREVLuqdQigffv2kCQJkiShd+/esLH5Z/eSkhJkZGTgqaee0nuQREREpF/VSgDK7tSXnp6Ovn37wsnJSd5ma2uLxo0bl3tTHyKjcXAZgBYP/q3oXHtTucnOozcMElZQG5upjIOIDKJaCcDChQsBAI0bN8azzz4LOzu7WgmKiIiIapdOZwHU5AI/REREZHhaJwBubm64cOECPDw8UL9+/UqvtZ+bm6uX4IiIiKh2aJ0ALF++XL4Az/Lly6u8IQ+RmkePVwM4/McNjbKuTdzrIhoiIoundQLw8LR/VFRUbcRCREREdUTrBKA6t63U9pa6REREZBhaJwCurq5VTvuXXSCopKT2blFLxunJS+sr3riP0/pERMZG6wRg3759tRkHERER1SGtE4DQ0NDajIOIiIjqkNYJwKlTpxAcHAwrKyucOnWq0rpt2rSpcWBEBlPOGQtqeIU9IjIDWicA7dq1Q3Z2Njw9PdGuXTtIklTurXy5BoCIiMj4aZ0AZGRkoEGDBvL/iYiIyHRpnQAEBASU+38ii1PVIQJjwUMZRFQJne4FAADnz5/HqlWrcO7cOUiShBYtWmDmzJlo3ry5PuMjIiKiWmCly05ffPEFgoODkZaWhrZt26JNmzY4ceIEgoOD8fnnn+stuPv37+PVV19FYGAg7O3t0aRJEyxatAilpf/cxlUIgbi4OPj6+sLe3h5hYWE4e/as3mIgIiIyRzrNAMydOxfz58/HokWL1MoXLlyIefPmYcSIEXoJ7u2338a6deuwadMmtGrVCsePH8fEiRPh4uKCWbNmAQASExOxbNkyJCcno1mzZli8eDHCw8Nx/vx5+d4FREREpE6nBCA7OxsTJkzQKB83bhyWLl1a46DKHD58GEOGDMGAAQMAAI0bN8ann36K48ePA3jw7X/FihVYsGABhg0bBgDYtGkTvLy8sGXLFkyZMkVvsVDd4A2CiIjqhk4JQFhYGA4ePIimTZuqlR86dAg9evTQS2AA0L17d6xbtw4XLlxAs2bN8Msvv+DQoUNYsWIFgAdnI2RnZyMiIkLeR6lUIjQ0FKmpqRUmAEVFRSgqKpKfl93nQKVSQaVSaRVbWT1t65dHEjU/XbJUsq5xGxW1qa+2VaL8I03atl/R/jWJRZ9t6hZIFT83OsRX7bHV4Ge3Lunjd81YmeOYyHRonQDs2LFD/v/gwYMxb948pKWl4cknnwQA/Pzzz/j8888RHx+vt+DmzZuHvLw8tGjRAtbW1igpKcGbb76J0aNHA3gwEwEAXl5eavt5eXkhKyurwnYTEhLKjXPv3r1wcHCoVowpKSnVqv+wQJ33/Md19xA9tFK+XLcuemlnV0EFG7T8Yl/h/jWQcruZ/hutjl27qqjQQuemtR5blTEYl5r8rhmrwsJCQ4dAFkwS5V3NpxxWVtp9q9DnhYC2bt2KOXPmYOnSpWjVqhXS09MRExODZcuWITIyEqmpqejWrRuuXr0KHx8feb/Jkyfj8uXL2L17d7ntljcD4Ofnh+vXr2t9J0OVSoWUlBSEh4dDoVDoNL41+y7qtN/DnriSXOM2HlUqWSPXrQvcco/ASg+zFJ0bu5VbfjQzt0b760IlrJByuxnCnS5AIZVWvUNt6RFb+faDy6rdZLXHVlUMRkIfv2vGKj8/Hx4eHsjLy+NdVKnOaT0D8PDK+7oyZ84cvPzyyxg1ahQAoHXr1sjKykJCQgIiIyPh7e0N4MFMwMMJQE5OjsaswMOUSiWUSqVGuUKhqPYfGF32KSP0MMWujw/oytrWR/sVfRhp23ZtfFArpFLDJgBV/czUIDatx2ZiH6Y1+V0zVuY2HjItBj4QWrnCwkKNmQdra2s5GQkMDIS3t7fa1GBxcTEOHDiAkJDamxonIiIydTpfCOjOnTs4cOAALl26hOLiYrVt0dHRNQ4MAAYNGoQ333wT/v7+aNWqFU6ePIlly5bh3//+N4AHhxtiYmKwZMkSBAUFISgoCEuWLIGDgwPGjBmjlxiIiIjMkU4JwMmTJ9G/f38UFhbizp07cHNzw/Xr1+Hg4ABPT0+9JQCrVq3Ca6+9hmnTpiEnJwe+vr6YMmUKXn/9dbnO3LlzcffuXUybNg03b95Ely5dsHfvXl4DgIiIqBI6JQAvvvgiBg0ahLVr18LV1RU///wzFAoFxo0bJ1+gRx+cnZ2xYsUK+bS/8kiShLi4OMTFxemtXyIiInOn0xqA9PR0zJ49G9bW1rC2tkZRURH8/PyQmJiIV155Rd8xEhERkZ7plAAoFApIkgTgwTn3ly5dAgC4uLjI/yciIiLjpdMhgPbt2+P48eNo1qwZevXqhddffx3Xr1/Hxx9/jNatW+s7RiIiItIznRKAJUuWoKDgweXZ3njjDURGRuKFF15A06ZNkZSUpNcAiXh/ACIi/dMpAejUqZP8/wYNGmCXiV1SlIgA7Euouk6v+bUfBxEZhM7XAQAeXHHv/PnzkCQJzZs3R4MGDfQVF5F50+bDl4ioFum0CDA/Px/jx49Hw4YNERoaip49e8LX1xfjxo1DXl6evmMkIiIiPdMpAXjuuedw5MgR7Ny5E7du3UJeXh527tyJ48ePY/LkyfqOkYiIiPRMp0MA3377Lfbs2YPu3bvLZX379sWGDRvw1FNP6S04IiIiqh06zQC4u7vDxcVFo9zFxQX169evcVBERERUu3RKAF599VXExsbi2rVrcll2djbmzJmD1157TW/BERERUe3Q+hBA+/bt5av/AcBvv/2GgIAA+Pv7AwAuXboEpVKJv//+G1OmTNF/pERERKQ3WicAQ4cOrcUwiIiIqC5pnQAsXLiwNuMgIiKiOlSjCwGlpaXh3LlzkCQJLVu2RPv27fUVFxEREdUinRKAnJwcjBo1Cvv374erqyuEEMjLy0OvXr2wdetWXhGQiIjIyOl0FsDMmTORn5+Ps2fPIjc3Fzdv3sSZM2eQn5+P6OhofcdIREREeqZTArB7926sXbsWjz/+uFzWsmVLrFmzBt99953eggOAP//8E+PGjYO7uzscHBzQrl07pKWlyduFEIiLi4Ovry/s7e0RFhaGs2fP6jUGIiIic6NTAlBaWgqFQqFRrlAoUFpaWuOgyty8eRPdunWDQqHAd999h19//RXvvvsuXF1d5TqJiYlYtmwZVq9ejWPHjsHb2xvh4eHy7YqJiIhIk04JwL/+9S/MmjULV69elcv+/PNPvPjii+jdu7fegnv77bfh5+eHpKQkdO7cGY0bN0bv3r3x2GOPAXjw7X/FihVYsGABhg0bhuDgYGzatAmFhYXYsmWL3uIgIiIyNzotAly9ejWGDBmCxo0bw8/PD5Ik4dKlS2jdujU2b96st+B27NiBvn37YsSIEThw4AAaNmyIadOmyTccysjIQHZ2NiIiIuR9lEolQkNDkZqaWuEFiYqKilBUVCQ/z8/PBwCoVCqoVCqtYiurp239NfsuapRJ5dSrrlLJWg+tlN+mvtpWifLzzJq0X1Gb2u6n6/7GrFbGpuXPd22q7u+aKTHHMZHpkIQQQtedU1JS8L///Q9CCLRs2RJ9+vTRZ2yws7MDAMTGxmLEiBE4evQoYmJi8MEHH2DChAlITU1Ft27d8Oeff8LX11fe7/nnn0dWVhb27NlTbrtxcXGIj4/XKN+yZQscHBz0OgYioooUFhZizJgxyMvLQ7169QwdDlmYaicA9+/fh52dHdLT0xEcHFxbcQEAbG1t0alTJ6Smpspl0dHROHbsGA4fPiwnAFevXoWPj49cZ/Lkybh8+TJ2795dbrvlzQD4+fnh+vXrWv8SqlQqpKSkIDw8vNz1EI8qbwZAH564kqz3Nksla+S6dYFb7hFYiZIat9e5sVu55Uczc/XeZlVUwgopt5sh3OkCFJL+1qsYg1oZW49Y/bRTA9X9XTMl+fn58PDwYAJABlHtQwA2NjYICAhASUnNPxiq4uPjg5YtW6qVPf744/jyyy8BAN7e3gAe3Ijo4QQgJycHXl5eFbarVCqhVCo1yhUKRbX/wGi7j6iFqXoAevmArqxtfbRf0YdRTdqu6QecQio1uwSgjF7HZkQfuLr8fho7cxsPmRad7wY4f/585Obq/g1OG926dcP58+fVyi5cuICAgAAAQGBgILy9vZGSkiJvLy4uxoEDBxASElKrsREREZkynRYBrly5EhcvXoSvry8CAgLg6Oiotv3EiRN6Ce7FF19ESEgIlixZgpEjR+Lo0aNYv3491q9fDwCQJAkxMTFYsmQJgoKCEBQUhCVLlsDBwQFjxozRSwxERETmSKcEYOjQoZAkCTVYP6iVJ554Atu3b8f8+fOxaNEiBAYGYsWKFRg7dqxcZ+7cubh79y6mTZuGmzdvokuXLti7dy+cnZ1rNTYiIiJTVq0EoLCwEHPmzMFXX30FlUqF3r17Y9WqVfDw8Kit+DBw4EAMHDiwwu2SJCEuLg5xcXG1FgMREZG5qdYagIULFyI5ORkDBgzA6NGj8f333+OFF16ordiIiIiollRrBmDbtm3YuHEjRo0aBQAYO3YsunXrhpKSElhb184qdyIiItK/as0AXL58GT169JCfd+7cGTY2NmqXBCYiIiLjV60EoKSkBLa2tmplNjY2uH//vl6DIiIiotpVrUMAQghERUWpXUTn3r17mDp1qtqpgNu2bdNfhERERKR31UoAIiMjNcrGjRunt2CIiIioblQrAUhKSqqtOIiq5fAfN8ot79rEvY4jISIyTeZ3T1QiIiKqEhMAIiIiC8QEgIiIyALpdC8Aouqo6Hg9mYB9CZVv7zW/buIgIr3jDAAREZEF4gyAkXvy0npDh0BERGaIMwBEREQWiAkAERGRBWICQEREZIG4BoDM3sNnIZRK1gAvFkhExBkAIiIiS2RSCUBCQgIkSUJMTIxcJoRAXFwcfH19YW9vj7CwMJw9e9ZwQRIREZkAk0kAjh07hvXr16NNmzZq5YmJiVi2bBlWr16NY8eOwdvbG+Hh4SgoKDBQpERERMbPJBKA27dvY+zYsdiwYQPq168vlwshsGLFCixYsADDhg1DcHAwNm3ahMLCQmzZssWAERMRERk3k1gEOH36dAwYMAB9+vTB4sWL5fKMjAxkZ2cjIiJCLlMqlQgNDUVqaiqmTJlSbntFRUUoKiqSn+fn5wMAVCoVVCqVVjGV1dO2viRKtKr3qFLJWqf9aqKsT0P0XVMqoZnTPjyOsv+XV8/UlY2pTsem5c9/zbqo3u+aKTHHMZHpMPoEYOvWrThx4gSOHTumsS07OxsA4OXlpVbu5eWFrKysCttMSEhAfHy8RvnevXvh4OBQrfhSUlK0qhdYrVb/cd09RMc9ay7XrYvB+tbVrvKO/JSz6j/ldrNaj8VQ6nRsu3bVWVfa/q6ZksLCQkOHQBbMqBOAy5cvY9asWdi7dy/s7OwqrCdJktpzIYRG2cPmz5+P2NhY+Xl+fj78/PwQERGBevXqaRWbSqVCSkoKwsPDoVAoqqy/Zt9Frdp91BNXknXaryZKJWvkunWBW+4RWOk4c2EonRu7aZQdzcyV/182tnCnC1BIpXUZWq1TCSuk3G5mfGPrEVt1nUpU93fNlJTNPhIZglEnAGlpacjJyUHHjh3lspKSEvz0009YvXo1zp8/D+DBTICPj49cJycnR2NW4GFKpRJKpVKjXKFQVPsPjLb7CB2n0w35AWwlSkwuATie8bdGWXkT4gqp1Lg+JPXI6Mampw9tXX4/jZ25jYdMi1EfCO3duzdOnz6N9PR0+dGpUyeMHTsW6enpaNKkCby9vdWmBouLi3HgwAGEhBhu6pyIiMjYGfUMgLOzM4KDg9XKHB0d4e7uLpfHxMRgyZIlCAoKQlBQEJYsWQIHBweMGTPGECETERGZBKNOALQxd+5c3L17F9OmTcPNmzfRpUsX7N27F87OzoYOjYiIyGiZXAKwf/9+teeSJCEuLg5xcXEGiYeIiMgUGfUaACIiIqodTACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgskMldCtjYLU+5YOgQiIiIqsQZACIiIgvEBICIiMgC8RAAERnWvoTKtwsrAC3qJBQiS8IZACIiIgvEGQAiql1VfcMnIoPgDAAREZEFYgJARERkgYz6EEBCQgK2bduG//3vf7C3t0dISAjefvttNG/eXK4jhEB8fDzWr1+PmzdvokuXLlizZg1atWplwMi19+Sl9YYOgYiILJBRzwAcOHAA06dPx88//4yUlBTcv38fERERuHPnjlwnMTERy5Ytw+rVq3Hs2DF4e3sjPDwcBQUFBoyciIjIuBn1DMDu3bvVniclJcHT0xNpaWno2bMnhBBYsWIFFixYgGHDhgEANm3aBC8vL2zZsgVTpkwpt92ioiIUFRXJz/Pz8wEAKpUKKpVKq9jK6j1aXxIl2g3u/yuVrKtVvy6UxWSMsdVU2ZhUwqhzX52UjcncxiaPS8vfTVNijmMi0yEJIYShg9DWxYsXERQUhNOnTyM4OBh//PEHHnvsMZw4cQLt27eX6w0ZMgSurq7YtGlTue3ExcUhPj5eo3zLli1wcHCotfiJiB5WWFiIMWPGIC8vD/Xq1TN0OGRhTCYBEEJgyJAhuHnzJg4ePAgASE1NRbdu3fDnn3/C19dXrvv8888jKysLe/bsKbet8mYA/Pz8cP36da1/CVUqFVJSUhAeHg6FQiGXr9l3sVrjeuJKcrXq14VSyRq5bl3glnsEVtWc0TB2ZWMLd7oAhVRq6HD0SiWskHK7mdmNTR7XI79r5iA/Px8eHh5MAMggjPoQwMNmzJiBU6dO4dChQxrbJElSey6E0Ch7mFKphFKp1ChXKBTV/gPz6D6imtPmxvwBayVKjDq+mlBIpWb1Ifkwcx2bLr+fxs7cxkOmxSQSgJkzZ2LHjh346aef0KhRI7nc29sbAJCdnQ0fHx+5PCcnB15eXnUeJ5mOo5m55SY3XZu4GyAaIqK6Z9SrhYQQmDFjBrZt24Yff/wRgYGBatsDAwPh7e2NlJQUuay4uBgHDhxASEhIXYdLRERkMox6BmD69OnYsmULvv76azg7OyM7OxsA4OLiAnt7e0iShJiYGCxZsgRBQUEICgrCkiVL4ODggDFjxhg4eiIiIuNl1AnA2rVrAQBhYWFq5UlJSYiKigIAzJ07F3fv3sW0adPkCwHt3bsXzs7OdRwtERGR6TDqBECbExQkSUJcXBzi4uJqPyAiIiIzYdRrAIiIiKh2MAEgIiKyQEZ9CICISHZwGVDR9Q16za/bWIjMAGcAiIiILBBnAIgecviPGxplvDgQEZkjJgBEVWBSQETmiIcAiIiILBATACIiIgvEBICIiMgCcQ0AEZm+fQmVb+dpgkQaOANARERkgZgAEBERWSAeAiDSQXmnBlaEpwwSkTHiDAAREZEF4gwAUS3jhYTMRFULDQEuNiSTwgSghtbsuwghWZe77clL6+s4GiIiIu0wASAi88dv70QazGYNwPvvv4/AwEDY2dmhY8eOOHjwoKFDIiIiMlpmkQB89tlniImJwYIFC3Dy5En06NED/fr1w6VLlwwdGhERkVEyi0MAy5Ytw6RJk/Dcc88BAFasWIE9e/Zg7dq1SEjQYuqPqI5V5zTC8nARIRHVlMknAMXFxUhLS8PLL7+sVh4REYHU1NRy9ykqKkJRUZH8PC8vDwCQm5sLlUqlVb8qlQqFhYUoKsmrcBFgwb37WrVlbEolgcLCQhTcuw8rUWLocPTKXMZ243axRplKWKGwsBA3pGIopFIDRFU76mxcN6pIysp5zavdxiMKCgoAAEKIau1HpA8mnwBcv34dJSUl8PLyUiv38vJCdnZ2ufskJCQgPj5eozwwMLBWYiQiUxBnsDYKCgrg4uKih/6JtGfyCUAZSZLUngshNMrKzJ8/H7GxsfLz0tJS5Obmwt3dvcJ9HpWfnw8/Pz9cvnwZ9erV0z1wI8SxmSZzHZu5jgt48HeqoKAAvr6+hg6FLJDJJwAeHh6wtrbW+Lafk5OjMStQRqlUQqlUqpW5urrq1H+9evXM7o9SGY7NNJnr2Mx1XPzmT4Zi8mcB2NraomPHjkhJSVErT0lJQUhIiIGiIiIiMm4mPwMAALGxsRg/fjw6deqErl27Yv369bh06RKmTp1q6NCIiIiMklkkAM8++yxu3LiBRYsW4dq1awgODsauXbsQEBBQa30qlUosXLhQ41CCOeDYTJO5js1cx0VkaJLg+SdEREQWx+TXABAREVH1MQEgIiKyQEwAiIiILBATACIiIgvEBEAH5nDr4YSEBDzxxBNwdnaGp6cnhg4divPnz6vVEUIgLi4Ovr6+sLe3R1hYGM6ePWugiHWXkJAASZIQExMjl5ny2P7880+MGzcO7u7ucHBwQLt27ZCWliZvN9Wx3b9/H6+++ioCAwNhb2+PJk2aYNGiRSgt/ef6/6Y6NiKjJKhatm7dKhQKhdiwYYP49ddfxaxZs4Sjo6PIysoydGjV0rdvX5GUlCTOnDkj0tPTxYABA4S/v7+4ffu2XOett94Szs7O4ssvvxSnT58Wzz77rPDx8RH5+fkGjLx6jh49Kho3bizatGkjZs2aJZeb6thyc3NFQECAiIqKEkeOHBEZGRni+++/FxcvXpTrmOrYFi9eLNzd3cXOnTtFRkaG+Pzzz4WTk5NYsWKFXMdUx0ZkjJgAVFPnzp3F1KlT1cpatGghXn75ZQNFpB85OTkCgDhw4IAQQojS0lLh7e0t3nrrLbnOvXv3hIuLi1i3bp2hwqyWgoICERQUJFJSUkRoaKicAJjy2ObNmye6d+9e4XZTHtuAAQPEv//9b7WyYcOGiXHjxgkhTHtsRMaIhwCqoezWwxEREWrlld162FSU3RLZzc0NAJCRkYHs7Gy1sSqVSoSGhprMWKdPn44BAwagT58+auWmPLYdO3agU6dOGDFiBDw9PdG+fXts2LBB3m7KY+vevTt++OEHXLhwAQDwyy+/4NChQ+jfvz8A0x4bkTEyiysB1hVdbj1sCoQQiI2NRffu3REcHAwA8njKG2tWVladx1hdW7duxYkTJ3Ds2DGNbaY8tj/++ANr165FbGwsXnnlFRw9ehTR0dFQKpWYMGGCSY9t3rx5yMvLQ4sWLWBtbY2SkhK8+eabGD16NADTft+IjBETAB1U59bDpmDGjBk4deoUDh06pLHNFMd6+fJlzJo1C3v37oWdnV2F9UxxbKWlpejUqROWLFkCAGjfvj3Onj2LtWvXYsKECXI9UxzbZ599hs2bN2PLli1o1aoV0tPTERMTA19fX0RGRsr1THFsRMaIhwCqQZdbDxu7mTNnYseOHdi3bx8aNWokl3t7ewOASY41LS0NOTk56NixI2xsbGBjY4MDBw5g5cqVsLGxkeM3xbH5+PigZcuWamWPP/44Ll26BMC037c5c+bg5ZdfxqhRo9C6dWuMHz8eL774IhISEgCY9tiIjBETgGowp1sPCyEwY8YMbNu2DT/++CMCAwPVtgcGBsLb21ttrMXFxThw4IDRj7V37944ffo00tPT5UenTp0wduxYpKeno0mTJiY7tm7dummcrnnhwgX5xlem/L4VFhbCykr9T5K1tbV8GqApj43IKBlwAaJJKjsNcOPGjeLXX38VMTExwtHRUWRmZho6tGp54YUXhIuLi9i/f7+4du2a/CgsLJTrvPXWW8LFxUVs27ZNnD59WowePdpkT7l6+CwAIUx3bEePHhU2NjbizTffFL/99pv45JNPhIODg9i8ebNcx1THFhkZKRo2bCifBrht2zbh4eEh5s6dK9cx1bERGSMmADpYs2aNCAgIELa2tqJDhw7yqXOmBEC5j6SkJLlOaWmpWLhwofD29hZKpVL07NlTnD592nBB18CjCYApj+2bb74RwcHBQqlUihYtWoj169erbTfVseXn54tZs2YJf39/YWdnJ5o0aSIWLFggioqK5DqmOjYiY8TbARMREVkgrgEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEg0oPMzExIkoT09HRDh0JEpBUmAGRWhBDo06cP+vbtq7Ht/fffh4uLi3znPCIiS8YEgMyKJElISkrCkSNH8MEHH8jlGRkZmDdvHt577z34+/sbMEIiIuPABIDMjp+fH9577z289NJLyMjIgBACkyZNQu/evREVFaVRf/To0Rg1apRamUqlgoeHB5KSkgAAu3fvRvfu3eHq6gp3d3cMHDgQv//+e4UxJCcnw9XVVa3sq6++giRJamXffPMNOnbsCDs7OzRp0gTx8fG4f/++vD0uLg7+/v5QKpXw9fVFdHR0NV8NIqLy2Rg6AKLaEBkZie3bt2PixIkYPnw4zpw5gzNnzpRbd+zYsRg5ciRu374NJycnAMCePXtw584dDB8+HABw584dxMbGonXr1rhz5w5ef/11PP3000hPT9e4h7229uzZg3HjxmHlypXo0aMHfv/9dzz//PMAgIULF+KLL77A8uXLsXXrVrRq1QrZ2dn45ZdfdOqLiEiDYW9GSFR7/vrrL9GgQQNhZWUltm3bVmG94uJi4eHhIf7zn//IZaNHjxYjRoyocJ+cnBwBQL4VbUZGhgAgTp48KYQQIikpSbi4uKjts337dvHwr1yPHj3EkiVL1Op8/PHHwsfHRwghxLvvviuaNWsmiouLtRovEVF18BAAmS1PT088//zzePzxx/H0009XWE+hUGDEiBH45JNPADz4tv/1119j7Nixcp3ff/8dY8aMQZMmTVCvXj0EBgYCQI0WFKalpWHRokVwcnKSH5MnT8a1a9dQWFiIESNG4O7du2jSpAkmT56M7du3qx0eICKqCR4CILNmY2MDG5uqf8zHjh2L0NBQ5OTkICUlBXZ2dujXr5+8fdCgQfDz88OGDRvg6+uL0tJSBAcHo7i4uNz2rKysIIRQK1OpVGrPS0tLER8fj2HDhmnsb2dnBz8/P5w/fx4pKSn4/vvvMW3aNCxduhQHDhyAQqHQZvhERBViAkAEICQkBH5+fvjss8/w3XffYcSIEbC1tQUA3LhxA+fOncMHH3yAHj16AAAOHTpUaXsNGjRAQUEB7ty5A0dHRwDQuEZAhw4dcP78eTRt2rTCduzt7TF48GAMHjwY06dPR4sWLXD69Gl06NChBqMlImICQATgwemDY8aMwbp163DhwgXs27dP3la/fn24u7tj/fr18PHxwaVLl/Dyyy9X2l6XLl3g4OCAV155BTNnzsTRo0eRnJysVuf111/HwIED4efnhxEjRsDKygqnTp3C6dOnsXjxYiQnJ6OkpERu6+OPP4a9vT0CAgJq4yUgIgvDNQBE/9/YsWPx66+/omHDhujWrZtcbmVlha1btyItLQ3BwcF48cUXsXTp0krbcnNzw+bNm7Fr1y60bt0an376KeLi4tTq9O3bFzt37kRKSgqeeOIJPPnkk1i2bJn8Ae/q6ooNGzagW7duaNOmDX744Qd88803cHd31/vYicjySOLRA5VERERk9jgDQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQW6P8BNNz4zl/QBbAAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABb6ElEQVR4nO3deVxU1fsH8M8MDMMMm7IPsiui4r7hmpiCS25tVlZKmlmWaVaWaYlLmJZmm5ZlYi5lZfb1Vy6gCWm4oGIumJosgjAgCLLDLOf3xzQXLusM2yw879drXjD33rnzHGaYZ85yzxEwxhgIIYQQYlaEhg6AEEIIIS2PEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEnwzRUVFQSAQwNraGmlpabX2h4SEoGfPnrxtvr6+EAgE3M3a2hpdunTB4sWLkZubyzs2IiICAoEAQqEQycnJtc5fUlICe3t7CAQChIeHNxpvzee2tbVFcHAwvvvuO/0K3ght3DXL0xzh4eGwtbXV6VhfX1/e3yM1NRUCgQBRUVHcNu1rl5qaqlcckZGR+PXXX2ttj42NhUAgQGxsrF7nq4s2Xu1NKBTCyckJEydOxKlTp5p9fl2Eh4fD19eXt00gECAiIkKv82RmZiIiIgIXL16stU/7PiGEtDxK8C2koqICy5cv1/n44cOH49SpUzh16hQOHTqEefPm4auvvsL48ePrPN7W1hbbt2+vtf2nn36CQqGASCRq0nNrk9ysWbOwZcsWnc9h7Pbv34933323wWMeeughnDp1CjKZTK9z15fg+/fvj1OnTqF///56na8hCxYswKlTp3DixAmsXbsWf//9N0aPHo3ExMQWew59nDp1Cs8//7xej8nMzMTKlSvrTPDPP/98m31hIaS9sTR0AOZi/Pjx2LNnD9544w306dOn0eM7dOiAIUOGcPdHjx6NoqIirF69Gjdu3EDXrl15xz/xxBPYsWMHVq5cCaGw6nvZtm3b8PDDD+PAgQM6x1rzuceOHQsfHx9s3LgRL730Up2PUalUUCqVEIvFOj+PIfXr16/RY1xcXODi4tJiz2lvb8/7u7YEb29v7pzDhw9Hly5dMGbMGGzevBlff/11nY8pKyuDtbV1q9SMW7p8np6e8PT0bNFzEkI0qAbfQpYsWQInJye89dZbTT6Hg4MDANRZG589ezbS09MRExPDbbtx4wZOnjyJ2bNnN/k5AU3CDwwM5LoYtM3D69evx5o1a+Dn5wexWIzjx48DAA4cOIChQ4dCKpXCzs4OoaGh9dbC0tPT8cgjj8De3h4ODg545plncPfuXd4xe/fuRVhYGGQyGSQSCbp37463334bJSUldZ7z6tWrGDNmDGxsbODi4oJXXnkFpaWlvGNqNtHXpa4m+sTEREyaNAmurq4Qi8Xw8PDAQw89hIyMDACaJuqSkhLs2LGDaz4PCQkBUH8T/ZkzZzB58mQ4OTnB2toanTt3xqJFixqMrT7aBKt9rbRliI6OxuzZs+Hi4gKpVIqKigoAmr/t0KFDYWNjA1tbW4wbN67O2n9UVBQCAwMhFovRvXv3erts6mqiv3PnDl544QV4eXnBysoKHh4eeOyxx5CdnY3Y2FgMGjQIAPDcc89xfzPtOepqoler1Vi/fj26desGsVgMV1dXzJw5k3sNtLTdXwkJCRg5ciSkUin8/f3xwQcfQK1W8863Zs0aBAYGQiKRoEOHDujduzc++eQTHf/qhJgmSvAtxM7ODsuXL8eRI0fwxx9/NHo8YwxKpRJKpRLFxcU4fvw4Nm3ahOHDh8PPz6/W8QEBARg5ciS+/fZbbtu3334LX19fjBkzplmxKxQKpKWl1arNfvrpp/jjjz/w0Ucf4dChQ+jWrRv27NmDqVOnwt7eHt9//z22bduG/Px8hISE4OTJk7XO/fDDD6NLly74+eefERERgV9//RXjxo2DQqHgjrl58yYmTpyIbdu24fDhw1i0aBF+/PFHTJ48uc5YJ06ciDFjxuDXX3/FK6+8gq+++gpPPPFEs/4GgGY8Q2hoKLKzs/HFF18gJiYGmzZtgre3N4qKigBomqglEgnXF37q1Cls3ry53nMeOXIEI0eOxO3bt7Fx40YcOnQIy5cvR3Z2dpNi/PfffwGg1ms1e/ZsiEQi7Ny5Ez///DNEIhEiIyPx1FNPoUePHvjxxx+xc+dOFBUVYeTIkUhKSuIeGxUVheeeew7du3fHvn37sHz5cqxevVqn9/GdO3cwaNAg7N+/H4sXL8ahQ4ewadMmODg4ID8/H/379+e6lpYvX879zRpq5n/ppZfw1ltvITQ0FAcOHMDq1atx+PBhDBs2rNaYDrlcjqeffhrPPPMMDhw4gAkTJmDp0qXYtWsXd8z69esRERGBp556Cr///jv27t2LOXPmoKCgoNHyEWLSGGmW7du3MwAsISGBVVRUMH9/fzZw4ECmVqsZY4yNGjWKBQUF8R7j4+PDANS6DR48mGVlZfGOXbFiBQPA7t69y7Zv387EYjHLy8tjSqWSyWQyFhERwRhjzMbGhs2aNavReH18fNjEiROZQqFgCoWCpaSksFmzZjEA7M0332SMMZaSksIAsM6dO7PKykrusSqVinl4eLBevXoxlUrFbS8qKmKurq5s2LBhteJ+7bXXeM+/e/duBoDt2rWrzvjUajVTKBQsLi6OAWB///03t08b5yeffMJ7zPvvv88AsJMnT/LKWf3voS3T9u3buW3a1y4lJYUxxti5c+cYAPbrr782+Des7299/PhxBoAdP36c29a5c2fWuXNnVlZW1uA5a9LGu27dOqZQKFh5eTk7f/48GzRoEAPAfv/9d14ZZs6cyXv87du3maWlJVuwYAFve1FREXN3d2fTp09njFW9pv379+fes4wxlpqaykQiEfPx8eE9HgBbsWIFd3/27NlMJBKxpKSkesuSkJBQ62+vpX2faF27do0BYPPnz+cdd+bMGQaAvfPOO9y2UaNGMQDszJkzvGN79OjBxo0bx92fNGkS69u3b73xEWKuqAbfgqysrLBmzRqcO3cOP/74Y4PHjhgxAgkJCUhISMBff/2Fbdu24e7du3jwwQfrHXn++OOPw8rKCrt378bBgwchl8t1Gjlf08GDByESiSASieDn54cff/wRCxYswJo1a3jHTZkyhdddcP36dWRmZuLZZ5/ljQOwtbXFo48+itOnT9dqKn/66ad596dPnw5LS0uuuR8AkpOTMWPGDLi7u8PCwgIikQijRo0CAFy7dq1W/DXPOWPGDADgnbMpunTpgo4dO+Ktt97Cl19+yavlNsWNGzdw69YtzJkzB9bW1k06x1tvvQWRSARra2sMGDAAt2/fxldffYWJEyfyjnv00Ud5948cOQKlUomZM2dyLUVKpRLW1tYYNWoU142gfU1nzJjBayr38fHBsGHDGo3v0KFDGD16NLp3796k8tWkfQ1rvq8HDx6M7t2749ixY7zt7u7uGDx4MG9b7969eVe0DB48GH///Tfmz5+PI0eOoLCwsEViJcTY0SC7Fvbkk0/io48+wrJly/DII4/Ue5yDgwMGDhzI3R82bBh69OiBoUOHYsOGDVi7dm2tx9jY2OCJJ57At99+Cx8fH25wnL5GjBiBjz/+GAKBAFKpFJ07d4aVlVWt42qOLs/Ly6tzOwB4eHhArVYjPz8fUqmU2+7u7s47ztLSEk5OTty5iouLMXLkSFhbW2PNmjXo2rUrpFIp13dfVlZW5+Or0z6H9pxN5eDggLi4OLz//vt45513kJ+fD5lMhrlz52L58uV6XakAgBtr0JxBZAsXLsQzzzwDoVCIDh06wM/Pr87BczVfE20XgLb/uybtFzTt36zm66Td1tglhHfv3m3RQXKNvcdqXopa870AAGKxmPe+Wbp0KWxsbLBr1y58+eWXsLCwwAMPPIB169bx/gcJMTeU4FuYQCDAunXrEBoaiq1bt+r12N69ewMA/v7773qPmT17Nr755htcunQJu3fvblKMNb9c1KdmItF+mGZlZdU6NjMzE0KhEB07duRtl8vl6NSpE3dfqVQiLy+PO9cff/yBzMxMxMbGcrV2APX2j9Z8vPY5qsfXHL169cIPP/wAxhguXbqEqKgorFq1ChKJBG+//bZe59L2k9ccHKYPT0/PJr1Wzs7OAICff/65wS+B2r+Z9m9YXV3banJxcWlW+eqLJysrq9YXh8zMTK5c+rC0tMTixYuxePFiFBQU4OjRo3jnnXcwbtw4pKen876QEmJOqIm+FYwdOxahoaFYtWoViouLdX6c9jphV1fXeo8ZOnQoZs+ejYcffhgPP/xwc0PVS2BgIDp16oQ9e/aAMcZtLykpwb59+7iR9dXV/BLy448/QqlUciPPtYmp5uV3X331Vb1x1Dznnj17AIA7Z0sQCATo06cPPv74Y3To0AEXLlzg9tWsIdana9eu6Ny5M7799ltuVHtbGTduHCwtLXHr1i0MHDiwzhugeU1lMhm+//573mualpaG+Pj4Rp9nwoQJOH78OK5fv17vMdrXVpe/2YMPPggAvEFyAJCQkIBr1641e0Bphw4d8Nhjj+Hll1/GvXv39J7kiBBTQjX4VrJu3ToMGDAAOTk5CAoKqrW/oKAAp0+fBqAZGX7t2jVERkZCLBbj5ZdfbvDc27Zta5WYGyMUCrF+/Xo8/fTTmDRpEubNm4eKigp8+OGHKCgowAcffFDrMb/88gssLS0RGhqKq1ev4t1330WfPn0wffp0AJquiY4dO+LFF1/EihUrIBKJsHv37npbMaysrLBhwwYUFxdj0KBBiI+Px5o1azBhwgSMGDGiWeX77bffsHnzZkybNg3+/v5gjOGXX35BQUEBQkNDueN69eqF2NhY/N///R9kMhns7OwQGBhY5zm/+OILTJ48GUOGDMFrr70Gb29v3L59G0eOHGlyC4wufH19sWrVKixbtgzJyckYP348OnbsiOzsbJw9exY2NjbcnAqrV6/G888/j4cffhhz585FQUEBIiIi6my2r2nVqlU4dOgQHnjgAbzzzjvo1asXCgoKcPjwYSxevBjdunVD586dIZFIsHv3bnTv3h22trbw8PCAh4dHrfMFBgbihRdewGeffQahUIgJEyYgNTUV7777Lry8vPDaa6/p/beYPHkyevbsiYEDB8LFxQVpaWnYtGkTfHx8EBAQoPf5CDEZhh3jZ/qqj6KvacaMGQxAo6PoLSwsmLe3N3vsscdYYmIi79jqo+gbos8o+oceeqjBY7QjuD/88MM69//6668sODiYWVtbMxsbGzZmzBj2119/1Rn3+fPn2eTJk5mtrS2zs7NjTz31FMvOzuYdGx8fz4YOHcqkUilzcXFhzz//PLtw4UKtkdezZs1iNjY27NKlSywkJIRJJBLm6OjIXnrpJVZcXFyrnPqOov/nn3/YU089xTp37swkEglzcHBggwcPZlFRUbxzX7x4kQ0fPpxJpVIGgI0aNYoxVvcoesYYO3XqFJswYQJzcHBgYrGYde7cudbVBTU19hrULENd7z/GNK/V6NGjmb29PROLxczHx4c99thj7OjRo7zjvvnmGxYQEMCsrKxY165d2bfffstmzZrV6Ch6xhhLT09ns2fPZu7u7kwkEjEPDw82ffp03uv8/fffs27dujGRSMQ7R81R9IxpRvavW7eOde3alYlEIubs7MyeeeYZlp6ezjuuritUGGO14t6wYQMbNmwYc3Z2ZlZWVszb25vNmTOHpaam1vk3I8RcCBir1i5HCCGEELNAffCEEEKIGaIETwghhJghSvCEEEKIGaIETwghhJghSvCEEEKIGTL76+DVajUyMzNhZ2fXKutjE0JIXRhjKCoqgoeHB2/tBkLaitkn+MzMTHh5eRk6DEJIO5Went6i8/UToiuzT/B2dnYANP9k9vb2Oj1GoVAgOjoaYWFhei8wYoyoPMbN3MoDmF+ZmlKewsJCeHl5cZ9BhLQ1s0/w2mZ5e3t7vRK8VCqFvb292Xw4UXmMl7mVBzC/MjWnPNQ1SAyFOoYIIYQQM0QJnhBCCDFDlOAJIYQQM2T2ffCEEGLMVCoVFAqFocMgJkIkEsHCwkKnYynBE0KIATDGIJfLUVBQYOhQiInp0KED3N3dGx3ASQmeEEIMQJvcXV1dIZVKabQ9aRRjDKWlpcjJyQEAyGSyBo+nBE8IIW1MpVJxyd3JycnQ4RATIpFIAAA5OTlwdXVtsLmeBtkRQkgDGAPKylr2nNo+d6lU2rInJu2C9n3T2NgNqsETQkgdlEogNxe4exewbKVPSmqWJ02h6/uGEjwhhFRTVgbk5AD37gFqtWZbayV4QloTvW0JIe0eY8D9+5rEXlRk6GgIaRnUB08IabdUKkAuB65cAW7douRuTEJCQrBo0SLuvq+vLzZt2tSqzxkbGwuBQACBQIBp06a16nPVR/v8HTp0aPa5KMETQtqdsjIgLQ24dAm4cweorDR0RKQxCQkJeOGFF9rkua5fv46oqCi9HhMeHs4lZ+1tyJAhvGMqKiqwYMECODs7w8bGBlOmTEFGRgbvmKysrBb7IkMJnhDSbhQUADduAElJmgF02j52YvxcXFza7KoDV1fXJtWgx48fj6ysLO528OBB3v5FixZh//79+OGHH3Dy5EkUFxdj0qRJUKlU3DHu7u5wcHBobhEAUIInhJg5lQrIzgYuXzbeZnjGGEpKSgxyY4zpHGdISAgWLFiARYsWoWPHjnBzc8PWrVtRUlKC5557DnZ2dujcuTMOHTrEe1xSUhImTpwIW1tbuLm54dlnn0Vubi63v6SkBDNnzoStrS1kMhk2bNhQ67lrNtFv3LgRvXr1go2NDby8vDB//nwUFxdz+6OiotChQwccOXIE3bt3h62tLZeA9VVXfDW7EABALBbD3d2duzk6OnL77t+/j23btmHDhg0YO3Ys+vXrh127duHy5cs4evSo3jHpghI8IcQslZcDt29rmuEzMoy7Gb60tBS2trYGuZWWluoV644dO+Ds7IyzZ89iwYIFeOmll/D4449j2LBhuHDhAsaNG4dnn32WO29WVhZGjRqFvn374ty5czh8+DCys7Mxffp07pxvvvkmjh8/jv379yM6OhqxsbE4f/58g3EIhUJ8+umnuHLlCnbs2IE//vgDS5YsqfV3/eijj7Bz5078+eefuH37Nt544w29yqtPfLGxsXB1dUXXrl0xd+5cbsY5ADh//jwUCgXCwsK4bR4eHujZsyfi4+P1jkkXNIqeEGJWCgpoNHxr6tOnD5YvXw4AWLp0KT744AM4Oztj7ty5AID33nsPW7ZswaVLlzBkyBBs2bIF/fv3R2RkJHeOb7/9Fl5eXrhx4wY8PDywbds2fPfddwgNDQWg+RLh6enZYBzVa89+fn5YvXo1XnrpJWzevJnbrlAo8OWXX6Jz584AgFdeeQWrVq3Sq7zFxcU6xTdhwgQ8/vjj8PHxQUpKCt599108+OCDOH/+PMRiMeRyOaysrNCxY0fe49zc3CCXy/WKSVeU4AkhJk+lqpqUpqLC0NHoTyqV8pqX2/q59dG7d2/udwsLCzg5OaFXr17cNjc3NwDgaq/nz5/H8ePHYWtrW+tct27dQllZGSorKzF06FBuu6OjIwIDAxuM4/jx44iMjERSUhIKCwuhVCpRXl6OkpIS2NjYcGXTJndAM3d79Vq1Lm7duqVTfE888QT3e8+ePTFw4ED4+Pjg999/xyOPPFLv+RljrTbhESV4QojJKi/X1Nbz8kx7wJxAIOCSkrETiUS8+wKBgLdNm6zU/70garUakydPxrp162qdSyaT4ebNm3rHkJaWhokTJ+LFF1/E6tWr4ejoiJMnT2LOnDm86VvrilWfMQcA9D5eSyaTwcfHhyufu7s7KisrkZ+fz6vF5+TkYNiwYU16jsZQHzwhxOTcvw/cvAlcvaqptZtycjd3/fv3x9WrV+Hr64suXbrwbjY2NujSpQtEIhFOnz7NPSY/Px83btyo95znzp2DUqnEhg0bMGTIEHTt2hWZmZmtEn9T4gOAvLw8pKencyu+DRgwACKRCDExMdwxWVlZuHLlCiV4Qkj7plJpautXrgD//gsUFho6IqKLl19+Gffu3cNTTz2Fs2fPIjk5GdHR0Zg9ezZUKhVsbW0xZ84cvPnmmzh27BiuXLmC8PBwCIX1p6fOnTtDqVTis88+Q3JyMnbu3Ikvv/yyVeLXJb7i4mK88cYbOHXqFFJTUxEbG4vJkyfD2dkZDz/8MADAwcEBc+bMweuvv45jx44hMTERzzzzDHr16oWxY8e2SuzURE8IMWrm0gzfXnl4eOCvv/7CW2+9hXHjxqGiogI+Pj4YP348lyQ//PBDFBcXY8qUKbCzs8Prr7+O+/fv13vOvn37YuPGjVi3bh2WLl2KBx54AGvXrsXMmTNbpQyNxWdhYYHLly/ju+++Q0FBAWQyGUaPHo29e/fCzs6OO+7jjz+GpaUlpk+fjrKyMowZMwZRUVENLvnaHJTgCSFGSTs3PNXUjUdsbGytbampqbW21ey3DggIwC+//FLveW1tbbFz507s3LmT2/bmm282+DyvvfYaXnvtNd62Z599lvs9PDwc4eHhvP3Tpk1rUp96XfH9/vvv3O8SiQRHjhxp9DzW1tb47LPP8Nlnn+kdQ1NQgieEGA2VSlNTz8kxzdHwxDx4enpi8uTJ+P7779v8uW1tbaFUKmFtbd3sc1GCJ4QYXHm5ZrBcXp4myRNiCMHBwdyo97ou62sLFy9eBIAWabY3+CC7O3fu4JlnnoGTkxOkUin69u3LmyGIMYaIiAh4eHhAIpEgJCQEV69eNWDEhJCWUn00fE4OJXdiWBKJhBvh7+7u3uCxsbGxrbK6nfb5/fz8mn0ugyb4/Px8DB8+HCKRCIcOHUJSUhI2bNjAm+R//fr12LhxIz7//HMkJCTA3d0doaGhKKJpqggxSWq1JplfvUqj4QlpTQZtol+3bh28vLywfft2bpuvry/3O2MMmzZtwrJly7iZgHbs2AE3Nzfs2bMH8+bNa+uQCSFNVFFRNRqeauqEtD6DJvgDBw5g3LhxePzxxxEXF4dOnTph/vz53JzGKSkpkMvlvMn5xWIxRo0ahfj4+DoTfEVFBSqqjc4p/K96oFAoeDMcNUR7nK7HGzsqj3Ezt/IA/DIVFWn61025pq5S6f8amdPrSUyTQRN8cnIytmzZgsWLF+Odd97B2bNn8eqrr0IsFmPmzJncBPzauY213NzckJaWVuc5165di5UrV9baHh0drfecy9VnHDIHVB7jZm7lAcyvTPqUR99V2ghpaQZN8Gq1GgMHDuRWGerXrx+uXr2KLVu28CYsqDkRf0OT8y9duhSLFy/m7hcWFsLLywthYWGwt7fXKS6FQoGYmBiEhobWmsvYFFF5jJs5laeyUlNbz81V4M6dGLi7h0IoNO0yAYBEosCtW/q9RoWm3GRBzIJBE7xMJkOPHj1427p37459+/YBADeKUS6Xc/P5AprJ+WvW6rXEYjHEYnGt7SKRSO8Pz6Y8xphReYybKZensFDTv66d3Es7l4hQKDKLBK+9Ykmf18hUX0tiPgya4IcPH47r16/ztt24cQM+Pj4ANGv8uru7IyYmBv369QMAVFZWIi4urs6ViQghbUetrpqUprzc0NGYh61b2/b5XnhBv+NDQkIQFxcHAEhMTETfvn1bPigjpW01dnBwQEFBgWGD0ZFBL5N77bXXcPr0aURGRuLff//Fnj17sHXrVrz88ssANH/QRYsWITIyEvv37+cm+ZdKpZgxY4YhQyek3aqoADIygEuXgNu3Kbm3N3PnzkVWVhZ69uyp0/GxsbGYOnUqZDIZbGxs0LdvX+zevbvWMQKBoNbtn3/+aXa8dZ1XIBDgww8/5I4JCQmptf/JJ5/knScrK6tVrntvTQatwQ8aNAj79+/H0qVLsWrVKvj5+WHTpk14+umnuWOWLFmCsrIyzJ8/H/n5+QgODkZ0dDRvAn9CSOsrKtLU1k2k8kJaiVQqbXQSmOri4+PRu3dvvPXWW3Bzc8Pvv/+OmTNnwt7eHpMnT+Yde/36dd5YKRcXl2bHm5WVxbt/6NAhzJkzB48++ihv+9y5c7Fq1SruvkQi4e13d3eHg4NDs+NpSwafqnbSpEmYNGlSvfsFAgEiIiIQERHRdkERQgBUNcPfvQuUlRk6GmJsYmNjMXr0aPz222945513cP36dfTp0wfffPMNevXqBQB45513eI959dVXceTIEezfv79Wgnd1deVNdNaYkJAQriVh165dsLCwwEsvvYTVq1dzTeo1v4z873//w+jRo+Hv78/bru8XF1Ng8KlqCSHGp7KS3wxPyZ005M0338RHH32EhIQEuLq6YsqUKQ3OA3D//n04OjrW2t6vXz/IZDKMGTMGx48f1+m5d+zYAUtLS5w5cwaffvopPv74Y3zzzTd1HpudnY3ff/8dc+bMqbVv9+7dcHZ2RlBQEN544w2zmC3V4DV4Qojx0DbD379fNRKekMasWLECoaGhADQJ19PTE/v378f06dNrHfvzzz8jISEBX331FbdNJpNh69atGDBgACoqKrBz506MGTMGsbGxeOCBBxp8bi8vL3z88ccQCAQIDAzE5cuX8fHHH3MTplW3Y8cO2NnZcTOjaj399NPcoO4rV65g6dKl+Pvvv01+HgdK8IS0c2o1cO+eJrFTTZ00xdChQ7nfHR0dERgYiGvXrtU6LjY2FuHh4fj6668RFBTEbQ8MDERgYCDvfOnp6fjoo4/wwAMP4MSJE5gwYQK3/6uvvuLGag0ZMoQ3L8rQoUOxYcMGqFSqWiuyffvtt3j66adrLcVa/ctAz549ERAQgIEDB+LChQvo37+/vn8Oo0EJnpB2qmpSGkCpNHQ0xNzUnIwsLi4OkydPxsaNG3kTmdVnyJAh2LVrFwBg4MCB3DKqQO3ZTXVx4sQJXL9+HXv37m302P79+0MkEuHmzZuU4AkhpoOa4UlLO336NLy9vQFoVgm9ceMGunXrxu2PjY3FpEmTsG7dOryg48X3iYmJ3ARn2mVc63vumvcDAgJq1d63bduGAQMGoE+fPo0+99WrV6FQKHgTrJkiSvCEtAPUDE9a06pVq+Dk5AQ3NzcsW7YMzs7OmDZtGgBNcn/ooYewcOFCPProo9waI1ZWVtxAu02bNsHX1xdBQUGorKzErl27sG/fPm5W04akp6dj8eLFmDdvHi5cuIDPPvsMGzZs4B1TWFiIn376qdZ2ALh16xZ2796NiRMnwtnZGUlJSXj99dfRr18/DB8+vJl/GcOiBE+IGaNmeNOi78xyxuKDDz7AwoULcfPmTfTp0wcHDhyAlZUVACAqKgqlpaVYu3Yt1q5dyz1m1KhRiI2NBaCZofSNN97AnTt3IJFIEBQUhN9//x0TJ05s9LlnzpyJsrIyDB48GBYWFliwYEGtVoIffvgBjDE89dRTtR5vZWWFY8eO4ZNPPkFxcTG8vLzw0EMPYcWKFbVaAUwNJXhCzFBxcdWkNNQMT1rbiBEjcOXKlTr3RUVFISoqqsHHL1myBEuWLGnSc4tEImzatAlbtmyp95gXXnih3q4BLy8vbvpdc0MJnhAzwVhVMzytVEpay+bNm/HNN9/g1KlThg6lTdna2kKpVNYagW/MKMETYuIUCk1Sp2Z40tp2796Nsv8GcXh7eyM+Pt7AEbUd7Sh+U2q2pwRPiImiZnjS1jp16sS7HxISAmbAN5+2D78t1DeK35hRgifEhFAzPCFEV5TgCTEBCoVmNPzdu9QMTwjRDSV4QoxYSYmmtp6fT83whBD9UIInxMhom+Hv3tUkeEIIaQpK8IQYCW0zfG6u5ndCCGkOSvCEGJh2sFxSElBjfQ5CCGkySvCEGABjmn71nBzN4i/abZTg27fz59v2+QYM0O/4kJAQbta3xMRE9O3bt+WDMgBfX1+kpaUB0CyW06FDB8MG1EKEhg6AkPZEoQCysoDLl4GUFOpjJ6Zn7ty5yMrKQs+ePXU6vry8HOHh4ejVqxcsLS25RWhaQlFRERYtWgQfHx9IJBIMGzYMCQkJvGOys7MRHh4ODw8PSKVSjB8/Hjdv3uQdk5CQoNPCNqaGEjwhbaC0VJPQL18GMjOpj52YLqlUCnd3d1ha6tYArFKpIJFI8Oqrr2Ls2LEtGsvzzz+PmJgY7Ny5E5cvX0ZYWBjGjh2LO3fuAAAYY5g2bRqSk5Pxv//9D4mJifDx8cHYsWNRUu3btYuLC7eynTmhBE9IK9GOhv/nH+DaNc3vdKkbMSexsbEQCAT4/fff0adPH1hbWyM4OBiXL1/mjrGxscGWLVswd+5cuLu763zu8PBwTJs2DStXroSrqyvs7e0xb948VFZWAgDKysqwb98+rF+/Hg888AC6dOmCiIgI+Pn5cQvP3Lx5E6dPn8aWLVswaNAgBAYGYvPmzSguLsb333/fsn8MI0QJnpAWplRSMzxpX95880189NFHSEhIgKurK6ZMmQJFCzRTHTt2DNeuXcPx48fx/fffY//+/Vi5ciUAQKlUQqVS1Vr8RSKR4OTJkwCAiooKAOAdY2FhASsrK+4Yc0YJnpAWUloKpKYCly5RMzxpX1asWIHQ0FD06tULO3bsQHZ2Nvbv39/s81pZWeHbb79FUFAQHnroIaxatQqffvop1Go17OzsMHToUKxevRqZmZlQqVTYtWsXzpw5g6ysLABAt27d4OPjg6VLlyI/Px+VlZX44IMPIJfLuWPMGSV4QppBOxr++nVNM3xeHjXDk/Zn6NCh3O+Ojo4IDAzEtWvXdHrs7du3YWtry90iIyO5fX369IFUKuU9T3FxMdLT0wEAO3fuBGMMnTp1glgsxqeffooZM2ZwK76JRCLs27cPN27cgKOjI6RSKWJjYzFhwgSTWhWuqQya4CMiIiAQCHi36n00jDFERETAw8MDEokEISEhuHr1qgEjJkSjejN8crJmZTdCSBWBjtd8enh44OLFi9ztxRdf1PncnTt3RlxcHJf0z549C4VCAT8/P+7YAQMG4OLFiygoKEBWVhYOHz6MvLw83jHmyuA1+KCgIGRlZXG36oMz1q9fj40bN+Lzzz9HQkIC3N3dERoaiiLthcOEtDFtMzyNhiekyunTp7nf8/PzcePGDXTr1k2nx1paWqJLly7crfpo9r///ptbf177PLa2tvD09OSdw8bGBjKZDPn5+Thy5AimTp1a63kcHBzg4uKCmzdv4ty5c3UeY24MPtGNpaVlnSMrGWPYtGkTli1bhkceeQQAsGPHDri5uWHPnj2YN29eW4dK2inGNGuu5+RQTZ2QuqxatQpOTk5wc3PDsmXL4OzszLvePSkpCZWVlbh37x6Kiopw8eJFAGh0opzKykrMmTMHy5cvR1paGlasWIFXXnkFQqGmbnrkyBEwxhAYGIh///0Xb775JgIDA/Hcc89x5/jpp5/g4uICb29vXL58GQsXLsS0adMQFhbW0n8Go2PwBH/z5k14eHhALBYjODgYkZGR8Pf3R0pKCuRyOe9FEIvFGDVqFOLj4+tN8BUVFdzISQAoLCwEACgUCp1HdWqPa4lRoMaAytM0SqWmTz0vD/jvypxWoVYreD/NgbmVSaXS/z3XlPenvjPLGYsPPvgACxcuxM2bN9GnTx8cOHAAVlZW3P6JEydyM8UBQL9+/QBoKnINGTNmDAICAvDAAw+goqICTz75JCIiIrj99+/fx9KlS5GRkQFHR0c8+uijeP/99yESibhjsrKysHjxYmRnZ0Mmk2HmzJl49913W6jkxs2gCT44OBjfffcdunbtiuzsbKxZswbDhg3D1atXIZfLAQBubm68x7i5ufHeKDWtXbuWu4yiuujoaN5gDV3ExMTodbyxo/IYN7ncvMoDmF+Z9HnPlWoXGWgHRowYgStXrtS7PzU1tcnnXrlyZZ2f6QAwffp0TJ8+vcHHv/rqq3j11Veb/PymzKAJfsKECdzvvXr1wtChQ9G5c2fs2LEDQ4YMAVB7oAZjrMHBG0uXLsXixYu5+4WFhfDy8kJYWBjs7e11ikuhUCAmJgahoaG8b4KmisrTOMaA+/cNs0SrWq2AXB4Dd/dQCIWm//oA5lcmiUSBW7f0e89pWw/NzebNm/HNN9/g1KlThg6lxQQFBSE5OdnQYbQ4gzfRV2djY4NevXrh5s2bXP+NXC6HTCbjjsnJyalVq69OLBZDLBbX2i4SifROBk15jDGj8tSmVGqWZ717t6oZXmigoadCocgskmF15lIm7RVV+rznzOl/TWv37t3coDdvb2/Ex8cbOKKWcfDgQa5LRdeKoCkwqgRfUVGBa9euYeTIkfDz84O7uztiYmK4/prKykrExcVh3bp1Bo6UmLqyMs2guXv3ALXa0NEQYho6derEux8SEtJoP3pTRUVFtcp56+Lj49Nmz9WWDJrg33jjDUyePBne3t7IycnBmjVrUFhYiFmzZkEgEGDRokWIjIxEQEAAAgICEBkZCalUihkzZhgybGKitM3w1ZdoJYQQc2XQBJ+RkYGnnnoKubm5cHFxwZAhQ3D69Gnu29SSJUtQVlaG+fPnIz8/H8HBwYiOjoadnZ0hwyYmRqXSNMFXb4YnxBioqfmINIGu7xuDJvgffvihwf0CgQARERG8yyII0RU1wxNjZWVlBaFQiMzMTLi4uMDKykrnmd9I+8UYQ2VlJe7evQuhUMi7FLEuRtUHT0hL0E5KQ83wxFgJhUL4+fkhKysLmZmZhg6HmBipVApvb29uwp/6UIInZkGlqhoNX22eI0KMlpWVFby9vbllTwnRhYWFBSwtLXVq8aEET0xaebmmtp6XR83wxPQIBAKzu3yVGA9K8MRk/fuvZvEXQgghtVGCJyZD2wyfna25X1xsuElpCCHE2FGCJ0avZjM8NcUTQkjjKMETo6WdlMZMp/QmhJBWRQmeGBWVSlNTz8mh0fCEENIclOCJUaDR8IQQ0rIowRODomZ4QghpHZTgSZujZnhiChjTvE+zsgCRCLC1NXREhOin2QlepVLh8uXL8PHxQceOHVsiJmKmKiqqmuFp4i5ijJRKzXs0MxOQy6sWJ/LxoQRPTI/eCX7RokXo1asX5syZA5VKhVGjRiE+Ph5SqRS//fYbQkJCWiFMYsoKCzUfmvfvGzoSQmorL9ck86wszfuUvnwSc6F3gv/555/xzDPPAAD+7//+DykpKfjnn3/w3XffYdmyZfjrr79aPEhietTqqrnhy8sNHQ0hfMXFmlp6VpZmtUHGDB0RIS1P7wSfm5sLd3d3AMDBgwfx+OOPo2vXrpgzZw4+/fTTFg+QmBZqhifGiDFNIs/K0txopUHSHuid4N3c3JCUlASZTIbDhw9j8+bNAIDS0lJYWFi0eIDENFAzPDE2KpWmBUlbU6cBnaS90TvBP/fcc5g+fTpkMhkEAgFCQ0MBAGfOnEG3bt1aPEBivNTqqtHw1AxPjEFFRVV/enY2tSKR9k3vBB8REYGePXsiPT0djz/+OMRiMQDNGrVvv/12iwdIjE9FhaZmlJtLH6DE8IqLq5re8/KoP50QrSZdJvfYY48BAMqrVdtmzZrVMhERo0XN8MRY5OdXNb3TJEmE1E3vBK9SqRAZGYkvv/wS2dnZuHHjBvz9/fHuu+/C19cXc+bMaY04iYFom+Hv3gXKygwdDWmv1GrNl0u5XJPYqUuIkMbpvZr2+++/j6ioKKxfvx5WVlbc9l69euGbb75p0eCI4VRWAhkZwKVLwO3blNxJ21MogPR04MwZ4LffgPh4IDmZkjshutK7Bv/dd99h69atGDNmDF588UVue+/evfHPP/+0aHCk7RUVaWpKBQWGjoS0R6WlVU3vtPAQIc2jd4K/c+cOunTpUmu7Wq2GQqFokaBI21KrNdcI5+RQTZ20vYKCqqRO4zsIaTl6J/igoCCcOHECPj4+vO0//fQT+vXr12KBkdZXWVk1KY1SaehoSHuhVmvGdGRlafrUS0sNHREh5knvBL9ixQo8++yzuHPnDtRqNX755Rdcv34d3333HX777bcmB7J27Vq88847WLhwITZt2gQAYIxh5cqV2Lp1K/Lz8xEcHIwvvvgCQUFBTX4eUtUMf/8+XVJE2oZCobkuXZvUqbGPkNan9yC7yZMnY+/evTh48CAEAgHee+89XLt2Df/3f//HTXqjr4SEBGzduhW9e/fmbV+/fj02btyIzz//HAkJCXB3d0doaCiKaJ7JJsnLA5KSgBs3NM2ilNxJa0tNBU6eBH7/HTh7VjNojpI7IW1D7wQPAOPGjUNcXByKi4tRWlqKkydPIiwsrEkBFBcX4+mnn8bXX3/NW26WMYZNmzZh2bJleOSRR9CzZ0/s2LEDpaWl2LNnT5Oeqz2qrNTUmgDNhyv1sZPWdP8+8M8/wJ9/au5fuqRpLaLBcoS0vWavB99cL7/8Mh566CGMHTsWa9as4banpKRALpfzvjiIxWJuedp58+bVeb6KigpUVJt0uvC/WTAUCoXOgwC1x5nyoMGSEk0/5/37gEqlKYdabbrlqU5bDiqP4WkHaMrl/P50oVDB+2nqBAL9PxNM+fODmAe9E7xQKIRAIKh3v0qPuUt/+OEHXLhwAQkJCbX2yeVyAJrFbapzc3NDWlpavedcu3YtVq5cWWt7dHQ0pFKpzrEBQExMjF7HGzu5nMpjzEy5PI6OmltN/fubbpnqos9nQimNHiQGpneC379/P+++QqFAYmIiduzYUWdirU96ejoWLlyI6OhoWFtb13tczS8TjLEGv2AsXboUixcv5u4XFhbCy8sLYWFhsLe31yk2hUKBmJgYhIaGQiQS6fQYQ6qs1PSv1zcaXq1WQC6Pgbt7KIRC4y9PY6g8ba+8vGqQXG5u403uQqEC/fvH4MKFUKjVxlkmfXh6lkMq/RWPPvqozp8JhTSHLjEwvRP81KlTa2177LHHEBQUhL179+o8Ve358+eRk5ODAQMGcNtUKhX+/PNPfP7557h+/ToATU1eJpNxx+Tk5NSq1VcnFou5BXCqE4lEeifrpjymLRUXV01Kox0wJ2xgVIVQKDLaBNIUVJ7WVVhYtYjLvXtNO4daLTLJBM8YQ0FBMlJSYpCSEoP09D/Qt28PPPnkkzp/JhjzZwdpH1qsDz44OBhz587V+fgxY8bg8uXLvG3PPfccunXrhrfeegv+/v5wd3dHTEwMd319ZWUl4uLisG7dupYK2+QwVjUpDbUAkpbEmKYVKCtLM/FMSYmhI2pbpaV5SEv7g0vq9++n8vbfunULjC49ISakRRJ8WVkZPvvsM3h6eur8GDs7O/Ts2ZO3zcbGBk5OTtz2RYsWITIyEgEBAQgICEBkZCSkUilmzJjREmGbFIVCM2ju7l2alIa0HKVS82UxM1MzSK6y0tARtR2lsgIZGX9xCV0uvwCgKoELhSJ06jQUfn6hGDZsNAYNym6we5AQY6N3gu/YsSPvTc4YQ1FREaRSKXbt2tWiwS1ZsgRlZWWYP38+N9FNdHQ07OzsWvR5jFldzfCENEd5uSaZZ2Vp3lt6jIs1aYypkZNzuVqz+wkolfzrRp2dg+DnFwo/v1B4ez8AKytbAICPjwIWFgcNETYhTaZ3gv/44495CV4oFMLFxQXBwcG869ibIjY2lndfIBAgIiICERERzTqvqaFmeNLSioqqmt7z89vPl8XCwgwuoaemHkNpaQ5vv62tDL6+Y/9L6mNhayur50yEmB69E3x4eHgrhEEAaoYnLUf7JVE7SK69TP5YUVGItLRYLqnfu3edt18ksoG39yiulu7s3IOa3YnZ0inBX7p0SecT1pxuljSupERTW29PNSvS8lQqzZdD7cps1eZ7MlsqlQKZmWf/q6EfxZ07p8FYVZ+DQCCETDaIS+idOg2BhYWVASMmpO3olOD79u0LgUDQ6AhSgUCg10Q37RljmoSek9P+RiuTllNRUdWfnp1t/v3pjDHk5V3/L6HHIC0tFpWV/OaJjh27VOtHD4FE0ryuQ0JMlU4JPiUlpbXjaDe0zfC5ubToBmma4uKqpve8PPNv9SkpyUFq6tH/mt2Poqgog7dfInGCr+8Y+PmFwtd3LDp08DVMoIQYGZ0SfM2134n+qBmeNEf1/nRznyBNoShFevoJrh89J4ffRWhhIYaX1wiulu7m1hcCQZPWzSLErDX5OvikpCTcvn0blTUunJ0yZUqzgzIX1AxPmkrbn65N6uXlho6o9ajVKmRnJ3I19IyMk1Cp+J8rbm59uYTu6TkCIpHEQNESYjr0TvDJycl4+OGHcfnyZV6/vHYkKvXBa5rec3M1H9DUDE90VVlZNUguO9u8r6QoKEjhEnpa2jGUlfHnwrW39/qvyT0Uvr4PwsbG1UCREmK69E7wCxcuhJ+fH44ePQp/f3+cPXsWeXl5eP311/HRRx+1Rowmo7RU88FMzfBEV6WlmoRubQ1ER5tvUi8ry+emgU1NPYr8/Fu8/WKxPXx8RsPXV3M9uqNjV7p8jZBm0jvBnzp1Cn/88QdcXFwgFAohFAoxYsQIrF27Fq+++ioSExNbI06jRc3wRF8FBVWXst2/r1kgaODAxldoMyVKZQUuX76M48dPISXlOLKyzoGxqgIKhZbw8BjCTTDj4TEYQmGLLY1BCEETErxKpYKtrWb6RmdnZ2RmZiIwMBA+Pj7cCnDtgVJZNSkNNcOThqjVVf3pcrl5zk7IGMPdu1e4Gvrt23FQKPgFdXLqziV0b+8QiMXtZ8ppQgxB7wTfs2dPXLp0Cf7+/ggODsb69ethZWWFrVu3wt/fvzViNCqlpZra+r171AxP6qdQaLprtP3p5vglsKgok0voKSlHUVIi5+3v0KEDPD0nwNc3DL6+Y2Fvr/tiVISQ5tM7wS9fvhwl/7VFr1mzBpMmTcLIkSPh5OSEvXv3tniAxoAxTbNqTo7mGmRC6lJWVtX0nptrXk3uAFBRUYTbt+O4a9Jzc5N4+y0tJdw0sP7+ozBxYjrOn3/IJNeDJ8Qc6J3gx40bx/3u7++PpKQk3Lt3r9Yqc+ZAqaTR8KRh9+9XLeJSUGDoaFqWWq1EZmYCl9Dv3DkFtbr6KEABZLKB3AQznp7DYGkpBgAIhQoIBBl1n5gQ0ib0TvA7duzAY489BhsbG26bo6NjiwZlLJKSGj+GtC9qtWb2OG1N3Zz60xljuHfvJpfQ09KOo6LiPu+YDh38uYTu6/sgJBLz/N8nxBzoneDfeOMNzJ8/H5MnT8YzzzyD8ePHw9LSPEe/qtWaEc6kfVMqNYPjtLcaczuZtJKSu0hNPcYl9cLC27z91tYd4es7hltStWNH8x9nQ4i50DszZ2Vl4fDhw/j+++/x5JNPQiKR4PHHH8czzzyDYcOGtUaMhLS58vKqpve7d82nP12hKENGxkmkpGgSenY2/7JWCwsreHoO5xK6u3t/CIUWBoqWENIceid4S0tLTJo0CZMmTUJpaSn279+PPXv2YPTo0fD09MStW7caPwkhRqiwUJPQ5XLNVRLmgDE1srMvcgk9I+MklEr+vLeurr25hO7lNRJWVjb1nI0QYkqa1bYulUoxbtw45OfnIy0tDdeuXWupuAhpdYxp+tO1NXVzmajo/v00LqGnph5DWVkub7+trQc3r7uv7xjY2robKFLjJRAA9vaAk5Pm5uUFZNCYQWJimpTgtTX33bt34+jRo/Dy8sJTTz2Fn376qaXjI6RFKZWayx21NXVz6E8vLy9AWlost0b6vXs3efutrGzh7R3CJXUnp25md8VLc4lEQMeOgLMz4OiouVUfWiSVGi42QppK7wT/1FNP4f/+7/8glUrx+OOPIzY2lvreiVErL9ck86wsTXI39fWQVKpK3LlzhkvomZlnedPACgQW6NQpmGt29/AIhoUFXYtenY2Npmbu6Kj5aW+vqbUTYk70TvACgQB79+7FuHHjzHb0PDF9RUVVTe+mvvgPYwy5ude4hH77dhwqK/kzLjk6BsLPT5PQvb1DYG3tYKBojY9QCHToUNXc7uioWdyHEHOnd4bes2dPa8RBSLMwphkYp10/vajI0BE1T3GxnLt0LSXlKIqLM3n7JRJnLqH7+o6Fg4O3gSI1PlZW/Kb2jh0BC7oQgLRDVAUnJkul0szzrk3qFRWGjqjpysvL8e+/h5GcfBwpKTG4e/cKb7+lpTW8vEZy/eiurr0hENAkDYCmeV3b1O7kBPy3FhYh7R4leGJSKio0/ekWFsDhw6Y7hbBarYJcfv6/Gno0MjPjoVTyp4F1d+/3Xw09FF5ew2FpSe3KFhZVyVz7U0TDCwipk84JPiMjA56eLbsa1JYtW7BlyxakpqYCAIKCgvDee+9hwoQJADR9jytXrsTWrVuRn5+P4OBgfPHFFwgKCmrROIhxKy6uqqXn5WkGQw0caHqD5fLzb/2X0GOQlvYHyssLePsdHHy4hO7r+yCkUmfDBGpEpNKqpnZnZ01tnWaXJEQ3Oif4nj174rPPPsOzzz7bYk/u6emJDz74AF26dAGgmed+6tSpSExMRFBQENavX4+NGzciKioKXbt2xZo1axAaGorr16/Dzo7WkjZn1fvTCwv5+0xltHNpaR7S0v7gllQtKEjh7ReLHeDj8yA6d34QU6ZYIiNjNhizMlC0hicU8q89d3Sky9MIaQ6dE3xkZCRefvll/Prrr9i6dSucnJya/eSTJ0/m3X///fexZcsWnD59Gj169MCmTZuwbNkyPPLIIwA0XwDc3NywZ88ezJs3r9nPT4yHSqWZElab1MvLG3+MsVEqK5CR8RdXS5fLLwCoGr4vFIrQqdNQrh9dJhsAodASQqECMtlB3LkjMOnR/vqystIMgNMm9I4d+deeE0KaR+d/p/nz52PChAmYM2cOgoKCsHXrVkyZMqXFAlGpVPjpp59QUlKCoUOHIiUlBXK5HGFhYdwxYrEYo0aNQnx8fL0JvqKiAhXVRlsV/lf9UygUUOjYYas9Tq020Q7eGrTlMLbyVFZqrkuXyzU/q3dBN9QMKxQqeD8NhTE1cnIuIzn5GFJSjuH27ZNQKst4x7i49ICf31j4+4+Bt/dIWFlVHwHGACiMpjwtqa4y2dpWJfSOHYG6GuGMdc5/lUpTDl0/Q/Q9lpDWIGBM/zrD559/jtdeew3du3evdS38hQsX9DrX5cuXMXToUJSXl8PW1hZ79uzBxIkTER8fj+HDh+POnTvw8PDgjn/hhReQlpaGI0eO1Hm+iIgIrFy5stb2PXv2QErtfaSZcnNz8ffff+PixYu4dOkS7t/nL6fasWNH9OnTh7uZ61LKpHGlpaWYMWMG7t+/D3t7e0OHQ9ohvRvE0tLSsG/fPjg6OmLq1KnNnuwmMDAQFy9eREFBAfbt24dZs2YhLi6O219zSk3GWIPTbC5duhSLFy/m7hcWFsLLywthYWE6/5MpFArExMTA3T0UQqHpD9FVqxWQyw1Xnvv3Nc3u2dma35tLKFSgf/8YXLgQCrW6dctTUVGItLS4/2rpR5GXd4O3XySygY/PA/DzGwN//zFwdu7BvT+TkzW3xrRleVqLtbWmVq4dEGdnp0BOjvn8D0kkCty6FYPQ0FCIdBy2X1hz8AghbUyv7Pz111/j9ddfx9ixY3HlyhW4uLg0OwArKytukN3AgQORkJCATz75BG+99RYAQC6XQyaTccfn5OTAzc2t3vOJxWKIxeJa20Uikc7/mFpCocgsPpy02qo8anVVf7pcDpSWttbziFo8IapUCmRmnuUGxt25cxqMVQ3XFwiEkMkGcf3onToNgYVF1cA4xpo+a15rlKc1CASa5vXqk8nUvPZc29RuLv9D2oly9Pkc0ffzhpCWpnOCHz9+PM6ePYvPP/8cM2fObLWAGGOoqKiAn58f3N3dERMTg379+gEAKisrERcXh3Xr1rXa85OmUSg0NfTMTM1PU+l+ZIwhL+86Nw1sWlosKiv50+B17NiFS+je3iGQSDoaKFrDsLSsSuTa0e2UuwgxfjoneJVKhUuXLrXotfDvvPMOJkyYAC8vLxQVFeGHH35AbGwsDh8+DIFAgEWLFiEyMhIBAQEICAhAZGQkpFIpZsyY0WIxkKYrK9Mk9KwsIDfXeAdI1VRSksObBraoiL8OqETiBF/fMdw0sB06+BomUAORSvmXqjk4mM6liYSQKjon+JiYmBZ/8uzsbDz77LPIysqCg4MDevfujcOHDyM0NBQAsGTJEpSVlWH+/PncRDfR0dF0DbwBFRRomt0zMzW/mwKFohTp6Se4hJ6T8zdvv4WFGF5eI7iE7u7er91MA6tdiKX6ZDK0EAsh5sGgV51u27atwf0CgQARERGIiIhom4BILWq1ZvY4bU29tfrTW5JarUJ2diKX0DMyTkKl4i/87ubWl2t29/QcAZFIYqBo25aVFX/e9g4d6NpzQswV/WuTWpRKTS1de6usbPwxhlZQkMIl9LS0Yygru8fbb2/vxZsG1sbG1UCRti07O/687dT4RUj7QQmeANDMHKddP/3uXePvTy8uLsa1a78gOTkWqakxyM+/xdsvFtvDx2c0fH1D4ec3Fo6OXRu8vNIcWFhUXaqmHeFu1X5nviWk3aME344VFmoSulyumfvdmCmVFbhz5xQ32l0uPw91tW8hQqElPDyG/NfsPhYeHoMhFJr329vauqqp3clJMxiOFmIhhGiZ9ycg4WFMM9pdW1MvKTF0RPVjjOHu3Svc9ei3b8dBoeAPAHB27gZf3zD4+Y2Ft3cIxGLzbX8WCDQJvHr/OU3MSAhpCCV4M6dUaq5LB4AjR4x7EZeiokwuoaekHEVJiZy338bGDb6+Y+HvPxpTpwKpqTNNYmKYphCJNM3t1SeTocFwhBB90EeGGSovr7qULSdHs23gQOMbLFdRUYTbt+O4a9Jzc5N4+y0tJfD2HgU/v7Hw8wuFi0svCAQCCIUKODsfRGqqYeJuLV5eVcnc3p6uPSeENA8leDNRVFTV9J6fz58u1Vj6ZdVqJTIzE7iEfufOKajV1ZaQgwAy2UDuenRPz2GwtKw97bCp0157Xv1StXv3gH79jOe1IoSYPkrwJooxTVLQrp9eVNT4Y9oaYwz37t3kEnpa2nFUVPBXm+nQwZ9L6L6+D0IiMb/V18TiqkvVHB01Te/auc0B479igRBimijBmxCVStPkrk3q1Za9NxolJXeRlvbHf9ekx6Cw8DZvv7V1R/j4PMhNMtOxo7+BIm099vb8a89rLsRCCCFtgRK8kauo0PSna5dbVakaf0xbUijKkJHxF5fQs7MTefstLKzg6Tkcvr6afnR39/4QCi3qOZvpsbDgL8Li5EQLsRBCjAMleCNUXFzVn37vXtOXH20NjKmRnX0RKSmaZveMjJNQKvlD811de3MJ3ctrJKysbAwUbcuTSvnzttNCLIQQY0UJ3khU708vLDR0NHz376dxCT019RjKynJ5+21tPbgmd1/fMbC1dTdQpC1LKKxqbtfeJO1jynpCiBmgBG8gKpVmSlhtUjem69PLywuQlhbLzRp3795N3n4rK1t4e4dwSd3JqZtZTAOrXYhF29TesSNde04IMV308dWGKiv5/elKZeOPaQsqVSXu3DnDJfTMzLNgrGpot0BgAQ+PwVxC9/AIhoWF6Xc029ry5223tzd0RIQQ0nIowbeykpKqWnpennFcEsUYQ27uNS6h374dh8rKYt4xjo6B3AQz3t4hsLZ2MFC0LUMo1NTItU3tjo6ay9cIIcRcUYJvBQUFmgFymZnG05+en5+Py5d3Izn5OFJSjqK4OJO3XyJx5hK6r+9YODh4GyjSlmFtXXvdc5pEhhDSnlCCbwFqNb8/vazM0BEBlZUlSE//87/L16Jx9+5V3n5LS2t4eY3kmt1dXXtDIDDNDCgQaNY5rz5vO117Tghp7yjBN5FCoelHz8zU/FQoDBuPWq2CXH6eux49IyMeanVVUAKBAG5ufeHnFwZf31B4eQ2HpaW1ASNuOu3At8DAqoRO154TQggfJXg9lJZW1dJzcw3fn56ff4tL6Glpf6C8vIC338HBB35+ofD3H41p01S4ceNJk1x9TSrl953b2Wleg8BAanYnhJD6UIJvREFB1cpsBQWGjaW0NI+bBjY19SgKClJ4+8VihxrTwHbmVl+ztz9ooKj1o12IpfpkMtY1GhoM/cWKEEJMASX4Bhw9qplVzlCUygreNLBy+QUAVdPaCYUidOo0lEvoMtkACIWm9ZJaWfGnee3Qga49J4SQlkAfpQ0oLW3b52NMjZycy1xCT08/AaWSP2LP2TmIS+je3g/Aysq0RpPZ2fETup2doSMihBDzRAnewAoLM7iEnpp6DKWlObz9trYybl53X98xsLPzMFCk+rOw0Fx7Xn0yGSsrQ0dFCCHtAyX4NlZRUchNA5uSEoN7967z9otENvD2HsXV0p2de5jMNLASCf/acwcHGgRHCCGGYtAEv3btWvzyyy/4559/IJFIMGzYMKxbtw6BgYHcMYwxrFy5Elu3bkV+fj6Cg4PxxRdfICgoyICR606lUiAz8yw3MO7OndNgrGrNV4FACJlsEJfQO3UaAgsL46/mCgSaBF49oUulho6KEEKIlkETfFxcHF5++WUMGjQISqUSy5YtQ1hYGJKSkmBjo1lidP369di4cSOioqLQtWtXrFmzBqGhobh+/TrsjLADlzGGvLzrXEJPSzuOysoi3jEdO3ap1o8eAomko4Gi1Z1IpGlurz6ZDA2GI4QQ42XQj+jDhw/z7m/fvh2urq44f/48HnjgATDGsGnTJixbtgyPPPIIAGDHjh1wc3PDnj17MG/ePEOEXUtJSQ5SU4/+1+x+FEVFGbz9EokTfH3HcNPAdujga5hA9WBjw7/23N6e1j0nhBBTYlR1sPv37wMAHB0dAQApKSmQy+UICwvjjhGLxRg1ahTi4+PrTPAVFRWoqKjg7hf+Nxm8QqGAQsfp5rTHCYV1H69QlOL27ZNISfkDKSlHkZ19ibffwkIML6/h8PcfAz+/MXB371tjGti2nfZOW476yiMUVjW3Ozpqauo1rz1nTHMzBtoZ+qrP1GfKzK08gPmVSaXSlEPXzxB9jyWkNRhNgmeMYfHixRgxYgR69uwJAJDL5QAANzc33rFubm5IS0ur8zxr167FypUra22Pjo6GVM9O4v79YwAAKpUKKSkpuHjxIv7++29cu3YNyhprvfr5+aFPnz7o27cvunfvDjG3VJkcAL+lwlC05WkIY8C9e20QTAuQyxsvjykxt/IA5lemmBjdy1Pa1tfZElKD0ST4V155BZcuXcLJkydr7as5ipwxVu/I8qVLl2Lx4sXc/cLCQnh5eSEsLAz2Oi74rVAosGvXLhw4UInk5Fikph5HWRk/69nbe8HPbwz8/B6En9+DsLFxBaBZ4/3yZZ2eps106KBAly4xUKlC4egown/DG0yWWq2AXB4Dd/dQCIWmN/VuTeZWHsD8yiSRKHDrVgxCQ0Mh0nHhg0JjWUqStFtGkeAXLFiAAwcO4M8//4Snpye33d3dHYCmJi+TybjtOTk5tWr1WmKxuFrtuYpIJNLpH1Mul2PEiBG4detWjfPaw8dnNHx9Q+HnNxaOjl15XzKMZfpUC4uqke3anxYWmql2vbxEZvFhqyUUUnmMnbmUycJC81PXzxHtsYQYkkETPGMMCxYswP79+xEbGws/Pz/efj8/P7i7uyMmJgb9+vUDAFRWViIuLg7r1q1rlZjc3NxQUlICCwsLeHgMga9vGPz8xsLDY7BRTgMrlfLnbXdwqD0Yzli+fBBCCGk7Bs1YL7/8Mvbs2YP//e9/sLOz4/rcHRwcIJFIIBAIsGjRIkRGRiIgIAABAQGIjIyEVCrFjBkzWiUmgUCA//3vf0hOTsbVq48a1epr1QfDaUe4SySGjooQQogxMmiC37JlCwAgJCSEt3379u0IDw8HACxZsgRlZWWYP38+N9FNdHR0q14D369fP2RlZbXa+XVlZVVVO3dy0oxup2vPCSGE6MLgTfSNEQgEiIiIQEREROsHZGC2tvx523UcE0gIIYTUQvVBAxEKNTXy6pPJ1DE2kBBCCGkSSvBtxNqa33feoQMtxEIIIaT1UIJvBQKBZp1zbVO7kxNM/tpzQgghpoUSfAuwtORfe+7oqFmchRBCCDEUSvBNIJXy+87ruvacEEIIMSRK8I0QCjX95dX7z2suxEIIIYQYG0rwDRg+XJPYtdNUEkLaD0tLzVwUIhFNKEVMEyX4Bjg50Uh3QsyNQKBJ2iJRVQKv62f1bjda+ZWYIkrwhBCzIRQ2nLS1iZ2Q9oASPCHEJFhaNl7rpu40QqpQgieEGJRAwO/vru8ndZcRoh9K8ISQViMUaqZgbih5W1rSZaaEtAZK8ISQJrGwaLi2nZkJ9O5Nfd6EGAoleEJILY31dTfWZE6jzgkxPErwhLQjAoFuo8ypyZwQ00cJnhAzYWHReM3bkv7jCWk36N+dEBOgy8QsNMqcEFIdJXhCDEjbZA5o1jyQSOpO4NRkTgjRFyV4QlqJrk3mCgWQmgr4+tKIc0JIy6EET0gT6DIxC82qRggxJErwhFSjXYikrpHl9S1EQgghxogSPGk3aCESQkh7QgmemAVaiIQQQvgowROjVtfa3dppUAMCqkad0yVihBDCZ9CPxT///BOTJ0+Gh4cHBAIBfv31V95+xhgiIiLg4eEBiUSCkJAQXL161TDBkhanXYjEzg5wdATc3QEvL6BzZ6BbN8085v37A716ae77+2v2u7pqHm9jo3k8JXdCCKnNoDX4kpIS9OnTB8899xweffTRWvvXr1+PjRs3IioqCl27dsWaNWsQGhqK69evw87OzgARE11RkzkhhBiWQRP8hAkTMGHChDr3McawadMmLFu2DI888ggAYMeOHXBzc8OePXswb968tgyVVNPchUgIIYS0PqPtg09JSYFcLkdYWBi3TSwWY9SoUYiPj683wVdUVKCiooK7X1hYCABQKBRQ6LjElfY4tdo8lsTSlqOx8giFmuRc/Rrv6v3f2lp5Y5eIqVSaW2vRvj66vp7GztzKA5hfmZpSHnMpOzFdRpvg5XI5AMDNzY233c3NDWlpafU+bu3atVi5cmWt7dHR0ZBKpXrGEKPX8cbO3MoTE0PlMXbmViZ9ylNaWtqKkRDSOKNN8FqCGtVFxlitbdUtXboUixcv5u4XFhbCy8sLYWFhsLe31+k5FQoFYmJi4O4eCqHQ+C+MbqjWralxK3D0aAxCQ0MhMoMLvbWvD5XHeJlbmZpSHm3rISGGYrQJ3t3dHYCmJi+TybjtOTk5tWr11YnFYojF4lrbRSKR3h80QqHIoAm+pdbu1rYUNuVvYMyoPMbP3MqkT3nMqdzENBltgvfz84O7uztiYmLQr18/AEBlZSXi4uKwbt06A0fXfLR2NyGEkNZk0BRSXFyMf//9l7ufkpKCixcvwtHREd7e3li0aBEiIyMREBCAgIAAREZGQiqVYsaMGQaMunG0EAkhhBBDM2iCP3fuHEaPHs3d1/adz5o1C1FRUViyZAnKysowf/585OfnIzg4GNHR0Qa7Br6+hUho7W5CCCHGxqAJPiQkBIyxevcLBAJEREQgIiKi7YKqxtNTMxUqLURCCCHE1FAvbwOcnSmpE0IIMU003xghhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWbI7Ce60c6Up8/SjQqFAqWlpSgsLDSLFaGoPMbN3MoDmF+ZmlIe7WdOQ7N1EtKazD7BFxUVAQC8vLwMHAkhpD0qKiqCg4ODocMg7ZCAmfnXS7VajczMTNjZ2UGg4yowhYWF8PLyQnp6Ouzt7Vs5wtZH5TFu5lYewPzK1JTyMMZQVFQEDw8PCIXUG0rantnX4IVCITw9PZv0WHt7e7P4cNKi8hg3cysPYH5l0rc8VHMnhkRfKwkhhBAzRAmeEEIIMUOU4OsgFouxYsUKiMViQ4fSIqg8xs3cygOYX5nMrTykfTD7QXaEEEJIe0Q1eEIIIcQMUYInhBBCzBAleEIIIcQMUYInhBBCzFC7TfCbN2+Gn58frK2tMWDAAJw4caLB4+Pi4jBgwABYW1vD398fX375ZRtFqht9yvPLL78gNDQULi4usLe3x9ChQ3HkyJE2jLZx+r4+Wn/99RcsLS3Rt2/f1g1QT/qWp6KiAsuWLYOPjw/EYjE6d+6Mb7/9to2ibZy+5dm9ezf69OkDqVQKmUyG5557Dnl5eW0UbcP+/PNPTJ48GR4eHhAIBPj1118bfYyxfx4QAgBg7dAPP/zARCIR+/rrr1lSUhJbuHAhs7GxYWlpaXUen5yczKRSKVu4cCFLSkpiX3/9NROJROznn39u48jrpm95Fi5cyNatW8fOnj3Lbty4wZYuXcpEIhG7cOFCG0deN33Lo1VQUMD8/f1ZWFgY69OnT9sEq4OmlGfKlCksODiYxcTEsJSUFHbmzBn2119/tWHU9dO3PCdOnGBCoZB98sknLDk5mZ04cYIFBQWxadOmtXHkdTt48CBbtmwZ27dvHwPA9u/f3+Dxxv55QIhWu0zwgwcPZi+++CJvW7du3djbb79d5/FLlixh3bp1422bN28eGzJkSKvFqA99y1OXHj16sJUrV7Z0aE3S1PI88cQTbPny5WzFihVGleD1Lc+hQ4eYg4MDy8vLa4vw9KZveT788EPm7+/P2/bpp58yT0/PVouxqXRJ8Mb+eUCIVrtroq+srMT58+cRFhbG2x4WFob4+Pg6H3Pq1Klax48bNw7nzp2DQqFotVh10ZTy1KRWq1FUVARHR8fWCFEvTS3P9u3bcevWLaxYsaK1Q9RLU8pz4MABDBw4EOvXr0enTp3QtWtXvPHGGygrK2uLkBvUlPIMGzYMGRkZOHjwIBhjyM7Oxs8//4yHHnqoLUJuccb8eUBIdWa/2ExNubm5UKlUcHNz4213c3ODXC6v8zFyubzO45VKJXJzcyGTyVot3sY0pTw1bdiwASUlJZg+fXprhKiXppTn5s2bePvtt3HixAlYWhrXW7op5UlOTsbJkydhbW2N/fv3Izc3F/Pnz8e9e/cM3g/flPIMGzYMu3fvxhNPPIHy8nIolUpMmTIFn332WVuE3OKM+fOAkOraXQ1eq+bSsYyxBpeTrev4urYbir7l0fr+++8RERGBvXv3wtXVtbXC05uu5VGpVJgxYwZWrlyJrl27tlV4etPn9VGr1RAIBNi9ezcGDx6MiRMnYuPGjYiKijKKWjygX3mSkpLw6quv4r333sP58+dx+PBhpKSk4MUXX2yLUFuFsX8eEAK0wxq8s7MzLCwsatU2cnJyan0r13J3d6/zeEtLSzg5ObVarLpoSnm09u7dizlz5uCnn37C2LFjWzNMnelbnqKiIpw7dw6JiYl45ZVXAGgSJGMMlpaWiI6OxoMPPtgmsdelKa+PTCZDp06deEuNdu/eHYwxZGRkICAgoFVjbkhTyrN27VoMHz4cb775JgCgd+/esLGxwciRI7FmzRqTq/Ea8+cBIdW1uxq8lZUVBgwYgJiYGN72mJgYDBs2rM7HDB06tNbx0dHRGDhwIEQiUavFqoumlAfQ1NzDw8OxZ88eo+oL1bc89vb2uHz5Mi5evMjdXnzxRQQGBuLixYsIDg5uq9Dr1JTXZ/jw4cjMzERxcTG37caNGxAKhfD09GzVeBvTlPKUlpZCKOR/1FhYWACoqvmaEmP+PCCEx0CD+wxKe5nPtm3bWFJSElu0aBGzsbFhqampjDHG3n77bfbss89yx2svi3nttddYUlIS27Ztm1FdFqNvefbs2cMsLS3ZF198wbKysrhbQUGBoYrAo295ajK2UfT6lqeoqIh5enqyxx57jF29epXFxcWxgIAA9vzzzxuqCDz6lmf79u3M0tKSbd68md26dYudPHmSDRw4kA0ePNhQReApKipiiYmJLDExkQFgGzduZImJidxlf6b2eUCIVrtM8Iwx9sUXXzAfHx9mZWXF+vfvz+Li4rh9s2bNYqNGjeIdHxsby/r168esrKyYr68v27JlSxtH3DB9yjNq1CgGoNZt1qxZbR94PfR9faoztgTPmP7luXbtGhs7diyTSCTM09OTLV68mJWWlrZx1PXTtzyffvop69GjB5NIJEwmk7Gnn36aZWRktHHUdTt+/HiD/w+m+HlACGOM0XKxhBBCiBlqd33whBBCSHtACZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCalDamoqBAIBLl68aOhQCCGkSSjBE5MVHh6OadOm1doeGxsLgUCAgoKCJp/by8sLWVlZ6NmzZ9MDJIQQA2p3y8US0pjKykpYWVnB3d3d0KEQQkiTUQ2emL19+/YhKCgIYrEYvr6+2LBhA2+/r68v1qxZg/DwcDg4OGDu3Lm1mujDw8MhEAhq3WJjYwEA+fn5mDlzJjp27AipVIoJEybg5s2b3HNERUWhQ4cOOHLkCLp37w5bW1uMHz8eWVlZbfVnIIS0M5TgiVk7f/48pk+fjieffBKXL19GREQE3n33XURFRfGO+/DDD9GzZ0+cP38e7777bq3zfPLJJ8jKyuJuCxcuhKurK7p16wZA8wXg3LlzOHDgAE6dOgXGGCZOnAiFQsGdo7S0FB999BF27tyJP//8E7dv38Ybb7zRquUnhLRjBl7NjpAmmzVrFrOwsGA2Nja8m7W1NQPA8vPz2YwZM1hoaCjvcW+++Sbr0aMHd9/Hx4dNmzaNd0xKSgoDwBITE2s97759+5hYLGYnTpxgjDF248YNBoD99ddf3DG5ublMIpGwH3/8kTGmWRMdAPv333+5Y7744gvm5ubW7L8DIYTUhWrwxKSNHj0aFy9e5N2++eYbbv+1a9cwfPhw3mOGDx+OmzdvQqVScdsGDhyo0/MlJiZi5syZ+OKLLzBixAjuOSwtLREcHMwd5+TkhMDAQFy7do3bJpVK0blzZ+6+TCZDTk6OfgUmhBAd0SA7YtJsbGzQpUsX3raMjAzud8YYBAIBbz9jrM7zNEYul2PKlCmYM2cO5syZ0+D56npukUjE2y8QCOp9LCGENBfV4IlZ69GjB06ePMnbFh8fj65du8LCwkLn85SXl2Pq1Kno1q0bNm7cWOs5lEolzpw5w23Ly8vDjRs30L179+YVgBBCmohq8MSsvf766xg0aBBWr16NJ554AqdOncLnn3+OzZs363WeefPmIT09HceOHcPdu3e57Y6OjggICMDUqVMxd+5cfPXVV7Czs8Pbb7+NTp06YerUqS1dJEII0QnV4IlZ69+/P3788Uf88MMP6NmzJ9577z2sWrUK4eHhep0nLi4OWVlZ6NGjB2QyGXeLj48HAGzfvh0DBgzApEmTMHToUDDGcPDgwVrN8oQQ0lYEjDoBCSGEELNDNXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDP0/abufTubPN7wAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -4105,7 +2625,7 @@ "print('probs.shape (N,H,K) \\t', probs.shape)\n", "\n", "model = NBMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (counts, probs, weights)\n", + "distr_args = (counts, probs)\n", "samples, sample_mean, quants = model.sample(distr_args, num_samples=2000)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 6bafac332..c1852c18a 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -22,7 +22,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting.\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting. \n", "\n", "\n", "**References**
\n", @@ -30,13 +30,10 @@ "\n", "\n", ":::{.callout-warning collapse=\"false\"}\n", - "#### Exogenous Variables, Losses, and Parameters Availability\n", + "#### Losses\n", "\n", - "Given the sampling procedure during inference, DeepNPTS only supports `DistributionLoss` as training loss.\n", + "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast, rather than a sampled distributional output. Consequently, DeepNPTS only supports point losses as training loss.\n", "\n", - "Note that DeepNPTS generates a non-parametric forecast distribution using Monte Carlo. We use this sampling procedure also during validation to make it closer to the inference procedure. Therefore, only the `MQLoss` is available for validation.\n", - "\n", - "Aditionally, Monte Carlo implies that historic exogenous variables are not available for the model.\n", ":::" ] }, @@ -47,17 +44,15 @@ "outputs": [], "source": [ "#| export\n", - "import numpy as np\n", - "\n", "import torch\n", "import torch.nn as nn\n", + "import torch.nn.functional as F\n", "import neuralforecast.losses.pytorch as losses\n", "from typing import Optional\n", - "from functools import partial\n", "\n", "\n", "from neuralforecast.common._base_windows import BaseWindows\n", - "from neuralforecast.losses.pytorch import MQLoss, GMM, PMM, NBMM\n" + "from neuralforecast.losses.pytorch import MAE\n" ] }, { @@ -102,7 +97,7 @@ "class DeepNPTS(BaseWindows):\n", " \"\"\" DeepNPTS\n", "\n", - " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", "\n", " **Parameters:**
\n", " `h`: int, Forecast horizon.
\n", @@ -111,7 +106,6 @@ " `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", " `dropout`: float=0.1, dropout.
\n", " `n_layers`: int=2, number of dense layers.
\n", - " `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", " `stat_exog_list`: str list, static exogenous columns.
\n", " `hist_exog_list`: str list, historic exogenous columns.
\n", " `futr_exog_list`: str list, future exogenous columns.
\n", @@ -152,15 +146,14 @@ " batch_norm: bool = True,\n", " dropout: float = 0.1,\n", " n_layers: int = 2,\n", - " trajectory_samples: int = 100,\n", " futr_exog_list = None,\n", " hist_exog_list = None,\n", " stat_exog_list = None,\n", " exclude_insample_y = False,\n", - " loss = GMM(),\n", - " valid_loss = MQLoss(level=[80, 90]),\n", + " loss = MAE(),\n", + " valid_loss = MAE(),\n", " max_steps: int = 1000,\n", - " learning_rate: float = 1e-5,\n", + " learning_rate: float = 1e-3,\n", " num_lr_decays: int = 3,\n", " early_stop_patience_steps: int =-1,\n", " val_check_steps: int = 100,\n", @@ -178,25 +171,12 @@ " optimizer_kwargs = None,\n", " **trainer_kwargs):\n", "\n", - " if hist_exog_list is not None:\n", - " raise Exception('DeepNPTS does not support historical exogenous variables.')\n", - "\n", " if exclude_insample_y:\n", " raise Exception('DeepNPTS has no possibility for excluding y.')\n", - " \n", - " supported_losses = (losses.GMM,\n", - " losses.PMM,\n", - " losses.NBMM)\n", "\n", - " if not isinstance(loss, supported_losses):\n", - " raise Exception('DeepNPTS only supports GMM, PMM or NBMM as loss function.') \n", - " \n", - " if not isinstance(valid_loss, losses.MQLoss):\n", - " raise Exception('DeepNPTS only supports MQLoss as validation loss.')\n", + " if not isinstance(loss, losses.BasePointLoss):\n", + " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function.') \n", " \n", - " # Overwrite n_components, it has to be the input_size in DeepNPTS\n", - " loss.n_components = input_size\n", - " \n", " # Inherit BaseWindows class\n", " super(DeepNPTS, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -226,16 +206,15 @@ " **trainer_kwargs)\n", "\n", " self.h = h\n", - " self.h_backup = self.h # Used because h=1 during training\n", - " self.use_softmax = True\n", " self.hidden_size = hidden_size\n", " self.dropout = dropout\n", - " self.trajectory_samples = trajectory_samples\n", "\n", " self.futr_exog_size = len(self.futr_exog_list)\n", " self.stat_exog_size = len(self.stat_exog_list)\n", + " self.hist_exog_size = len(self.hist_exog_list)\n", "\n", - " input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size\n", + " input_dim = input_size * (1 + self.futr_exog_size + self.hist_exog_size) + self.stat_exog_size + self.h * self.futr_exog_size\n", + " \n", " # Create DeepNPTSNetwork\n", " modules = [] \n", " for i in range(n_layers):\n", @@ -246,503 +225,57 @@ " if dropout > 0.0:\n", " modules.append(nn.Dropout(dropout))\n", "\n", + " modules.append(nn.Linear(hidden_size, input_size * self.h))\n", " self.deepnptsnetwork = nn.Sequential(*modules)\n", - " self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07))\n", - "\n", - " # Add output layers for Mixture distribution \n", - " output_modules = []\n", - " if dropout > 0.0:\n", - " output_modules.append(nn.Dropout(self.dropout))\n", - " \n", - " if isinstance(loss, GMM):\n", - " output_modules.append(nn.Linear(hidden_size, input_size + 1))\n", - " elif isinstance(loss, PMM):\n", - " output_modules.append(nn.Linear(hidden_size, input_size))\n", - " elif isinstance(loss, NBMM):\n", - " output_modules.append(nn.Linear(hidden_size, input_size))\n", - "\n", - " self.output_layer = nn.Sequential(*output_modules)\n", - " self.output_layer.apply(self._init_weights)\n", - "\n", - "\n", - " @staticmethod\n", - " def _init_weights(module, scale=1.0):\n", - " if type(module) == nn.Linear:\n", - " nn.init.uniform_(module.weight, -scale, scale)\n", - " nn.init.zeros_(module.bias)\n", - "\n", - " def _domain_map(self, o_t, insample_y):\n", - " if isinstance(self.loss, GMM):\n", - " weights = o_t[:, :-1] # [B, L + 1] -> [B, L]\n", - " kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1]\n", - " kernel_width = torch.repeat_interleave(input=kernel_width,\n", - " repeats=weights.shape[1],\n", - " dim=-1) # [B, 1] -> [B, L]\n", - " output = torch.cat([insample_y, kernel_width, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", - " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L]\n", - " elif isinstance(self.loss, PMM):\n", - " weights = o_t # [B, L] -> [B, L]\n", - " output = torch.cat([insample_y, weights], dim=-1) # [B, L] + [B, L] = [B, 2 * L]\n", - " output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] \n", - " elif isinstance(self.loss, NBMM):\n", - " weights = torch.ones_like(o_t) # [B, L] -> [B, L]\n", - " output = torch.cat([insample_y, o_t, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", - " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * \n", - "\n", - " else:\n", - " raise NotImplementedError\n", - " \n", - " return output\n", - "\n", - " # Override BaseWindows method\n", - " def training_step(self, batch, batch_idx):\n", - " \n", - " # Only train one-step ahead\n", - " self.h = 1\n", - " self.quantiles = self.loss.quantiles\n", - "\n", - " # Create and normalize windows [Ws, L+H, C]\n", - " y_idx = batch[\"y_idx\"]\n", - " windows = self._create_windows(batch, step=\"train\")\n", - " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h :, y_idx])\n", - " windows = self._normalization(windows=windows, y_idx=y_idx)\n", - "\n", - " # Parse windows\n", - " (\n", - " insample_y,\n", - " insample_mask,\n", - " outsample_y,\n", - " outsample_mask,\n", - " _,\n", - " futr_exog,\n", - " stat_exog,\n", - " ) = self._parse_windows(batch, windows)\n", - "\n", - " windows_batch = dict(\n", - " insample_y=insample_y, # [Ws, L]\n", - " insample_mask=insample_mask, # [Ws, L]\n", - " futr_exog=futr_exog, # [Ws, L+H]\n", - " hist_exog=None, \n", - " stat_exog=stat_exog, # [Ws, 1]\n", - " y_idx=y_idx # [Ws, 1]\n", - " ) \n", - "\n", - " # Model Predictions\n", - " output = self.train_forward(windows_batch)\n", - "\n", - " _, y_loc, y_scale = self._inv_normalization(\n", - " y_hat=outsample_y, \n", - " temporal_cols=batch[\"temporal_cols\"], \n", - " y_idx=y_idx\n", - " )\n", - " # outsample_y = original_insample_y\n", - " outsample_y = original_outsample_y\n", - " distr_args = self.loss.scale_decouple(\n", - " output=output, loc=y_loc, scale=y_scale\n", - " )\n", - " loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask)\n", - "\n", - " if torch.isnan(loss):\n", - " print(\"Model Parameters\", self.hparams)\n", - " print(\"insample_y\", torch.isnan(insample_y).sum())\n", - " print(\"outsample_y\", torch.isnan(outsample_y).sum())\n", - " print(\"output\", torch.isnan(output).sum())\n", - " raise Exception(\"Loss is NaN, training stopped.\")\n", - "\n", - " self.log(\"train_loss\", loss, prog_bar=True, on_epoch=True)\n", - " self.train_trajectories.append((self.global_step, float(loss)))\n", - "\n", - " self.h = self.h_backup \n", - " \n", - " return loss\n", - "\n", - " # Override BaseWindows method\n", - " def validation_step(self, batch, batch_idx):\n", - "\n", - " self.h = self.h_backup\n", - " self.quantiles = self.valid_loss.quantiles\n", - "\n", - " if self.val_size == 0:\n", - " return np.nan\n", - "\n", - " # TODO: Hack to compute number of windows\n", - " windows = self._create_windows(batch, step=\"val\")\n", - " n_windows = len(windows[\"temporal\"])\n", - " y_idx = batch[\"y_idx\"]\n", - "\n", - " # Number of windows in batch\n", - " windows_batch_size = self.inference_windows_batch_size\n", - " if windows_batch_size < 0:\n", - " windows_batch_size = n_windows\n", - " n_batches = int(np.ceil(n_windows / windows_batch_size))\n", - "\n", - " valid_losses = []\n", - " batch_sizes = []\n", - " for i in range(n_batches):\n", - " # Create and normalize windows [Ws, L+H, C]\n", - " w_idxs = np.arange(\n", - " i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows)\n", - " )\n", - " windows = self._create_windows(batch, step=\"val\", w_idxs=w_idxs)\n", - " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h:, 0])\n", - " windows = self._normalization(windows=windows, y_idx=y_idx)\n", - "\n", - " # Parse windows\n", - " (\n", - " insample_y,\n", - " insample_mask,\n", - " _,\n", - " outsample_mask,\n", - " _,\n", - " futr_exog,\n", - " stat_exog,\n", - " ) = self._parse_windows(batch, windows)\n", - " \n", - " windows_batch = dict(\n", - " insample_y=insample_y, # [Ws, L]\n", - " insample_mask=insample_mask, # [Ws, L]\n", - " futr_exog=futr_exog, # [Ws, L+H]\n", - " hist_exog=None, # [Ws, L]\n", - " stat_exog=stat_exog,\n", - " y_idx=y_idx,\n", - " ) # [Ws, 1]\n", - "\n", - " # Model Predictions\n", - " output_batch = self(windows_batch)\n", - " # Monte Carlo already returns y_hat with mean and quantiles\n", - " output_batch = output_batch[:,:, 1:] # Remove mean\n", - " valid_loss_batch = self.valid_loss(y=original_outsample_y, y_hat=output_batch, mask=outsample_mask)\n", - " valid_losses.append(valid_loss_batch)\n", - " batch_sizes.append(len(output_batch))\n", - "\n", - " valid_loss = torch.stack(valid_losses)\n", - " batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device)\n", - " valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes)\n", - "\n", - " if torch.isnan(valid_loss):\n", - " raise Exception(\"Loss is NaN, training stopped.\")\n", - "\n", - " self.log(\"valid_loss\", valid_loss, prog_bar=True, on_epoch=True)\n", - " self.validation_step_outputs.append(valid_loss)\n", - " return valid_loss\n", - "\n", - " # Override BaseWindows method\n", - " def predict_step(self, batch, batch_idx):\n", - "\n", - " self.h == self.h_backup\n", - " self.quantiles = self.loss.quantiles\n", - "\n", - " # TODO: Hack to compute number of windows\n", - " windows = self._create_windows(batch, step='predict')\n", - " n_windows = len(windows['temporal'])\n", - " y_idx = batch['y_idx']\n", - "\n", - " # Number of windows in batch\n", - " windows_batch_size = self.inference_windows_batch_size\n", - " if windows_batch_size < 0:\n", - " windows_batch_size = n_windows\n", - " n_batches = int(np.ceil(n_windows/windows_batch_size))\n", - "\n", - " y_hats = []\n", - " for i in range(n_batches):\n", - " # Create and normalize windows [Ws, L+H, C]\n", - " w_idxs = np.arange(i*windows_batch_size, \n", - " min((i+1)*windows_batch_size, n_windows))\n", - " windows = self._create_windows(batch, step='predict', w_idxs=w_idxs)\n", - " windows = self._normalization(windows=windows, y_idx=y_idx)\n", - "\n", - " # Parse windows\n", - " insample_y, insample_mask, _, _, _, futr_exog, stat_exog = self._parse_windows(batch, windows)\n", - " windows_batch = dict(insample_y=insample_y, # [Ws, L]\n", - " insample_mask=insample_mask, # [Ws, L]\n", - " futr_exog=futr_exog, # [Ws, L+H]\n", - " stat_exog=stat_exog,\n", - " y_idx=y_idx)\n", - " \n", - " # Model Predictions\n", - " y_hat = self(windows_batch)\n", - " # Monte Carlo already returns y_hat with mean and quantiles\n", - " y_hats.append(y_hat)\n", - " y_hat = torch.cat(y_hats, dim=0)\n", - " return y_hat\n", - "\n", - " def train_forward(self, windows_batch):\n", - " # Parse windows_batch\n", - " x_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", - " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", - " stat_exog = windows_batch['stat_exog'] # [B, S]\n", - "\n", - " batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len\n", - "\n", - " # Concatenate x_t with future exogenous\n", - " if self.futr_exog_size > 0: \n", - " futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F]\n", - " x_t = torch.cat((x_t, futr_exog_t), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", - " \n", - " x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)]\n", - "\n", - " # Concatenate x_t with static exogenous\n", - " if self.stat_exog_size > 0:\n", - " x_t = torch.cat((x_t, stat_exog), dim=1) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S]\n", - "\n", - " # Run through DeepNPTSNetwork\n", - " h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size]\n", - " o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1]\n", - "\n", - " output = self._domain_map(o_t, windows_batch['insample_y']) # [B, L + 1], [B, L] -> [B, 3 * L]\n", - " output = self.loss.domain_map(output) # [B, 3 * L] -> ([B, L], [B, L], [B, L])\n", - "\n", - " return output\n", "\n", " def forward(self, windows_batch):\n", " # Parse windows_batch\n", - " insample_y_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " x = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " hist_exog = windows_batch['hist_exog'] # [B, L, X]\n", " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", " stat_exog = windows_batch['stat_exog'] # [B, S]\n", - " y_idx = windows_batch['y_idx']\n", - "\n", - " batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len\n", - " device = insample_y_t.device\n", - " dtype = insample_y_t.dtype\n", - "\n", - " # Repeat insample_y for trajectory samples\n", - " insample_y_t = torch.repeat_interleave(input=insample_y_t, \n", - " repeats=self.trajectory_samples, \n", - " dim=0) # [B, L, 1] -> [B * n_samples, L, 1]\n", - " \n", - " # Input x_t is insample_y at time t\n", - " x_t = insample_y_t\n", "\n", - " # Repeat futr_exog if available for trajectory samples and add to x_t \n", + " batch_size, seq_len = x.shape[:2] # B = batch_size, L = seq_len\n", + " insample_y = windows_batch['insample_y'].unsqueeze(-1) \n", + " \n", + " # Concatenate x_t with future exogenous of input\n", " if self.futr_exog_size > 0: \n", - " futr_exog = torch.repeat_interleave(input=futr_exog, \n", - " repeats=self.trajectory_samples, \n", - " dim=0) # [B, L + h, F] -> [B * n_samples, L + h, F] \n", - " x_t = torch.cat((x_t, futr_exog[:, :seq_len]), dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", + " x = torch.cat((x, futr_exog[:, :seq_len]), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", " \n", - " x_t = x_t.reshape(batch_size * self.trajectory_samples, -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + " # Concatenate x_t with historic exogenous\n", + " if self.hist_exog_size > 0: \n", + " x = torch.cat((x, hist_exog), dim=2) # [B, L, 1 + F] + [B, L, X] -> [B, L, 1 + F + X] \n", "\n", - " # Repeat stat_exog if available for trajectory samples and add to x_t\n", - " if self.stat_exog_size > 0:\n", - " stat_exog = torch.repeat_interleave(\n", - " input=stat_exog, \n", - " repeats=self.trajectory_samples, \n", - " dim=0) # [B, S] -> [B * n_samples, S] \n", - " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", + " x = x.reshape(batch_size, -1) # [B, L, 1 + F + X] -> [B, L * (1 + F + X)]\n", "\n", - " # Scales for inverse normalization\n", - " y_scale = self.scaler.x_scale[:, :, y_idx]\n", - " y_loc = self.scaler.x_shift[:, :, y_idx]\n", - " y_scale = torch.repeat_interleave(input=y_scale, \n", - " repeats=self.trajectory_samples, \n", - " dim=0)\n", - " y_loc = torch.repeat_interleave(input=y_loc, \n", - " repeats=self.trajectory_samples, \n", - " dim=0)\n", - " # Create forecasts tensor\n", - " forecasts = torch.zeros((batch_size, \n", - " self.h,\n", - " len(self.quantiles) + 1), \n", - " device=device, \n", - " dtype=dtype)\n", - " \n", - " # Recursive predictions\n", - " for t in range(self.h):\n", - " # Run input throught DeepNPTSNetwork\n", - " h_t = self.deepnptsnetwork(x_t) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size]\n", - " o_t = self.output_layer(h_t) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)]\n", - " output = self._domain_map(o_t, insample_y_t.squeeze(-1)) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L]\n", - " output = self.loss.domain_map(output) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L])\n", - "\n", - " # Inverse normalization\n", - " distr_args = self.loss.scale_decouple(output=output, \n", - " loc=y_loc, \n", - " scale=y_scale)\n", + " # Concatenate x with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x = torch.cat((x, stat_exog), dim=1) # [B, L * (1 + F + X)] + [B, S] -> [B, L * (1 + F + X) + S]\n", "\n", - " # Sample and create probabilistic outputs\n", - " samples_t_flat, _, _ = self.loss.sample(distr_args=distr_args, \n", - " num_samples=1)\n", + " # Concatenate x_t with future exogenous of horizon\n", + " if self.futr_exog_size > 0:\n", + " futr_exog = futr_exog[:, seq_len:] # [B, L + h, F] -> [B, h, F]\n", + " futr_exog = futr_exog.reshape(batch_size, -1) # [B, L + h, F] -> [B, h * F]\n", + " x = torch.cat((x, futr_exog), dim=1) # [B, L * (1 + F + X) + S] + [B, h * F] -> [B, L * (1 + F + X) + S + h * F] \n", "\n", - " samples_t_flat = samples_t_flat.squeeze()\n", - " samples_t = samples_t_flat.reshape(batch_size, \n", - " self.trajectory_samples) # [B * n_samples] -> [B, n_samples] \n", - " \n", - " samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] \n", - " quantiles_t = torch.quantile(input=samples_t, \n", - " q=self.quantiles, \n", - " dim=-1) # [B, n_samples] -> [Q, B]\n", - " forecasts[:, t, 0] = samples_t_mean\n", - " forecasts[:, t, 1:] = quantiles_t.permute(1, 0)\n", + " # Run through DeepNPTSNetwork\n", + " weights = self.deepnptsnetwork(x) # [B, L * (1 + F + X) + S + h * F] -> [B, L * h]\n", "\n", - " insample_y_t_next = self.scaler.scaler(samples_t_flat, \n", - " y_loc.squeeze(), \n", - " y_scale.squeeze()) # [B * n_samples] -> [B * n_samples]\n", - " insample_y_t_next = insample_y_t_next.unsqueeze(-1)\\\n", - " .unsqueeze(-1) # [B * n_samples] -> [B * n_samples, 1, 1]\n", + " # Apply softmax for weighted input predictions\n", + " weights = weights.reshape(batch_size, seq_len, -1) # [B, L * h] -> [B, L, h]\n", + " x = F.softmax(weights, dim=1) * insample_y # [B, L, h] * [B, L, 1] = [B, L, h]\n", + " output = torch.sum(x, dim=1).unsqueeze(-1) # [B, L, h] -> [B, h, 1]\n", "\n", - " # Update insample_y_t \n", - " insample_y_t = torch.cat([insample_y_t[:, 1:], \n", - " insample_y_t_next], \n", - " dim=1) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1]\n", - " \n", - " # Update input\n", - " x_t = insample_y_t\n", - " # Concatenate x_t with future exogenous\n", - " if self.futr_exog_size > 0: \n", - " x_t = torch.cat((x_t, \n", - " futr_exog[:, t:seq_len + t]), \n", - " dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", - " \n", - " x_t = x_t.reshape(batch_size * self.trajectory_samples\n", - " , -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + " forecast = self.loss.domain_map(output) # [B, h, 1] -> [B, h, 1]\n", "\n", - " # Concatenate x_t with static exogenous\n", - " if self.stat_exog_size > 0:\n", - " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", - " \n", - " return forecasts\n", - "\n" + " return forecast" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DeepNPTS\n", - "\n", - "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", - "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", - "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", - "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", - "> max_steps:int=1000, learning_rate:float=0.001,\n", - "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", - "> val_check_steps:int=100, batch_size:int=32,\n", - "> valid_batch_size:Optional[int]=None,\n", - "> windows_batch_size:int=1024,\n", - "> inference_windows_batch_size:int=-1,\n", - "> start_padding_enabled=False, step_size:int=1,\n", - "> scaler_type:str='standard', random_seed:int=1,\n", - "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", - "> optimizer_kwargs=None, **trainer_kwargs)\n", - "\n", - "DeepNPTS\n", - "\n", - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", - "\n", - "**Parameters:**
\n", - "`h`: int, Forecast horizon.
\n", - "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - "`hidden_size`: int=32, hidden size of dense layers.
\n", - "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - "`dropout`: float=0.1, dropout.
\n", - "`n_layers`: int=2, number of dense layers.
\n", - "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", - "`stat_exog_list`: str list, static exogenous columns.
\n", - "`hist_exog_list`: str list, historic exogenous columns.
\n", - "`futr_exog_list`: str list, future exogenous columns.
\n", - "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`max_steps`: int=1000, maximum number of training steps.
\n", - "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - "`batch_size`: int=32, number of different series in each batch.
\n", - "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - "`step_size`: int=1, step size between each window of temporal data.
\n", - "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - "`alias`: str, optional, Custom name of the model.
\n", - "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - "**References**
\n", - "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DeepNPTS\n", - "\n", - "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", - "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", - "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", - "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", - "> max_steps:int=1000, learning_rate:float=0.001,\n", - "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", - "> val_check_steps:int=100, batch_size:int=32,\n", - "> valid_batch_size:Optional[int]=None,\n", - "> windows_batch_size:int=1024,\n", - "> inference_windows_batch_size:int=-1,\n", - "> start_padding_enabled=False, step_size:int=1,\n", - "> scaler_type:str='standard', random_seed:int=1,\n", - "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", - "> optimizer_kwargs=None, **trainer_kwargs)\n", - "\n", - "DeepNPTS\n", - "\n", - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", - "\n", - "**Parameters:**
\n", - "`h`: int, Forecast horizon.
\n", - "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - "`hidden_size`: int=32, hidden size of dense layers.
\n", - "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - "`dropout`: float=0.1, dropout.
\n", - "`n_layers`: int=2, number of dense layers.
\n", - "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", - "`stat_exog_list`: str list, static exogenous columns.
\n", - "`hist_exog_list`: str list, historic exogenous columns.
\n", - "`futr_exog_list`: str list, future exogenous columns.
\n", - "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`max_steps`: int=1000, maximum number of training steps.
\n", - "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - "`batch_size`: int=32, number of different series in each batch.
\n", - "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - "`step_size`: int=1, step size between each window of temporal data.
\n", - "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - "`alias`: str, optional, Custom name of the model.
\n", - "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - "**References**
\n", - "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS, title_level=3)" ] @@ -751,73 +284,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### DeepNPTS.fit\n", - "\n", - "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ], - "text/plain": [ - "---\n", - "\n", - "### DeepNPTS.fit\n", - "\n", - "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" ] @@ -826,53 +293,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### DeepNPTS.predict\n", - "\n", - "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ], - "text/plain": [ - "---\n", - "\n", - "### DeepNPTS.predict\n", - "\n", - "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" ] @@ -891,194 +312,26 @@ "metadata": {}, "outputs": [], "source": [ + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "\n", "from neuralforecast import NeuralForecast\n", - "from neuralforecast.losses.pytorch import MQLoss, DistributionLoss, GMM\n", - "from neuralforecast.tsdataset import TimeSeriesDataset\n", - "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic" + "from neuralforecast.utils import AirPassengersPanel, AirPassengersStatic" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Seed set to 1\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "b74158f17d254e4884139ee5c48e5706", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Sanity Checking: | | 0/? [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", - "import pandas as pd\n", - "import pytorch_lightning as pl\n", - "import matplotlib.pyplot as plt\n", - "\n", - "from neuralforecast import NeuralForecast\n", - "#from neuralforecast.models import DeepAR\n", - "from neuralforecast.losses.pytorch import DistributionLoss, HuberMQLoss\n", - "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic\n", - "\n", - "#AirPassengersPanel['y'] = AirPassengersPanel['y'] + 10\n", "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds=AirPassengersPanel['ds'].values[-12]].reset_index(drop=True) # 12 test\n", "\n", "nf = NeuralForecast(\n", " models=[DeepNPTS(h=12,\n", - " input_size=12,\n", - " trajectory_samples=100,\n", - " loss=GMM(),\n", - " # learning_rate=1e-5,\n", - " n_layers = 2,\n", - " dropout=0.0,\n", + " input_size=24,\n", " stat_exog_list=['airline1'],\n", " futr_exog_list=['trend'],\n", " max_steps=1000,\n", @@ -1100,29 +353,9 @@ "plot_df = plot_df[plot_df.unique_id=='Airline1'].drop('unique_id', axis=1)\n", "plt.plot(plot_df['ds'], plot_df['y'], c='black', label='True')\n", "plt.plot(plot_df['ds'], plot_df['DeepNPTS'], c='red', label='mean')\n", - "plt.plot(plot_df['ds'], plot_df['DeepNPTS-median'], c='blue', label='median')\n", - "plt.fill_between(x=plot_df['ds'][-12:], \n", - " y1=plot_df['DeepNPTS-lo-90'][-12:].values, \n", - " y2=plot_df['DeepNPTS-hi-90'][-12:].values,\n", - " alpha=0.4, label='level 90')\n", - "plt.legend()\n", "plt.grid()\n", "plt.plot()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/nbs/models.ipynb b/nbs/models.ipynb index d48214601..9e437cea8 100644 --- a/nbs/models.ipynb +++ b/nbs/models.ipynb @@ -1133,18 +1133,7 @@ "execution_count": null, "id": "95850f3c", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2024-04-06 10:40:24,017\tINFO worker.py:1724 -- Started a local Ray instance.\n", - "2024-04-06 10:40:25,556\tINFO tune.py:220 -- Initializing Ray automatically. For cluster usage or custom Ray initialization, call `ray.init(...)` before `Tuner(...)`.\n", - "2024-04-06 10:40:25,559\tINFO tune.py:583 -- [output] This uses the legacy output and progress reporter, as Jupyter notebooks are not supported by the new engine, yet. For more information, please see https://github.com/ray-project/ray/issues/36949\n", - "Seed set to 1\n" - ] - } - ], + "outputs": [], "source": [ "%%capture\n", "# Use your own config or AutoNHITS.default_config\n", @@ -1164,139 +1153,7 @@ "execution_count": null, "id": "7c905530", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[36m(_train_tune pid=27632)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\ray\\tune\\integration\\pytorch_lightning.py:194: `ray.tune.integration.pytorch_lightning.TuneReportCallback` is deprecated. Use `ray.tune.integration.pytorch_lightning.TuneReportCheckpointCallback` instead.\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\pytorch_lightning\\utilities\\parsing.py:199: Attribute 'loss' is an instance of `nn.Module` and is already saved during checkpointing. It is recommended to ignore them using `self.save_hyperparameters(ignore=['loss'])`.\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\pytorch_lightning\\utilities\\parsing.py:199: Attribute 'valid_loss' is an instance of `nn.Module` and is already saved during checkpointing. It is recommended to ignore them using `self.save_hyperparameters(ignore=['valid_loss'])`.\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m Seed set to 11\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m GPU available: True (cuda), used: True\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m TPU available: False, using: 0 TPU cores\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m IPU available: False, using: 0 IPUs\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m HPU available: False, using: 0 HPUs\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m `Trainer(val_check_interval=1)` was configured so validation will run after every batch.\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m You are using a CUDA device ('NVIDIA GeForce RTX 3090') that has Tensor Cores. To properly utilize them, you should set `torch.set_float32_matmul_precision('medium' | 'high')` which will trade-off precision for performance. For more details, read https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html#torch.set_float32_matmul_precision\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m Missing logger folder: C:\\Users\\ospra\\ray_results\\_train_tune_2024-04-06_10-40-40\\_train_tune_4d1da_00000\\lightning_logs\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0]\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m | Name | Type | Params\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m -----------------------------------------------\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 0 | padder_train | ConstantPad1d | 0 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 1 | loss | MAE | 0 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 2 | scaler | TemporalNorm | 0 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 3 | lin_hist | Linear | 16 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 4 | drop_hist | Dropout | 0 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 5 | net_bwd | Sequential | 944 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 6 | feature_lin | Linear | 9 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 7 | temporal_lin | Linear | 156 \n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m -----------------------------------------------\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 1.1 K Trainable params\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 0 Non-trainable params\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 1.1 K Total params\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m 0.004 Total estimated model params size (MB)\n", - "\u001b[36m(_train_tune pid=27632)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\pytorch_lightning\\trainer\\connectors\\data_connector.py:441: The 'val_dataloader' does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` to `num_workers=19` in the `DataLoader` to improve performance.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sanity Checking: | | 0/? [00:00 0) * 1) eps = 1e-10 - lambdas, weights = distr_args + lambdas = distr_args[0] B, H, K = lambdas.size() + weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) + y = y[:, :, None] mask = mask[:, :, None] @@ -1305,7 +1313,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1367,22 +1375,18 @@ def __init__( if self.return_params: mu_names = [f"-mu-{i}" for i in range(1, n_components + 1)] std_names = [f"-std-{i}" for i in range(1, n_components + 1)] - weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] - self.param_names = [ - i for j in zip(mu_names, std_names, weight_names) for i in j - ] - self.output_names = self.output_names + self.param_names + mu_std_names = [i for j in zip(mu_names, std_names) for i in j] + self.output_names = self.output_names + mu_std_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 3 * n_components + self.outputsize_multiplier = 2 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - means, stds, weights = output.chunk(3, dim=-1) - - return (means, stds, weights) + means, stds = torch.tensor_split(output, 2, dim=-1) + return (means, stds) def scale_decouple( self, @@ -1397,16 +1401,14 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - means, stds, weights = output + means, stds = output stds = F.softplus(stds) - weights = F.softmax(weights, dim=-1) if (loc is not None) and (scale is not None): loc = loc.view(means.size(dim=0), 1, -1) scale = scale.view(means.size(dim=0), 1, -1) means = (means * scale) + loc stds = (stds + eps) * scale - - return (means, stds, weights) + return (means, stds) def sample(self, distr_args, num_samples=None): """ @@ -1428,11 +1430,17 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - means, stds, weights = distr_args + means, stds = distr_args B, H, K = means.size() Q = len(self.quantiles) assert means.shape == stds.shape + # Sample K ~ Mult(weights) + # shared across B, H + # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) + + weights = (1 / K) * torch.ones_like(means, device=means.device) + # Avoid loop, vectorize weights = weights.reshape(-1, K) means = means.flatten() @@ -1473,16 +1481,18 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - means, stds, weights = distr_args + means, stds = distr_args B, H, K = means.size() + weights = (1 / K) * torch.ones_like(means, device=means.device) + y = y[:, :, None] mask = mask[:, :, None] @@ -1510,7 +1520,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1568,29 +1578,25 @@ def __init__( f"-total_count-{i}" for i in range(1, n_components + 1) ] probs_names = [f"-probs-{i}" for i in range(1, n_components + 1)] - weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] - self.param_names = [ - i for j in zip(total_count_names, probs_names, weight_names) for i in j - ] - self.output_names = self.output_names + self.param_names + param_names = [i for j in zip(total_count_names, probs_names) for i in j] + self.output_names = self.output_names + param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 3 * n_components + self.outputsize_multiplier = 2 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - mu, alpha, weights = output.chunk(3, dim=-1) - - return mu, alpha, weights + mu, alpha = torch.tensor_split(output, 2, dim=-1) + return (mu, alpha) def scale_decouple( self, output, loc: Optional[torch.Tensor] = None, scale: Optional[torch.Tensor] = None, - eps: float = 1e-6, + eps: float = 0.2, ): """Scale Decouple @@ -1599,10 +1605,9 @@ def scale_decouple( Also adds domain protection to the distribution parameters. """ # Efficient NBinomial parametrization - mu, alpha, weights = output - mu = F.softplus(mu) + eps - alpha = F.softplus(alpha) + eps # alpha = 1/total_counts - weights = F.softmax(weights, dim=-1) + mu, alpha = output + mu = F.softplus(mu) + 1e-8 + alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts if (loc is not None) and (scale is not None): loc = loc.view(mu.size(dim=0), 1, -1) mu *= loc @@ -1612,9 +1617,8 @@ def scale_decouple( # => probs = mu / (total_count + mu) # => probs = mu / [total_count * (1 + mu * (1/total_count))] total_count = 1.0 / alpha - probs = mu * alpha / (1.0 + mu * alpha) - probs = torch.clamp(probs, eps, 1 - eps) - return (total_count, probs, weights) + probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 + return (total_count, probs) def sample(self, distr_args, num_samples=None): """ @@ -1636,11 +1640,17 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - total_count, probs, weights = distr_args + total_count, probs = distr_args B, H, K = total_count.size() Q = len(self.quantiles) assert total_count.shape == probs.shape + # Sample K ~ Mult(weights) + # shared across B, H + # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) + + weights = (1 / K) * torch.ones_like(probs, device=probs.device) + # Avoid loop, vectorize weights = weights.reshape(-1, K) total_count = total_count.flatten() @@ -1682,16 +1692,18 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - total_count, probs, weights = distr_args + total_count, probs = distr_args B, H, K = total_count.size() + weights = (1 / K) * torch.ones_like(probs, device=probs.device) + y = y[:, :, None] mask = mask[:, :, None] @@ -1722,7 +1734,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): diff --git a/neuralforecast/models/__init__.py b/neuralforecast/models/__init__.py index b4e7153f1..7e474ef62 100644 --- a/neuralforecast/models/__init__.py +++ b/neuralforecast/models/__init__.py @@ -3,7 +3,7 @@ 'TFT', 'VanillaTransformer', 'Informer', 'Autoformer', 'PatchTST', 'FEDformer', 'StemGNN', 'HINT', 'TimesNet', 'TimeLLM', 'TSMixer', 'TSMixerx', 'MLPMultivariate', 'iTransformer', 'BiTCN', - 'DeepNPTS' + 'DeepNPTS', 'DeepNPTSMultivariate' ] from .rnn import RNN @@ -34,3 +34,4 @@ from .itransformer import iTransformer from .bitcn import BiTCN from .deepnpts import DeepNPTS +from .deepnptsmultivariate import DeepNPTSMultivariate diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index d4da85974..678f89c11 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -4,23 +4,21 @@ __all__ = ['DeepNPTS'] # %% ../../nbs/models.deepnpts.ipynb 3 -import numpy as np - import torch import torch.nn as nn +import torch.nn.functional as F import neuralforecast.losses.pytorch as losses from typing import Optional -from functools import partial from ..common._base_windows import BaseWindows -from ..losses.pytorch import MQLoss, GMM, PMM, NBMM +from ..losses.pytorch import MAE # %% ../../nbs/models.deepnpts.ipynb 7 class DeepNPTS(BaseWindows): """DeepNPTS - Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. + Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. **Parameters:**
`h`: int, Forecast horizon.
@@ -29,7 +27,6 @@ class DeepNPTS(BaseWindows): `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
`dropout`: float=0.1, dropout.
`n_layers`: int=2, number of dense layers.
- `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
`stat_exog_list`: str list, static exogenous columns.
`hist_exog_list`: str list, historic exogenous columns.
`futr_exog_list`: str list, future exogenous columns.
@@ -72,15 +69,14 @@ def __init__( batch_norm: bool = True, dropout: float = 0.1, n_layers: int = 2, - trajectory_samples: int = 100, futr_exog_list=None, hist_exog_list=None, stat_exog_list=None, exclude_insample_y=False, - loss=GMM(), - valid_loss=MQLoss(level=[80, 90]), + loss=MAE(), + valid_loss=MAE(), max_steps: int = 1000, - learning_rate: float = 1e-5, + learning_rate: float = 1e-3, num_lr_decays: int = 3, early_stop_patience_steps: int = -1, val_check_steps: int = 100, @@ -99,22 +95,13 @@ def __init__( **trainer_kwargs ): - if hist_exog_list is not None: - raise Exception("DeepNPTS does not support historical exogenous variables.") - if exclude_insample_y: raise Exception("DeepNPTS has no possibility for excluding y.") - supported_losses = (losses.GMM, losses.PMM, losses.NBMM) - - if not isinstance(loss, supported_losses): - raise Exception("DeepNPTS only supports GMM, PMM or NBMM as loss function.") - - if not isinstance(valid_loss, losses.MQLoss): - raise Exception("DeepNPTS only supports MQLoss as validation loss.") - - # Overwrite n_components, it has to be the input_size in DeepNPTS - loss.n_components = input_size + if not isinstance(loss, losses.BasePointLoss): + raise Exception( + "DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function." + ) # Inherit BaseWindows class super(DeepNPTS, self).__init__( @@ -147,16 +134,19 @@ def __init__( ) self.h = h - self.h_backup = self.h # Used because h=1 during training - self.use_softmax = True self.hidden_size = hidden_size self.dropout = dropout - self.trajectory_samples = trajectory_samples self.futr_exog_size = len(self.futr_exog_list) self.stat_exog_size = len(self.stat_exog_list) + self.hist_exog_size = len(self.hist_exog_list) + + input_dim = ( + input_size * (1 + self.futr_exog_size + self.hist_exog_size) + + self.stat_exog_size + + self.h * self.futr_exog_size + ) - input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size # Create DeepNPTSNetwork modules = [] for i in range(n_layers): @@ -167,391 +157,61 @@ def __init__( if dropout > 0.0: modules.append(nn.Dropout(dropout)) + modules.append(nn.Linear(hidden_size, input_size * self.h)) self.deepnptsnetwork = nn.Sequential(*modules) - self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07)) - - # Add output layers for Mixture distribution - output_modules = [] - if dropout > 0.0: - output_modules.append(nn.Dropout(self.dropout)) - - if isinstance(loss, GMM): - output_modules.append(nn.Linear(hidden_size, input_size + 1)) - elif isinstance(loss, PMM): - output_modules.append(nn.Linear(hidden_size, input_size)) - elif isinstance(loss, NBMM): - output_modules.append(nn.Linear(hidden_size, input_size)) - - self.output_layer = nn.Sequential(*output_modules) - self.output_layer.apply(self._init_weights) - - @staticmethod - def _init_weights(module, scale=1.0): - if type(module) == nn.Linear: - nn.init.uniform_(module.weight, -scale, scale) - nn.init.zeros_(module.bias) - - def _domain_map(self, o_t, insample_y): - if isinstance(self.loss, GMM): - weights = o_t[:, :-1] # [B, L + 1] -> [B, L] - kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1] - kernel_width = torch.repeat_interleave( - input=kernel_width, repeats=weights.shape[1], dim=-1 - ) # [B, 1] -> [B, L] - output = torch.cat( - [insample_y, kernel_width, weights], dim=-1 - ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] - output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L] - elif isinstance(self.loss, PMM): - weights = o_t # [B, L] -> [B, L] - output = torch.cat( - [insample_y, weights], dim=-1 - ) # [B, L] + [B, L] = [B, 2 * L] - output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] - elif isinstance(self.loss, NBMM): - weights = torch.ones_like(o_t) # [B, L] -> [B, L] - output = torch.cat( - [insample_y, o_t, weights], dim=-1 - ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] - output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * - - else: - raise NotImplementedError - - return output - - # Override BaseWindows method - def training_step(self, batch, batch_idx): - - # Only train one-step ahead - self.h = 1 - self.quantiles = self.loss.quantiles - - # Create and normalize windows [Ws, L+H, C] - y_idx = batch["y_idx"] - windows = self._create_windows(batch, step="train") - original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, y_idx]) - windows = self._normalization(windows=windows, y_idx=y_idx) - - # Parse windows - ( - insample_y, - insample_mask, - outsample_y, - outsample_mask, - _, - futr_exog, - stat_exog, - ) = self._parse_windows(batch, windows) - - windows_batch = dict( - insample_y=insample_y, # [Ws, L] - insample_mask=insample_mask, # [Ws, L] - futr_exog=futr_exog, # [Ws, L+H] - hist_exog=None, - stat_exog=stat_exog, # [Ws, 1] - y_idx=y_idx, # [Ws, 1] - ) - - # Model Predictions - output = self.train_forward(windows_batch) - - _, y_loc, y_scale = self._inv_normalization( - y_hat=outsample_y, temporal_cols=batch["temporal_cols"], y_idx=y_idx - ) - # outsample_y = original_insample_y - outsample_y = original_outsample_y - distr_args = self.loss.scale_decouple(output=output, loc=y_loc, scale=y_scale) - loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask) - - if torch.isnan(loss): - print("Model Parameters", self.hparams) - print("insample_y", torch.isnan(insample_y).sum()) - print("outsample_y", torch.isnan(outsample_y).sum()) - print("output", torch.isnan(output).sum()) - raise Exception("Loss is NaN, training stopped.") - - self.log("train_loss", loss, prog_bar=True, on_epoch=True) - self.train_trajectories.append((self.global_step, float(loss))) - - self.h = self.h_backup - - return loss - - # Override BaseWindows method - def validation_step(self, batch, batch_idx): - - self.h = self.h_backup - self.quantiles = self.valid_loss.quantiles - - if self.val_size == 0: - return np.nan - - # TODO: Hack to compute number of windows - windows = self._create_windows(batch, step="val") - n_windows = len(windows["temporal"]) - y_idx = batch["y_idx"] - - # Number of windows in batch - windows_batch_size = self.inference_windows_batch_size - if windows_batch_size < 0: - windows_batch_size = n_windows - n_batches = int(np.ceil(n_windows / windows_batch_size)) - - valid_losses = [] - batch_sizes = [] - for i in range(n_batches): - # Create and normalize windows [Ws, L+H, C] - w_idxs = np.arange( - i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) - ) - windows = self._create_windows(batch, step="val", w_idxs=w_idxs) - original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, 0]) - windows = self._normalization(windows=windows, y_idx=y_idx) - - # Parse windows - ( - insample_y, - insample_mask, - _, - outsample_mask, - _, - futr_exog, - stat_exog, - ) = self._parse_windows(batch, windows) - - windows_batch = dict( - insample_y=insample_y, # [Ws, L] - insample_mask=insample_mask, # [Ws, L] - futr_exog=futr_exog, # [Ws, L+H] - hist_exog=None, # [Ws, L] - stat_exog=stat_exog, - y_idx=y_idx, - ) # [Ws, 1] - - # Model Predictions - output_batch = self(windows_batch) - # Monte Carlo already returns y_hat with mean and quantiles - output_batch = output_batch[:, :, 1:] # Remove mean - valid_loss_batch = self.valid_loss( - y=original_outsample_y, y_hat=output_batch, mask=outsample_mask - ) - valid_losses.append(valid_loss_batch) - batch_sizes.append(len(output_batch)) - - valid_loss = torch.stack(valid_losses) - batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device) - valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes) - - if torch.isnan(valid_loss): - raise Exception("Loss is NaN, training stopped.") - - self.log("valid_loss", valid_loss, prog_bar=True, on_epoch=True) - self.validation_step_outputs.append(valid_loss) - return valid_loss - # Override BaseWindows method - def predict_step(self, batch, batch_idx): - - self.h == self.h_backup - self.quantiles = self.loss.quantiles - - # TODO: Hack to compute number of windows - windows = self._create_windows(batch, step="predict") - n_windows = len(windows["temporal"]) - y_idx = batch["y_idx"] - - # Number of windows in batch - windows_batch_size = self.inference_windows_batch_size - if windows_batch_size < 0: - windows_batch_size = n_windows - n_batches = int(np.ceil(n_windows / windows_batch_size)) - - y_hats = [] - for i in range(n_batches): - # Create and normalize windows [Ws, L+H, C] - w_idxs = np.arange( - i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) - ) - windows = self._create_windows(batch, step="predict", w_idxs=w_idxs) - windows = self._normalization(windows=windows, y_idx=y_idx) - - # Parse windows - insample_y, insample_mask, _, _, _, futr_exog, stat_exog = ( - self._parse_windows(batch, windows) - ) - windows_batch = dict( - insample_y=insample_y, # [Ws, L] - insample_mask=insample_mask, # [Ws, L] - futr_exog=futr_exog, # [Ws, L+H] - stat_exog=stat_exog, - y_idx=y_idx, - ) - - # Model Predictions - y_hat = self(windows_batch) - # Monte Carlo already returns y_hat with mean and quantiles - y_hats.append(y_hat) - y_hat = torch.cat(y_hats, dim=0) - return y_hat - - def train_forward(self, windows_batch): + def forward(self, windows_batch): # Parse windows_batch - x_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + x = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + hist_exog = windows_batch["hist_exog"] # [B, L, X] futr_exog = windows_batch["futr_exog"] # [B, L + h, F] stat_exog = windows_batch["stat_exog"] # [B, S] - batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len + batch_size, seq_len = x.shape[:2] # B = batch_size, L = seq_len + insample_y = windows_batch["insample_y"].unsqueeze(-1) - # Concatenate x_t with future exogenous + # Concatenate x_t with future exogenous of input if self.futr_exog_size > 0: - futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F] - x_t = torch.cat( - (x_t, futr_exog_t), dim=2 + x = torch.cat( + (x, futr_exog[:, :seq_len]), dim=2 ) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] - x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)] + # Concatenate x_t with historic exogenous + if self.hist_exog_size > 0: + x = torch.cat( + (x, hist_exog), dim=2 + ) # [B, L, 1 + F] + [B, L, X] -> [B, L, 1 + F + X] - # Concatenate x_t with static exogenous - if self.stat_exog_size > 0: - x_t = torch.cat( - (x_t, stat_exog), dim=1 - ) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S] - - # Run through DeepNPTSNetwork - h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size] - o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1] - - output = self._domain_map( - o_t, windows_batch["insample_y"] - ) # [B, L + 1], [B, L] -> [B, 3 * L] - output = self.loss.domain_map( - output - ) # [B, 3 * L] -> ([B, L], [B, L], [B, L]) - - return output - - def forward(self, windows_batch): - # Parse windows_batch - insample_y_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] - futr_exog = windows_batch["futr_exog"] # [B, L + h, F] - stat_exog = windows_batch["stat_exog"] # [B, S] - y_idx = windows_batch["y_idx"] + x = x.reshape(batch_size, -1) # [B, L, 1 + F + X] -> [B, L * (1 + F + X)] - batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len - device = insample_y_t.device - dtype = insample_y_t.dtype - - # Repeat insample_y for trajectory samples - insample_y_t = torch.repeat_interleave( - input=insample_y_t, repeats=self.trajectory_samples, dim=0 - ) # [B, L, 1] -> [B * n_samples, L, 1] - - # Input x_t is insample_y at time t - x_t = insample_y_t - - # Repeat futr_exog if available for trajectory samples and add to x_t - if self.futr_exog_size > 0: - futr_exog = torch.repeat_interleave( - input=futr_exog, repeats=self.trajectory_samples, dim=0 - ) # [B, L + h, F] -> [B * n_samples, L + h, F] - x_t = torch.cat( - (x_t, futr_exog[:, :seq_len]), dim=2 - ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] - - x_t = x_t.reshape( - batch_size * self.trajectory_samples, -1 - ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] - - # Repeat stat_exog if available for trajectory samples and add to x_t + # Concatenate x with static exogenous if self.stat_exog_size > 0: - stat_exog = torch.repeat_interleave( - input=stat_exog, repeats=self.trajectory_samples, dim=0 - ) # [B, S] -> [B * n_samples, S] - x_t = torch.cat( - (x_t, stat_exog), dim=1 - ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] - - # Scales for inverse normalization - y_scale = self.scaler.x_scale[:, :, y_idx] - y_loc = self.scaler.x_shift[:, :, y_idx] - y_scale = torch.repeat_interleave( - input=y_scale, repeats=self.trajectory_samples, dim=0 - ) - y_loc = torch.repeat_interleave( - input=y_loc, repeats=self.trajectory_samples, dim=0 - ) - # Create forecasts tensor - forecasts = torch.zeros( - (batch_size, self.h, len(self.quantiles) + 1), device=device, dtype=dtype - ) + x = torch.cat( + (x, stat_exog), dim=1 + ) # [B, L * (1 + F + X)] + [B, S] -> [B, L * (1 + F + X) + S] - # Recursive predictions - for t in range(self.h): - # Run input throught DeepNPTSNetwork - h_t = self.deepnptsnetwork( - x_t - ) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size] - o_t = self.output_layer( - h_t - ) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)] - output = self._domain_map( - o_t, insample_y_t.squeeze(-1) - ) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L] - output = self.loss.domain_map( - output - ) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L]) - - # Inverse normalization - distr_args = self.loss.scale_decouple( - output=output, loc=y_loc, scale=y_scale - ) - - # Sample and create probabilistic outputs - samples_t_flat, _, _ = self.loss.sample( - distr_args=distr_args, num_samples=1 - ) - - samples_t_flat = samples_t_flat.squeeze() - samples_t = samples_t_flat.reshape( - batch_size, self.trajectory_samples - ) # [B * n_samples] -> [B, n_samples] - - samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] - quantiles_t = torch.quantile( - input=samples_t, q=self.quantiles, dim=-1 - ) # [B, n_samples] -> [Q, B] - forecasts[:, t, 0] = samples_t_mean - forecasts[:, t, 1:] = quantiles_t.permute(1, 0) - - insample_y_t_next = self.scaler.scaler( - samples_t_flat, y_loc.squeeze(), y_scale.squeeze() - ) # [B * n_samples] -> [B * n_samples] - insample_y_t_next = insample_y_t_next.unsqueeze(-1).unsqueeze( - -1 - ) # [B * n_samples] -> [B * n_samples, 1, 1] - - # Update insample_y_t - insample_y_t = torch.cat( - [insample_y_t[:, 1:], insample_y_t_next], dim=1 - ) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1] + # Concatenate x_t with future exogenous of horizon + if self.futr_exog_size > 0: + futr_exog = futr_exog[:, seq_len:] # [B, L + h, F] -> [B, h, F] + futr_exog = futr_exog.reshape( + batch_size, -1 + ) # [B, L + h, F] -> [B, h * F] + x = torch.cat( + (x, futr_exog), dim=1 + ) # [B, L * (1 + F + X) + S] + [B, h * F] -> [B, L * (1 + F + X) + S + h * F] - # Update input - x_t = insample_y_t - # Concatenate x_t with future exogenous - if self.futr_exog_size > 0: - x_t = torch.cat( - (x_t, futr_exog[:, t : seq_len + t]), dim=2 - ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] + # Run through DeepNPTSNetwork + weights = self.deepnptsnetwork( + x + ) # [B, L * (1 + F + X) + S + h * F] -> [B, L * h] - x_t = x_t.reshape( - batch_size * self.trajectory_samples, -1 - ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] + # Apply softmax for weighted input predictions + weights = weights.reshape(batch_size, seq_len, -1) # [B, L * h] -> [B, L, h] + x = ( + F.softmax(weights, dim=1) * insample_y + ) # [B, L, h] * [B, L, 1] = [B, L, h] + output = torch.sum(x, dim=1).unsqueeze(-1) # [B, L, h] -> [B, h, 1] - # Concatenate x_t with static exogenous - if self.stat_exog_size > 0: - x_t = torch.cat( - (x_t, stat_exog), dim=1 - ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] + forecast = self.loss.domain_map(output) # [B, h, 1] -> [B, h, 1] - return forecasts + return forecast From 7bfe7bcac0284801af9cbd1a20d4ea1d898b8da9 Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Mon, 6 May 2024 09:40:12 +0200 Subject: [PATCH 03/11] update_model_files --- nbs/models.deepnpts.ipynb | 567 +++++++++++++++++++++++++++++- nbs/models.ipynb | 139 ++++++++ neuralforecast/_modidx.py | 5 + neuralforecast/auto.py | 103 +++++- neuralforecast/models/__init__.py | 5 +- neuralforecast/models/deepnpts.py | 7 +- 6 files changed, 800 insertions(+), 26 deletions(-) diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index c1852c18a..7b6cac9e0 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -32,7 +32,7 @@ ":::{.callout-warning collapse=\"false\"}\n", "#### Losses\n", "\n", - "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast, rather than a sampled distributional output. Consequently, DeepNPTS only supports point losses as training loss.\n", + "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast. Therefore, it only supports point losses.\n", "\n", ":::" ] @@ -160,7 +160,7 @@ " batch_size: int = 32,\n", " valid_batch_size: Optional[int] = None,\n", " windows_batch_size: int = 1024,\n", - " inference_windows_batch_size: int = -1,\n", + " inference_windows_batch_size: int = 1024,\n", " start_padding_enabled = False,\n", " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", @@ -177,6 +177,9 @@ " if not isinstance(loss, losses.BasePointLoss):\n", " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function.') \n", " \n", + " if not isinstance(valid_loss, losses.BasePointLoss):\n", + " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as valid loss function.') \n", + " \n", " # Inherit BaseWindows class\n", " super(DeepNPTS, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -275,7 +278,139 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L18){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.1, n_layers:int=2, futr_exog_list=None,\n", + "> hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=MAE(), valid_loss=MAE(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=1024,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L18){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.1, n_layers:int=2, futr_exog_list=None,\n", + "> hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=MAE(), valid_loss=MAE(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=1024,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DeepNPTS, title_level=3)" ] @@ -284,7 +419,73 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" ] @@ -293,7 +494,53 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" ] @@ -323,7 +570,315 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Seed set to 1\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a683239fc3e5435aad7174b0d136376d", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Sanity Checking: | | 0/? [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| eval: false\n", "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds Date: Thu, 18 Apr 2024 15:00:51 +0200 Subject: [PATCH 04/11] deepnpts_firststab --- nbs/common.scalers.ipynb | 4 +- nbs/losses.pytorch.ipynb | 1712 +++++++++++++++++++++++++++-- nbs/models.deepnpts.ipynb | 1137 +++++++++++++++++++ neuralforecast/_modidx.py | 18 + neuralforecast/common/_scalers.py | 4 +- neuralforecast/core.py | 3 + neuralforecast/losses/pytorch.py | 110 +- neuralforecast/models/__init__.py | 4 +- neuralforecast/models/deepnpts.py | 557 ++++++++++ 9 files changed, 3369 insertions(+), 180 deletions(-) create mode 100644 nbs/models.deepnpts.ipynb create mode 100644 neuralforecast/models/deepnpts.py diff --git a/nbs/common.scalers.ipynb b/nbs/common.scalers.ipynb index c06fa0da2..921d5adaf 100644 --- a/nbs/common.scalers.ipynb +++ b/nbs/common.scalers.ipynb @@ -567,8 +567,8 @@ " shape = list(x.shape)\n", " shape[dim] = 1\n", "\n", - " x_shift = torch.zeros(shape)\n", - " x_scale = torch.ones(shape)\n", + " x_shift = torch.zeros(shape, device=x.device)\n", + " x_scale = torch.ones(shape, device=x.device)\n", "\n", " return x_shift, x_scale" ] diff --git a/nbs/losses.pytorch.ipynb b/nbs/losses.pytorch.ipynb index 55cd837b3..36adfaabd 100644 --- a/nbs/losses.pytorch.ipynb +++ b/nbs/losses.pytorch.ipynb @@ -244,7 +244,61 @@ "execution_count": null, "id": "1d004cd0", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__init__\n", + "\n", + "> MAE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Error\n", + "\n", + "Calculates Mean Absolute Error between\n", + "`y` and `y_hat`. MAE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the\n", + "deviation of the prediction and the true\n", + "value at a given time and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__init__\n", + "\n", + "> MAE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Error\n", + "\n", + "Calculates Mean Absolute Error between\n", + "`y` and `y_hat`. MAE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the\n", + "deviation of the prediction and the true\n", + "value at a given time and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAE, name='MAE.__init__', title_level=3)" ] @@ -254,7 +308,51 @@ "execution_count": null, "id": "0a20a273", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__call__\n", + "\n", + "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mae`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAE.__call__\n", + "\n", + "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mae`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAE.__call__, name='MAE.__call__', title_level=3)" ] @@ -328,7 +426,61 @@ "execution_count": null, "id": "e8c65b82", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__init__\n", + "\n", + "> MSE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Squared Error\n", + "\n", + "Calculates Mean Squared Error between\n", + "`y` and `y_hat`. MSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the \n", + "squared deviation of the prediction and the true\n", + "value at a given time, and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__init__\n", + "\n", + "> MSE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Squared Error\n", + "\n", + "Calculates Mean Squared Error between\n", + "`y` and `y_hat`. MSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the \n", + "squared deviation of the prediction and the true\n", + "value at a given time, and averages these devations\n", + "over the length of the series.\n", + "\n", + "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MSE, name='MSE.__init__', title_level=3)" ] @@ -338,7 +490,51 @@ "execution_count": null, "id": "b0126a7f", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__call__\n", + "\n", + "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mse`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MSE.__call__\n", + "\n", + "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mse`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MSE.__call__, name='MSE.__call__', title_level=3)" ] @@ -416,7 +612,67 @@ "execution_count": null, "id": "d961d383", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__init__\n", + "\n", + "> RMSE.__init__ (horizon_weight=None)\n", + "\n", + "Root Mean Squared Error\n", + "\n", + "Calculates Root Mean Squared Error between\n", + "`y` and `y_hat`. RMSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the squared deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "Finally the RMSE will be in the same scale\n", + "as the original time series so its comparison with other\n", + "series is possible only if they share a common scale. \n", + "RMSE has a direct connection to the L2 norm.\n", + "\n", + "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__init__\n", + "\n", + "> RMSE.__init__ (horizon_weight=None)\n", + "\n", + "Root Mean Squared Error\n", + "\n", + "Calculates Root Mean Squared Error between\n", + "`y` and `y_hat`. RMSE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the squared deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "Finally the RMSE will be in the same scale\n", + "as the original time series so its comparison with other\n", + "series is possible only if they share a common scale. \n", + "RMSE has a direct connection to the L2 norm.\n", + "\n", + "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(RMSE, name='RMSE.__init__', title_level=3)" ] @@ -426,7 +682,51 @@ "execution_count": null, "id": "d398d3e3", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__call__\n", + "\n", + "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`rmse`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### RMSE.__call__\n", + "\n", + "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`rmse`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(RMSE.__call__, name='RMSE.__call__', title_level=3)" ] @@ -517,7 +817,69 @@ "execution_count": null, "id": "174e8042", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__init__\n", + "\n", + "> MAPE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Percentage Error\n", + "\n", + "Calculates Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. MAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the percentual deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "The closer to zero an observed value is, the higher penalty MAPE loss\n", + "assigns to the corresponding error.\n", + "\n", + "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__init__\n", + "\n", + "> MAPE.__init__ (horizon_weight=None)\n", + "\n", + "Mean Absolute Percentage Error\n", + "\n", + "Calculates Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. MAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the percentual deviation\n", + "of the prediction and the observed value at a given time and\n", + "averages these devations over the length of the series.\n", + "The closer to zero an observed value is, the higher penalty MAPE loss\n", + "assigns to the corresponding error.\n", + "\n", + "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAPE, name='MAPE.__init__', title_level=3)" ] @@ -527,7 +889,51 @@ "execution_count": null, "id": "da63f136", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__call__\n", + "\n", + "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mape`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MAPE.__call__\n", + "\n", + "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mape`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MAPE.__call__, name='MAPE.__call__', title_level=3)" ] @@ -609,7 +1015,73 @@ "execution_count": null, "id": "dee99fb8", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__init__\n", + "\n", + "> SMAPE.__init__ (horizon_weight=None)\n", + "\n", + "Symmetric Mean Absolute Percentage Error\n", + "\n", + "Calculates Symmetric Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. SMAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the relative deviation\n", + "of the prediction and the observed value scaled by the sum of the\n", + "absolute values for the prediction and observed value at a\n", + "given time, then averages these devations over the length\n", + "of the series. This allows the SMAPE to have bounds between\n", + "0% and 200% which is desireble compared to normal MAPE that\n", + "may be undetermined when the target is zero.\n", + "\n", + "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__init__\n", + "\n", + "> SMAPE.__init__ (horizon_weight=None)\n", + "\n", + "Symmetric Mean Absolute Percentage Error\n", + "\n", + "Calculates Symmetric Mean Absolute Percentage Error between\n", + "`y` and `y_hat`. SMAPE measures the relative prediction\n", + "accuracy of a forecasting method by calculating the relative deviation\n", + "of the prediction and the observed value scaled by the sum of the\n", + "absolute values for the prediction and observed value at a\n", + "given time, then averages these devations over the length\n", + "of the series. This allows the SMAPE to have bounds between\n", + "0% and 200% which is desireble compared to normal MAPE that\n", + "may be undetermined when the target is zero.\n", + "\n", + "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", + "\n", + "**Parameters:**
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(SMAPE, name='SMAPE.__init__', title_level=3)" ] @@ -619,7 +1091,51 @@ "execution_count": null, "id": "db62a845", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__call__\n", + "\n", + "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`smape`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### SMAPE.__call__\n", + "\n", + "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`smape`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(SMAPE.__call__, name='SMAPE.__call__', title_level=3)" ] @@ -706,7 +1222,71 @@ "execution_count": null, "id": "b6a4cf21", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__init__\n", + "\n", + "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", + "\n", + "Mean Absolute Scaled Error \n", + "Calculates the Mean Absolute Scaled Error between\n", + "`y` and `y_hat`. MASE measures the relative prediction\n", + "accuracy of a forecasting method by comparinng the mean absolute errors\n", + "of the prediction and the observed value against the mean\n", + "absolute errors of the seasonal naive model.\n", + "The MASE partially composed the Overall Weighted Average (OWA), \n", + "used in the M4 Competition.\n", + "\n", + "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", + "\n", + "**Parameters:**
\n", + "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__init__\n", + "\n", + "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", + "\n", + "Mean Absolute Scaled Error \n", + "Calculates the Mean Absolute Scaled Error between\n", + "`y` and `y_hat`. MASE measures the relative prediction\n", + "accuracy of a forecasting method by comparinng the mean absolute errors\n", + "of the prediction and the observed value against the mean\n", + "absolute errors of the seasonal naive model.\n", + "The MASE partially composed the Overall Weighted Average (OWA), \n", + "used in the M4 Competition.\n", + "\n", + "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", + "\n", + "**Parameters:**
\n", + "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MASE, name='MASE.__init__', title_level=3)" ] @@ -716,7 +1296,53 @@ "execution_count": null, "id": "32a2c11b", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__call__\n", + "\n", + "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mase`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MASE.__call__\n", + "\n", + "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mase`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MASE.__call__, name='MASE.__call__', title_level=3)" ] @@ -803,7 +1429,69 @@ "execution_count": null, "id": "edeb6f9a", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__init__\n", + "\n", + "> relMSE.__init__ (y_train, horizon_weight=None)\n", + "\n", + "Relative Mean Squared Error\n", + "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", + "as an alternative to percentage errors, to avoid measure unstability.\n", + "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", + "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", + "\n", + "**Parameters:**
\n", + "`y_train`: numpy array, Training values.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "- [Hyndman, R. J and Koehler, A. B. (2006).\n", + " \"Another look at measures of forecast accuracy\",\n", + " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", + " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__init__\n", + "\n", + "> relMSE.__init__ (y_train, horizon_weight=None)\n", + "\n", + "Relative Mean Squared Error\n", + "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", + "as an alternative to percentage errors, to avoid measure unstability.\n", + "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", + "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", + "\n", + "**Parameters:**
\n", + "`y_train`: numpy array, Training values.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "- [Hyndman, R. J and Koehler, A. B. (2006).\n", + " \"Another look at measures of forecast accuracy\",\n", + " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", + "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", + " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(relMSE, name='relMSE.__init__', title_level=3)" ] @@ -813,7 +1501,53 @@ "execution_count": null, "id": "a317b5c5", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__call__\n", + "\n", + "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`relMSE`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### relMSE.__call__\n", + "\n", + "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor (batch_size, output_size), Actual values.
\n", + "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", + "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`relMSE`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(relMSE.__call__, name='relMSE.__call__', title_level=3)" ] @@ -898,7 +1632,67 @@ "execution_count": null, "id": "70bd46d9", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__init__\n", + "\n", + "> QuantileLoss.__init__ (q, horizon_weight=None)\n", + "\n", + "Quantile Loss\n", + "\n", + "Computes the quantile loss between `y` and `y_hat`.\n", + "QL measures the deviation of a quantile forecast.\n", + "By weighting the absolute deviation in a non symmetric way, the\n", + "loss pays more attention to under or over estimation.\n", + "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", + "\n", + "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", + "\n", + "**Parameters:**
\n", + "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__init__\n", + "\n", + "> QuantileLoss.__init__ (q, horizon_weight=None)\n", + "\n", + "Quantile Loss\n", + "\n", + "Computes the quantile loss between `y` and `y_hat`.\n", + "QL measures the deviation of a quantile forecast.\n", + "By weighting the absolute deviation in a non symmetric way, the\n", + "loss pays more attention to under or over estimation.\n", + "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", + "\n", + "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", + "\n", + "**Parameters:**
\n", + "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(QuantileLoss, name='QuantileLoss.__init__', title_level=3)" ] @@ -908,7 +1702,51 @@ "execution_count": null, "id": "0b1588e9", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__call__\n", + "\n", + "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`quantile_loss`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### QuantileLoss.__call__\n", + "\n", + "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies datapoints to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`quantile_loss`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(QuantileLoss.__call__, name='QuantileLoss.__call__', title_level=3)" ] @@ -1080,7 +1918,87 @@ "execution_count": null, "id": "8f42ec82", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__init__\n", + "\n", + "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", + "\n", + "Multi-Quantile loss\n", + "\n", + "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", + "MQL calculates the average multi-quantile Loss for\n", + "a given set of quantiles, based on the absolute \n", + "difference between predicted quantiles and observed values.\n", + "\n", + "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", + "\n", + "The limit behavior of MQL allows to measure the accuracy \n", + "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", + "the continuous ranked probability score (CRPS). This can be achieved \n", + "through a numerical integration technique, that discretizes the quantiles \n", + "and treats the CRPS integral with a left Riemann approximation, averaging over \n", + "uniformly distanced quantiles. \n", + "\n", + "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", + "\n", + "**Parameters:**
\n", + "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", + "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", + "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__init__\n", + "\n", + "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", + "\n", + "Multi-Quantile loss\n", + "\n", + "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", + "MQL calculates the average multi-quantile Loss for\n", + "a given set of quantiles, based on the absolute \n", + "difference between predicted quantiles and observed values.\n", + "\n", + "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", + "\n", + "The limit behavior of MQL allows to measure the accuracy \n", + "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", + "the continuous ranked probability score (CRPS). This can be achieved \n", + "through a numerical integration technique, that discretizes the quantiles \n", + "and treats the CRPS integral with a left Riemann approximation, averaging over \n", + "uniformly distanced quantiles. \n", + "\n", + "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", + "\n", + "**Parameters:**
\n", + "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", + "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", + "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", + "\n", + "**References:**
\n", + "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", + "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MQLoss, name='MQLoss.__init__', title_level=3)" ] @@ -1090,7 +2008,51 @@ "execution_count": null, "id": "bac2237a", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__call__\n", + "\n", + "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mqloss`: tensor (single value)." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### MQLoss.__call__\n", + "\n", + "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "**Parameters:**
\n", + "`y`: tensor, Actual values.
\n", + "`y_hat`: tensor, Predicted values.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns:**
\n", + "`mqloss`: tensor (single value)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(MQLoss.__call__, name='MQLoss.__call__', title_level=3)" ] @@ -1109,7 +2071,17 @@ "execution_count": null, "id": "da37f2ef", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check MQLoss' stored quantiles\n", @@ -1654,7 +2626,99 @@ "execution_count": null, "id": "a462101b", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__init__\n", + "\n", + "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> **distribution_kwargs)\n", + "\n", + "DistributionLoss\n", + "\n", + "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", + "interact with NeuralForecast models modularly. It shares the negative \n", + "log-likelihood as the optimization objective and a sample method to \n", + "generate empirically the quantiles defined by the `level` list.\n", + "\n", + "Additionally, it implements a distribution transformation that factorizes the\n", + "scale-dependent likelihood parameters into a base scale and a multiplier \n", + "efficiently learnable within the network's non-linearities operating ranges.\n", + "\n", + "Available distributions:
\n", + "- Poisson
\n", + "- Normal
\n", + "- StudentT
\n", + "- NegativeBinomial
\n", + "- Tweedie
\n", + "- Bernoulli (Temporal Classifiers)\n", + "\n", + "**Parameters:**
\n", + "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", + "\n", + "**References:**
\n", + "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", + "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", + " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__init__\n", + "\n", + "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> **distribution_kwargs)\n", + "\n", + "DistributionLoss\n", + "\n", + "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", + "interact with NeuralForecast models modularly. It shares the negative \n", + "log-likelihood as the optimization objective and a sample method to \n", + "generate empirically the quantiles defined by the `level` list.\n", + "\n", + "Additionally, it implements a distribution transformation that factorizes the\n", + "scale-dependent likelihood parameters into a base scale and a multiplier \n", + "efficiently learnable within the network's non-linearities operating ranges.\n", + "\n", + "Available distributions:
\n", + "- Poisson
\n", + "- Normal
\n", + "- StudentT
\n", + "- NegativeBinomial
\n", + "- Tweedie
\n", + "- Bernoulli (Temporal Classifiers)\n", + "\n", + "**Parameters:**
\n", + "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", + "\n", + "**References:**
\n", + "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", + "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", + " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DistributionLoss, name='DistributionLoss.__init__', title_level=3)" ] @@ -1664,7 +2728,65 @@ "execution_count": null, "id": "d8c367f8", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.sample\n", + "\n", + "> DistributionLoss.sample (distr_args:torch.Tensor,\n", + "> num_samples:Optional[int]=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.sample\n", + "\n", + "> DistributionLoss.sample (distr_args:torch.Tensor,\n", + "> num_samples:Optional[int]=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DistributionLoss.sample, name='DistributionLoss.sample', title_level=3)" ] @@ -1674,7 +2796,75 @@ "execution_count": null, "id": "04e32679", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__call__\n", + "\n", + "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Computes the negative log-likelihood objective function. \n", + "To estimate the following predictive distribution:\n", + "\n", + "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", + "\n", + "where $\\theta$ represents the distributions parameters. It aditionally \n", + "summarizes the objective signal using a weighted average using the `mask` tensor. \n", + "\n", + "**Parameters**
\n", + "`y`: tensor, Actual values.
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns**
\n", + "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DistributionLoss.__call__\n", + "\n", + "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Computes the negative log-likelihood objective function. \n", + "To estimate the following predictive distribution:\n", + "\n", + "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", + "\n", + "where $\\theta$ represents the distributions parameters. It aditionally \n", + "summarizes the objective signal using a weighted average using the `mask` tensor. \n", + "\n", + "**Parameters**
\n", + "`y`: tensor, Actual values.
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", + "\n", + "**Returns**
\n", + "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DistributionLoss.__call__, name='DistributionLoss.__call__', title_level=3)" ] @@ -1684,7 +2874,17 @@ "execution_count": null, "id": "14a7e381", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check DistributionLoss' stored quantiles\n", @@ -1764,35 +2964,42 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " self.param_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", + " lambda_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", + " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", + " self.param_names = [i for j in zip(lambda_names, weight_names) for i in j]\n", " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = n_components\n", + " self.outputsize_multiplier = 2 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " return (output,)#, weights\n", - " \n", - " def scale_decouple(self, \n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None):\n", - " \"\"\" Scale Decouple\n", + " lambdas, weights = output.chunk(2, dim=-1)\n", + " return (lambdas, weights)\n", + "\n", + " def scale_decouple(\n", + " self,\n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " ):\n", + " \"\"\"Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " lambdas = output[0]\n", + " lambdas, weights = output\n", + " weights = F.softmax(weights, dim=-1)\n", + "\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(lambdas.size(dim=0), 1, -1)\n", " scale = scale.view(lambdas.size(dim=0), 1, -1)\n", " lambdas = (lambdas * scale) + loc\n", " lambdas = F.softplus(lambdas)\n", - " return (lambdas,)\n", + " return (lambdas, weights)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -1814,15 +3021,10 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", "\n", - " lambdas = distr_args[0]\n", + " lambdas, weights = distr_args\n", " B, H, K = lambdas.size()\n", " Q = len(self.quantiles)\n", "\n", - " # Sample K ~ Mult(weights)\n", - " # shared across B, H\n", - " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", - " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", - "\n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " lambdas = lambdas.flatten() \n", @@ -1860,7 +3062,7 @@ " \n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", " if mask is None: \n", " mask = (y > 0) * 1\n", @@ -1868,11 +3070,9 @@ " mask = mask * ((y > 0) * 1)\n", "\n", " eps = 1e-10\n", - " lambdas = distr_args[0]\n", + " lambdas, weights = distr_args\n", " B, H, K = lambdas.size()\n", "\n", - " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", - "\n", " y = y[:,:,None]\n", " mask = mask[:,:,None]\n", "\n", @@ -1897,7 +3097,7 @@ " return loss\n", "\n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)\n" @@ -1908,7 +3108,83 @@ "execution_count": null, "id": "62d7daba", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__init__\n", + "\n", + "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> batch_correlation=False, horizon_correlation=False)\n", + "\n", + "Poisson Mixture Mesh\n", + "\n", + "This Poisson Mixture statistical model assumes independence across groups of \n", + "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", + "\n", + "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", + "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", + "\\prod_{\\beta\\in[g_{i}]} \n", + "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", + "\n", + "**Parameters:**
\n", + "`n_components`: int=10, the number of mixture components.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", + "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", + "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", + "\n", + "**References:**
\n", + "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", + "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__init__\n", + "\n", + "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", + "> num_samples=1000, return_params=False,\n", + "> batch_correlation=False, horizon_correlation=False)\n", + "\n", + "Poisson Mixture Mesh\n", + "\n", + "This Poisson Mixture statistical model assumes independence across groups of \n", + "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", + "\n", + "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", + "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", + "\\prod_{\\beta\\in[g_{i}]} \n", + "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", + "\n", + "**Parameters:**
\n", + "`n_components`: int=10, the number of mixture components.
\n", + "`level`: float list [0,100], confidence levels for prediction intervals.
\n", + "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", + "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", + "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", + "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", + "\n", + "**References:**
\n", + "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", + "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", + "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(PMM, name='PMM.__init__', title_level=3)" ] @@ -1918,7 +3194,63 @@ "execution_count": null, "id": "fa8da65c", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.sample\n", + "\n", + "> PMM.sample (distr_args, num_samples=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.sample\n", + "\n", + "> PMM.sample (distr_args, num_samples=None)\n", + "\n", + "Construct the empirical quantiles from the estimated Distribution,\n", + "sampling from it `num_samples` independently.\n", + "\n", + "**Parameters**
\n", + "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", + "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", + " of the resulting distribution.
\n", + "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", + " of the resulting distribution.
\n", + "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", + "\n", + "**Returns**
\n", + "`samples`: tensor, shape [B,H,`num_samples`].
\n", + "`quantiles`: tensor, empirical quantiles defined by `levels`.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(PMM.sample, name='PMM.sample', title_level=3)" ] @@ -1928,7 +3260,39 @@ "execution_count": null, "id": "ba75717c", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__call__\n", + "\n", + "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Call self as a function." + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### PMM.__call__\n", + "\n", + "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", + "> mask:Optional[torch.Tensor]=None)\n", + "\n", + "Call self as a function." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(PMM.__call__, name='PMM.__call__', title_level=3)" ] @@ -1947,7 +3311,17 @@ "execution_count": null, "id": "e4a20e21", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -1971,11 +3345,43 @@ "execution_count": null, "id": "a56a2fbe", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "lambdas.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", + "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", + "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9FUlEQVR4nO3de1hU1foH8O8GhuGm4IDcFBHF+/2SijcgA8PUTI+WWoGHvOQtw7TQU4BxpEOllpblJTDL7KamZiqWoh21BOV4ySwLUE8QKQoICiOs3x/+2MdxuM0wwzDM9/M88zzM2muv9a4ZYN5Ze++1JSGEABEREVkUK1MHQERERA2PCQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICYCJ/fDDD3jsscfQpk0bKJVKeHh4ICAgAAsXLtSoFxQUhKCgIKPHI0kSYmNjDdZe27ZtMXr0aIO1V5NDhw5BkiQcOnSoQfrTVVBQECRJgiRJsLKyQrNmzeDv74+JEyfiiy++QEVFhdY+bdu2RUREhE79HD16FLGxsbhx44ZO+93fV+Xr+cUXX+jUTk1KSkoQGxtb5XuUnJwMSZKQlZVlsP6IqHo2pg7Akn399dcYO3YsgoKCkJiYCC8vL+Tk5CAtLQ1bt27Fm2++Kdd99913TRipeejbty+OHTuGrl27mjqUarVr1w4ff/wxAKC4uBiZmZnYsWMHJk6ciGHDhmHXrl1wdnaW62/fvh3NmzfXqY+jR48iLi4OERERcHFxqfN++vSlq5KSEsTFxQGAVkL7yCOP4NixY/Dy8jJqDER0FxMAE0pMTISfnx/27dsHG5v/vRVPPPEEEhMTNeo25g81U1Or1ZAkCc2bN8egQYNMHU6N7O3ttWJ85plnkJSUhL///e+YMWMGPv30U3lbnz59jB7TrVu3YG9v3yB91aRly5Zo2bKlSWMgsiQ8BGBC165dg5ubm8aHfyUrK8235v5DAFlZWZAkCW+88QZWrFgBPz8/ODk5ISAgAMePH9dqb/369ejYsSOUSiW6du2KLVu2ICIiAm3btq01ztzcXMycOROtW7eGra0t/Pz8EBcXhzt37tR5rHv37kXfvn1hb2+Pzp0744MPPtCqc/bsWTz66KNo0aIF7Ozs0Lt3b2zatEmjTuW09ObNm7Fw4UK0atUKSqUSFy9e1DoEUPkaVfe41wcffIBevXrBzs4OKpUKjz32GM6fP69RJyIiAk5OTrh48SJGjRoFJycn+Pj4YOHChSgtLa3za1GVadOmYdSoUfj888+RnZ0tl98/LV9RUYH4+Hh06tQJ9vb2cHFxQc+ePfHWW28BAGJjY7Fo0SIAgJ+fnzzWytek8pDMtm3b0KdPH9jZ2cnfyKs73HD79m1ERUXB09MT9vb2CAwMxKlTpzTqVHeI6t7fsaysLPkDPi4uTo6tss/qDgEY+r1Zu3YtevXqBScnJzRr1gydO3fGkiVLtGInauo4A2BCAQEB2LBhA+bPn4+pU6eib9++UCgUOrXxzjvvoHPnzli1ahUA4OWXX8aoUaOQmZkpTyWvW7cOM2fOxIQJE7By5UoUFBQgLi6uTh9aubm5GDBgAKysrPDKK6+gffv2OHbsGOLj45GVlYWkpKRa2/jPf/6DhQsX4qWXXoKHhwc2bNiAyMhI+Pv7Y/jw4QCACxcuYPDgwXB3d8fbb78NV1dXfPTRR4iIiMCff/6JxYsXa7QZHR2NgIAAvPfee7CysoK7uztyc3M16nh5eeHYsWMaZX/99ReefPJJtGrVSi5LSEjAkiVLMHnyZCQkJODatWuIjY1FQEAATpw4gQ4dOsh11Wo1xo4di8jISCxcuBCHDx/Gq6++CmdnZ7zyyiu1vhY1GTt2LPbs2YMjR47A19e3yjqJiYmIjY3FP/7xDwwfPhxqtRo///yzfLz/mWeeQX5+PlavXo1t27bJ0+n3ziCdPHkS58+fxz/+8Q/4+fnB0dGxxriWLFmCvn37YsOGDSgoKEBsbCyCgoJw6tQptGvXrs7j8/Lywt69e/Hwww8jMjISzzzzDADU+K3f0O/N1q1bMXv2bMybNw9vvPEGrKyscPHiRfz00091HgdRkyHIZK5evSqGDh0qAAgAQqFQiMGDB4uEhARRVFSkUTcwMFAEBgbKzzMzMwUA0aNHD3Hnzh25/McffxQAxCeffCKEEKK8vFx4enqKgQMHarSXnZ0tFAqF8PX11SgHIGJiYuTnM2fOFE5OTiI7O1uj3htvvCEAiHPnztU4Rl9fX2FnZ6ex/61bt4RKpRIzZ86Uy5544gmhVCrFpUuXNPYPCwsTDg4O4saNG0IIIQ4ePCgAiOHDh2v1Vbnt4MGDVcZSXFwsBgwYILy8vERWVpYQQojr168Le3t7MWrUKI26ly5dEkqlUkyZMkUuCw8PFwDEZ599plF31KhRolOnTjW+DkLcfQ+7detW7fZvvvlGABD/+te/5DJfX18RHh4uPx89erTo3bt3jf28/vrrAoDIzMzU2ubr6yusra3FhQsXqtx2b1+Vr2ffvn1FRUWFXJ6VlSUUCoV45plnNMZ27+9npfDwcI3fsb/++kvrd6xSUlKSRtzGeG/mzp0rXFxctPomskQ8BGBCrq6uOHLkCE6cOIHXXnsNjz76KH755RdER0ejR48euHr1aq1tPPLII7C2tpaf9+zZEwDkaeQLFy4gNzcXkyZN0tivTZs2GDJkSK3t7969G8HBwfD29sadO3fkR1hYGAAgNTW11jZ69+6NNm3ayM/t7OzQsWNHjanu7777DiNGjICPj4/GvhERESgpKdH6Jj9hwoRa+71XeXk5Hn/8cZw/fx579uyRv2EfO3YMt27d0pr69vHxwYMPPohvv/1Wo1ySJIwZM0ajrGfPnhpj0ZcQotY6AwYMwH/+8x/Mnj0b+/btQ2Fhoc799OzZEx07dqxz/SlTpmgcMvH19cXgwYNx8OBBnfvWhTHemwEDBuDGjRuYPHkyvvrqqzr9jRE1VUwAGoH+/fvjxRdfxOeff44//vgDzz//PLKysrROBKyKq6urxnOlUgng7oldwN3zDADAw8NDa9+qyu73559/YteuXVAoFBqPbt26AUCd/oHeH2NlnJUxVsZZ1dnf3t7eGuOopOuZ4rNmzcLevXvxxRdfoHfv3hr9Vteet7e3Vr8ODg6ws7PTGsvt27d1iqcqlR9UlWOuSnR0NN544w0cP34cYWFhcHV1xYgRI5CWllbnfnR97Tw9Passu/+1MTRjvDdPPfUUPvjgA2RnZ2PChAlwd3fHwIEDkZKSYoQREDVuTAAaGYVCgZiYGAB3T4qrr8oP3z///FNr2/3HzKvi5uaG0NBQnDhxospHZGRkvWOsjDMnJ0er/I8//pDjuNf9J/HVJDY2Fhs2bMD69esRGhqq1S+Aavu+v19j2rlzJyRJks+LqIqNjQ2ioqJw8uRJ5Ofn45NPPsHly5cxcuRIlJSU1KkfXV47oOrfk9zcXI3Ezs7OrspzSurzDdtY7820adNw9OhRFBQU4Ouvv4YQAqNHjzbILA6ROWECYEJV/WMDIJ/hXNM3wbrq1KkTPD098dlnn2mUX7p0CUePHq11/9GjR+Ps2bNo3749+vfvr/UwRIwAMGLECHz33XfyB36lDz/8EA4ODnpf3rdx40bExcVh2bJlVZ7hHhAQAHt7e3z00Uca5VeuXJEPSzSEpKQkfPPNN5g8ebLG4ZKauLi44G9/+xvmzJmD/Px8+ez5+2eB6uuTTz7RODyRnZ2No0ePapz137ZtW/zyyy8aScC1a9e0fsd0ic3Y742joyPCwsKwdOlSlJWV4dy5c/Vqj8jc8CoAExo5ciRat26NMWPGoHPnzqioqEBGRgbefPNNODk54bnnnqt3H1ZWVoiLi8PMmTPxt7/9DX//+99x48YNxMXFwcvLS+tyw/stW7YMKSkpGDx4MObPn49OnTrh9u3byMrKwp49e/Dee++hdevW9Y4zJiZGPt/glVdegUqlwscff4yvv/4aiYmJGovj1NWxY8cwa9YsDBkyBCEhIVqXRw4aNAguLi54+eWXsWTJEjz99NOYPHkyrl27hri4ONjZ2cmzMYZy69YtOY5bt27h999/x44dO7B7924EBgbivffeq3H/MWPGoHv37ujfvz9atmyJ7OxsrFq1Cr6+vvIZ8T169AAAvPXWWwgPD4dCoUCnTp3QrFkzvWLOy8vDY489hunTp6OgoAAxMTGws7NDdHS0XOepp57C+++/jyeffBLTp0/HtWvXkJiYqLWwULNmzeDr64uvvvoKI0aMgEqlgpubW5WXoxrjvZk+fTrs7e0xZMgQeHl5ITc3FwkJCXB2dsYDDzygc3tEZs3EJyFatE8//VRMmTJFdOjQQTg5OQmFQiHatGkjnnrqKfHTTz9p1K3uKoDXX39dq11UcZb1unXrhL+/v7C1tRUdO3YUH3zwgXj00UdFnz59at33r7/+EvPnzxd+fn5CoVAIlUol+vXrJ5YuXSpu3rxZ4xh9fX3FI488olVe1VnjZ86cEWPGjBHOzs7C1tZW9OrVSyQlJWnUqTwz/fPPP9dq8/6rACrPKq/uca8NGzaInj17CltbW+Hs7CweffRRrSscwsPDhaOjo1a/MTExWu1VJTAwUKN/R0dH0a5dO/G3v/1NfP7556K8vFxrn/vPzH/zzTfF4MGDhZubm7C1tRVt2rQRkZGR8lUNlaKjo4W3t7ewsrLSeE2qez+q6qvy9dy8ebOYP3++aNmypVAqlWLYsGEiLS1Na/9NmzaJLl26CDs7O9G1a1fx6aefal0FIIQQBw4cEH369BFKpVIAkPu8/yqASoZ8bzZt2iSCg4OFh4eHsLW1Fd7e3mLSpEni9OnTVb4mRE2ZJEQdTj2mJufGjRvo2LEjxo0bh3Xr1pk6HCIiamA8BGABcnNz8c9//hPBwcFwdXVFdnY2Vq5ciaKiIoMcZiAiIvPDBMACKJVKZGVlYfbs2cjPz5dPqnvvvffky/mIiMiy8BAAERGRBeJlgERERBaICQAREZEF4jkAuHuL1T/++APNmjXTeZU0IiJ9CSFQVFQEb2/vWtfkIDI0JgC4u6zo/TehISJqKJcvXzbIglpEumACAMgrpF2+fFlr5bLqqNVq7N+/H6GhoVAoFMYMz2Sa+hib+vgAjrGxKywshI+Pj96rNBLVBxMA/O/mKM2bN9cpAXBwcEDz5s3N7p9OXTX1MTb18QEco7ngoUcyBR50IiIiskBMAIiIiCwQEwAiIiILZNJzABISErBt2zb8/PPPsLe3x+DBg/Gvf/0LnTp1kusIIRAXF4d169bh+vXrGDhwIN555x2NJWxLS0vxwgsv4JNPPsGtW7cwYsQIvPvuuzyrlojMXkVFBcrKykwdBpkBhUIBa2vrOtc3aQKQmpqKOXPm4IEHHsCdO3ewdOlShIaG4qeffoKjoyMAIDExEStWrEBycjI6duyI+Ph4hISE4MKFC/KZswsWLMCuXbuwdetWuLq6YuHChRg9ejTS09N1ejGIiBqTsrIyZGZmoqKiwtShkJlwcXGBp6dnnU4sNWkCsHfvXo3nSUlJcHd3R3p6OoYPHw4hBFatWoWlS5di/PjxAIBNmzbBw8MDW7ZswcyZM1FQUICNGzdi8+bNeOihhwAAH330EXx8fHDgwAGMHDmywcdFRFRfQgjk5OTA2toaPj4+XCiIaiSEQElJCfLy8gAAXl5ete7TqC4DLCgoAACoVCoAQGZmJnJzcxEaGirXUSqVCAwMxNGjRzFz5kykp6dDrVZr1PH29kb37t1x9OjRKhOA0tJSlJaWys8LCwsB3L2cSK1W1ynWynp1rW+OmvoYm/r4AI6xsasp5jt37qCkpATe3t5wcHBowKjIXNnb2wMA8vLy4O7uXusMeKNJAIQQiIqKwtChQ9G9e3cAd+9jDwAeHh4adT08PJCdnS3XsbW1RYsWLbTqVO5/v4SEBMTFxWmV79+/X+c/tJSUFJ3qm6OmPsamPj6AY2ysSkpKqt1WXl4OALC1tW2ocKgJqPwMU6vV5pMAzJ07F6dPn8b333+vte3+YxlCiFqPb9RUJzo6GlFRUfLzytW4QkNDdVoIKCUlBSEhIWaz+Mg7By/qVF8S5Wh7+zdk2bXH7Ac71b6DmTHH91BXHGPjVjn7WBMuEkS60OX3pVEkAPPmzcPOnTtx+PBhjTP3PT09Adz9ln/v8Yy8vDx5VsDT0xNlZWW4fv26xixAXl4eBg8eXGV/SqUSSqVSq1yhUOj8D0SffUxFSPqdECkka7MZoz7M6T3UF8fYOJlbvNS0mPSsEiEE5s6di23btuG7776Dn5+fxnY/Pz94enpqTO2VlZUhNTVV/nDv168fFAqFRp2cnBycPXu22gSAiIjI0pl0BmDOnDnYsmULvvrqKzRr1kw+Zu/s7Ax7e3tIkoQFCxZg+fLl6NChAzp06IDly5fDwcEBU6ZMketGRkZi4cKFcHV1hUqlwgsvvIAePXrIVwUQERGRJpMmAGvXrgUABAUFaZQnJSUhIiICALB48WLcunULs2fPlhcC2r9/v8bds1auXAkbGxtMmjRJXggoOTmZawAQUZOzMuWXBu3v+ZCODdpfXQQFBaF3795YtWqVqUMxayY/BFDVo/LDH7h7QkNsbCxycnJw+/ZtpKamylcJVLKzs8Pq1atx7do1lJSUYNeuXfDx8Wng0RAREQBERERg3LhxWuWHDh2CJEm4ceNGvdrftm0bXn311Xq1YQ4OHz6MMWPGwNvbG5IkYceOHQZtnytLEBGRWahcElmlUmnMApuboKAgJCcn11qvuLgYvXr1wpo1a4wSBxMAIiIyidLSUsyfPx/u7u6ws7PD0KFDceLECXl7UFAQ5s6di6ioKLi5uSEkJEQuX7BgAQAgKysLkiRpPSoPLdfWR2V78+fPx+LFi6FSqeDp6YnY2NgaYx87dmyV/UqShJ07dxrk9QkLC0N8fLy8Eq6hMQEgIiKTWLx4Mb788kts2rQJJ0+ehL+/P0aOHIn8/Hy5zqZNm2BjY4N///vfeP/997Xa8PHxQU5Ojvw4deoUXF1dMXz48Dr3UdmPo6MjfvjhByQmJmLZsmU1Li6VlJSEnJwc/PrrrwCAPXv2yDGMGjXKEC+P0TWKdQCIiKhp2b17N5ycnDTKKlc3BO5Ob69duxbJyckICwsDAKxfvx4pKSnYuHEjFi1aBADw9/dHYmJitf1YW1vLa8bcvn0b48aNQ0BAAGJjY+vcBwD07NkTMTExAIAOHTpgzZo1+Pbbb+VZh/u5uroCAI4dOwZJkjB06FCzOyzBGQAiIjK44OBgZGRkaDw2bNggb//tt9+gVqsxZMgQuUyhUGDAgAE4f/68XNa/f/869xkZGYmioiJs2bIFVlZWde4DuJsA3MvLy0u+sU5NTp8+jbZt29b44b98+XI4OTnJjyNHjmDWrFlaZQ2NMwBERGRwjo6O8Pf31yi7cuWK/LMQAkDtS71X3hq+NvHx8di7dy9+/PFH+cO4rn0A2qsySpJUp9swnz59Wit5uN+sWbMwadIk+fnUqVMxYcIEjWP7rVq1qrUvQ+MMABERNTh/f3/Y2tpq3P9FrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp5rvlaJSqeDv7y8/7O3t4e7urlXW0DgDQEREDc7R0RHPPvssFi1aBJVKhTZt2iAxMRElJSWIjIyscztnz57F008/jRdffBHdunWTV5S1tbWFSqUySB81qaioQHZ2Nq5cuYJWrVoZ9OZNN2/exMWL/7uJW2ZmJjIyMuSx1BcTACIiM9IYV+bT12uvvYaKigo89dRTKCoqQv/+/bFv3z6t27vXJC0tDSUlJYiPj0d8fLxcHhgYiEOHDhmkj5rMnz8fM2bMQOfOnVFYWGjQBCAtLQ3BwcHy88q72IaHh9dpHYHaSKLyIIkFKywshLOzMwoKCnS6HfCePXswatQos7mjl65LiEqiHH63fkGmfUcsCDXMdFljYo7voa44xsatpv89t2/fRmZmJvz8/GBnZ2eiCMnc6PJ7w3MAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQFwJ0ER0XZSnUlNaBYyIiEyHMwBEREQWiDMARETm5GBCw/YXHN2w/dVBUFAQevfujVWrVpk6FLPGGQAiIjKoiIgIjBs3Tqv80KFDkCQJN27cqFf727Ztw6uvvlqvNsxBQkICHnjgATRr1gzu7u4YN24cLly4YLD2mQAQEZFZKCsrAwCoVCo0a9bMxNHoLygoqE5380tNTcWcOXNw/PhxpKSk4M6dOwgNDUVxcbFB4mACQEREJlFaWor58+fD3d0ddnZ2GDp0KE6cOCFvDwoKwty5cxEVFQU3NzeEhITI5QsWLAAAZGVlQZIkrUdQUFCd+qhsb/78+Vi8eDFUKhU8PT0RGxtbY+xjx46tsl9JkrBz506DvD579+5FREQEunXrhl69eiEpKQmXLl1Cenq6QdpnAkBERCaxePFifPnll9i0aRNOnjwJf39/jBw5Evn5+XKdTZs2wcbGBv/+97/x/vvva7Xh4+ODnJwc+XHq1Cm4urpi+PDhde6jsh9HR0f88MMPSExMxLJly5CSklJt7ElJScjJycGvv/4KANizZ48cw6hRowzx8mgpKCgAcHcGxBB4EiARERnc7t274eTkpFFWXl4u/1xcXIy1a9ciOTkZYWFhAID169cjJSUFGzduxKJFiwAA/v7+SExMrLYfa2treHp6AgBu376NcePGISAgALGxsXXuAwB69uyJmJgYAECHDh2wZs0afPvtt/Ksw/1cXV0BAMeOHYMkSRg6dKhRD0sIIRAVFYWhQ4eie/fuBmmTMwBERGRwwcHByMjI0Hhs2LBB3v7bb79BrVZjyJAhcplCocCAAQNw/vx5uax///517jMyMhJFRUXYsmULrKys6twHcDcBuJeXlxfy8vJq7fP06dNo27ZtjR/+y5cvh5OTk/w4cuQIZs2apVVWk7lz5+L06dP45JNPao2prjgDQEREBufo6Ah/f3+NsitXrsg/CyEAAJIkadQRQmiUOTo61qm/+Ph47N27Fz/++KP8YVzXPoC7icG9JElCRUVFrf2ePn1aK3m436xZszBp0iT5+dSpUzFhwgSMHz9eLmvVqlW1+8+bNw87d+7E4cOH0bp161pjqivOABARUYPz9/eHra0tvv/+e7lMrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp0411lGpVPD395cf9vb2cHd31yq7nxACc+fOxbZt2/Ddd9/Bz8/PIDFX4gwAERE1OEdHRzz77LNYtGgRVCoV2rRpg8TERJSUlCAyMrLO7Zw9exZPP/00XnzxRXTr1g25ubkAAFtbW6hUKoP0UZOKigpkZ2fjypUraNWqldbMQn3MmTMHW7ZswVdffYVmzZrJY3N2dq4yYdAVEwAiInPSCFfm09drr72GiooKPPXUUygqKkL//v2xb98+tGjRos5tpKWloaSkBPHx8YiPj5fLAwMDcejQIYP0UZP58+djxowZ6Ny5MwoLCw2aAKxduxYA5EsaKyUlJSEiIqLe7TMBICIig6pukZugoCD5uDwA2NnZ4e2338bbb79dZf1Dhw7VWh4REVHjh2FtfVTXz44dO6qtf6+wsDBcvny5TnVr6q8q975WxsBzAIiIiCwQEwAiIiILxEMAZHQrU37Ra7/nQzoaOBIiIqrEGQAiIiILxBkAavyMdf9zYQWgs3HaJjIQY58IRk1LXRYvqsQEgIioEVIoFJAkCX/99Rdatmxp0MvLqOkRQqCsrAx//fUXrKysYGtrW+s+TACIiBoha2trtG7dGleuXEFWVpapwyEz4eDggDZt2sDKqvYj/EwAiIgaKScnJ3To0AFqtdrUoZAZsLa2ho2NTZ1ni5gAEBE1YtbW1rC2tjZ1GNQE8SoAIiIiC8QEgIiIyAKZNAE4fPgwxowZA29vb0iSpLX2ckREBCRJ0ngMGjRIo05paSnmzZsHNzc3ODo6YuzYsRr3nCYiIiJtJk0AiouL0atXL6xZs6baOg8//DBycnLkx549ezS2L1iwANu3b8fWrVvx/fff4+bNmxg9ejTKy8uNHT4REZHZMulJgGFhYQgLC6uxjlKphKenZ5XbCgoKsHHjRmzevBkPPfQQAOCjjz6Cj48PDhw4gJEjR1a5X2lpKUpLS+XnhYWFAAC1Wl3ns20r6+l7dq4k9EtQ6nM2sK59VtaXRHmD9ltJ7lMYJ09V/3+7TfkM6/r+npoDcx6jOcZMTYckGskyU5IkYfv27Rg3bpxcFhERgR07dsDW1hYuLi4IDAzEP//5T7i7uwMAvvvuO4wYMQL5+fka93bu1asXxo0bh7i4uCr7io2NrXLbli1b4ODgYNiBERFVo6SkBFOmTEFBQQGaN29u6nDIwjTqywDDwsIwceJE+Pr6IjMzEy+//DIefPBBpKenQ6lUIjc3F7a2thof/gDg4eGB3NzcatuNjo5GVFSU/LywsBA+Pj4IDQ2t8x+hWq1GSkoKQkJCoFAodB7bOwcv6rwPAMwJ9tdthyMr5B9/zMrXadcKyRr5qoFQ5f+AQb7O1VccFlX9NhhgrPeMwZDUwgopNzvq/R6ag/r+npoDcx5j5ewjkSk06gTg8ccfl3/u3r07+vfvD19fX3z99dcYP358tfsJIWpcCEGpVEKpVGqVKxQKnf+B6LMPAAhJv+t6de5L+t+60FZ6TsVbiXIopBrWl64lpnqPtaa+DUDf99CccIyNk7nFS02LWV0G6OXlBV9fX/z6668AAE9PT5SVleH69esa9fLy8uDh4WGKEImIiMyCWSUA165dw+XLl+Hl5QUA6NevHxQKBVJSUuQ6OTk5OHv2LAYPHmyqMImIiBo9kx4CuHnzJi5e/N/x4czMTGRkZEClUkGlUiE2NhYTJkyAl5cXsrKysGTJEri5ueGxxx4DADg7OyMyMhILFy6Eq6srVCoVXnjhBfTo0UO+KoCoVkdWGP0wA4Kjjds+EZGOTJoApKWlITg4WH5eeWJeeHg41q5dizNnzuDDDz/EjRs34OXlheDgYHz66ado1qyZvM/KlSthY2ODSZMm4datWxgxYgSSk5O5djYREVENTJoABAUFoaarEPft21drG3Z2dli9ejVWr15tyNCIiIiaNLM6B4CIiIgMgwkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIEa9c2AyEwcTKhx86BL1/Rs11W//YiIqFZ6JQDJycmYNGkSHBwcDB0PUdNUS5JUb1xqmIh0pNchgOjoaHh6eiIyMhJHjx41dExERERkZHrNAFy5cgVff/01kpOTERwcDD8/P0ybNg3h4eHw9PQ0dIx0j5Upv+hUX+/pdyIiatL0mgGwtrbG2LFjsW3bNly+fBkzZszAxx9/jDZt2mDs2LH46quvUFFh5LurERERkd7qfRWAu7s7hgwZgoCAAFhZWeHMmTOIiIhA+/btcejQIQOESERERIamdwLw559/4o033kC3bt0QFBSEwsJC7N69G5mZmfjjjz8wfvx4hIeHGzJWIiIiMhC9zgEYM2YM9u3bh44dO2L69Ol4+umnoVKp5O329vZYuHAhVq5cabBAiYiIyHD0SgDc3d2RmpqKgICAaut4eXkhMzNT78CIiIjIePQ6BBAYGIi+fftqlZeVleHDDz8EAEiSBF9f3/pFR0REREahVwIwbdo0FBQUaJUXFRVh2rRp9Q6KiIiIjEuvBEAIAUmStMqvXLkCZ2fnegdFRERExqXTOQB9+vSBJEmQJAkjRoyAjc3/di8vL0dmZiYefvhhgwdJREREhqVTAjBu3DgAQEZGBkaOHAknJyd5m62tLdq2bYsJEyYYNEAiIiIyPJ0SgJiYGABA27Zt8fjjj8POzs4oQREREZFx6XUZIBf4ISIiMm91TgBUKhV++eUXuLm5oUWLFlWeBFgpPz/fIMERERGRcdQ5AVi5ciWaNWsm/1xTAkBERESNW50TgHun/SMiIowRCxERETWQOicAhYWFdW60efPmegVDREREDaPOCYCLi0ut0/6VCwSVl5fXOzAiIiIynjonAAcPHjRmHERERNSA6pwABAYGGjMOIqqPgwlVlwsrAJ2BIysAqaJ+fQRH129/ImpU6pwAnD59Gt27d4eVlRVOnz5dY92ePXvWOzAiIiIynjonAL1790Zubi7c3d3Ru3dvSJIEIYRWPZ4DQERE1PjVOQHIzMxEy5Yt5Z/Jshz7/ZqpQyAiIgOqcwLg6+tb5c9ERERkfvS6FwAAXLhwAatXr8b58+chSRI6d+6MefPmoVOnToaMj4iIiIzASp+dvvjiC3Tv3h3p6eno1asXevbsiZMnT6J79+74/PPPDR0jERERGZheMwCLFy9GdHQ0li1bplEeExODF198ERMnTjRIcERERGQces0A5Obm4umnn9Yqf/LJJ5Gbm1vvoIiIiMi49EoAgoKCcOTIEa3y77//HsOGDat3UERERGRcdT4EsHPnTvnnsWPH4sUXX0R6ejoGDRoEADh+/Dg+//xzxMXFGT5KIiIiMqg6JwDjxo3TKnv33Xfx7rvvapTNmTMHs2bNqndgREREZDx1TgAqKuq5jjgRERE1GnqdA2Aohw8fxpgxY+Dt7Q1JkrBjxw6N7UIIxMbGwtvbG/b29ggKCsK5c+c06pSWlmLevHlwc3ODo6Mjxo4diytXrjTgKIiIiMyP3gsBFRcXIzU1FZcuXUJZWZnGtvnz59e5jV69emHatGmYMGGC1vbExESsWLECycnJ6NixI+Lj4xESEoILFy6gWbNmAIAFCxZg165d2Lp1K1xdXbFw4UKMHj0a6enpsLa21nd4RERETZpeCcCpU6cwatQolJSUoLi4GCqVClevXoWDgwPc3d3rnACEhYUhLCysym1CCKxatQpLly7F+PHjAQCbNm2Ch4cHtmzZgpkzZ6KgoAAbN27E5s2b8dBDDwEAPvroI/j4+ODAgQMYOXKkPsMjIiJq8vRKAJ5//nmMGTMGa9euhYuLC44fPw6FQoEnn3wSzz33nEECy8zMRG5uLkJDQ+UypVKJwMBAHD16FDNnzkR6ejrUarVGHW9vb3Tv3h1Hjx6tNgEoLS1FaWmp/LywsBAAoFaroVar6xRfZb261r+fJBrmjokVkv6zIJX71qeN+lAL4x6hqmzf2P2YkkHHqOfvurHV92/RlMwxZmo69EoAMjIy8P7778Pa2hrW1tYoLS1Fu3btkJiYiPDwcPkbe31ULijk4eGhUe7h4YHs7Gy5jq2tLVq0aKFVp6YFiRISEqq8XHH//v1wcHDQKc6UlBSd6lfy02sv3V11HVzvNvJVAw0Qie72FDVMPyk3OzZMRyZkkDHu2VP/NoxI379FUyopKTF1CGTB9EoAFAoFJEkCcPfD9tKlS+jSpQucnZ1x6dIlgwZY2U8lIYRW2f1qqxMdHY2oqCj5eWFhIXx8fBAaGormzZvXKS61Wo2UlBSEhIRAoVDUaZ97vXPwos776OOBK8l671shWSNfNRCq/B9g1UAzFoYyoK2q1jpqYYWUmx0R4vQLFFLTvMrFoGMcFlV7HROo79+iKVXOPhKZgl4JQJ8+fZCWloaOHTsiODgYr7zyCq5evYrNmzejR48eBgnM09MTwN1v+V5eXnJ5Xl6ePCvg6emJsrIyXL9+XWMWIC8vD4MHV//NV6lUQqlUapUrFAqd/4Hosw8AiAaaVjfEB7eVKDe7BECXDzuFVNFkE4BKBhljI/9w1fdv0ZTMLV5qWvQ6MLh8+XL5Q/nVV1+Fq6srnn32WeTl5WHdunUGCczPzw+enp4a03plZWVITU2VP9z79esHhUKhUScnJwdnz56tMQEgIiKydHrNAPTv31/+uWXLltij57HBmzdv4uLF/02FZ2ZmIiMjAyqVCm3atMGCBQuwfPlydOjQAR06dMDy5cvh4OCAKVOmAACcnZ0RGRmJhQsXwtXVFSqVCi+88AJ69OghXxVARERE2vReBwC4O9V+4cIFSJKETp06oWXLljrtn5aWhuDgYPl55XH58PBwJCcnY/Hixbh16xZmz56N69evY+DAgdi/f7+8BgAArFy5EjY2Npg0aRJu3bqFESNGIDk5mWsAEBER1UCvBKCwsBBz5szB1q1bUV5+99iwtbU1Hn/8cbzzzjtwdnauUztBQUEQQlS7XZIkxMbGIjY2tto6dnZ2WL16NVavXq3TGIiIiCyZXucAPPPMM/jhhx+we/du3LhxAwUFBdi9ezfS0tIwffp0Q8dIREREBqbXDMDXX3+Nffv2YejQoXLZyJEjsX79ejz88MMGC46IiIiMQ68ZAFdX1yqn+Z2dnbUW5SEiIqLGR68E4B//+AeioqKQk5Mjl+Xm5mLRokV4+eWXDRYcERERGUedDwH06dNHY3W9X3/9Fb6+vmjTpg0A4NKlS1Aqlfjrr78wc+ZMw0dKREREBlPnBGDcuHFGDIOIiIgaUp0TgJiYGGPGQURERA2oXgsBpaen4/z585AkCV27dkWfPn0MFRcREREZkV4JQF5eHp544gkcOnQILi4uEEKgoKAAwcHB2Lp1q84rAhIREVHD0usqgHnz5qGwsBDnzp1Dfn4+rl+/jrNnz6KwsBDz5883dIxERERkYHrNAOzduxcHDhxAly5d5LKuXbvinXfeQWhoqMGCIyIiIuPQawagoqKiyvtYKxQKVFQ07fuqExERNQV6JQAPPvggnnvuOfzxxx9y2X//+188//zzGDFihMGCIyIiIuPQKwFYs2YNioqK0LZtW7Rv3x7+/v7w8/NDUVER78pHRERkBvQ6B8DHxwcnT55ESkoKfv75Zwgh0LVrVzz00EOGjo+IiIiMQOcE4M6dO7Czs0NGRgZCQkIQEhJijLiIiIjIiHQ+BGBjYwNfX1+Ul5cbIx4iIiJqAHrfDTA6Ohr5+fmGjoeIiIgagF7nALz99tu4ePEivL294evrC0dHR43tJ0+eNEhwREREZBx6JQDjxo2DJEkQQhg6HiIiImoAOiUAJSUlWLRoEXbs2AG1Wo0RI0Zg9erVcHNzM1Z8REREZAQ6nQMQExOD5ORkPPLII5g8eTIOHDiAZ5991lixERERkZHoNAOwbds2bNy4EU888QQAYOrUqRgyZAjKy8thbW1tlACJiIjI8HSaAbh8+TKGDRsmPx8wYABsbGw0lgQmIiKixk+nBKC8vBy2trYaZTY2Nrhz545BgyIiIiLj0ukQgBACERERUCqVctnt27cxa9YsjUsBt23bZrgIm7hBl9aZOgQiIrJAOiUA4eHhWmVPPvmkwYIhIiKihqFTApCUlGSsOIiIiKgB6bUUMBEREZk3JgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIF0uhkQEVmwgwnGbT842rjtE5EGzgAQERFZICYAREREFqhRJwCxsbGQJEnj4enpKW8XQiA2Nhbe3t6wt7dHUFAQzp07Z8KIiYiIzEOjTgAAoFu3bsjJyZEfZ86ckbclJiZixYoVWLNmDU6cOAFPT0+EhISgqKjIhBETERE1fo3+JEAbGxuNb/2VhBBYtWoVli5divHjxwMANm3aBA8PD2zZsgUzZ86sts3S0lKUlpbKzwsLCwEAarUaarW6TnFV1qtr/ftJohwAUCFZ67V/Q6iMrTHHWB21qD23raxTl7rmyqzGqOffUn3/Fk3JHGOmpkMSQghTB1Gd2NhYvP7663B2doZSqcTAgQOxfPlytGvXDr///jvat2+PkydPok+fPvI+jz76KFxcXLBp06Ya242Li9Mq37JlCxwcHIwyFiKi+5WUlGDKlCkoKChA8+bNTR0OWZhGnQB88803KCkpQceOHfHnn38iPj4eP//8M86dO4cLFy5gyJAh+O9//wtvb295nxkzZiA7Oxv79u2rtt2qZgB8fHxw9erVOv8RqtVqpKSkICQkBAqFQuexvXPwIgDggSvJOu/bUCoka+SrBkKV/wOs/n/GwlwMaKuqtY5aWCHlZkeEOP0ChVTRAFE1PLMa47AovXar79+iKRUWFsLNzY0JAJlEoz4EEBYWJv/co0cPBAQEoH379ti0aRMGDRoEAJAkSWMfIYRW2f2USiWUSqVWuUKh0PkfiD77AID4/2l1c/hgtRLlZhHnvXT5sFNIFY3/w7GezGKM9fzw1vdv0ZTMLV5qWszgwOD/ODo6okePHvj111/l8wJyc3M16uTl5cHDw8MU4REREZkNs0oASktLcf78eXh5ecHPzw+enp5ISUmRt5eVlSE1NRWDBw82YZRERESNX6M+BPDCCy9gzJgxaNOmDfLy8hAfH4/CwkKEh4dDkiQsWLAAy5cvR4cOHdChQwcsX74cDg4OmDJliqlDJyIiatQadQJw5coVTJ48GVevXkXLli0xaNAgHD9+HL6+vgCAxYsX49atW5g9ezauX7+OgQMHYv/+/WjWrJmJIyciImrcGnUCsHXr1hq3S5KE2NhYxMbGNkxARERETYRZnQNAREREhtGoZwDMwTsHL8qX9FHjcez3a7XWqZCsAVfgx6x8jcscA9q5GjM0IqJGgTMAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFsjG1AEQNTbHfr+m974B7VwNGAkRkfFwBoCIiMgCcQaAiBqHgwn67SesAHQGjqwApIrq6wVH69c+URPFGQAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskA2pg6AiKhBHEwwfh/B0cbvg8hAOANARERkgTgDQGRAx36/ptd+Ae1cDRwJEVHNOANARERkgZgAEBERWaAmkwC8++678PPzg52dHfr164cjR46YOiQiIqJGq0kkAJ9++ikWLFiApUuX4tSpUxg2bBjCwsJw6dIlU4dGRETUKDWJBGDFihWIjIzEM888gy5dumDVqlXw8fHB2rVrTR0aERFRo2T2VwGUlZUhPT0dL730kkZ5aGgojh49WuU+paWlKC0tlZ8XFBQAAPLz86FWq+vUr1qtRklJCUrLCyAkaz2jB4pu39F7X2OrkARKSkpQdPsOrES5qcMxuMY0vms3y4zSrlpYoaSkBNekMiikCqP0YWqNaozXdLsKpKioCAAghDBGNEQ1MvsE4OrVqygvL4eHh4dGuYeHB3Jzc6vcJyEhAXFxcVrlfn5+RomRiCxFrF57FRUVwdnZ2bChENXC7BOASpIkaTwXQmiVVYqOjkZUVJT8vKKiAvn5+XB1da12n/sVFhbCx8cHly9fRvPmzfUPvBFr6mNs6uMDOMbGTgiBoqIieHt7mzoUskBmnwC4ubnB2tpa69t+Xl6e1qxAJaVSCaVSqVHm4uKiV//Nmzc3u386umrqY2zq4wM4xsaM3/zJVMz+JEBbW1v069cPKSkpGuUpKSkYPHiwiaIiIiJq3Mx+BgAAoqKi8NRTT6F///4ICAjAunXrcOnSJcyaNcvUoRERETVKTSIBePzxx3Ht2jUsW7YMOTk56N69O/bs2QNfX1+j9alUKhETE6N1KKEpaepjbOrjAzhGIqqeJHj9CRERkcUx+3MAiIiISHdMAIiIiCwQEwAiIiILxASAiIjIAjEB0ENTuvXw4cOHMWbMGHh7e0OSJOzYsUNjuxACsbGx8Pb2hr29PYKCgnDu3DnTBKuHhIQEPPDAA2jWrBnc3d0xbtw4XLhwQaOOuY9x7dq16Nmzp7wQTkBAAL755ht5u7mP734JCQmQJAkLFiyQy5raGIkaAhMAHTW1Ww8XFxejV69eWLNmTZXbExMTsWLFCqxZswYnTpyAp6cnQkJC5JuYNHapqamYM2cOjh8/jpSUFNy5cwehoaEoLi6W65j7GFu3bo3XXnsNaWlpSEtLw4MPPohHH31U/gA09/Hd68SJE1i3bh169uypUd6UxkjUYATpZMCAAWLWrFkaZZ07dxYvvfSSiSIyHABi+/bt8vOKigrh6ekpXnvtNbns9u3bwtnZWbz33nsmiLD+8vLyBACRmpoqhGiaYxRCiBYtWogNGzY0qfEVFRWJDh06iJSUFBEYGCiee+45IUTTfQ+JjI0zADqovPVwaGioRnlNtx42Z5mZmcjNzdUYr1KpRGBgoNmOt/LWzyqVCkDTG2N5eTm2bt2K4uJiBAQENKnxzZkzB4888ggeeughjfKmNEaihtQkVgJsKPrceticVY6pqvFmZ2ebIqR6EUIgKioKQ4cORffu3QE0nTGeOXMGAQEBuH37NpycnLB9+3Z07dpV/gA09/Ft3boVJ0+exIkTJ7S2NZX3kKihMQHQgy63Hm4Kmsp4586di9OnT+P777/X2mbuY+zUqRMyMjJw48YNfPnllwgPD0dqaqq83ZzHd/nyZTz33HPYv38/7Ozsqq1nzmMkMgUeAtCBPrceNmeenp4A0CTGO2/ePOzcuRMHDx5E69at5fKmMkZbW1v4+/ujf//+SEhIQK9evfDWW281ifGlp6cjLy8P/fr1g42NDWxsbJCamoq3334bNjY28jjMeYxEpsAEQAeWduthPz8/eHp6aoy3rKwMqampZjNeIQTmzp2Lbdu24bvvvoOfn5/G9qYwxqoIIVBaWtokxjdixAicOXMGGRkZ8qN///6YOnUqMjIy0K5dO7MfI5FJmO78Q/O0detWoVAoxMaNG8VPP/0kFixYIBwdHUVWVpapQ9NLUVGROHXqlDh16pQAIFasWCFOnTolsrOzhRBCvPbaa8LZ2Vls27ZNnDlzRkyePFl4eXmJwsJCE0deN88++6xwdnYWhw4dEjk5OfKjpKRErmPuY4yOjhaHDx8WmZmZ4vTp02LJkiXCyspK7N+/Xwhh/uOryr1XAQjRNMdIZGxMAPTwzjvvCF9fX2Frayv69u0rX1Jmjg4ePCgAaD3Cw8OFEHcvsYqJiRGenp5CqVSK4cOHizNnzpg2aB1UNTYAIikpSa5j7mP8+9//Lv8+tmzZUowYMUL+8BfC/MdXlfsTgKY4RiJj4+2AiYiILBDPASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAygKysLEiShIyMDFOHQkRUJ0wAqEkRQuChhx7CyJEjtba9++67cHZ2xqVLl0wQGRFR48IEgJoUSZKQlJSEH374Ae+//75cnpmZiRdffBFvvfUW2rRpY8IIiYgaByYA1OT4+PjgrbfewgsvvIDMzEwIIRAZGYkRI0YgIiJCq/7kyZPxxBNPaJSp1Wq4ubkhKSkJALB3714MHToULi4ucHV1xejRo/Hbb79VG0NycjJcXFw0ynbs2AFJkjTKdu3ahX79+sHOzg7t2rVDXFwc7ty5I2+PjY1FmzZtoFQq4e3tjfnz5+v4ahARVc3G1AEQGUN4eDi2b9+OadOmYcKECTh79izOnj1bZd2pU6di0qRJuHnzJpycnAAA+/btQ3FxMSZMmAAAKC4uRlRUFHr06IHi4mK88soreOyxx5CRkQErK/3y6H379uHJJ5/E22+/jWHDhuG3337DjBkzAAAxMTH44osvsHLlSmzduhXdunVDbm4u/vOf/+jVFxGRFtPejJDIeP7880/RsmVLYWVlJbZt21ZtvbKyMuHm5iY+/PBDuWzy5Mli4sSJ1e6Tl5cnAMi3nM3MzBQAxKlTp4QQQiQlJQlnZ2eNfbZv3y7u/ZMbNmyYWL58uUadzZs3Cy8vLyGEEG+++abo2LGjKCsrq9N4iYh0wUMA1GS5u7tjxowZ6NKlCx577LFq6ykUCkycOBEff/wxgLvf9r/66itMnTpVrvPbb79hypQpaNeuHZo3bw4/Pz8AqNcJhenp6Vi2bBmcnJzkx/Tp05GTk4OSkhJMnDgRt27dQrt27TB9+nRs375d4/AAEVF98BAANWk2Njawsan913zq1KkIDAxEXl4eUlJSYGdnh7CwMHn7mDFj4OPjg/Xr18Pb2xsVFRXo3r07ysrKqmzPysoKQgiNMrVarfG8oqICcXFxGD9+vNb+dnZ28PHxwYULF5CSkoIDBw5g9uzZeP3115GamgqFQlGX4RMRVYsJABGAwYMHw8fHB59++im++eYbTJw4Eba2tgCAa9eu4fz583j//fcxbNgwAMD3339fY3stW7ZEUVERiouL4ejoCABaawT07dsXFy5cgL+/f7Xt2NvbY+zYsRg7dizmzJmDzp0748yZM+jbt289RktExASACMDdywenTJmC9957D7/88gsOHjwob2vRogVcXV2xbt06eHl54dKlS3jppZdqbG/gwIFwcHDAkiVLMG/ePPz4449ITk7WqPPKK69g9OjR8PHxwcSJE2FlZYXTp0/jzJkziI+PR3JyMsrLy+W2Nm/eDHt7e/j6+hrjJSAiC8NzAIj+39SpU/HTTz+hVatWGDJkiFxuZWWFrVu3Ij09Hd27d8fzzz+P119/vca2VCoVPvroI+zZswc9evTAJ598gtjYWI06I0eOxO7du5GSkoIHHngAgwYNwooVK+QPeBcXF6xfvx5DhgxBz5498e2332LXrl1wdXU1+NiJyPJI4v4DlURERNTkcQaAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCzQ/wGGQ4CjYVIb5gAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABXk0lEQVR4nO3deXxMV/8H8M9MMjMy2cg+WWSziy20YimxJERRtbZaRFNPn1LlQdVSBC2lpbqg9RShKFWPX7WUhIot0RJJLVEiiUhIkAiTPZOZ8/tjzE1GFjPJbJl836/XvJh779z5nizzyT33nHt5jDEGQgghhJgVvrELIIQQQojuUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwDdQVFQUeDwe97C0tISnpyemTp2Ku3fvctvFxsZy20RFRdW4r4EDB4LH48HHx0dtuY+PD3g8HoKDg2t83c6dO7l9x8bG1llv1Tp4PB4sLCzg6uqKcePG4fr161q0/Pl8fHwwfPhwne6Tx+Phvffee+52qnZW/XpERkaCx+OpbRccHFzr17U2ycnJiIyMxO3bt6utCw8Pr/b9qy9VvaqHUCiEr68vZs2ahcePH+vkPZ6Hx+MhMjKSe676ea+p7XU5cuSI2n6q8vHxQXh4eL1rJITUjAJeR7Zv3474+HjExMRg2rRp+PHHH/HSSy+hqKhIbTtbW1ts3bq12uvT09MRGxsLOzu7Gvdva2uL06dPIzU1tdq6bdu21fq62qxatQrx8fE4efIkPvzwQ8TExKBPnz5qf5Q0ZoGBgYiPj0dgYGCd223atAmbNm3Sat/JyclYvnx5jSG3ZMkSHDx4UKv9Pc/Ro0cRHx+Pw4cPY9SoUfj6668RFhYGY1xl+uWXX0Z8fDwkEolWrzty5AiWL19e47qDBw9iyZIluiiPEFIFBbyOBAQEICgoCAMGDMCyZcswf/58pKen4//+7//UtpswYQLOnj2LlJQUteXbtm2Dh4cH+vTpU+P++/btCw8PD2zbtk1teWpqKk6fPo0JEyZoVW/r1q0RFBSEfv36Yc6cOVi/fj3y8/Nr7V0AgOLiYq3ew5js7OwQFBT03D98OnTogA4dOujsff39/dGtWzed7Q8AunfvjqCgIISEhOCLL77Am2++ifPnzyMuLq7W1+jre+Xs7IygoCCIRCKd7bNbt27w9/fX2f4IIUoU8HoSFBQEAMjIyFBbHhISAi8vL7WgVigU2LFjB6ZMmQI+v+ZvCZ/Px+TJk7Fjxw4oFApu+bZt2+Dl5YXBgwfrtF5V9/ClS5cwduxYtGjRgvsQLi0txcKFC+Hr6wuhUAgPDw/MmDGj1m7jgwcPonPnzmjWrBn8/Pzw1Vdfqa0vLS3F3Llz0bVrV9jb28PBwQG9evXCL7/8Umu93333Hdq0aQORSIQOHTpg7969autr6qKvSU1d9Js3b0aXLl1gY2MDW1tbtGvXDosWLQKg7KIeN24cAGDAgAHVTrvU1EWvUCjw9ddfo2vXrrCyskLz5s0RFBSEQ4cO1VlbbZ79XgUHByMgIACnT59G7969IRaL8dZbbwEApFIp5s2bp/a9mj17drWeJalUimnTpsHR0RE2NjYYOnQobt68We29a+uiP3r0KAYNGgR7e3uIxWK0b98eq1ev5r4mGzduBAC1Uw6qfdTURX/nzh28+eabcHFxgUgkQvv27bFu3Tq1n/3bt2+Dx+Ph888/x/r16+Hr6wsbGxv06tUL58+fV9tfWloaXnvtNbi7u0MkEsHV1RWDBg1CUlKS5l94QhoZS2MXYK5u3boFQHnEUxWfz0d4eDi2bt2Kjz/+GBYWFoiOjkZWVhamTp2KWbNm1brPt956C6tXr8axY8cQFhYGuVyOHTt2ICIiotY/DBpa7+jRo/Haa6/h3//+N4qKisAYw6hRo3DixAksXLgQL730Ei5fvoxly5YhPj4e8fHxakd3SUlJmD17NiIjI+Hm5obdu3dj1qxZKC8vx7x58wAAZWVlePToEebNmwcPDw+Ul5fj+PHjGD16NLZv347Jkyer1XTo0CGcPHkSK1asgLW1NTZt2oTXX38dlpaWGDt2bIO+Dnv37sX06dMxc+ZMfP755+Dz+bh16xaSk5MBKLuoV61ahUWLFmHjxo3cKYC6jkDDw8Oxa9cuREREYMWKFRAKhbh06ZLW57FVavpeZWdn480338T8+fOxatUq8Pl8FBcXo3///sjKysKiRYvQuXNnXLt2DUuXLsWVK1dw/Phx8Hg87nsaFxeHpUuX4oUXXsC5c+cQFhamUT1bt27FtGnT0L9/f3z77bdwcXHBzZs3cfXqVQDK0xZFRUX4+eefER8fz72utm7+hw8fonfv3igvL8fKlSvh4+OD3377DfPmzUNqamq1UyobN25Eu3btsGHDBu79hg0bhvT0dNjb2wMAhg0bBrlcjrVr16Jly5bIzc1FXFycwcYyEGIUjDTI9u3bGQB2/vx5JpPJWEFBAfvtt9+Ys7Mzs7W1ZTk5OYwxxk6ePMkAsP3797O0tDTG4/HYb7/9xhhjbNy4cSw4OJgxxtjLL7/MvL291d7D29ubvfzyy4wxxvr378/Gjh3LGGPs8OHDjMfjsfT0dLZ//34GgJ08ebLOelV17Nu3j8lkMlZcXMxOnz7NWrVqxSwsLNjff//NGGNs2bJlDABbunSp2uuPHj3KALC1a9eqLd+3bx8DwLZs2aJWN4/HY0lJSWrbhoSEMDs7O1ZUVFRjjRUVFUwmk7GIiAjWrVs3tXUAmJWVFfd1VW3frl071qpVq2rtrPr1ULWpqv79+7P+/ftzz9977z3WvHnzGutSqetrPWXKFLXv3+nTpxkAtnjx4jr3WRNVvTk5OUwmk7H8/Hy2a9cuZmVlxby8vFhJSQnXBgDsxIkTaq9fvXo14/P57MKFC2rLf/75ZwaAHTlyhDHG2O+//84AsC+//FJtu08++YQBYMuWLeOWqX7e09PTGWOMFRQUMDs7O9a3b1+mUChqbcuMGTOqfe1VvL292ZQpU7jnCxYsYADYn3/+qbbdu+++y3g8Hrtx4wZjjLH09HQGgHXq1IlVVFRw2/31118MAPvxxx8ZY4zl5uYyAGzDhg211keIOaIueh0JCgqCQCCAra0thg8fDjc3N/z+++9wdXWttq2vry+Cg4Oxbds25OXl4ZdffuG6VJ/nrbfewqFDh5CXl4etW7diwIAB9Rq1PWHCBAgEAojFYvTr1w9yuRw///wzOnfurLbdmDFj1J7/8ccfAFCtS3XcuHGwtrbGiRMn1JZ37NgRXbp0UVs2ceJESKVSXLp0iVu2f/9+9OnTBzY2NrC0tIRAIMDWrVtrHNk/aNAgta+rhYUFJkyYgFu3biErK0vzL0INXnzxRTx+/Bivv/46fvnlF+Tm5jZof7///jsAYMaMGfXeh5ubGwQCAVq0aIE333wTgYGBOHr0KJo1a8Zt06JFCwwcOFDtdb/99hsCAgLQtWtXVFRUcI8hQ4aonb44efIkAOCNN95Qe/3EiROfW1tcXBykUimmT59ebYZCff3xxx/o0KEDXnzxRbXl4eHhYIxxP4MqL7/8MiwsLLjnqp9h1SkMBwcH+Pv747PPPsP69euRmJio1tVPiLmigNeRnTt34sKFC0hMTMS9e/dw+fLlWgfMAUBERAR+/fVXrF+/HlZWVhp3LY8dOxbNmjXDF198gV9//RURERH1qnfNmjW4cOECLl26hDt37iAtLQ2jRo2qtt2z3ah5eXmwtLSs1pXP4/Hg5uaGvLw8teVubm7V9qlaptr2f//7H8aPHw8PDw/s2rUL8fHxuHDhAt566y2UlpbW+vq69llfkyZNwrZt25CRkYExY8bAxcUFPXv2RExMTL329/DhQ1hYWNRYs6aOHz+OCxcuICkpCbm5uTh79my1gYE1dXffv38fly9fhkAgUHvY2tqCMcb98aL6njo6Oqq9XpOaHz58CADw9PSsb/OqycvLq7E97u7u3Pqqnq1bdYqopKQEgPJn88SJExgyZAjWrl2LwMBAODs74/3330dBQYHO6ibE1NA5eB1p3749evToofH2o0ePxowZM/Dpp59i2rRpsLKy0uh1YrEYr732GlavXg07OzuMHj26XvX6+flpVO+zR2WOjo6oqKjAw4cP1UKeMYacnBy88MILatvn5ORU26dqmeqDedeuXfD19cW+ffvU3q+srKzGmjTZZ0NMnToVU6dORVFREU6fPo1ly5Zh+PDhuHnzJry9vbXal7OzM+RyOXJycrSeWqbSpUsXODk51blNTUfPTk5OsLKyqjbzoup6oPJ7mpeXp/b1q+nr/CzVz0BDe06qcnR0RHZ2drXl9+7dA4Dnfi1q4u3tzU1PvXnzJn766SdERkaivLwc3377bcMKJsRE0RG8kVhZWWHp0qUYMWIE3n33Xa1e++6772LEiBFYunSpWjetIQwaNAiAMpSrOnDgAIqKirj1KteuXcPff/+ttmzPnj2wtbXlBqipLuJSNaRycnJqHUV/4sQJ3L9/n3sul8uxb98++Pv76/RI0traGmFhYVi8eDHKy8tx7do1ANWPEOuiGqi2efNmndWlqeHDhyM1NRWOjo7o0aNHtYfq1M6AAQMAALt371Z7/Z49e577Hr1794a9vT2+/fbbOufla/M1GzRoEJKTk9VO4QCVF3RS1Vtfbdq0wUcffYROnTpVew9CzAkdwRvRnDlzMGfOHK1f17Vr12rz6w0lJCQEQ4YMwYcffgipVIo+ffpwo+i7deuGSZMmqW3v7u6OkSNHIjIyEhKJBLt27UJMTAzWrFkDsVgMQBlE//vf/zB9+nSMHTsWmZmZWLlyJSQSSbXrBQDKI7iBAwdiyZIl3Cj6f/75p9pUufpQ9ab06dMHEokEOTk5WL16Nezt7bneiYCAAADAli1bYGtri2bNmsHX17fG3oOXXnoJkyZNwscff4z79+9j+PDhEIlESExMhFgsxsyZMxtcc21mz56NAwcOoF+/fvjPf/6Dzp07Q6FQ4M6dO4iOjsbcuXPRs2dPhIaGol+/fpg/fz6KiorQo0cPnDt3Dj/88MNz38PGxgbr1q3D22+/jcGDB2PatGlwdXXFrVu38Pfff+Obb74BAHTq1AmA8tRQWFgYLCws0LlzZwiFwmr7/M9//oOdO3fi5ZdfxooVK+Dt7Y3Dhw9j06ZNePfdd9GmTRutvg6XL1/Ge++9h3HjxqF169YQCoX4448/cPnyZSxYsECrfRHSmFDAE63weDz83//9HyIjI7F9+3Z88skncHJywqRJk7Bq1apqF0Dp2rUrpk6dimXLliElJQXu7u5Yv349/vOf/3DbTJ06FQ8ePMC3336Lbdu2wc/PDwsWLEBWVlaNVz8bOXIkOnbsiI8++gh37tyBv78/du/erfXFfmry0ksvISoqCj/99BPy8/Ph5OSEvn37YufOnVx3tK+vLzZs2IAvv/wSwcHBkMvl2L59e62XW42KikJgYCC2bt2KqKgoWFlZoUOHDtzcen2xtrbGmTNn8Omnn2LLli1IT0+HlZUVWrZsicGDB3NH8Hw+H4cOHcKcOXOwdu1alJeXo0+fPjhy5AjatWv33PeJiIiAu7s71qxZg7fffhuMMfj4+GDKlCncNhMnTsS5c+ewadMmrFixAowxpKen1zhA1NnZGXFxcVi4cCEWLlwIqVQKPz8/rF27tl5/ELu5ucHf3x+bNm1CZmYmeDwe/Pz8sG7dOr3+gUWIsfFYXf1qhBBCCGmU6Bw8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZsjsp8kpFArcu3cPtra2OrtWNiGEPA9jDAUFBXB3d2/w3R4JqQ+zD/h79+7By8vL2GUQQpqozMxMnV5hkRBNmX3A29raAlD+ktnZ2Wn0GplMhujoaISGhkIgEOizPIOg9pg2c2sPYH5tqk97pFIpvLy8uM8gQgzN7ANe1S1vZ2enVcCLxWLY2dmZzYcTtcd0mVt7APNrU0PaQ6cGibHQiSFCCCHEDFHAE0IIIWaIAp4QQggxQ2Z/Dp4QQkyZXC6HTCYzdhmkkRAIBLCwsNBoWwp4QggxAsYYcnJy8PjxY2OXQhqZ5s2bw83N7bkDOCngCSHECFTh7uLiArFYTKPtyXMxxlBcXIwHDx4AACQSSZ3bU8ATQoiByeVyLtwdHR2NXQ5pRKysrAAADx48gIuLS53d9TTIjhBC6sAYUFys232qzrmLxWLd7pg0Caqfm+eN3aAjeEIIqUFJCZCbCzx6BAiF+nkP6pYn9aHpzw0FPCGEPKVQKAM9NxcoKqpcrq+AJ0SfKOAJIU1eUVHl0bpCYexqCNENOgdPCGmSKiqA+/eBa9eAf/5RBjyFu+kIDg7G7Nmzuec+Pj7YsGGDXt8zNjYWPB4PPB4Po0aN0ut71Ub1/s2bN2/wvijgCSFNilQKpKUBly8DWVlAaamxKyKauHDhAv71r38Z5L1u3LiBqKgorV4THh7OhbPqERQUpLZNWVkZZs6cCScnJ1hbW2PkyJHIyspS2yY7O1tnf8hQFz0hxOyVlwN5ecqj9PJyY1dD6sPZ2dlg7+Xi4lKvI+ihQ4di+/bt3HPhM4M3Zs+ejV9//RV79+6Fo6Mj5s6di+HDhyMhIYGb7ubm5gZ7e/sG1a9CR/CEELPEGJCfD6SkAFeuAPfumW64M8ZQVFRklAdjTOM6g4ODMXPmTMyePRstWrSAq6srtmzZgqKiIkydOhW2trbw9/fH77//rva65ORkDBs2DDY2NnB1dcWkSZOQm5vLrS8qKsLkyZNhY2MDiUSCdevWVXvvZ7vo169fj06dOsHa2hpeXl6YPn06CgsLufVRUVFo3rw5jh07hvbt28PGxgZDhw5Fdna2Ft+Z2ut79hQCAIhEIri5uXEPBwcHbt2TJ0+wdetWrFu3DoMHD0a3bt2wa9cuXLlyBcePH9e6Jk1QwBNCzEppqbLr/fJlZVe8VGrsip6vuLgYNjY2RnkUaznJf8eOHXBycsJff/2FmTNn4t1338W4cePQu3dvXLp0CUOGDMGkSZO4/WZnZ6N///7o2rUrLl68iKNHj+L+/fsYP348t88PPvgAJ0+exMGDBxEdHY3Y2FgkJCTUWQefz8dXX32Fq1evYseOHfjjjz8wf/78al/Xzz//HD/88ANOnz6NO3fuYN68eVq1V5v6YmNj4eLigjZt2mDatGncFecAICEhATKZDKGhodwyd3d3BAQEIC4uTuuaNEFd9ISQRq+26W1E97p06YKPPvoIALBw4UJ8+umncHJywrRp0wAAS5cuxebNm3H58mUEBQVh8+bNCAwMxKpVq7h9bNu2DV5eXrh58ybc3d2xdetW7Ny5EyEhIQCUf0R4enrWWUfVo2dfX1+sXLkS7777LjZt2sQtl8lk+Pbbb+Hv7w8AeO+997BixQqt2ltYWKhRfWFhYRg3bhy8vb2Rnp6OJUuWYODAgUhISIBIJEJOTg6EQiFatGih9jpXV1fk5ORoVZOmKOAJIY2Wanpbfj4glxu7mvoTi8Vq3cuGfm9tdO7cmfu/hYUFHB0d0alTJ26Zq6srAHBHrwkJCTh58iRsbGyq7Ss1NRUlJSUoLy9Hr169uOUODg5o27ZtnXWcPHkSq1atQnJyMqRSKSoqKlBaWoqioiJYW1tzbVOFO6C8dnvVo2pNpKamalTfhAkTuP8HBASgR48e8Pb2xuHDhzF69Oha988Y09sFj4zaRb9582Z07twZdnZ2sLOzQ69evdTO3TDGEBkZCXd3d1hZWSE4OBjXrl0zYsWEEGOrqAAePACSkyuntzXmcAeUU6Osra2N8tA2XAQCQbXaqy5T7U/xdM6hQqHAiBEjkJSUpPZISUlBv379tBoDoJKRkYFhw4YhICAABw4cQEJCAjZu3AhA/fKtNdWq7fvVpz5A+ceEt7c3UlJSACgHz5WXlyM/P19tuwcPHnB/FOmaUQPe09MTn376KS5evIiLFy9i4MCBeOWVV7gQX7t2LdavX49vvvkGFy5cgJubG0JCQlBQUGDMsgkhRlB1eltmpvJSssT0BQYG4tq1a/Dx8UGrVq3UHtbW1mjVqhUEAgHOnz/PvSY/Px83b96sdZ8XL15ERUUF1q1bh6CgILRp0wb37t3TS/31qQ8A8vLykJmZyd3xrXv37hAIBIiJieG2yc7OxtWrV9G7d2+91G7UgB8xYgSGDRuGNm3aoE2bNvjkk09gY2OD8+fPgzGGDRs2YPHixRg9ejQCAgKwY8cOFBcXY8+ePcYsmxBiIOXlQHY2cPWqcjR8fr5ydDxpPGbMmIFHjx7h9ddfx19//YW0tDRER0fjrbfeglwuh42NDSIiIvDBBx/gxIkTuHr1KsLDw8Hn1x5P/v7+qKiowNdff420tDT88MMP+Pbbb/VSvyb1FRYWYt68eYiPj8ft27cRGxuLESNGwMnJCa+++ioAwN7eHhEREZg7dy5OnDiBxMREvPnmm+jUqRMGDx6sl9pN5hy8XC7H/v37UVRUhF69eiE9PR05OTlqIw5FIhH69++PuLg4vPPOOzXup6ysDGVlZdxz6dMhtDKZ7Ll33lFRbafp9qaO2mPazK09QMPaxJjyaD0vz3RGwMvl2rfHnL6fDeHu7o5z587hww8/xJAhQ1BWVgZvb28MHTqUC8nPPvsMhYWFGDlyJGxtbTF37lw8efKk1n127doV69evx5o1a7Bw4UL069cPq1evxuTJk/XShufVZ2FhgStXrmDnzp14/PgxJBIJBgwYgH379sHW1pbb7osvvoClpSXGjx+PkpISDBo0CFFRUXXe8rUheKy+Jxh05MqVK+jVqxdKS0thY2ODPXv2YNiwYYiLi0OfPn1w9+5duLu7c9v/61//QkZGBo4dO1bj/iIjI7F8+fJqy/fs2UO3ZiSEGExxcTEmTpyIJ0+ewM7OTm1daWkp0tPT4evri2bNmhmpQvKs2NhYDBgwAPn5+c+90E1wcDC6du2ql8vnRkVFYfbs2Xj8+HGN6zX9+TH6EXzbtm2RlJSEx48f48CBA5gyZQpOnTrFrX92AMjzRhwuXLgQc+bM4Z5LpVJ4eXkhNDS02i9ZbWQyGWJiYhASElJtkEZjRO0xbebWHkDzNikUwOPHyqN1U57eZmUlQ2qqdt8jqal0PxCteXp6YsSIEfjxxx8N/t42NjaoqKjQyR9+Rg94oVCIVq1aAQB69OiBCxcu4Msvv8SHH34IAMjJyeEGKQDPH3EoEokgEomqLRcIBFp/eNbnNaaM2mPazK09QO1tKipShvqjR5Uj4Os45Wp0qh5Ubb5H5va9bAp69uzJjXqvaVqfISQlJQGATrrtjR7wz2KMoaysDL6+vnBzc0NMTAy6desGACgvL8epU6ewZs0aI1dJCNGWXF55PXgaAU9MkZWVFXfA+TyxsbF6qUHT99eEUQN+0aJFCAsLg5eXFwoKCrB3717Exsbi6NGj4PF4mD17NlatWoXWrVujdevWWLVqFcRiMSZOnGjMsgkhWigoUIb648d0O1ZCDMmoAX///n1MmjQJ2dnZsLe3R+fOnXH06FHucoDz589HSUkJpk+fjvz8fPTs2RPR0dFqoxIJIaZHNYD8+vXK/xNCDMuoAb9169Y61/N4PERGRiIyMtIwBRFC6o0x4MmTyqN1ACgrM+1z64SYM5M7B08IaVzKypShnpdXebROF6MhxPgo4AkhWlMolFeVy80FjHSPFELIc1DAE0I0VlysDPWq09sIIaaJAp4QUie5vPJe68XFxq7GvG3ZYtj3+9e/tNs+ODiYuxBZYmIiunbtqvuiTJTqAmv29va1XmHO1NDwF0JIjQoKgPR05d3b7tyhcCdK06ZNQ3Z2NgICAjTaPjY2Fq+88gokEgmsra3RtWtX7N69u9o2PB6v2uOff/5pcL017ZfH4+Gzzz7jtgkODq62/rXXXlPbT3Z2tl4uS6tPdARPCOHIZJUXo6lyzyZCOGKxGG5ubhpvHxcXh86dO+PDDz+Eq6srDh8+jMmTJ8POzg4jRoxQ2/bGjRtqlxR3dnZucL3Z2dlqz3///XdERERgzJgxasunTZuGFStWcM+trKzU1ru5ucHe3r7B9RgSBTwhTZxqeltenvJfGgFPNKW6Octvv/2GRYsW4caNG+jSpQu+//57dOrUCYDygmZVvf/++zh27BgOHjxYLeBdXFyee5OXqoKDg7mehF27dsHCwgLvvvsuVq5cyXWpP/vHyC+//IIBAwbAz89Pbbm2f7g0BtRFT0gTVVYG3L0LXLkCpKYq565TuJP6+OCDD/D555/jwoULcHFxwciRI+u8Xe6TJ0/g4OBQbXm3bt0gkUgwaNAgnDx5UqP33rFjBywtLfHnn3/iq6++whdffIHvv/++xm3v37+Pw4cPIyIiotq63bt3w8nJCR07dsS8efNQUFCg0fubMjqCJ6QJUd29LTdXeY6dEF1YtmwZdwXSHTt2wNPTEwcPHsT48eOrbfvzzz/jwoUL+O6777hlEokEW7ZsQffu3VFWVoYffvgBgwYNQmxsLPr161fne3t5eeGLL74Aj8dD27ZtceXKFXzxxReYNm1atW137NgBW1tbjB49Wm35G2+8wd3/5OrVq1i4cCH+/vtvxMTE1OfLYTIo4AlpAmh6G9GnXr16cf93cHBA27Ztcf369WrbxcbGIjw8HP/973/RsWNHbnnbtm3Rtm1btf1lZmbi888/R79+/XDmzBmEhYVx67/77ju88cYbAICgoCC1W4j36tUL69atg1wur3ZHtm3btuGNN96odivWqn8MBAQEoHXr1ujRowcuXbqEwMBAbb8cJoMCnhAzRdPbiDFVDV0AOHXqFEaMGIH169dj8uTJz319UFAQdu3aBUB5K3HVbVQB1HnL8NqcOXMGN27cwL59+567bWBgIAQCAVJSUijgCSGmo7BQGer5+XT3NmIY58+fR8uWLQEA+fn5uHnzJtq1a8etj42NxfDhw7FmzRr8S8PJ94mJiZBIJADqvo3r+fPnqz1v3bp1taP3rVu3onv37ujSpctz3/vatWuQyWTc+zdWFPCEmAHV9La8PKC01NjVkKZmxYoVcHR0hKurKxYvXgwnJyeMGjUKgDLcX375ZcyaNQtjxoxBTk4OAEAoFHID7TZs2AAfHx907NgR5eXl2LVrFw4cOIADBw48970zMzMxZ84cvPPOO7h06RK+/vprrFu3Tm0bqVSK/fv3V1sOAKmpqdi9ezeGDRsGJycnJCcnY+7cuejWrRv69OnTwK+McVHAE9JIMQZIpcqjdZreZh60vbKcqfj0008xa9YspKSkoEuXLjh06BCEQiEAICoqCsXFxVi9ejVWr17NvaZ///6IjY0FAJSXl2PevHm4e/curKys0LFjRxw+fBjDhg177ntPnjwZJSUlePHFF2FhYYGZM2dW6yXYu3cvGGN4/fXXq71eKBTixIkT+PLLL1FYWAgvLy+8/PLLWLZsWbVegMaGAp6QRqasrPJiNHSvdWIK+vbti6tXr9a4LioqClFRUXW+fv78+Zg/f3693lsgEGDDhg3YvHlzrdv861//qvXUgJeXF3f5XXNDAU9II8BY5d3baHobMaZNmzbh+++/R3x8vLFLMSgbGxtUVFRUG4FvyijgCTFhJSWV09sqKoxdDWnqdu/ejZKSEgBAy5YtERcXZ+SKDEc1ir8xddtTwBNiYlTT2/LygKIiY1dDSCUPDw+158HBwWBGHPyhOodvCLWN4jdlFPCEmIiiIuVV5mh6GyFEFyjgCTGiigrgwQPl/1NSAD7dHYIQoiMU8IQYwZMnldPb6NKxhBB9oIAnxEDKy5Whnpen/D8hhOgTBTwhesRY5d3bpFJjV0MIaUoo4AnRA5reRggxNgp4QnREoai8extNbyP1kZBg2Pfr3l277YODg7mrviUmJqJr1666L8oIfHx8kJGRAUB5s5zmzZsbtyAdoTG7hDRQURGQkQH8/bfyXwp3Ys6mTZuG7OxsBAQEaLR9aWkpwsPD0alTJ1haWnI3odGFgoICzJ49G97e3rCyskLv3r1x4cIFtW3u37+P8PBwuLu7QywWY+jQoUhJSVHb5sKFCxrd2KaxMWrAr169Gi+88AJsbW3h4uKCUaNG4caNG2rbhIeHg8fjqT2CgoKMVDEhShUVwP37wLVrwD//KI/aae46aQrEYjHc3NxgaalZB7BcLoeVlRXef/99DB48WKe1vP3224iJicEPP/yAK1euIDQ0FIMHD8bdu3cBAIwxjBo1Cmlpafjll1+QmJgIb29vDB48GEVV/hJ3dnbm7mxnTowa8KdOncKMGTNw/vx5xMTEoKKiAqGhoWpfeAAYOnQosrOzuceRI0eMVDFp6qRSIC0NuHwZyMqiW7OSpi02NhY8Hg+HDx9Gly5d0KxZM/Ts2RNXrlzhtrG2tsbmzZsxbdo0uLm5abzv8PBwjBo1CsuXL4eLiwvs7OzwzjvvoPzpFJSSkhIcOHAAa9euRb9+/dCqVStERkbC19eXu/FMSkoKzp8/j82bN+OFF15A27ZtsWnTJhQWFuLHH3/U7RfDBBn1HPzRo0fVnm/fvh0uLi5ISEhAv379uOUikUjjH4yysjKUlZVxz6VPhy7LZDLINLz1lmo7Tbc3ddSehikvV55bf/RIP9PbFAqZ2r/mwNzaJJdr/zNnLr9vmvjggw/w5Zdfws3NDYsWLcLIkSNx8+ZNCASCBu33xIkTaNasGU6ePInbt29j6tSpcHJywieffIKKigrI5fJqN3+xsrLC2bNnAYDLgqrbWFhYQCgU4uzZs3j77bcbVJ+pM6lBdk+ePAGAal0lsbGxcHFxQfPmzdG/f3988skncHFxqXEfq1evxvLly6stj46Ohlgs1qqemJgYrbY3ddQe05aTY17tAcyvTdr8zBUXF+uxEtOybNkyhISEAAB27NgBT09PHDx4EOPHj2/QfoVCIbZt2waxWIyOHTtixYoV+OCDD7By5UrY2tqiV69eWLlyJdq3bw9XV1f8+OOP+PPPP9G6dWsAQLt27eDt7Y2FCxfiu+++g7W1NdavX4+cnBxkZ2c3uN2mzmQCnjGGOXPmoG/fvmqDN8LCwjBu3Dh4e3sjPT0dS5YswcCBA5GQkACRSFRtPwsXLsScOXO451KpFF5eXggNDYWdnZ1GtchkMsTExCAkJKTBf4GaAmqP5lT3Wjfk9DaFQoacnBi4uYWAz2/83x/A/NpkZSVDaqp2P3PSJnThg169enH/d3BwQNu2bXH9+nWNXnvnzh106NCBe75o0SIsWrQIANClSxe1A7NevXqhsLAQmZmZ8Pb2xg8//IC33noLHh4esLCwQGBgICZOnIhLly4BUN4r/sCBA4iIiICDgwMsLCwwePBghIWF6aLZJs9kAv69997D5cuXua4VlQkTJnD/DwgIQI8ePeDt7Y3Dhw9j9OjR1fYjEolqDH6BQKB1GNTnNaaM2lOz2qa3Gfq68Hy+wCzCsCpzaZPqDqHa/MyZ0+9affB4PI22c3d3527FClTvwa1r3/7+/jh16hSKiooglUohkUgwYcIE+Pr6ctt2794dSUlJePLkCcrLy+Hs7IyePXuiR48e2jWoETKJgJ85cyYOHTqE06dPw9PTs85tJRIJvL29q01zIERbRUXKUM/Pp+vBE9IQ58+fR8uWLQEo55HfvHkT7dq10+i1lpaWtd6K9e+//0ZJSQmsrKy497GxsamWE9bW1rC2tkZ+fj6OHTuGtWvXVtuXvb09AOXAu4sXL2LlypUat6+xMmrAM8Ywc+ZMHDx4ELGxsWp/ddUmLy8PmZmZkEgkBqiQmJuKisqj9ZISY1dDiHlYsWIFHB0d4erqisWLF8PJyUltvntycjLKy8vx6NEjFBQUcEfsz7tQTnl5OSIiIvDRRx8hIyMDy5Ytw3vvvQf+0+61Y8eOgTGGtm3b4tatW/jggw/Qtm1bTJ06ldvH/v374ezsjJYtW+LKlSuYNWsWRo0ahdDQUF1/GUyOUQN+xowZ2LNnD3755RfY2toiJycHgPIvLSsrKxQWFiIyMhJjxoyBRCLB7du3sWjRIjg5OeHVV181ZumkkZFKlaH++LHy+vCEmCJtryxnKj799FPMmjULKSkp6NKlCw4dOgShUMitHzZsGHelOADo1q0bAOVBXl0GDRqE1q1bo1+/figrK8Nrr72GyMhIbv2TJ0+wcOFCZGVlwcHBAWPGjMEnn3yidnokOzsbc+bMwf379yGRSDB58mQsWbJERy03bUYNeNVcxeDgYLXl27dvR3h4OCwsLHDlyhXs3LkTjx8/hkQiwYABA7Bv3z7Y2toaoWLSmJSXKwfM5ebS3dsI0ae+ffvi6tWrta6/fft2vfe9fPnyGmdGAcD48eOfO1L//fffx/vvv1/v92/MjN5FXxcrKyscO3bMQNUQc8CY+r3WCSG6tWnTJnz//feIj483dik607FjR6SlpRm7DJ0ziUF2hDRUaWnlvdbp7m2E6Mfu3btR8nTwSsuWLREXF2fkinTjyJEj3IWJNJ1O3RhQwJNGS6Go7IIvLDR2NYSYPw8PD7XnwcHBz+2Jra+oqCi97Lcm3t7eBnsvQ6KAJ42O6gJh167RgDlCCKkNBTxpFOTyyqN11cVo5HLDX4yGEF1S0C0IST1o+nNDAU9MWkFB5fQ2+iwk5kIoFILP5+PevXtwdnaGUCjU+MpvpOlijKG8vBwPHz4En89Xm4pYEwp4YnJkssoBc1VuDEiI2eDz+fD19UV2djbu3btn7HJIIyMWi9GyZUvugj+1oYAnJqHq9DaplM6tE9Oij5kZQqEQLVu25G57SogmLCwsYGlpqVGPDwU8Maqyssqj9SZ0+2zSCJSWAllZwN27gK0tUMsdqhuEx+OZ3U2giOmggCcGp1Aob/BC09uIqaka6nl5lcttbIxXEyH1RQFPDKa4WBnqjx7R3duI6agt1Alp7CjgiV6pprfl5VXOXyfE2CjUSVNAAU/0gqa3EVNDoU6aGgp4ojMyWeXFaGh6GzEFFOqkKaOAJw1C09uIqaka6o8e0c8kaboo4Em90PQ2Ykoo1AmpjgKeaEyhUJ5Tz81VnmMnxJhKS5WBnpVFoU5ITRoc8HK5HFeuXIG3tzdatGihi5qIiaHpbcRUqEJddU6dQp2Q2mkd8LNnz0anTp0QEREBuVyO/v37Iy4uDmKxGL/99huCg4P1UCYxNLlcGei5uTS9jRgXhToh9aN1wP/888948803AQC//vor0tPT8c8//2Dnzp1YvHgxzp07p/MiieEUFipDPT+fprcR46FQJ6ThtA743NxcuLm5AQCOHDmCcePGoU2bNoiIiMBXX32l8wKJ/qmmt+XlKT9YCTEGCnVCdEvrgHd1dUVycjIkEgmOHj2KTZs2AQCKi4thYWGh8wKJfjCmnNaWm6uc5kYfpsQYKNQJ0R+tA37q1KkYP348JBIJeDweQkJCAAB//vkn2rVrp/MCiW6VlVVejIamtxFjKC0F7t1Tjn6nUCdEf7QO+MjISAQEBCAzMxPjxo2DSCQCoLxH7YIFC3ReIGk41QforVs0YI4YhyrU795V/nFJoU6I/tVrmtzYsWMBAKVVTthOmTJFNxURnSkpUX6Y5uYqnxcWAny+cWsiTcvt2xTqhBiL1h/3crkcK1euhIeHB2xsbJCWlgYAWLJkCbZu3arzAol25HLg4UPg+nUgORl48ACoqDB2VaSpKC0F0tKAuDjl88uXlT+PFO6EGJ7WAf/JJ58gKioKa9euhVAo5JZ36tQJ33//vVb7Wr16NV544QXY2trCxcUFo0aNwo0bN9S2YYwhMjIS7u7usLKyQnBwMK5du6Zt2WavsFB5tHT5MnDnDnXFE8NRhfqZM8DvvwNJSZW9RoQQ49E64Hfu3IktW7bgjTfeUBs137lzZ/zzzz9a7evUqVOYMWMGzp8/j5iYGFRUVCA0NBRFRUXcNmvXrsX69evxzTff4MKFC3Bzc0NISAgK6FqpqKgA7t8Hrl0DbtxQDliiuevEEGoKdTpSJ8S0aH0O/u7du2jVqlW15QqFAjIth2UfPXpU7fn27dvh4uKChIQE9OvXD4wxbNiwAYsXL8bo0aMBADt27ICrqyv27NmDd955R9vyzYLq7m00vY0YUlMcKMcYQ25uMm7ePAKJJBPDhg0zdkmEaEzrgO/YsSPOnDkDb29vteX79+9Ht27dGlTMkydPAAAODg4AgPT0dOTk5CA0NJTbRiQScZfHrSngy8rKUFblZuRSqRQAIJPJNP4DRLWdtn+w6FN5ufLSsY8eKf+vDYVCpvZvY0ftMZzSUiAnRxnsVae08XjKR234fJnav41JSUk+0tNPIC0tBqmpMSgoyAIA+Pn54dNPP9V4P6b0+UGaJq0DftmyZZg0aRLu3r0LhUKB//3vf7hx4wZ27tyJ3377rd6FMMYwZ84c9O3bFwEBAQCAnJwcAMqL61Tl6uqKjIyMGvezevVqLF++vNry6OhoiMVirWqKiYnRantTl5ND7TFlptoeoRDw8VE+tBUYaJptqkoulyMlJQWJiYlISkpCSkoKFFXOdQmFQnTo0AGBgYGIjo4Gr66/bKoopoEwxMi0DvgRI0Zg3759WLVqFXg8HpYuXYrAwED8+uuv3EVv6uO9997D5cuXcfbs2Wrrnv2FYozV+ku2cOFCzJkzh3sulUrh5eWF0NBQ2NnZaVSLTCZDTEwMQkJCIBAItGiFbpSWKo+W8vN1MwJeoZAhJycGbm4h4PMN3x5do/boXlkZkJ1d/Ui9vvh8GQIDY3DpUggUCtP7HkmlWUhNjUFaWjTS0/9AaWm+2nonp/bw9w+Fn18IWrZ8Cf7+lnBx0e4zQdV7SIix1Gse/JAhQzBkyBCdFTFz5kwcOnQIp0+fhqenJ7dcdc37nJwcSCQSbvmDBw+qHdWriEQi7uI7VQkEAq3Duj6vqS+FovLubVXGGOp03jqfLzCLQFSh9jRMWVnlFeX0dU5doRCYRMDLZCXIzDyDtLRjSEs7htxc9Zk4zZo1h4/PYPj5DYGf3xDY2XmprWdM2d2uzWeCMQ4OCKmqwfeDbwjGGGbOnImDBw8iNjYWvr6+aut9fX3h5uaGmJgY7vx+eXk5Tp06hTVr1hijZJ0rLFQeMT16RCPgif6pQv3uXfMe9c4YQ17eP08D/Sju3DmFiorKC3PxeHxIJC/Az28o/PyGwN39BfD5Rv04JETntP6J5vP5dZ6DksvlGu9rxowZ2LNnD3755RfY2tpy59zt7e1hZWUFHo+H2bNnY9WqVWjdujVat26NVatWQSwWY+LEidqWbjIqKiqvB093byP61lRCvbT0MdLTjyM9XXmULpVmqq23tfWAn98Q+PoOga/vYFhZORipUkIMQ+uAP3jwoNpzmUyGxMRE7Nixo8bBbXXZvHkzACA4OFht+fbt2xEeHg4AmD9/PkpKSjB9+nTk5+ejZ8+eiI6Ohq2trbalG53q7m2PH5vvhywxDU0h1BUKObKzLyIt7RjS04/h7t3zYKyyG8zCQoSWLftxoe7s3FHjAXKEmAOtA/6VV16ptmzs2LHo2LEj9u3bh4iICI33xTT41OHxeIiMjERkZKQ2ZZqM8nJlqOflaT+9jRBtNIVQLyi4i7S0aKSlHcXt28dRUvJIbb2jY7un59GHomXLfhAItJs5Q4g50dlJp549e2LatGm62l2jxpjyKD03V3nUToi+VA313FzzG8dRUVGKzMyzSEs7irS0Y3j48KraepHIXm1wnL19SyNVSojp0UnAl5SU4Ouvv1YbAd8UlZZWHq3TDV6IvqimtKlGv5tTqCsHx93gut0zMmJRUVFSZQse3N1fgK+vMtA9PHrS4DhCaqH1b0aLFi3UzmMxxlBQUACxWIxdu3bptLjGoLbpbYTokjmHemnpE9y+fYIb8S6V3lFbb2MjqTI4LgRisaORKiWkcdE64L/44gu1gOfz+XB2dkbPnj3RokULnRZnyoqKlB+0+fnKW7QSomvmGuoKhRw5OZe4bnfl4LjKXyILCyG8vF7iprA5OwfQ4DhC6kHrgFeNbm+KKioqj9ZLSp6/PSHaMtdQLyzM5i4yk54eg5KSPLX1Dg5tufPoLVv2h1BobaRKCTEfGgX85cuXNd5h586d612MqaLpbUSfVPdGio9Xjn43h1CvqCjD33//jRMnTiM19TgePFD/DBGJ7ODtPYgL9ebNfYxTKCFmTKOA79q1K3g83nOntfF4PK0udGPq7t9XdsHT9Daia+XllZeJffQICAxs3OHOGMOjRylVrhwXC5ms6s1WeJBIulcZHBcECwu6lCsh+qRRwKenp+u7DpOUna3ba8GTpq1qqFftfm+sP2NlZdIqg+OO4cmT22rrW7RoAS+v4fD1DXs6OM7JOIUS0kRpFPDP3vudEKKZ2kK9MWJM8XRwnDLQs7Liqg2O8/TsCz+/IWjVaiCGDctCQsLLJnGzGUKaonpPIE1OTsadO3dQ/kz/9ciRIxtcFCGNmTmFemFhDtLTo5+GejRKSnLV1js4tOa63b29gyEU2gBQ3i6Wx7trjJIJIU9pHfBpaWl49dVXceXKFbXz8qppLOZ0Dp4QTZlLqMvl5cjMPMdNYXvw4G+19UKhLXx8BnJT2Jo3961lT4QQY9M64GfNmgVfX18cP34cfn5++Ouvv5CXl4e5c+fi888/10eNhJgkcwh1xhjy829x3e4ZGSchk6lfscnNrTs32t3DoxcNjiOkkdA64OPj4/HHH3/A2dkZfD4ffD4fffv2xerVq/H+++8jMTFRH3USYhJUoa66oUtjDPWysgJkZPzBjXh//Fh9EK21tSt8fUOfXj0uBNbWLkaqlBDSEFoHvFwuh42N8jybk5MT7t27h7Zt28Lb2xs3btzQeYGEGFtjD3XGFLh/PwlpaceQmnoUd+/GQaGovFkCny+Ap2cf+PkNgb//ULi4dAaP10iH9hNCOFoHfEBAAC5fvgw/Pz/07NkTa9euhVAoxJYtW+Dn56ePGgkxuMYe6oWF97nBcenpMSgufqC2vkULf+48esuWwRCJbI1UqWkTiwEnJ6BVK6CgwNjVEKIdrQP+o48+QtHTu6p8/PHHGD58OF566SU4Ojpi3759Oi+QEENpzKEul5cjKyuOO5d+/776qTKh0Abe3gO5c+ktWvgbqVLTpgp0JyfA2Rmwtq5cTgFPGhutA37IkCHc//38/JCcnIxHjx5Vu8scIY2BTKYM9MYY6vn5qVUGx/2B8vJCtfWurt24QPf07A0LC6GRKjVdVQPdyQl4evaRELOgdcDv2LEDY8eOhbV15c0gHBwcdFoUIfokk1WOfm9MoV5eXoiMjJNITT2K9PRjyM9PVVsvFjvD1zcU/v5D4eMTAhsbVyNVaroo0ElTonXAz5s3D9OnT8eIESPw5ptvYujQobC0rPf1cggxCFWo370LPHjQOEJdOTju7ypXjjsHhULGrefzLbnBcX5+Q+Dq2pUGxz2DAp00ZVonc3Z2No4ePYoff/wRr732GqysrDBu3Di8+eab6N27tz5qJKReGmOoFxU9QHp6zNPBcdEoKrqvtr55cz8u0L29B0AksjNSpabJykp57pwCnZB6BLylpSWGDx+O4cOHo7i4GAcPHsSePXswYMAAeHp6IjU19fk7IURPGluoy+UyZGaewfXrP+DcueXIyVEfHCcQWMPbewA34t3BoZWRKjVNFOiE1K5BfetisRhDhgxBfn4+MjIycP36dV3VRYjGGluo5+enPT1CP4bbt/9Aebn68GxX165PLzKjHBxnaSkyUqWmhwKdEM3VK+BVR+67d+/G8ePH4eXlhddffx379+/XdX2E1Cozs3GEunJwXCwX6o8epaitF4ud0KNHBzRvPgU+PsNgY+NmpEpNDwU6IfWndcC//vrr+PXXXyEWizFu3DjExsbSuXdiEKoj9Xv3AC8vIDHRNIOdMYYHDy5zl4LNzDxbbXCch0evp+fSh8LdPQAvvHAUFy8Oa/K3VrWyqpyDToFOSMNoHfA8Hg/79u3DkCFDaPQ80buaut/5fGXAm5Kiooe4fTuGu61qUVGO2np7ex/uPLqPz0C1wXE8nuzZ3TUZFOiE6I/WCb1nzx6dvfnp06fx2WefISEhAdnZ2Th48CBGjRrFrQ8PD8eOHTvUXtOzZ0+cP39eZzUQ09MYzqnL5TLcvXue63bPzk4AwLj1AoEY3t4DuHulOzi0pgtBgQKdEEMy6iF4UVERunTpgqlTp2LMmDE1bjN06FBs376dey4U0tW4zJFMBmRnKy8+Y6qh/vjx7SpXjjuBsjKp2noXl87c4Dgvr740OA7KQHdwoEAnxBiMGvBhYWEICwurcxuRSAQ3Nxp0ZI5MPdTLy4tw584ppKUdRVraMTx6dFNtvZWVI3x9Q552vYfCxkZipEpNR7NmyjB3dlY+DwlRnlIhhBiexgGflZUFT09PfdZSo9jYWLi4uKB58+bo378/PvnkE7i41H5/6rKyMpSVlXHPpVLlUZZMJoNMptm5TtV2VQdGNWaqdphCe2Qy4P59ZRf8s6GuaRDw+TK1f3VFOTjuCtLSYpCaGoPMzLOQy8u59TyeBTw9g+DnFwJ//1C4uXUDn29RZQ/1q0df7TGEZs0qR7g7OlbenEWhkCEnxzR+5nRBLle2Q9PPEG23JUQfeIwx9vzNgObNm+Prr7/GpEmT9FMIj1ftHPy+fftgY2MDb29vpKenY8mSJaioqEBCQgJEopq7PyMjI7F8+fJqy/fs2QOxWKyX2knjJZVK8ffffyMxMRFJSUl49OiR2npnZ2d069YN3bp1Q+fOndXuwUBIXYqLizFx4kQ8efIEdnZ0xUFieBoH/KZNm7BgwQKEhIRgy5YtcHR01G0hNQT8s7Kzs+Ht7Y29e/di9OjRNW5T0xG8l5cXcnNzNf4lk8lkiImJgZtbCPj8xj9tSXk0Zdj21HWk3lB8vgyBgTG4dClE62llCkUF7t79C6mp0UhLi8a9e+qD4ywtreDt3R/+/sqjdAeHNnofHNeQ9uib6gjd0VH5r6Z/3xjjZ06frKxkSE2NQUhICAQCzdojlUrh5OREAU+MRuMu+unTpyMsLAwRERHo2LEjtmzZgpEjR+qztmokEgm8vb2RkpJS6zYikajGo3uBQKDxL6YKny8wiw8nFX23R3VO/e5dZbjr+5y6QiHQKBCfPMngBsfdvn0CZWVP1NY7OwdwU9iUg+OacesYUz4MQdP26JPqHLpqpHtDB8WZy++QxdMzMdp8jmj7eUOIrmk1yM7X1xd//PEHvvnmG4wZMwbt27evNhf+0qVLOi2wqry8PGRmZkIiocFMpqKionJKmyFCXRMyWfHTwXHKUM/L+0dtvZWVA3x8Qp5eaCYUtrYeRqrU+FRH6KpQt7U1dkWEEF3RehR9RkYGDhw4AAcHB7zyyisNuthNYWEhbt26xT1PT09HUlISHBwc4ODggMjISIwZMwYSiQS3b9/GokWL4OTkhFdffbXe70karqKicvS7KYQ6YwwPH15Deroy0O/cOQ25vPI0DY/Hh4dHEDcnXSLp8czguKaDAp2QpkOrdP7vf/+LuXPnYvDgwbh69SqcVXNh6unixYsYMGAA93zOnDkAgClTpmDz5s24cuUKdu7cicePH0MikWDAgAHYt28fbOlTyeBUoX73Lp6OjjZuPQUFBbh27Sekpp5Aeno0Cgruqq23s/PiLgXr4zMIzZo1N06hRkaBTkjTpXHADx06FH/99Re++eYbTJ48WSdvHhwcjLrG+B07dkwn70Pqx5RCXaGowL17fz29ctxRZGdfhKJKQZaWzdCyZTB3r3RHx3ZN8spxFOiEEBWNA14ul+Py5ctGmQtPDMeUQl0qzawyOO44Sksfq613du4AX1/V4LiXIBBYGadQI6o6D93ZmQKdEFJJ44CPiYnRZx3EiEwl1GWyEmRmnkZq6tGng+Ouq61v1qwFfHwGw99/MF591QK3b082+qhzQ6NAJ4Roim4H10SZQqgzxpCbe527FGxm5mlUVJRy63k8PtzdX+SmsEkkL4DPtwCfL4OT0xHcvm34mg1NJKoMcwp0Qog2KOCbkIoK5ah3Y4Z6SUk+bt8+znW9FxRkqa23tfXkzqP7+AyClZWD4Ys0Igp0QoiuUMCbOdWROo8HHD2qfG5ICoUc2dkXuEC/d+9PMFb5l4WFhQgtW/bnRrw7ObVvUoPjRKLKG7MMHAjQBc8IIbpCAW+Gnu1+B4AePQx3xC6VZiE9PRqpqUefDo7LV1vv6Ngefn5D4O8/FF5e/ZrU4LiajtAVCuXFguhWqoQQXaKANxMVFcowV118Ri6vXKfv23VWVJTizp3T3FF6bu41tfUikf3T26oqu97t7Lz0W5AJqRroTk50hE4IMRwK+EasrlDXJ8YY8vL+4QL9zp1TqKgoqbIF7+ngOGWgu7u/CD6/afyoUaATQkxF0/jUNSPGCvXS0se4ffvE01A/Cqk0U229jY07dx7d13dwkxkcR4FOCDFVFPCNgDFCXTk47uLTK8cdw927f4KxyjdWDo7rBz+/IfD1HQJn545NYnCcUKh+tzUKdEKIqaKAN1HGCPWCgntVrhwXg5KSR2rrHR3bcd3uLVv2h0Ag1n9RRkaBTghprCjgTYihQ72iohSZmWe5bveHD6+qrReJ7OHjM4gLdXt7b/0WZAIo0Akh5oIC3sgMGerKwXE3kJp6FOnpx5CREVttcJxE0oO7cpyHR0+zHxwnFKqfQ7e3N3ZFhBCiG+b96W2iDBnqpaVPcOfOMfz111b8+ecsPHmSobbexkbCnUf39R0MsdhJf8WYAAp0QkhTQQFvIKpQV118Rl+hzpgC2dkJ3Ln0u3fjnxkcJ4SX10tct7uzcyezHhxHgU4Iaaoo4PXIUKFeWJiNtLRopKUdRXp6DEpK8tTWOzi0Rq9ebWBr+y94eg6CUGitn0JMgCrQVaFOgU4Iaaoo4HXMEKFeUVGGzMyzSE9XHqU/eHBZbb1QaAsfn8HcUbqDgwd69DiCixfDzO72qhTohBBSMwp4HdB3qDPG8OhRCjcnPSPjJGSy4ipb8CCRdIev75Cng+OCYGFRNchlui3IiFSj3AEgOBho3tyY1RBCiOmigK8nfYd6WZkUt2//wU1he/Lkttp6a2s3+PmFws9vKHx8BsPa2lm3BZiImo7QVTdnoSlshBBSOwp4Legz1BlTICfnktrgOIWi8t6ufL4AXl59uSlsLi6dzXJwHHW5E0KIblDAP4c+Q72wMAfp6dFPu95jUFz8UG29g0Nrrtvd2zsYQqH53U+UAp0QQvSDAr4OFy8q76uuq1CXy8uRmXmOGxx3/36S2nqh0AY+PoO4UG/Rwk83b2xCKNAJIcQwKODrcO+e8nxvQzx6dIs7j64cHFektt7NLZC7C5uHR69nBsc1flUD3cmJBsURQoihUMDrWFlZATIy/uDOpT9+nKa2Xix24aav+fqGwNraxUiV6gcFOiGEmAYK+AZiTIH795O4QM/KOldtcJynZx8u1F1du4DH4xuxYt2iQCeEENNk1IA/ffo0PvvsMyQkJCA7OxsHDx7EqFGjuPWMMSxfvhxbtmxBfn4+evbsiY0bN6Jjx47GKxpAUdEDpKdHP71pSwyKix+orW/Rwr/K4LgBEIlsjVSp7j0b6Pb2gBkO5ieEkEbPqAFfVFSELl26YOrUqRgzZky19WvXrsX69esRFRWFNm3a4OOPP0ZISAhu3LgBW1vDhaZcXo6srHikpR19OjguUW29QGANH5+B3BS2Fi38DVabvlGgE0JI42TUgA8LC0NYWFiN6xhj2LBhAxYvXozRo0cDAHbs2AFXV1fs2bMH77zzjl5ry87OxsWL3yI19TgyMv5AeXmh2npX125ct7unZ29YWAj1Wo+hUKATQoh5MNlz8Onp6cjJyUFoaCi3TCQSoX///oiLi6s14MvKylBWVsY9l0qlAACZTAaZ7PmXbM3JycGAAQOQmpqqtlwsdoaf32D4+YXAzy8ENjauz7zSdC8Hy+fL1P6tSigEHByUYe7oqLw6XNVAZ0z5MCUKhUzt38bO3NoDmF+b5HJlOzT5DFHRZltC9MFkAz4nJwcA4OqqHqSurq7IyMio6SUAgNWrV2P58uXVlkdHR0MsFj/3fZXXfX8ECwsLtGvXDt26dUO3bt3g6+sLPl81OC5B84aYkMDAmDrXFxUpH41FTk7d7WlszK09gPm1KSZG8/YUFxc/fyNC9MhkA17l2cuxMsbqvETrwoULMWfOHO65VCqFl5cXQkNDYafhxct//fVX3LlzB//88woUCgHy84H8/PrVb2wCAeDkJIOHRwzE4hDY2wsafZe7QiFDTk4M3NxCwOc3/usGmFt7APNrk5WVDKmpMQgJCYFAoFl7VL2HhBiLyQa8m5sbAOWRvEQi4ZY/ePCg2lF9VSKRCCKRqNpygUCg8S/miy++iNzcXCgUgkZ3e1VloKtfKY4x5UV7mjcXmMWHrQqfT+0xdebSJgsL5b/afI5ouh0h+mKyAe/r6ws3NzfExMSgW7duAIDy8nKcOnUKa9asMXJ1pqOmQH/2CN3UzqETQgjRP6MGfGFhIW7dusU9T09PR1JSEhwcHNCyZUvMnj0bq1atQuvWrdG6dWusWrUKYrEYEydONGLVxqVJoBNCCCFGDfiLFy9iwIAB3HPVufMpU6YgKioK8+fPR0lJCaZPn85d6CY6Otqgc+CNTSBQjm53dqZAJ4QQojmjBnxwcDBYHf3HPB4PkZGRiIyMNFxRRkaBTgghRBdM9hx8U0GBTgghRB8o4A2MAp0Q0yUQACJR9QefD9y8aezqCNEOBbyeVQ101d3WKNAJMQ4er+YAFworg7wmdFE60hhRwOsYBTohxmVhUXOIq4KckKaCAr6BKNAJMbzautJFIsCSPtUIAUABrzVVoKvmoVOgE6J7tXWlq47Ca+tKJ4RUooB/Dgp0QvSDutIJ0S8K+Dr06we0aEGBTkh9CQSAlRV1pRNiDPQrVgc6WiekbrV1pVtYKG9w1LGjMuQJIYZHAU8IqVN9utJpWhkhxkcBTwjh5oFXnRNOXemENG70q0tIE0Cj0glpeijgCTETNCqdEFIVBTwhjciz3efPDmwjhBAVCnhCTEhdXekiEc3qIIRojgKeEAN7titdNaWsQwfA2trY1RFCzAUFPCF6oE1XumpKGZ0nJ4ToEgU8IfVAXemEEFNHAU9ILSwtaz8Sp6NtQoipo4AnTRqNSieEmCsKeGLW+Py6Q5y60gkh5ooCnjR6lpY1d6FTVzohpCmjgCeNQtWjcNW0srZtldPKqCudEEKqo4AnJkGbrnTVtDIrKwp3QgipDQU8MZiautJVD7pnOCGE6BYFPNEpGpVOCCGmwaQDPjIyEsuXL1db5urqipycHCNVRGhUOiGENA4mHfAA0LFjRxw/fpx7bkGHgXpHXemEENL4mXzAW1paws3NTePty8rKUFZWxj2XSqUAAJlMBplqdNZzqLZTKDTb3tSp2lG1PUKh+lSyqtPL6vobSsMvoV6pvj+afj9Nnbm1BzC/NtWnPebSdtJ48RhjzNhF1CYyMhKfffYZ7O3tIRKJ0LNnT6xatQp+fn51vubZbn0A2LNnD8RisT7LJYQQTnFxMSZOnIgnT57Azs7O2OWQJsikA/73339HcXEx2rRpg/v37+Pjjz/GP//8g2vXrsHR0bHG19R0BO/l5YXc3FyNf8lkMhliYmLg5hYCPt90+6SfvVZ61SPyql3pqvaEhIRAYAZ97NQe02dubapPe6RSKZycnCjgidGYdBd9WFgY9/9OnTqhV69e8Pf3x44dOzBnzpwaXyMSiSASiaotFwgEWn/Q8PkCowY8j6cMal2NSq/P18CUUXtMn7m1SZv2mFO7SeNk0gH/LGtra3Tq1AkpKSnGLkVn+PzaA1wopFHphBBC6qdRBXxZWRmuX7+Ol156ydilaIVGpRNCCDE0kw74efPmYcSIEWjZsiUePHiAjz/+GFKpFFOmTDF2aWp03ZVOCCGENJRJB3xWVhZef/115ObmwtnZGUFBQTh//jy8vb0NXgt1pRNCCGlMTDrg9+7da9T3b9lSebcy6konhBDS2Jh0wBubgwMFOyGEkMaJb+wCCCGEEKJ7FPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwBNCCCFmyOwvdKO63b1UKtX4NTKZDMXFxZBKpWZxy0dqj2kzt/YA5tem+rRH9Zmj+gwixNDMPuALCgoAAF5eXkauhBDSFBUUFMDe3t7YZZAmiMfM/M9LhUKBe/fuwdbWFjwN7wgjlUrh5eWFzMxM2NnZ6blC/aP2mDZzaw9gfm2qT3sYYygoKIC7uzv4fDobSgzP7I/g+Xw+PD096/VaOzs7s/hwUqH2mDZzaw9gfm3Stj105E6Mif6sJIQQQswQBTwhhBBihijgayASibBs2TKIRCJjl6IT1B7TZm7tAcyvTebWHtI0mP0gO0IIIaQpoiN4QgghxAxRwBNCCCFmiAKeEEIIMUMU8IQQQogZarIBv2nTJvj6+qJZs2bo3r07zpw5U+f2p06dQvfu3dGsWTP4+fnh22+/NVClmtGmPf/73/8QEhICZ2dn2NnZoVevXjh27JgBq30+bb8/KufOnYOlpSW6du2q3wK1pG17ysrKsHjxYnh7e0MkEsHf3x/btm0zULXPp217du/ejS5dukAsFkMikWDq1KnIy8szULV1O336NEaMGAF3d3fweDz83//933NfY+qfB4QAAFgTtHfvXiYQCNh///tflpyczGbNmsWsra1ZRkZGjdunpaUxsVjMZs2axZKTk9l///tfJhAI2M8//2zgymumbXtmzZrF1qxZw/766y928+ZNtnDhQiYQCNilS5cMXHnNtG2PyuPHj5mfnx8LDQ1lXbp0MUyxGqhPe0aOHMl69uzJYmJiWHp6Ovvzzz/ZuXPnDFh17bRtz5kzZxifz2dffvklS0tLY2fOnGEdO3Zko0aNMnDlNTty5AhbvHgxO3DgAAPADh48WOf2pv55QIhKkwz4F198kf373/9WW9auXTu2YMGCGrefP38+a9eundqyd955hwUFBemtRm1o256adOjQgS1fvlzXpdVLfdszYcIE9tFHH7Fly5aZVMBr257ff/+d2dvbs7y8PEOUpzVt2/PZZ58xPz8/tWVfffUV8/T01FuN9aVJwJv65wEhKk2ui768vBwJCQkIDQ1VWx4aGoq4uLgaXxMfH19t+yFDhuDixYuQyWR6q1UT9WnPsxQKBQoKCuDg4KCPErVS3/Zs374dqampWLZsmb5L1Ep92nPo0CH06NEDa9euhYeHB9q0aYN58+ahpKTEECXXqT7t6d27N7KysnDkyBEwxnD//n38/PPPePnllw1Rss6Z8ucBIVWZ/c1mnpWbmwu5XA5XV1e15a6ursjJyanxNTk5OTVuX1FRgdzcXEgkEr3V+zz1ac+z1q1bh6KiIowfP14fJWqlPu1JSUnBggULcObMGVhamtaPdH3ak5aWhrNnz6JZs2Y4ePAgcnNzMX36dDx69Mjo5+Hr057evXtj9+7dmDBhAkpLS1FRUYGRI0fi66+/NkTJOmfKnweEVNXkjuBVnr11LGOsztvJ1rR9TcuNRdv2qPz444+IjIzEvn374OLioq/ytKZpe+RyOSZOnIjly5ejTZs2hipPa9p8fxQKBXg8Hnbv3o0XX3wRw4YNw/r16xEVFWUSR/GAdu1JTk7G+++/j6VLlyIhIQFHjx5Feno6/v3vfxuiVL0w9c8DQoAmeATv5OQECwuLakcbDx48qPZXuYqbm1uN21taWsLR0VFvtWqiPu1R2bdvHyIiIrB//34MHjxYn2VqTNv2FBQU4OLFi0hMTMR7770HQBmQjDFYWloiOjoaAwcONEjtNanP90cikcDDw0PtVqPt27cHYwxZWVlo3bq1XmuuS33as3r1avTp0wcffPABAKBz586wtrbGSy+9hI8//rjRHfGa8ucBIVU1uSN4oVCI7t27IyYmRm15TEwMevfuXeNrevXqVW376Oho9OjRAwKBQG+1aqI+7QGUR+7h4eHYs2ePSZ0L1bY9dnZ2uHLlCpKSkrjHv//9b7Rt2xZJSUno2bOnoUqvUX2+P3369MG9e/dQWFjILbt58yb4fD48PT31Wu/z1Kc9xcXF4PPVP2osLCwAVB75Niam/HlAiBojDe4zKtU0n61bt7Lk5GQ2e/ZsZm1tzW7fvs0YY2zBggVs0qRJ3PaqaTH/+c9/WHJyMtu6datJTYvRtj179uxhlpaWbOPGjSw7O5t7PH782FhNUKNte55laqPotW1PQUEB8/T0ZGPHjmXXrl1jp06dYq1bt2Zvv/22sZqgRtv2bN++nVlaWrJNmzax1NRUdvbsWdajRw/24osvGqsJagoKClhiYiJLTExkANj69etZYmIiN+2vsX0eEKLSJAOeMcY2btzIvL29mVAoZIGBgezUqVPcuilTprD+/furbR8bG8u6devGhEIh8/HxYZs3bzZwxXXTpj39+/dnAKo9pkyZYvjCa6Ht96cqUwt4xrRvz/Xr19ngwYOZlZUV8/T0ZHPmzGHFxcUGrrp22rbnq6++Yh06dGBWVlZMIpGwN954g2VlZRm46pqdPHmyzt+Hxvh5QAhjjNHtYgkhhBAz1OTOwRNCCCFNAQU8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ6QGt2/fBo/HQ1JSkrFLIYSQeqGAJ41WeHg4Ro0aVW15bGwseDweHj9+XO99e3l5ITs7GwEBAfUvkBBCjKjJ3S6WkOcpLy+HUCiEm5ubsUshhJB6oyN4YvYOHDiAjh07QiQSwcfHB+vWrVNb7+Pjg48//hjh4eGwt7fHtGnTqnXRh4eHg8fjVXvExsYCAPLz8zF58mS0aNECYrEYYWFhSElJ4d4jKioKzZs3x7Fjx9C+fXvY2Nhg6NChyM7ONtSXgRDSxFDAE7OWkJCA8ePH47XXXsOVK1cQGRmJJUuWICoqSm27zz77DAEBAUhISMCSJUuq7efLL79EdnY295g1axZcXFzQrl07AMo/AC5evIhDhw4hPj4ejDEMGzYMMpmM20dxcTE+//xz/PDDDzh9+jTu3LmDefPm6bX9hJAmzMh3syOk3qZMmcIsLCyYtbW12qNZs2YMAMvPz2cTJ05kISEhaq/74IMPWIcOHbjn3t7ebNSoUWrbpKenMwAsMTGx2vseOHCAiUQidubMGcYYYzdv3mQA2Llz57htcnNzmZWVFfvpp58YY8p7ogNgt27d4rbZuHEjc3V1bfDXgRBCakJH8KRRGzBgAJKSktQe33//Pbf++vXr6NOnj9pr+vTpg5SUFMjlcm5Zjx49NHq/xMRETJ48GRs3bkTfvn2597C0tETPnj257RwdHdG2bVtcv36dWyYWi+Hv7889l0gkePDggXYNJoQQDdEgO9KoWVtbo1WrVmrLsrKyuP8zxsDj8dTWM8Zq3M/z5OTkYOTIkYiIiEBERESd+6vpvQUCgdp6Ho9X62sJIaSh6AiemLUOHTrg7Nmzasvi4uLQpk0bWFhYaLyf0tJSvPLKK2jXrh3Wr19f7T0qKirw559/csvy8vJw8+ZNtG/fvmENIISQeqIjeGLW5s6dixdeeAErV67EhAkTEB8fj2+++QabNm3Saj/vvPMOMjMzceLECTx8+JBb7uDggNatW+OVV17BtGnT8N1338HW1hYLFiyAh4cHXnnlFV03iRBCNEJH8MSsBQYG4qeffsLevXsREBCApUuXYsWKFQgPD9dqP6dOnUJ2djY6dOgAiUTCPeLi4gAA27dvR/fu3TF8+HD06tULjDEcOXKkWrc8IYQYCo/RSUBCCCHE7NARPCGEEGKGKOAJIYQQM0QBTwghhJghCnhCCCHEDFHAE0IIIWaIAp4QQggxQxTwhBBCiBmigCeEEELMEAU8IYQQYoYo4AkhhBAzRAFPCCGEmKH/B4YHxYxK7zDVAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| hide\n", - "# Create single mixture and broadcast to N,H,K\n", - "weights = torch.ones((1,3))[None, :, :]\n", + "# Create single mixture and broadcast to N, H, K\n", + "weights = torch.ones((2,3))[None, :, :]\n", "lambdas = torch.Tensor([[5,10,15], [10,20,30]])[None, :, :]\n", "\n", "# Create repetitions for the batch dimension N.\n", @@ -1987,7 +3393,7 @@ "print('lambdas.shape (N,H,K) \\t', lambdas.shape)\n", "\n", "distr = PMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (lambdas,)\n", + "distr_args = (lambdas, weights)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -2092,38 +3498,44 @@ " if self.return_params:\n", " mu_names = [f\"-mu-{i}\" for i in range(1, n_components + 1)]\n", " std_names = [f\"-std-{i}\" for i in range(1, n_components + 1)]\n", - " mu_std_names = [i for j in zip(mu_names, std_names) for i in j]\n", - " self.output_names = self.output_names + mu_std_names\n", + " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", + " self.param_names = [i for j in zip(mu_names, std_names, weight_names) for i in j]\n", + " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 2 * n_components\n", + " self.outputsize_multiplier = 3 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " means, stds = torch.tensor_split(output, 2, dim=-1)\n", - " return (means, stds)\n", + " means, stds, weights = output.chunk(3, dim=-1)\n", + "\n", + " return (means, stds, weights)\n", "\n", - " def scale_decouple(self, \n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float=0.2):\n", - " \"\"\" Scale Decouple\n", + " def scale_decouple(\n", + " self,\n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float = 0.2,\n", + " ):\n", + " \"\"\"Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " means, stds = output\n", + " means, stds, weights = output\n", " stds = F.softplus(stds)\n", + " weights = F.softmax(weights, dim=-1)\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(means.size(dim=0), 1, -1)\n", - " scale = scale.view(means.size(dim=0), 1, -1) \n", + " scale = scale.view(means.size(dim=0), 1, -1)\n", " means = (means * scale) + loc\n", " stds = (stds + eps) * scale\n", - " return (means, stds)\n", + "\n", + " return (means, stds, weights)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -2145,17 +3557,11 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " means, stds = distr_args\n", + " means, stds, weights = distr_args\n", " B, H, K = means.size()\n", " Q = len(self.quantiles)\n", " assert means.shape == stds.shape\n", "\n", - " # Sample K ~ Mult(weights)\n", - " # shared across B, H\n", - " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", - " \n", - " weights = (1/K) * torch.ones_like(means, device=means.device)\n", - " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " means = means.flatten()\n", @@ -2195,17 +3601,15 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " means, stds = distr_args\n", + " means, stds, weights = distr_args\n", " B, H, K = means.size()\n", - " \n", - " weights = (1/K) * torch.ones_like(means, device=means.device)\n", - " \n", + " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", " \n", @@ -2228,7 +3632,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -2278,7 +3682,17 @@ "execution_count": null, "id": "8ebe4250", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", + "Parameter containing:\n", + "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" + ] + } + ], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -2302,7 +3716,40 @@ "execution_count": null, "id": "684d2382", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "means.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "stds.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", + "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", + "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfcAAAEyCAYAAADnUJkgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABDOklEQVR4nO3de1wU5f4H8M8gy3KHALkpIApoZt4yFVHBDEzTND12CjU085IXMjrpUTu5GAeMCq0sSy3g1CHL0jIzlUrJfqiB5rHMMBPQVCJvgKCwwvP7w5hclsvusrDL8nm/XvuSeWbmme9+WffLzDwzIwkhBIiIiMhiWJk6ACIiIjIuFnciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgW9xZ08OBBPPjgg/D394dSqYSXlxdCQ0Px9NNPaywXERGBiIiIFo9HkiSoVCqj9delSxeMHTvWaP01Zu/evZAkCXv37m2V7ekrIiICkiRBkiRYWVnByckJQUFBmDx5Mj766CPU1NRordOlSxdMnz5dr+1kZ2dDpVLhypUreq1Xd1u1+fzoo4/06qcxFRUVUKlU9f6O0tLSIEkSCgoKjLY9ImqYtakDsFSff/45HnjgAURERCA5ORk+Pj44f/48cnNzsWnTJrz88svysm+88YYJI20b+vfvj/3796Nnz56mDqVBXbt2xX//+18AQHl5OfLz8/HJJ59g8uTJGDZsGD777DO4uLjIy2/duhXOzs56bSM7Oxvx8fGYPn06XF1ddV7PkG3pq6KiAvHx8QCg9cfq/fffj/3798PHx6dFYyCim1jcW0hycjICAwOxa9cuWFv/leaHH34YycnJGsuac8EyNbVaDUmS4OzsjMGDB5s6nEbZ2dlpxfj4448jNTUVjz32GGbPno0PPvhAntevX78Wj+natWuws7NrlW01pmPHjujYsaNJYyBqT3hYvoVcvHgRHh4eGoW9lpWVZtrrHpYvKCiAJEl46aWXkJKSgsDAQDg6OiI0NBQHDhzQ6m/Dhg0ICQmBUqlEz549kZGRgenTp6NLly5NxllUVIQ5c+agc+fOsLGxQWBgIOLj43Hjxg2d3+vOnTvRv39/2NnZoUePHnjnnXe0lvnxxx8xfvx43HbbbbC1tUXfvn2Rnp6usUztoeJ3330XTz/9NDp16gSlUomTJ09qHZavzVFDr1u988476NOnD2xtbeHm5oYHH3wQx48f11hm+vTpcHR0xMmTJzFmzBg4OjrCz88PTz/9NCorK3XORX1mzJiBMWPGYPPmzSgsLJTb6x4qr6mpQUJCArp37w47Ozu4urqid+/eeOWVVwAAKpUKzzzzDAAgMDBQfq+1Oak9TbJlyxb069cPtra28p50Q6cArl+/jri4OHh7e8POzg7h4eH4/vvvNZZp6LTRrZ+xgoICuXjHx8fLsdVus6HD8sb+3axbtw59+vSBo6MjnJyc0KNHDyxbtkwrdiJLxz33FhIaGoqNGzciNjYWU6ZMQf/+/aFQKPTq4/XXX0ePHj2wZs0aAMC//vUvjBkzBvn5+fLh3fXr12POnDmYNGkSVq9ejZKSEsTHx+tUkIqKijBw4EBYWVnhueeeQ7du3bB//34kJCSgoKAAqampTfbxv//9D08//TT++c9/wsvLCxs3bsTMmTMRFBSE4cOHAwDy8vIwZMgQeHp64tVXX4W7uzvee+89TJ8+Hb///jsWL16s0efSpUsRGhqKN998E1ZWVvD09ERRUZHGMj4+Pti/f79G2x9//IGpU6eiU6dOcltSUhKWLVuGRx55BElJSbh48SJUKhVCQ0ORk5OD4OBgeVm1Wo0HHngAM2fOxNNPP41vvvkGzz//PFxcXPDcc881mYvGPPDAA9ixYwf27duHgICAepdJTk6GSqXCs88+i+HDh0OtVuPnn3+Wz68//vjjuHTpEl577TVs2bJFPsR965Gfw4cP4/jx43j22WcRGBgIBweHRuNatmwZ+vfvj40bN6KkpAQqlQoRERH4/vvv0bVrV53fn4+PD3bu3In77rsPM2fOxOOPPw4Aje6tG/t3s2nTJsybNw8LFy7ESy+9BCsrK5w8eRI//fSTzu+DyGIIahEXLlwQQ4cOFQAEAKFQKMSQIUNEUlKSKCsr01g2PDxchIeHy9P5+fkCgLjzzjvFjRs35PbvvvtOABDvv/++EEKI6upq4e3tLQYNGqTRX2FhoVAoFCIgIECjHYBYsWKFPD1nzhzh6OgoCgsLNZZ76aWXBABx7NixRt9jQECAsLW11Vj/2rVrws3NTcyZM0due/jhh4VSqRSnT5/WWH/06NHC3t5eXLlyRQghxJ49ewQAMXz4cK1t1c7bs2dPvbGUl5eLgQMHCh8fH1FQUCCEEOLy5cvCzs5OjBkzRmPZ06dPC6VSKaKjo+W2mJgYAUB8+OGHGsuOGTNGdO/evdE8CHHzd3jHHXc0OP+LL74QAMQLL7wgtwUEBIiYmBh5euzYsaJv376NbufFF18UAER+fr7WvICAANGhQweRl5dX77xbt1Wbz/79+4uamhq5vaCgQCgUCvH4449rvLdbP5+1YmJiND5jf/zxh9ZnrFZqaqpG3C3xu1mwYIFwdXXV2jZRe8TD8i3E3d0d+/btQ05ODlatWoXx48fjxIkTWLp0Ke68805cuHChyT7uv/9+dOjQQZ7u3bs3AMiHdvPy8lBUVISHHnpIYz1/f3+EhYU12f/27dsxYsQI+Pr64saNG/Jr9OjRAICsrKwm++jbty/8/f3laVtbW4SEhGgcfv76668xcuRI+Pn5aaw7ffp0VFRUaO2BT5o0qcnt3qq6uhp///vfcfz4cezYsUPeM96/fz+uXbumdTjaz88P99xzD7766iuNdkmSMG7cOI223r17a7wXQwkhmlxm4MCB+N///od58+Zh165dKC0t1Xs7vXv3RkhIiM7LR0dHa5zGCAgIwJAhQ7Bnzx69t62PlvjdDBw4EFeuXMEjjzyCTz/9VKf/Y0SWisW9hQ0YMABLlizB5s2bce7cOTz11FMoKCjQGlRXH3d3d41ppVIJ4OYgKeDmeX0A8PLy0lq3vra6fv/9d3z22WdQKBQarzvuuAMAdPpyrBtjbZy1MdbGWd8oaV9fX433UUvfEdVz587Fzp078dFHH6Fv374a222oP19fX63t2tvbw9bWVuu9XL9+Xa946lNbhGrfc32WLl2Kl156CQcOHMDo0aPh7u6OkSNHIjc3V+ft6Js7b2/vetvq5sbYWuJ3M23aNLzzzjsoLCzEpEmT4OnpiUGDBiEzM7MF3gGReWNxb0UKhQIrVqwAcHOAWXPVFtbff/9da17dc9T18fDwQFRUFHJycup9zZw5s9kx1sZ5/vx5rfZz587Jcdyq7oC4xqhUKmzcuBEbNmxAVFSU1nYBNLjtutttSdu2bYMkSfI4hPpYW1sjLi4Ohw8fxqVLl/D+++/jzJkzGDVqFCoqKnTajj65A+r/nBQVFWn80WZra1vvGI7m7Bm31O9mxowZyM7ORklJCT7//HMIITB27FijHH0haktY3FtIfV9aAOSRwI3twemqe/fu8Pb2xocffqjRfvr0aWRnZze5/tixY/Hjjz+iW7duGDBggNbLGDECwMiRI/H111/LxbzWf/7zH9jb2xt8idvbb7+N+Ph4rFy5st6R4KGhobCzs8N7772n0f7bb7/JpwpaQ2pqKr744gs88sgjGqcwGuPq6oq//e1vmD9/Pi5duiSPMq979Ka53n//fY1TBoWFhcjOztYYHd+lSxecOHFCo8BfvHhR6zOmT2wt/btxcHDA6NGjsXz5clRVVeHYsWPN6o+oreFo+RYyatQodO7cGePGjUOPHj1QU1ODI0eO4OWXX4ajoyOefPLJZm/DysoK8fHxmDNnDv72t7/hsccew5UrVxAfHw8fHx+tS+7qWrlyJTIzMzFkyBDExsaie/fuuH79OgoKCrBjxw68+eab6Ny5c7PjXLFihXx+/7nnnoObmxv++9//4vPPP0dycrLGjV10tX//fsydOxdhYWGIjIzUukRw8ODBcHV1xb/+9S8sW7YMjz76KB555BFcvHgR8fHxsLW1lY+iGMu1a9fkOK5du4ZTp07hk08+wfbt2xEeHo4333yz0fXHjRuHXr16YcCAAejYsSMKCwuxZs0aBAQEyCPH77zzTgDAK6+8gpiYGCgUCnTv3h1OTk4GxVxcXIwHH3wQs2bNQklJCVasWAFbW1ssXbpUXmbatGl46623MHXqVMyaNQsXL15EcnKy1k1xnJycEBAQgE8//RQjR46Em5sbPDw86r0ksyV+N7NmzYKdnR3CwsLg4+ODoqIiJCUlwcXFBXfffbfe/RG1aSYe0GexPvjgAxEdHS2Cg4OFo6OjUCgUwt/fX0ybNk389NNPGss2NFr+xRdf1OoX9YxGXr9+vQgKChI2NjYiJCREvPPOO2L8+PGiX79+Ta77xx9/iNjYWBEYGCgUCoVwc3MTd911l1i+fLm4evVqo+8xICBA3H///Vrt9Y2u/uGHH8S4ceOEi4uLsLGxEX369BGpqakay9SO4N68ebNWn3VHy9eOvm7odauNGzeK3r17CxsbG+Hi4iLGjx+vdSVATEyMcHBw0NruihUrtPqrT3h4uMb2HRwcRNeuXcXf/vY3sXnzZlFdXa21Tt0R7C+//LIYMmSI8PDwEDY2NsLf31/MnDlTHv1fa+nSpcLX11dYWVlp5KSh30d926rN57vvvitiY2NFx44dhVKpFMOGDRO5ubla66enp4vbb79d2Nraip49e4oPPvhAa7S8EEJ8+eWXol+/fkKpVAoA8jbrjpavZczfTXp6uhgxYoTw8vISNjY2wtfXVzz00EPi6NGj9eaEyJJJQugwjJfalCtXriAkJAQTJkzA+vXrTR0OERG1Mh6Wb+OKiorw73//GyNGjIC7uzsKCwuxevVqlJWVGeXQPxERtT0s7m2cUqlEQUEB5s2bh0uXLskD1N588035kjYiImpfeFieiIjIwvBSOCIiIgvD4k5ERGRhLP6ce01NDc6dOwcnJye9795FRGQoIQTKysrg6+vb5D0niIzN4ov7uXPntB5YQkTUWs6cOWOUm0ER6cPii3vtnbvOnDmjdUctapparcbu3bsRFRWl9/PoqWHMa8swp7yWlpbCz8/P4LsHEjWHxRf32kPxzs7OLO4GUKvVsLe3h7Ozs8m/LC0J89oyzDGvPB1IpsATQURERBaGxZ2IiMjCsLgTERFZGIs/505EZM6qq6uhVqtNHQaZOYVCgQ4dOui8PIs7EZEJCCFQVFSEK1eumDoUaiNcXV3h7e2t0yBNFnciIhOoLeyenp6wt7fnqHpqkBACFRUVKC4uBgD4+Pg0uQ6LOxFRK6uurpYLu7u7u6nDoTbAzs4OAFBcXAxPT88mD9FzQB0RUSurPcdub29v4kioLan9vOgyRoN77tSwPUmAsALQA9iXAtyzxNQRtW17kv76uTav1K7xUDzpQ5/PC/fciYiILAyLOxERkYVhcSciIrIwPOdORGQmVmeeaNXtPRUZ0qrb00VERAT69u2LNWvWmDqUNo177kREpLPp06djwoQJWu179+6FJEnNvinPli1b8Pzzzzerj7bgm2++wbhx4+Dr6wtJkvDJJ58YtX8WdyIiMrmqqioAgJubG5ycnEwcjeEiIiKQlpbW5HLl5eXo06cP1q5d2yJxsLgTEZHRVVZWIjY2Fp6enrC1tcXQoUORk5Mjz4+IiMCCBQsQFxcHDw8PREZGyu2LFi0CABQUFECSJK1XRESETtuo7S82NhaLFy+Gm5sbvL29oVKpGo39gQceqHe7kiRh27ZtRsnP6NGjkZCQgIkTJxqlv7pY3ImIyOgWL16Mjz/+GOnp6Th8+DCCgoIwatQoXLp0SV4mPT0d1tbW+L//+z+89dZbWn34+fnh/Pnz8uv777+Hu7s7hg8frvM2arfj4OCAgwcPIjk5GStXrkRmZmaDsaempuL8+fP45ZdfAAA7duyQYxgzZowx0tPiOKCOiIj0sn37djg6Omq0VVdXyz+Xl5dj3bp1SEtLw+jRowEAGzZsQGZmJt5++20888wzAICgoCAkJyc3uJ0OHTrA29sbAHD9+nVMmDABoaGhUKlUOm8DAHr37o0VK1YAAIKDg7F27Vp89dVX8tGCumpvCbx//35IkoShQ4e2uVMF3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PjID2FpzNGjR9GlS5dGC3tiYiIcHR3l1759+zB37lytttZm0uLepUuXes9pzJ8/H8DNJ+GoVCr4+vrCzs4OEREROHbsmClDJiJq9xwcHBAUFKTx6tSpkzxfCAFA+3apQgiNNgcHB522l5CQgJ07d2Lbtm1yodV1G8DNon8rSZJQU1PT5HaPHj2q9YdBXXPnztX4I2fAgAFYuXKlVltrM2lxz8nJ0TifUnsOZPLkyQCA5ORkpKSkYO3atcjJyYG3tzciIyNRVlZmyrCJiKgRQUFBsLGxwbfffiu3qdVq5Obm4vbbb9err48//hgrV67Ehx9+iG7durXINhpSUFCA7t27N7qMm5ubxh85dnZ28PT01GprbSY9596xY0eN6VWrVqFbt24IDw+HEAJr1qzB8uXL5dGE6enp8PLyQkZGBubMmVNvn5WVlaisrJSnS0tLAdz8pevyJB26hbCCWtz8+08trADmr3nEX39Ly3llTo2qNp/mkFdziMFUHBwc8MQTT+CZZ56Bm5sb/P39kZycjIqKCsycOVPnfn788Uc8+uijWLJkCe644w4UFRUBAGxsbODm5maUbTSmpqYGhYWF+O2339CpUyejPujn6tWrOHnypDydn5+PI0eOyO+lucxmQF1VVRXee+89xMXFQZIknDp1CkVFRYiKipKXUSqVCA8PR3Z2doPFPSkpCfHx8Vrtu3fv5uMV9fbXU8syr4YAO3aYMBZLoP0UuMZG7JLhzCGvFRUVeq9jjneMM9SqVatQU1ODadOmoaysDAMGDMCuXbtw22236dxHbm4uKioqkJCQgISEBLk9PDwce/fuNco2GhMbG4vZs2ejR48eKC0tNWpxz83NxYgRI+TpuLg4AEBMTIxO18k3RRK1Jy5M7MMPP0R0dDROnz4NX19fZGdnIywsDGfPnoWvr6+83OzZs1FYWIhdu3bV2099e+5+fn64cOECnJ2dW/x9WJR9KVALK2ReDUGk4wkohi8ydURt274U+Uc5r5GRWucDyXBqtRqZmZlmkdfS0lJ4eHigpKRE67vn+vXryM/PR2BgIGxtbU0UIbU1+nxuzGbP/e2338bo0aM1Cjmg22CJWymVSiiVSq12hUJh8v/sbY7014AThVTD/DWXpD2Ah5/LlmEOeTX19ql9M4tL4QoLC/Hll1/i8ccfl9tqr22sPcdSq7i4GF5eXq0aHxERUVtiFsU9NTUVnp6euP/+++W2wMBAeHt7a5w7q6qqQlZWFoYMGWKKMImIiNoEkx+Wr6mpQWpqKmJiYmBt/Vc4kiRh0aJFSExMRHBwMIKDg5GYmAh7e3tER0ebMGIiIiLzZvLi/uWXX+L06dN47LHHtOYtXrwY165dw7x583D58mUMGjQIu3fvbnO3ASQiImpNJi/uUVFRaGjAviRJUKlUTT7Bh4iIiP5iFufciYiIyHhY3ImIiCwMizsREZGFYXEnIiKyMCYfUEdERH/ak9S62xuxtHW3p4OIiAj07dsXa9asMXUobRr33ImISGfTp0/HhAkTtNr37t0LSZJw5cqVZvW/ZcsWPP/8883qoy1ISkrC3XffDScnJ3h6emLChAnIy8szWv8s7kREZHJVVVUAbj4fvS3fyyQiIkKnp7plZWVh/vz5OHDgADIzM3Hjxg1ERUWhvLzcKHGwuBMRkdFVVlYiNjYWnp6esLW1xdChQ5GTkyPPj4iIwIIFCxAXFwcPDw9ERkbK7YsWLQIAFBQUQJIkrVdERIRO26jtLzY2FosXL4abmxu8vb2bvHfKAw88UO92JUnCtm3bjJKfnTt3Yvr06bjjjjvQp08fpKam4vTp0zh06JBR+mdxJyIio1u8eDE+/vhjpKen4/DhwwgKCsKoUaNw6dIleZn09HRYW1vj//7v//DWW29p9eHn54fz58/Lr++//x7u7u4YPny4ztuo3Y6DgwMOHjyI5ORkrFy5UuO5JXWlpqbi/Pnz+OWXXwAAO3bskGMYM2aMMdKjpaSkBMDNIxfGwAF1RESkl+3bt8PR0VGjrbq6Wv65vLwc69atQ1paGkaPHg0A2LBhAzIzM/H222/jmWeeAQAEBQUhOTm5we106NBBfkLo9evXMWHCBISGhkKlUum8DQDo3bs3VqxYAQAIDg7G2rVr8dVXX8lHC+pyd3cHAOzfvx+SJGHo0KEteqpACIG4uDgMHToUvXr1Mkqf3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PiguLi4yW0ePXoUXbp0abSwJyYmwtHRUX7t27cPc+fO1WprzIIFC3D06FG8//77TcakK+65ExGRXhwcHBAUFKTR9ttvv8k/1z4vRJIkjWWEEBptDg4OOm0vISEBO3fuxHfffScXWl23Adws+reSJAk1NTVNbvfo0aNafxjUNXfuXDz00EPy9JQpUzBp0iRMnDhRbuvUqVOD6y9cuBDbtm3DN998g86dOzcZk664505EbdeeJM0XmYWgoCDY2Njg22+/ldvUajVyc3Nx++2369XXxx9/jJUrV+LDDz9Et27dWmQbDSkoKED37t0bXcbNzQ1BQUHyy87ODp6enlptdQkhsGDBAmzZsgVff/01AgMDjRJzLe65ExGRUTk4OOCJJ57AM888Azc3N/j7+yM5ORkVFRWYOXOmzv38+OOPePTRR7FkyRLccccdKCoqAgDY2NjAzc3NKNtoTE1NDQoLC/Hbb7+hU6dOWkcEmmP+/PnIyMjAp59+CicnJ/m9ubi41PvHgL5Y3ImIzIUZ3jHOUKtWrUJNTQ2mTZuGsrIyDBgwALt27cJtt92mcx+5ubmoqKhAQkICEhIS5Pbw8HDs3bvXKNtoTGxsLGbPno0ePXqgtLTUqMV93bp1ACBf1lcrNTUV06dPb3b/kmjoYeoWorS0FC4uLigpKYGzs7Opw2lb9iRBLaywo6wHxjj9DMU9S0wdUdvSyGFiOa9jxmidDyQ91Mmxeug/sGPHDrPIa2PfPdevX0d+fj4CAwNha2trogiprdHnc8Nz7kRERBbG5MX97NmzmDp1Ktzd3WFvb4++fftq3KFHCAGVSgVfX1/Y2dkhIiICx44dM2HERERE5s2kxf3y5csICwuDQqHAF198gZ9++gkvv/wyXF1d5WWSk5ORkpKCtWvXIicnB97e3oiMjERZWZnpAiciIjJjJh1Q98ILL8DPzw+pqalyW5cuXeSfhRBYs2YNli9fLl8zmJ6eDi8vL2RkZGDOnDmtHTIREZHZM2lx37ZtG0aNGoXJkycjKysLnTp1wrx58zBr1iwAQH5+PoqKihAVFSWvo1QqER4ejuzs7HqLe2VlJSorK+Xp0tJSADevf1Sr1S38jiyMsIJa3Dy4oxZWAPOnH9HwgTE5r8xp89TJcW0+zSGvusSgy41UiGrp83kxaXE/deoU1q1bh7i4OCxbtgzfffcdYmNjoVQq8eijj8rX/Xl5eWms5+XlhcLCwnr7TEpKQnx8vFb77t27YW9vb/w3YdF6yD9lXg0BduwwYSxtUY8ml2js4RWkizo5/jOf5pDXioqKBufZ2NjAysoK586dQ8eOHWFjY2PUy6zIsgghUFVVhT/++ANWVlawsbFpch2TXgpnY2ODAQMGIDs7W26LjY1FTk4O9u/fj+zsbISFheHcuXPw8fGRl5k1axbOnDmDnTt3avVZ3567n58fLly4wEvh9LUvBWphhcyrIYh0PAHF8EWmjqht2ZfS4Cw5r5GRJr9kq02rk2P14IXIzMw0i7yWlpbCw8Ojwctwq6qqcP78+Ub/CCC6lb29PXx8fHQq7ibdc/fx8UHPnj012m6//XZ8/PHHACA/DaioqEijuBcXF2vtzddSKpVQKpVa7QqFwuT/2dsc6a9DQAqphvnTl9T0ITTFgdeguHU5C7qJSauom+M/P6Pm8P+9qe3b2NjA398fN27c0HiiGlF9OnToAGtra52P8Ji0uIeFhSEvL0+j7cSJEwgICAAABAYGwtvbG5mZmejXrx+Am3/tZmVl4YUXXmj1eImIjEmSJLP4Q4Qsj0mL+1NPPYUhQ4YgMTERDz30EL777jusX78e69evB3Dzg79o0SIkJiYiODgYwcHBSExMhL29PaKjo00ZOhERkdkyaXG/++67sXXrVixduhQrV65EYGAg1qxZgylTpsjLLF68GNeuXcO8efNw+fJlDBo0CLt37270+bpERETtmckfHDN27FiMHTu2wfmSJEGlUkGlUrVeUERERG2YyW8/S0RERMZl8j13IqIWVffpfLwigdoB7rkTERFZGBZ3IiIiC8PiTkREZGFY3ImIiCwMizsREZGF4Wh5IrIc+1IA9Lj5rw739ieyVNxzJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZGA6oo5vq3qJTl2V4G08iIrPEPXciIiILw+JORERkYXhYngxX36F8HqonIjI57rkTERFZGJMWd5VKBUmSNF7e3t7yfCEEVCoVfH19YWdnh4iICBw7dsyEERMREZk/gw7Lp6Wl4aGHHoK9vX2zA7jjjjvw5ZdfytMdOnSQf05OTkZKSgrS0tIQEhKChIQEREZGIi8vD05OTs3etimszjyh1fZUZIgJIiEiIktl0J770qVL4e3tjZkzZyI7O7tZAVhbW8Pb21t+dezYEcDNvfY1a9Zg+fLlmDhxInr16oX09HRUVFQgIyOjWdskIiKyZAbtuf/222/4/PPPkZaWhhEjRiAwMBAzZsxATEyMxmF1Xfzyyy/w9fWFUqnEoEGDkJiYiK5duyI/Px9FRUWIioqSl1UqlQgPD0d2djbmzJlTb3+VlZWorKyUp0tLSwEAarUaarXagHdrXJKo1mozh7gg6v87T/1nu7qB+dormMF7MReN5KzBvDJ/+qmTP50+r62UY7P4f03tliSEEM3poLi4GO+99x7S0tLw888/47777sPMmTMxbtw4WFk1XhC++OILVFRUICQkBL///jsSEhLw888/49ixY8jLy0NYWBjOnj0LX19feZ3Zs2ejsLAQu3btqrdPlUqF+Ph4rfaMjAyjnEYgItJFRUUFoqOjUVJSAmdnZ1OHQ+1Ms4s7ABw8eBDvvPMO0tPT4ePjgytXrsDV1RWpqamIiIjQuZ/y8nJ069YNixcvxuDBgxEWFoZz587Bx8dHXmbWrFk4c+YMdu7cWW8f9e25+/n54cKFC2bxH+z1PSe12uaPCDJBJHXsS6m3WS2skHk1BJGOJ6DQ5RGaw+KMHFgb1kBOgUbyyvzpp06Odfq8tlKOS0tL4eHhweJOJmHwde6///473n33XaSmpuLUqVOYMGECtm/fjnvvvRfXrl3Ds88+i5iYGBQWFurcp4ODA+6880788ssvmDBhAgCgqKhIo7gXFxfDy8urwT6USiWUSqVWu0KhgEKh0P0NthAhddBqM4e4mnr2tUKq0a24m8N7MRc65Esrr8yffhrIcaOf11bKsVn8v6Z2y6ABdePGjYOfnx/S0tIwa9YsnD17Fu+//z7uvfdeAICdnR2efvppnDlzRq9+Kysrcfz4cfj4+CAwMBDe3t7IzMyU51dVVSErKwtDhgwxJGwiIqJ2waA9d09PT2RlZSE0NLTBZXx8fJCfn99oP//4xz8wbtw4+Pv7o7i4GAkJCSgtLUVMTAwkScKiRYuQmJiI4OBgBAcHIzExEfb29oiOjjYkbCIionbBoOIeHh6O/v37a7VXVVVh06ZNePTRRyFJEgICAhrt57fffsMjjzyCCxcuoGPHjhg8eDAOHDggr7d48WJcu3YN8+bNw+XLlzFo0CDs3r27zV7jTkRE1BoMKu4zZszAfffdB09PT432srIyzJgxA48++qhO/WzatKnR+ZIkQaVSQaVSGRImERFRu2TQOXchBCRJ0mr/7bff4OLi0uygiIiIyHB67bn369dPvgf8yJEjYW391+rV1dXIz8/HfffdZ/QgiYiISHd6Fffay9OOHDmCUaNGwdHRUZ5nY2ODLl26YNKkSUYNkIiIiPSjV3FfsWIFAKBLly74+9//Dltb2xYJioiIiAxn0IC6mJgYY8dBRERERqJzcXdzc8OJEyfg4eGB2267rd4BdbUuXbpklOCIiIhIfzoX99WrV8vXl69evbrR4k5ERESmo3Nxv/VQ/PTp01siFiKixu1JMnUERG2CzsW99rnouuATkIiIiExH5+Lu6ura5KH42pvbVFdXNzswIiIiMozOxX3Pnj0tGQeZqe8KLgHuN/8NC3Q1dTiWr77DziOWtn4cRNSm6Vzcw8PDWzIOIiIiMhKdi/vRo0fRq1cvWFlZ4ejRo40u27t372YHRkRERIbRubj37dsXRUVF8PT0RN++fSFJEoQQWsvxnDsREZFp6Vzc8/Pz0bFjR/lnIiIiMk86F/eAgIB6fyYiIiLzYtC95QEgLy8Pr732Go4fPw5JktCjRw8sXLgQ3bt3N2Z87cLqzBPyz09FhpgwEiIisgRWhqz00UcfoVevXjh06BD69OmD3r174/Dhw+jVqxc2b95s7BiJiIhIDwYV98WLF2Pp0qXYv38/UlJSkJKSguzsbCxbtgxLliwxKJCkpCRIkoRFixbJbUIIqFQq+Pr6ws7ODhERETh27JhB/VPz7T91EftPXTR1GDpZnXlCfhERtTcGFfeioiI8+uijWu1Tp05FUVGR3v3l5ORg/fr1WpfQJScnIyUlBWvXrkVOTg68vb0RGRmJsrIyQ8ImIiJqFww65x4REYF9+/YhKChIo/3bb7/FsGHD9Orr6tWrmDJlCjZs2ICEhAS5XQiBNWvWYPny5Zg4cSIAID09HV5eXsjIyMCcOXPq7a+yshKVlZXydO098dVqNdRqtV6xtQRJNH6ZoMliFPX/nVcjddD4FwDUDSx7c6bpcwxo5tnccgr8lcNGcykvbB45NQtN5EunvLZSPs3h+4baL0nUd7F6PbZt2yb/fO7cOTz33HN46KGHMHjwYADAgQMHsHnzZsTHx2Pu3Lk6BxATEwM3NzesXr0aERER6Nu3L9asWYNTp06hW7duOHz4MPr16ycvP378eLi6uiI9Pb3e/lQqFeLj47XaMzIyYG9vr3NcRETNUVFRgejoaJSUlPBhWtTqdC7uVla6HcHX5yY2mzZtwr///W/k5OTA1tZWo7hnZ2cjLCwMZ8+eha+vr7zO7NmzUVhYiF27dtXbZ3177n5+frhw4YJZ/Ad7fc/JRufPHxHU6PwWsy+l3uYDhSW45DYIbpcOwurPveGBXdwa7mdYXEtEp7db82xuOQVu7llmXg1BpOMJKKSaxvsxk5yahUZyCuiY11bKZ2lpKTw8PFjcySR0PixfU9PEF5Cezpw5gyeffBK7d++Gra1tg8vVfRJd7ZPnGqJUKqFUKrXaFQoFFAqF4QEbibjl8HZ9TBZjA1+EtQXdSlTLPzdajMwgx4Bmns0tp7dSSDVNF3czyalZ0CGnQBN5baV8msP3DbVfBg2oM4ZDhw6huLgYd911F6ytrWFtbY2srCy8+uqrsLa2hpeXFwBoDdArLi6W5xEREZE2g29iU15ejqysLJw+fRpVVVUa82JjY5tcf+TIkfjhhx802mbMmIEePXpgyZIl6Nq1K7y9vZGZmSmfc6+qqkJWVhZeeOEFQ8MmIiKyeAYV9++//x5jxoxBRUUFysvL4ebmhgsXLsDe3h6enp46FXcnJyf06tVLo83BwQHu7u5y+6JFi5CYmIjg4GAEBwcjMTER9vb2iI6ONiRsIiKidsGg4v7UU09h3LhxWLduHVxdXXHgwAEoFApMnToVTz75pNGCW7x4Ma5du4Z58+bh8uXLGDRoEHbv3g0nJyejbYMsH2/vS0TtjUHF/ciRI3jrrbfQoUMHdOjQAZWVlejatSuSk5MRExMjX5eur71792pMS5IElUoFlUplUH9ERETtkUED6hQKhTxi3cvLC6dPnwYAuLi4yD8TERGRaRi0596vXz/k5uYiJCQEI0aMwHPPPYcLFy7g3XffxZ133mnsGNsVHkLWH3NGRKTJoD33xMRE+Pj4AACef/55uLu744knnkBxcTHWr19v1ACJiIhIPwbtuQ8YMED+uWPHjtixY4fRAiIiIqLmMfg6d+DmDWXy8vIgSRK6d++Ojh07GisuMmO3PvY1tKu7SWIwu0e57kkydQSkq/p+VyOWtn4cRC3IoMPypaWlmDZtGjp16oTw8HAMHz4cvr6+mDp1KkpKSowdIxEREenBoOL++OOP4+DBg9i+fTuuXLmCkpISbN++Hbm5uZg1a5axYyQiIiI9GHRY/vPPP8euXbswdOhQuW3UqFHYsGED7rvvPqMFR0RERPozaM/d3d0dLi4uWu0uLi647bbbmh0UERERGc6gPfdnn30WcXFx+M9//iNfEldUVIRnnnkG//rXv4waoCUwu8FfgNagInMYJEdERMahc3Hv16+fxnPUf/nlFwQEBMDf3x8AcPr0aSiVSvzxxx+YM2eO8SMlIiIinehc3CdMmNCCYRAREZGx6FzcV6xY0ZJxEBmFWZ4CISJqZc26ic2hQ4dw/PhxSJKEnj17ol+/fsaKi4iIiAxkUHEvLi7Gww8/jL1798LV1RVCCJSUlGDEiBHYtGkT71RHRERkQgZdCrdw4UKUlpbi2LFjuHTpEi5fvowff/wRpaWliI2NNXaMbcrqzBNt/tDw/lMX5RcREbU9Bu2579y5E19++SVuv/12ua1nz554/fXXERUVZbTgiIiISH8G7bnX1NRAoVBotSsUCtTU1Ojcz7p169C7d284OzvD2dkZoaGh+OKLL+T5QgioVCr4+vrCzs4OEREROHbsmCEhExERtRsGFfd77rkHTz75JM6dOye3nT17Fk899RRGjhypcz+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCTk5ORkpKCtWvXIicnB97e3oiMjERZWZkhYRMREbULBhX3tWvXoqysDF26dEG3bt0QFBSEwMBAlJWV4bXXXtO5n3HjxmHMmDEICQlBSEgI/v3vf8PR0REHDhyAEAJr1qzB8uXLMXHiRPTq1Qvp6emoqKhARkaGIWETERG1Cwadc/fz88Phw4eRmZmJn3/+GUII9OzZE/fee6/BgVRXV2Pz5s0oLy9HaGgo8vPzUVRUpHEOX6lUIjw8HNnZ2Q3eBa+yshKVlZXydGlpKQBArVZDrVYbHJ+uJFEtb69um75aNF6h+XddjdSh3sVq2xuar67TD1ohx4AZ5rRuHpqK48/ltfJX78Ktk9M2oYl86ZVXjRWNn+PW+L4haogkhBD6rHDjxg3Y2triyJEj6NWrV7MD+OGHHxAaGorr16/D0dERGRkZGDNmDLKzsxEWFoazZ8/C19dXXn727NkoLCzErl276u1PpVIhPj5eqz0jIwP29vbNjpeISBcVFRWIjo5GSUkJnJ2dTR0OtTN677lbW1sjICAA1dWG7TnV1b17dxw5cgRXrlzBxx9/jJiYGGRlZcnzb72fPXBzkF3dtlstXboUcXFx8nRpaSn8/PwQFRXVKv/BXt9zskX6nT8iyLgd7kvRmPyu4FK9i9VIHXDJbRDcLh2EVT17ywO7uGk2DIvTWqYlGJpno+exVp18NkUtrJB5NQSRjiegkJoYhNpKOW0TmsizXnm9VQvkuPaoIZEpGPxUuKVLl+K9996Dm5tb0ys0wsbGBkFBN79wBwwYgJycHLzyyitYsmQJgJtPm6t98hxw8wY6Xl5eDfanVCqhVCq12hUKRb0j/I1NNHD4urmMHnudL776Cnfd+fUto/UF2go5BgzPc4t9BvQpJLdQSDVNF6FWymmboGOedcqrxgrGz3FrfN8QNcSg4v7qq6/i5MmT8PX1RUBAABwcHDTmHz582OCAhBCorKxEYGAgvL29kZmZKd/WtqqqCllZWXjhhRcM7p+IiMjSGVTcJ0yYAEmSoOfpei3Lli3D6NGj4efnh7KyMmzatAl79+7Fzp07IUkSFi1ahMTERAQHByM4OBiJiYmwt7dHdHR0s7ZLRERkyfQq7hUVFXjmmWfwySefQK1WY+TIkXjttdfg4eFh0MZ///13TJs2DefPn4eLiwt69+6NnTt3IjIyEgCwePFiXLt2DfPmzcPly5cxaNAg7N69G05OTgZtj4iIqD3Qq7ivWLECaWlpmDJlCuzs7JCRkYEnnngCmzdvNmjjb7/9dqPzJUmCSqWCSqUyqH8iIqL2SK/ivmXLFrz99tt4+OGHAQBTpkxBWFgYqqur0aFDywwkIyIiIv3odaeHM2fOYNiwYfL0wIEDYW1trXEbWiIiIjItvYp7dXU1bGxsNNqsra1x48YNowZFREREhtPrsLwQAtOnT9e4jvz69euYO3euxuVwW7ZsMV6EREREpBe9intMTIxW29SpU40WDFFjVmeeMHUI1Jr2JJk6AqI2S6/inpqa2lJxEBERkZEY9MhXIiIiMl8G3aGOLMf+UxeNtn5oV/fmhkNEREbAPXciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5kNPtPXeSNZoiIzACLOxERkYVhcSciIrIwLO5EREQWxqTFPSkpCXfffTecnJzg6emJCRMmIC8vT2MZIQRUKhV8fX1hZ2eHiIgIHDt2zEQRExERmT+T3n42KysL8+fPx913340bN25g+fLliIqKwk8//SQ/QjY5ORkpKSlIS0tDSEgIEhISEBkZiby8PDg5OZkyfKrH4NPrgT233IZ2xFLTBWMp6j4djTkloiaYtLjv3LlTYzo1NRWenp44dOgQhg8fDiEE1qxZg+XLl2PixIkAgPT0dHh5eSEjIwNz5swxRdhERERmzaweHFNSUgIAcHNzAwDk5+ejqKgIUVFR8jJKpRLh4eHIzs6ut7hXVlaisrJSni4tLQUAqNVqqNXqlgwfACCJ6hbp1+ixi5tnZGqkDo0uVju/qeVupRa3nO0xYtzGyG2LfQaEfme4anOk1nO9myu1/OfYLBiQG4Pz2gI5bY3vG6KGSEIIYeoggJvn1sePH4/Lly9j3759AIDs7GyEhYXh7Nmz8PX1lZedPXs2CgsLsWvXLq1+VCoV4uPjtdozMjJgb2/fcm+AiOgWFRUViI6ORklJCZydnU0dDrUzZrPnvmDBAhw9ehTffvut1jxJkjSmhRBabbWWLl2KuLg4ebq0tBR+fn6Iiopqlf9gr+852SL9zh8RZPjK+1IanPVdwaVGV62ROuCS2yC4XToIKx33nAd2cftrYlhcwwvqyRi5vTWPtf01K7e1GslxfdTCCplXQxDpeAIKqUa/bRkxp2ZNz5wCzchrC+S09qghkSmYRXFfuHAhtm3bhm+++QadO3eW2729vQEARUVF8PHxkduLi4vh5eVVb19KpRJKpVKrXaFQQKFQGDlybUKPw9f6aFbsjXzJ6VqwrUS1zstqfKkaMefGyO2teaztzyifC30LdG08Uo3+xb0VPsdmwcCcAgbktQVy2hrfN0QNMWlxF0Jg4cKF2Lp1K/bu3YvAwECN+YGBgfD29kZmZib69esHAKiqqkJWVhZeeOEFU4RMJmCWt7StO4Kd2jZekUAWxqTFff78+cjIyMCnn34KJycnFBUVAQBcXFxgZ2cHSZKwaNEiJCYmIjg4GMHBwUhMTIS9vT2io6NNGToREZHZMmlxX7duHQAgIiJCoz01NRXTp08HACxevBjXrl3DvHnzcPnyZQwaNAi7d+/mNe5EREQNMPlh+aZIkgSVSgWVStXyAREREVkA3lueiIjIwrC4ExERWRizuBSurTPL0dxERNRucc+diIjIwnDPnaitqe8ae16XTUS34J47ERGRhWFxJyIisjA8LN9O7D910dQhmAUOfiSi9oB77kRERBaGxZ2IiMjCsLgTERFZGBZ3IiIiC8PiTkREZGE4Wp7MEke1ExEZjnvuREREFobFnYiIyMKwuBMREVkYFnciIiILY9Li/s0332DcuHHw9fWFJEn45JNPNOYLIaBSqeDr6ws7OztERETg2LFjpgmWiIiojTDpaPny8nL06dMHM2bMwKRJk7TmJycnIyUlBWlpaQgJCUFCQgIiIyORl5cHJycnE0RsOrWjx5+KDDFxJHpqA48nvXVkfpvLLxFRPUxa3EePHo3Ro0fXO08IgTVr1mD58uWYOHEiACA9PR1eXl7IyMjAnDlz6l2vsrISlZWV8nRpaSkAQK1WQ61WG/kd3CSJ6hbptz4GvQdhhRqpg0Hbq11Pn/XVookDQjq8h9bM6a10zm9T77Gp7fy5fpO50rnDlvlsm5QBuTFaXo2Qz5b6viHShSSEEKYOAgAkScLWrVsxYcIEAMCpU6fQrVs3HD58GP369ZOXGz9+PFxdXZGenl5vPyqVCvHx8VrtGRkZsLe3b5HYiYjqqqioQHR0NEpKSuDs7GzqcKidMdub2BQVFQEAvLy8NNq9vLxQWFjY4HpLly5FXFycPF1aWgo/Pz9ERUW12H+w1/ecbJF+6zN/RJD+K+1LwXcFlwzaXo3UAZfcBsHt0kFY6bg3PbCLW+MLDItrfD5aN6e30jm/+1KatR21sELm1RBEOp6AQqppVl8AdMppm2NAjo2WVyPks/aoIZEpmG1xryVJksa0EEKr7VZKpRJKpVKrXaFQQKFQGD0+ABAGHvI2hEHvQarRuTA3xEpU69xHk1+qOryH1szprXTOrzEKMm7myijFvYU+2ybVjLw0O69GyGdLfd8Q6cJsi7u3tzeAm3vwPj4+cntxcbHW3nx7xYFgZFHqG3xpKm1gIChRY8z2OvfAwEB4e3sjMzNTbquqqkJWVhaGDBliwsiIiIjMm0n33K9evYqTJ/86t5qfn48jR47Azc0N/v7+WLRoERITExEcHIzg4GAkJibC3t4e0dHRJoyaiIjIvJm0uOfm5mLEiBHydO1AuJiYGKSlpWHx4sW4du0a5s2bh8uXL2PQoEHYvXu3WVzjzqeWNWz/qYvyz6Fd3U0YCRFR+2TS4h4REYHGrsSTJAkqlQoqlar1giIiImrjzPacOxERERnGbEfLm6s2cTjenEYdtzG8AoGILAH33ImIiCwMizsREZGF4WF5C3brqHWycHVPxfCGK0TtGvfciYiILAyLOxERkYXhYfk2pqHR+hqjvM3ot8ob2hARtT7uuRMREVkYM9rHMy9t4np2IiKienDPnYiIyMKwuBMREVkYHpa3QG3h+nZzPu0x+PT6mz/suTkAkIMCiait4Z47ERGRhWFxJyIisjA8LE+tb08SBp9u/NTBAf/ZrRRMw9rC6Y02g08qJGpV3HMnIiKyMG2iuL/xxhsIDAyEra0t7rrrLuzbt8/UIREREZktsz8s/8EHH2DRokV44403EBYWhrfeegujR4/GTz/9BH9/f1OH1+rkkdxkEm125Hx9h8X55Dgii2X2e+4pKSmYOXMmHn/8cdx+++1Ys2YN/Pz8sG7dOlOHRkREZJbMes+9qqoKhw4dwj//+U+N9qioKGRnZ9e7TmVlJSorK+XpkpISAMClS5egVqt13nbl1RIDIm55ZddvtOr2aiSBiooKlF2/AStR3ay+Ll6tkn9u6n2YMv+65vjW96MvtbBCRUUFLkpVUEg1BvfToIt1BgPWF2vdZVpSM3KljxbNq575KisrAwAIIYwbB5EOzLq4X7hwAdXV1fDy8tJo9/LyQlFRUb3rJCUlIT4+Xqs9MDCwRWKklvKKqQNo41RGWob+ojJorbKyMri4uBg3FKImmHVxryVJksa0EEKrrdbSpUsRFxcnT9fU1ODSpUtwd3dvcB1qWGlpKfz8/HDmzBk4OzubOhyLwby2DHPKqxACZWVl8PX1NWkc1D6ZdXH38PBAhw4dtPbSi4uLtfbmaymVSiiVSo02V1fXlgqx3XB2djb5l6UlYl5bhrnklXvsZCpmPaDOxsYGd911FzIzMzXaMzMzMWTIEBNFRUREZN7Mes8dAOLi4jBt2jQMGDAAoaGhWL9+PU6fPo25c+eaOjQiIiKzZPbF/e9//zsuXryIlStX4vz58+jVqxd27NiBgIAAU4fWLiiVSqxYsULrVAc1D/PaMphXopskwes0iIiILIpZn3MnIiIi/bG4ExERWRgWdyIiIgvD4k5ERGRhWNwJAPDNN99g3Lhx8PX1hSRJ+OSTTzTmCyGgUqng6+sLOzs7RERE4NixY6YJto1ISkrC3XffDScnJ3h6emLChAnIy8vTWIZ51d+6devQu3dv+UY1oaGh+OKLL+T5zCkRizv9qby8HH369MHatWvrnZ+cnIyUlBSsXbsWOTk58Pb2RmRkpPxwDNKWlZWF+fPn48CBA8jMzMSNGzcQFRWF8vJyeRnmVX+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCZUyIAgqgOAGLr1q3ydE1NjfD29harVq2S265fvy5cXFzEm2++aYII26bi4mIBQGRlZQkhmFdjuu2228TGjRuZU6I/cc+dmpSfn4+ioiJERUXJbUqlEuHh4Q0+epe01T5+2M3NDQDzagzV1dXYtGkTysvLERoaypwS/YnFnZpU++AefR69S5qEEIiLi8PQoUPRq1cvAMxrc/zwww9wdHSEUqnE3LlzsXXrVvTs2ZM5JfqT2d9+lsyHPo/eJU0LFizA0aNH8e2332rNY1711717dxw5cgRXrlzBxx9/jJiYGGRlZcnzmVNq77jnTk3y9vYGAL0evUt/WbhwIbZt24Y9e/agc+fOcjvzajgbGxsEBQVhwIABSEpKQp8+ffDKK68wp0R/YnGnJgUGBsLb21vj0btVVVXIysrio3cbIYTAggULsGXLFnz99dcIDAzUmM+8Go8QApWVlcwp0Z94WJ4AAFevXsXJkyfl6fz8fBw5cgRubm7w9/fHokWLkJiYiODgYAQHByMxMRH29vaIjo42YdTmbf78+cjIyMCnn34KJycneW/SxcUFdnZ2kCSJeTXAsmXLMHr0aPj5+aGsrAybNm3C3r17sXPnTuaUqJYph+qT+dizZ48AoPWKiYkRQty8bGvFihXC29tbKJVKMXz4cPHDDz+YNmgzV18+AYjU1FR5GeZVf4899pgICAgQNjY2omPHjmLkyJFi9+7d8nzmlEgIPvKViIjIwvCcOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7URMKCgogSRKOHDli6lCIiHTC4k5thhAC9957L0aNGqU174033oCLiwtOnz5tgsiIiMwLizu1GZIkITU1FQcPHsRbb70lt+fn52PJkiV45ZVX4O/vb8IIiYjMA4s7tSl+fn545ZVX8I9//AP5+fkQQmDmzJkYOXIkpk+frrX8I488gocfflijTa1Ww8PDA6mpqQCAnTt3YujQoXB1dYW7uzvGjh2LX3/9tcEY0tLS4OrqqtH2ySefQJIkjbbPPvsMd911F2xtbdG1a1fEx8fjxo0b8nyVSgV/f38olUr4+voiNjZWz2wQEdWPj3ylNicmJgZbt27FjBkzMGnSJPz444/48ccf6112ypQpeOihh3D16lU4OjoCAHbt2oXy8nJMmjQJAFBeXo64uDjceeedKC8vx3PPPYcHH3wQR44cgZWVYX//7tq1C1OnTsWrr76KYcOG4ddff8Xs2bMBACtWrMBHH32E1atXY9OmTbjjjjtQVFSE//3vfwZti4hIi2kfSkdkmN9//1107NhRWFlZiS1btjS4XFVVlfDw8BD/+c9/5LZHHnlETJ48ucF1iouLBQD5MaH5+fkCgPj++++FEEKkpqYKFxcXjXW2bt0qbv3vNGzYMJGYmKixzLvvvit8fHyEEEK8/PLLIiQkRFRVVen0fomI9MHD8tQmeXp6Yvbs2bj99tvx4IMPNricQqHA5MmT8d///hfAzb30Tz/9FFOmTJGX+fXXXxEdHY2uXbvC2dkZgYGBANCswXmHDh3CypUr4ejoKL9mzZqF8+fPo6KiApMnT8a1a9fQtWtXzJo1C1u3btU4ZE9E1Bw8LE9tlrW1Naytm/4IT5kyBeHh4SguLkZmZiZsbW0xevRoef64cePg5+eHDRs2wNfXFzU1NejVqxeqqqrq7c/KygpCCI02tVqtMV1TU4P4+HhMnDhRa31bW1v4+fkhLy8PmZmZ+PLLLzFv3jy8+OKLyMrKgkKh0OXtExE1iMWdLN6QIUPg5+eHDz74AF988QUmT54MGxsbAMDFixdx/PhxvPXWWxg2bBgA4Ntvv220v44dO6KsrAzl5eVwcHAAAK1r4Pv374+8vDwEBQU12I+dnR0eeOABPPDAA5g/fz569OiBH374Af3792/GuyUiYnGndkCSJERHR+PNN9/EiRMnsGfPHnnebbfdBnd3d6xfvx4+Pj44ffo0/vnPfzba36BBg2Bvb49ly5Zh4cKF+O6775CWlqaxzHPPPYexY8fCz88PkydPhpWVFY4ePYoffvgBCQkJSEtLQ3V1tdzXu+++Czs7OwQEBLRECoioneE5d2oXpkyZgp9++gmdOnVCWFiY3G5lZYVNmzbh0KFD6NWrF5566im8+OKLjfbl5uaG9957Dzt27MCdd96J999/HyqVSmOZUaNGYfv27cjMzMTdd9+NwYMHIyUlRS7erq6u2LBhA8LCwtC7d2989dVX+Oyzz+Du7m70905E7Y8k6p48JCIiojaNe+5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7ERGRhWFxJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZmP8HzQms27H+BcMAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABcFElEQVR4nO3dd1iTV/sH8G8CCSQs2UO2RK2guCp1FRyoqDi6bG2rtNYurVqrba1txVF81WqHVrsc1Mmvtb6v1klVUOuoUqy4QVBR9pA9EvL8/njMA4EASQgkhPtzXbk0z8o5EHLnnOec+/AYhmFACCGEEKPC13cBCCGEEKJ7FOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOB14MqVK5gxYwa6dOkCkUgEkUgEiUSCt956C5cuXVI6NjIyEjweD3w+H6mpqQ2uVVZWBmtra/B4PERERHDb7969Cx6PBx6Ph8jISJXleP3117ljmqMoh+IhFArh4+ODuXPn4tGjR5pUv0mKcn/55Zc6u2ZcXBx4PB5+++23Zo9V1LOukJAQhISEKG1r6ufamEOHDjV6jre3t9LvryVCQkKUflcikQiBgYH4+uuvIZfLdfIaTVH8vOPi4rhtERER8Pb21vhaGzduxLZt2xpsV7xPVO0jhGiHAnwL/fDDD+jXrx8uXLiAuXPn4o8//sDBgwcxb948XLt2DU8++STu3LnT4DxLS0ts3bq1wfZff/0VUqkUAoFA5etZWVlh27ZtDT7YS0tL8euvv8La2lqj8h85cgTnzp3DwYMHMWnSJKxfvx5hYWEwlgzGb7zxBs6dO9fscefOncMbb7yh0bUPHTqEpUuXqty3b98+fPbZZxpdrym+vr44d+4czp07h5iYGHTu3Bnvv/8+Fi1apLPX0MRnn32Gffv2aXxeYwHe1dUV586dw7hx43RQOkIIAJjquwDt2V9//YV3330X48aNw2+//QahUMjtGz58OGbNmoVff/0VIpGowblTpkxBdHQ0li5dCj6/9nvW5s2bMXnyZOzfv1/la06ZMgU///wzjh8/jtDQUG57TEwMampqMGnSJOzYsUPtOvTr1w8ODg4AgNDQUOTn52P79u04e/YsBg8erPKc8vJyiMVitV9Dn9zd3eHu7t7scU899ZROX7dPnz46vZ5IJFIqY1hYGLp3744NGzZgxYoVKr8QMgyDyspKle+/lurSpYtOr2dmZqbz3wEhHR214FsgKioKJiYm+OGHH5SCe13PP/883NzcGmx//fXXkZ6ejtjYWG7b7du3cebMGbz++uuNvma3bt0waNAgbNmyRWn7li1b8Mwzz8DGxkbL2rAUH7L37t0DwHYPBwQE4NSpUxg0aBDEYjFXvvv37+OVV16Bk5MTzMzM8MQTT2Dt2rUqu43lcjm++OILeHp6wtzcHP3798fx48eVjklJScFrr70GiUQCsViMzp07Izw8HElJSSrLWllZifnz58PFxQUikQjBwcFITExUOkZVF70q9bvoy8vLsWDBAvj4+MDc3Bx2dnbo378/du/eDYDtov7uu++4cxWPu3fvAlDdRf/o0SN88MEH8PX1hZmZGZycnDB27FjcvHmz2fLVJxAI0K9fP5SXlyM3N5crx+zZs/H999/jiSeegJmZGaKjowEAycnJmDp1qtLvSlH+um7evIkxY8ZALBbDwcEBb7/9NkpKShocp6qLXi6XY/369ejduzdEIhE6deqEp556ivuy6u3tjWvXriE+Pp77eSmu0VgX/ZkzZzBixAhYWVlBLBZj0KBBOHjwoNIx27ZtA4/Hw8mTJ/HOO+/AwcEB9vb2eOaZZ5CRkaF07IkTJxASEgJ7e3uIRCJ4enri2WefRXl5udo/e0LaC2rBa6mmpgYnT55E//794erqqvH5EokEQ4cOxZYtWzB69GgAbJD29vbGiBEjmjx3xowZmDVrFgoLC2Fra4tbt27h7NmzWLFiBfbu3atVfRRSUlIAAI6Ojty2zMxMvPLKK/jwww8RFRUFPp+P3NxcDBo0CNXV1Vi+fDm8vb3xxx9/YMGCBbhz5w42btyodN0NGzbAy8uLu2+8evVqhIWFIT4+HgMHDgQAZGRkwN7eHv/5z3/g6OiIgoICREdHIygoCImJiejWrZvSNT/55BP07dsXP//8M4qKihAZGYmQkBAkJibC19e3RT+H+fPnY/v27VixYgX69OmDsrIyXL16Ffn5+QDYLuqysjL89ttvSrcAGnsvlJSUYMiQIbh79y4++ugjBAUFobS0FKdOnUJmZia6d++ucRnv3LkDU1NT2Nractv++9//4vTp0/j888/h4uICJycnXL9+HYMGDYKnpyfWrl0LFxcXHD16FHPmzEFeXh6WLFkCAMjOzkZwcDAEAgE2btwIZ2dn7Ny5E7Nnz1arPBEREdixYwdmzJiBZcuWQSgU4p9//uG+9Ozbtw/PPfccbGxsuPeHmZlZo9eLj49HaGgoevXqhc2bN8PMzAwbN25EeHg4du/ejSlTpigd/8Ybb2DcuHHYtWsX0tPTsXDhQrzyyis4ceIEAPZLxLhx47i/u06dOuHhw4c4cuQIqqur202vFCFqY4hWsrKyGADMiy++2GCfTCZjpFIp95DL5dy+JUuWMACY3NxcZuvWrYyZmRmTn5/PyGQyxtXVlYmMjGQYhmEsLCyY6dOnc+elpaUxAJg1a9YwJSUljKWlJbNhwwaGYRhm4cKFjI+PDyOXy5lZs2Yx6vxaFeXIyspipFIpU1hYyOzYsYMRiUSMh4cHU1FRwTAMwwQHBzMAmOPHjyud//HHHzMAmAsXLihtf+eddxgej8fcunVLqdxubm7cNRmGYYqLixk7Oztm5MiRjZZRJpMx1dXVjEQiYd5//31u+8mTJxkATN++fZV+tnfv3mUEAgHzxhtvNKhnXcHBwUxwcLDSNgDMkiVLuOcBAQHMpEmTGi0bwzBN/qy9vLyUfn/Lli1jADCxsbFNXlOV4OBgxt/fn3s/ZWRkcD//559/XqkONjY2TEFBgdL5o0ePZtzd3ZmioiKl7bNnz2bMzc254z/66COGx+Mxly9fVjouNDSUAcCcPHmS2zZ9+nTGy8uLe37q1CkGALN48eIm6+Lv79/gZ88wte+TrVu3ctueeuopxsnJiSkpKeG2yWQyJiAggHF3d+d+91u3bmUAMO+++67SNVevXs0AYDIzMxmGYZjffvuNAdCgfoQYK+qibwX9+vWDQCDgHmvXrlV53PPPPw+hUIidO3fi0KFDyMrKUmvktaWlJZ5//nls2bIFMpkMv/zyC1577TW1uqLrc3FxgUAggK2tLV555RX07dsXR44cgbm5OXeMra0thg8frnTeiRMn0KNHDwwYMEBpe0REBBiG4VpNCs8884zSNa2srBAeHo5Tp06hpqYGACCTyRAVFYUePXpAKBTC1NQUQqEQycnJuHHjRoOyT506VanOXl5eGDRoEE6ePKnxz6G+AQMG4PDhw/j4448RFxeHioqKFl3v8OHD6Nq1K0aOHKnV+deuXePeT25ubli7di1efvll/PTTT0rHDR8+XKlFX1lZiePHj2Py5MkQi8WQyWTcY+zYsaisrMT58+cBACdPnoS/vz8CAwOVrjl16lS16gcAs2bN0qp+9ZWVleHChQt47rnnYGlpyW03MTHBq6++igcPHuDWrVtK50yYMEHpea9evQDU3m7q3bs3hEIh3nzzTURHR6ucxUKIMaEuei05ODhAJBJxHx517dq1C+Xl5cjMzGzwoVOXhYUFpkyZgi1btsDLywsjR46El5eXWq8/Y8YMDBkyBF988QVyc3O1npL1559/wsbGBgKBAO7u7rC3t29wjKpu5/z8fJXTpBTjDRRd2QouLi4NjnVxcUF1dTVKS0thY2OD+fPn47vvvsNHH32E4OBg2Nrags/n44033lAZYBu75r///ttofdX17bffwt3dHTExMVi1ahXMzc0xevRorFmzBhKJROPr5ebmwtPTU+vydOnSBXv27AGPx4O5uTl8fHxUdinX/13l5+dDJpNh/fr1WL9+vcpr5+Xlccf6+Pg02K/q51xfbm4uTExM1DpWHYWFhWAYRuV7r7H3WP33rqL7X/He6dKlC/7880+sXr0as2bNQllZGXx9fTFnzhzMnTtXJ+UmxJBQgNeSiYkJhg8fjmPHjiEzM1Ppg6hHjx4AwN17bMrrr7+On3/+GVeuXMHOnTvVfv3BgwejW7duWLZsGUJDQ+Hh4aFxHQAgMDCQG0XfGFU9A/b29sjMzGywXTGoqf41s7KyGhyblZUFoVDItdB27NiBadOmISoqSum4vLw8dOrUSeX5qrap+pKiKQsLCyxduhRLly5FdnY215oPDw/XalCco6MjHjx4oHV5FAMTm1P/d2Vra8u1ehtrXSuCur29faM/0+Y4OjqipqYGWVlZWo1JqU/x5U6T95g6hg4diqFDh6KmpgaXLl3C+vXrMW/ePDg7O+PFF19scbkJMSTURd8CixYtQk1NDd5++21IpVKtrjFw4EC8/vrrmDx5MiZPnqzRuZ9++inCw8PxwQcfaPXaLTFixAhcv34d//zzj9L2X375BTweD8OGDVPa/vvvv6OyspJ7XlJSggMHDmDo0KEwMTEBwAan+oOuDh48iIcPH6osw+7du5Xm69+7dw9nz55tkMSmpZydnREREYGXXnoJt27d4kZc128hNiUsLAy3b99ucOuitYnFYgwbNgyJiYno1asX+vfv3+Ch+EI0bNgwXLt2rUEPyK5du5p9nbCwMADApk2bmjzOzMxMrZ+XhYUFgoKC8PvvvysdL5fLsWPHDri7u6Nr167NXqcxJiYmCAoK4mYS1H8fE2IMqAXfAoMHD8Z3332H9957D3379sWbb74Jf39/ruWhGNHeXPKZzZs3a/X6r7zyCl555RWtzm2p999/H7/88gvGjRuHZcuWwcvLCwcPHsTGjRvxzjvvNPjwNTExQWhoKObPnw+5XI5Vq1ahuLhYKVHM+PHjsW3bNnTv3h29evVCQkIC1qxZ0+g89pycHEyePBkzZ85EUVERlixZAnNzc50kfwkKCsL48ePRq1cv2Nra4saNG9i+fTsGDhzIdY337NkTALBq1SqEhYXBxMQEvXr1Ujllct68eYiJicHEiRPx8ccfY8CAAaioqEB8fDzGjx/f4AuRLn3zzTcYMmQIhg4dinfeeQfe3t4oKSlBSkoKDhw4wH3pmDdvHrZs2YJx48ZhxYoV3Ch6dXoshg4dildffRUrVqxAdnY2xo8fDzMzMyQmJkIsFuO9994DwP7M9uzZg5iYGPj6+sLc3Jz7Oda3cuVKhIaGYtiwYViwYAGEQiE2btyIq1evYvfu3RqPOfn+++9x4sQJjBs3Dp6enqisrOSmm2o7NoIQQ0YBvoXefvttDBw4EN988w2++uorZGRkgMfjwd3dHYMGDcLx48cbDFAzBo6Ojjh79iwWLVqERYsWobi4GL6+vli9ejXmz5/f4PjZs2ejsrISc+bMQU5ODvz9/XHw4EGlZDrffPMNBAIBVq5cidLSUvTt2xe///47Pv30U5VliIqKwsWLF/Haa6+huLgYAwYMwJ49e3SShGX48OHYv38/vvrqK5SXl6Nz586YNm0aFi9ezB0zdepU/PXXX9i4cSOWLVsGhmGQlpamcmyClZUVzpw5g8jISPz4449YunQpbG1t8eSTT+LNN99scXmb0qNHD/zzzz9Yvnw5Pv30U+Tk5KBTp06QSCQYO3Ysd5yLiwvi4+Mxd+5cvPPOOxCLxZg8eTI2bNiAiRMnNvs627ZtQ9++fbF582Zs27YNIpEIPXr0wCeffMIds3TpUmRmZmLmzJkoKSmBl5dXo7eygoODceLECSxZsgQRERGQy+UIDAzE/v37MX78eI1/Dr1798axY8ewZMkSZGVlwdLSEgEBAdi/fz9GjRql8fUIMXQ8hjGSnKSEEEII4dA9eEIIIcQIUYAnhBBCjBAFeEIIIcQIUYAnhBBCjBAFeEIIIcQIGf00OblcjoyMDFhZWWmVq50QQrTBMAxKSkrg5uYGPp/aUqTtGX2Az8jI0DqNKyGEtFR6enqjyZoIaU1GH+CtrKwAsH9kzWWUU5BKpTh27BhGjRoFgUDQmsVrE1Qfw2Zs9QGMr07a1Ke4uBgeHh7cZxAhbc3oA7yiW97a2lqjAC8Wi2FtbW00H05UH8NlbPUBjK9OLakP3Rok+kI3hgghhBAjRAGeEEIIMUIU4AkhhBAjZPT34AkhxJDV1NRAKpXquxiknRAIBDAxMVHrWArwhBCiBwzDICsrC48ePdJ3UUg706lTJ7i4uDQ7gJMCPCGE6IEiuDs5OUEsFtNoe9IshmFQXl6OnJwcAICrq2uTx1OAJ4SQNlZTU8MFd3t7e30Xh7QjIpEIAJCTkwMnJ6cmu+tpkB0hhDSjokK311PccxeLxbq9MOkQFO+b5sZuUAueEEJUkMuBvDwgJwdQc0yTxqhbnmhD3feNXlvwmzZtQq9evbgscwMHDsThw4e5/QzDIDIyEm5ubhCJRAgJCcG1a9f0WGJCiLGTSoGHD4ErV4D0dKCqSt8lIkQ7eg3w7u7u+M9//oNLly7h0qVLGD58OCZOnMgF8dWrV2PdunXYsGEDLl68CBcXF4SGhqKkpESfxSaEGKHyciAtDUhKArKygJoafZeIkJbRaxd9eHi40vMvvvgCmzZtwvnz59GjRw98/fXXWLx4MZ555hkAQHR0NJydnbFr1y689dZbKq9ZVVWFqjpfuYuLiwGw9yrUnWuqOM5Y5qZSfQybsdUHaF91Ki5mu+FLSxs/pqZG8/q0h7obspCQEPTu3Rtff/01AMDb2xvz5s3DvHnzWu014+LiMGzYMADAxIkT8d///rfVXqsxiu53GxubFk+hNJh78DU1Nfj1119RVlaGgQMHIi0tDVlZWRg1ahR3jJmZGYKDg3H27NlGA/zKlSuxdOnSBtuPHTum8YCW2NhYzSph4Kg+hs3Y6gMYX500qU95eXkrlqTjuXjxIiwsLNrktW7dugUnJyeNzomIiEB0dLTStqCgIJw/f557XlVVhQULFmD37t2oqKjAiBEjsHHjRqXlhDMzMxETE4MlS5a0rBIwgACflJSEgQMHorKyEpaWlti3bx969OiBs2fPAgCcnZ2Vjnd2dsa9e/cavd6iRYswf/587rliycZRo0ZptJpcbGwsQkNDjWYlLKqP4TK2+gCGWyeZjB04l5fH/l9dIpEUd+5oVh9F7yHRDUdHxzZ7LScnJ3Tq1Enj88aMGYOtW7dyz4VCodL+efPm4cCBA9izZw/s7e3xwQcfYPz48UhISOCmu7m4uMDGxqZF5VfQe4Dv1q0bLl++jEePHmHv3r2YPn064uPjuf31RwsyDNPkCEIzMzOYmZk12C4QCDT+oNHmHENG9TFsxlYfwHDqVFkJZGcDBQXs6HgA4GswAkkxil6T+mhSb0UCE33QJMlOSEgIevbsCRMTE0RHR0MoFGL58uV4+eWXMXv2bPz2229wcnLChg0bEBYWxp13/fp1LFiwAKdOnYKFhQVGjRqFr776Cg4ODgCAsrIyvPPOO/j9999hZWWFBQsWNHjt+l3069atw9atW5Gamgo7OzuEh4dj9erVsLS0BABs27YN8+bNQ0xMDObNm4f09HQMGTIEW7dubTZBTH2qynfgwAGlWwgAG39cXFxUXqOoqAibN2/G9u3bMXLkSADAjh074OHhgT///BOjR4/WqEzq0Ps8eKFQCD8/P/Tv3x8rV65EYGAgvvnmG+6HlJWVpXR8Tk5Og1Y9IYSoUlICpKQA166xrXZFcDc05eXlsLS01MtD0y8W0dHRcHBwwN9//4333nsP77zzDp5//nkMGjQI//zzD0aPHo1XX32Vu25mZiaCg4PRu3dvXLp0CUeOHEF2djZeeOEF7poLFy7EyZMnsW/fPhw7dgxxcXFISEhoshx8Ph/ffvstrl69iujoaJw4cQIffvhhg5/rl19+ie3bt+PUqVO4f/++yi8PzVG3fHFxcXByckLXrl0xc+ZMLuMcACQkJEAqlSrddnZzc0NAQADXY61reg/w9TEMg6qqKvj4+MDFxUXpnld1dTXi4+MxaNAgPZaQEGLIGAbIzwdu3ABu3waKivRdIuMSGBiITz/9FBKJBIsWLYJIJIKDgwNmzpwJiUSCzz//HPn5+bhy5QoAdjp03759ERUVhe7du6NPnz7YsmULTp48idu3b6O0tBSbN2/Gl19+idDQUPTs2RPR0dGoaWYaw7x58zBs2DD4+Phg+PDhWL58Of7v//5P6RipVIrvv/8e/fv3R9++fTF79mwcP35co/qqW76wsDDs3LkTJ06cwNq1a3Hx4kUMHz6cG/SdlZUFoVAIW1tbpfOcnZ0bNGR1Ra9d9J988gnCwsLg4eGBkpIS7NmzB3FxcThy5Ah4PB7mzZuHqKgoSCQSSCQSREVFQSwWY+rUqfosNiHEANXUALm57Ij49jaAXSwWo7SpYfyt/Nqa6NWrF/d/ExMT2Nvbo2fPntw2RQ+rovWakJCAkydPcl3ndd25cwcVFRWorq7GwIEDue12dnbo1q1bk+U4efIkoqKicP36dRQXF0Mmk6GyshJlZWXcYDyxWIwuXbpw57i6uiq1qtVx584dtco3ZcoU7v8BAQHo378/vLy8cPDgQW4mmCrN3XZuCb0G+OzsbLz66qvIzMyEjY0NevXqhSNHjiA0NBQA8OGHH6KiogLvvvsuCgsLERQUhGPHjsHKykqfxSaEGJCqKjaoG3IXfHN4PF6bjRBvqfpjC3g8ntI2RbCSP/5lyOVyhIeHY9WqVQ2u5erqiuTkZI3LcO/ePYwdOxZvv/02li9fDjs7O5w5cwYzZsxQmp6oqqwMw2j0Wpoer+Dq6govLy+ufi4uLqiurkZhYaFSKz4nJ6fVeqX1GuA3b97c5H4ej4fIyEhERka2TYEIIe1GaSk7cI5WWzVsffv2xd69e+Ht7Q1T04Yhx8/PDwKBAOfPn4enpycAoLCwELdv30ZwcLDKa166dAkymQxr164F//Foyfrd87qiTfkAID8/H+np6dyAvn79+kEgECA2NpYbf5CZmYmrV69i9erVrVJ2g7sHTwghjWEYoLAQuHkTuHWLgnt7MGvWLBQUFOCll17C33//jdTUVBw7dgyvv/46ampqYGlpiRkzZmDhwoU4fvw4rl69ioiICC5wq9KlSxfIZDKsX78eqamp2L59O77//vtWKb865SstLcWCBQtw7tw53L17F3FxcQgPD4eDgwMmT54MgE1cM2PGDHzwwQc4fvw4EhMT8corr6Bnz57cqHpd0/s0OUIIaU7dhV8oN3z74ubmhr/++gsfffQRRo8ejaqqKnh5eWHMmDFckFyzZg1KS0sxYcIEWFlZ4YMPPkBRE6Mje/fujXXr1mHVqlVYtGgRnn76aaxcuRLTpk1rlTo0Vz4TExMkJSXhl19+waNHj+Dq6ophw4YhJiZG6ZbyV199BVNTU7zwwgtcoptt27Y1ueRrS1CAJ4QYrOrq2vvrlBte/+Li4hpsu3v3boNt9e9bSyQS/P77741e19LSEtu3b8f27du5bQsXLmzydd5//328//77StteffVV7v8RERGIiIhQ2j9p0iSt7qmrKt/Bgwe5/4tEIhw9erTZ65ibm2P9+vVYv369xmXQBgV4QojBKS9n768XFrLd8oS0JXd3d4SHh2P37t1t/tqWlpaQyWQwNzdv8bUowBNCDEZRERvYacFIog9BQUHcqHdV0/rawuXLlwFAJ932FOAJIXoll7OJaXJy2JSyhOiLSCSCn5+fWsequl2hC+q+vjoowBNC9EIqZRPT5OZqtvALIUQ9FOAJIW2qooJtrefn0/11QloTBXhCSJsoLmbvr9MqqoS0DQrwhJBWo1j4JTubbbkTQtoOBXhCiM4p7qlfv07z1wnRFwrwhBCdqapiW+u5uexzqRRoIuMoIaQVUYAnhLRY/YVf2uuqbvr2449t+3pvvqnZ8SEhIYiPjwcAJCYmonfv3rovlIFSrJJnY2ODR+1kEQT6bk0I0Qot/NIxzZw5E5mZmQgICFDr+Li4OEycOBGurq6wsLBA7969sXPnzgbH8Hi8Bo+bN2+2uLyqrsvj8bBmzRrumJCQkAb7X3zxRaXrZGZm4uuvv25xedoSteAJIRqpqald+KW6Wt+lIW1NLBbDxcVF7ePPnj2LXr164aOPPoKzszMOHjyIadOmwdraGuHh4UrH3rp1C9bW1txzR0fHFpc3MzNT6fnhw4cxY8YMPPvss0rbZ86ciWXLlnHPRSKR0n4XFxfY2Ni0uDxtiQI8IUQttPALqS8uLg7Dhg3DH3/8gU8++QS3bt1CYGAgfv75Z/Ts2RMA8MknnyidM2fOHBw9ehT79u1rEOCdnJzQqVMntV8/JCSE60nYsWMHTExM8M4772D58uVcl3r9LyP/+9//MGzYMPj6+ipt1/SLS3tAXfSEkCaVlwNpacDVq+x9dgrupL6FCxfiyy+/xMWLF+Hk5IQJEyZAKpU2enxRURHs7OwabO/Tpw9cXV0xYsQInDx5Uq3Xjo6OhqmpKS5cuIBvv/0WX331FX7++WeVx2ZnZ+PgwYOYMWNGg307d+6Eg4MD/P39sWDBApQYwYII1IInhKj06BEb0EtL9V0SYuiWLFmC0NBQAGzAdXd3x759+/DCCy80OPa3337DxYsX8cMPP3DbXF1d8eOPP6Jfv36oqqrC9u3bMWLECMTFxeHpp59u8rU9PDzw1VdfgcfjoVu3bkhKSsJXX32FmTNnNjg2OjoaVlZWeOaZZ5S2v/zyy/Dx8YGLiwuuXr2KRYsW4d9//0VsbKw2Pw6DQQGeEMJRLPySnc1OeSNEHQMHDuT+b2dnh27duuHGjRsNjouLi0NERAR++ukn+Pv7c9u7deuGbt26KV0vPT0dX375JZ5++mmcPn0aYWFh3P4ffvgBL7/8MgDgqaee4rrjFeeuXbsWNTU1DVZk27JlC15++eUGS7HW/TIQEBAAiUSC/v37459//kHfvn01/XEYDArwhBBIpbX312nhF6ILdYMuAMTHxyM8PBzr1q3DtGnTmj3/qaeewo4dOwAA/fv355ZRBQBnZ2eNy3P69GncunULMTExzR7bt29fCAQCJCcnU4AnhLRPFRVsa72ggBZ+Ido7f/48PD09AQCFhYW4ffs2unfvzu2Pi4vD+PHjsWrVKryp5uT7xMREuLq6Amh6Gdfz5883eC6RSBq03jdv3ox+/fohMDCw2de+du0apFIp9/rtFQV4QjogWviF6NKyZctgb28PZ2dnLF68GA4ODpg0aRIANriPGzcOc+fOxbPPPousrCwAgFAo5Abaff311/D29oa/vz+qq6uxY8cO7N27F3v37m32tdPT0zF//ny89dZb+Oeff7B+/XqsXbtW6Zji4mL8+uuvDbYDwJ07d7Bz506MHTsWDg4OuH79Oj744AP06dMHgwcPbuFPRr8owBPSQSgWfsnJoYVfDJWmmeUMxX/+8x/MnTsXycnJCAwMxP79+yEUCgEA27ZtQ3l5OVauXImVK1dy5wQHByMuLg4AUF1djQULFuDhw4cQiUTw9/fHwYMHMXbs2GZfe9q0aaioqMCAAQNgYmKC9957r0EvwZ49e8AwDF566aUG5wuFQhw/fhzffPMNSktL4eHhgXHjxmHJkiUNegHaGwrwhBg5mYzNDZ+by95rJ0TXhgwZgqtXr6rct23bNmzbtq3J8z/88EN8+OGHWr22QCDA119/jU2bNjV6zJtvvtnorQEPDw8u/a6xoQBPiJGqrGRb6/n5lBue6M7GjRvx888/49y5c/ouSpuytLSETCZrMALfkFGAJ8TIlJSw99eLivRdEmJsdu7ciYrH93c8PT1x9uxZPZeo7ShG8benbnsK8IQYAcXCL9nZbOY5QlpD586dlZ6HhISA0eP0C8U9/LbQ2Ch+Q0YBnpB2jBZ+IYQ0Rq+56FeuXIknn3wSVlZWcHJywqRJk3Dr1i2lYyIiIhos4/fUU0/pqcSEGIbqaiA9HUhKAh48oOBOCGlIrwE+Pj4es2bNwvnz5xEbGwuZTIZRo0ahrKxM6bgxY8YgMzOTexw6dEhPJSZEv8rKgNRUduGXnBxa+IUQ0ji9dtEfOXJE6fnWrVvh5OSEhIQEpQUGzMzM1F7Gr6qqClV1kmgXP87kIZVKm1zdqC7Fceoeb+ioPoZNnfoUFbEBvd53X4Mll0uV/m3vamo0f88Zy/uTtF8GdQ++6PGw3/rLCMbFxXHrBAcHB+OLL76Ak5OTymusXLkSS5cubbD92LFjEIvFGpWnva8kVB/Vx7AZW30AICvLuOqkye+onEY7Ej3jMfocAlkHwzCYOHEiCgsLcfr0aW57TEwMLC0t4eXlhbS0NHz22WeQyWRISEiAmZlZg+uoasF7eHggLy8P1tbWapVFKpUiNjYWoaGhEAgELa+cnlF9DFv9+kil7MC5vLz22wUvl0uRlRULF5dQ8Pnt/3ckEklx545m77ni4mI4ODigqKiowWdPZWUl0tLS4OPj067mVRPDoO77x2Ba8LNnz8aVK1dw5swZpe1Tpkzh/h8QEID+/fvDy8sLBw8ebLCmL8B256sK/AKBQONgoM05hozqY9hqagTIyREoLfzC1+somZbj8wVGEeAVU581ec9p895MSND4lBbp10+z40NCQrisb4mJiejdu7fuC6UH3t7euHfvHgB2sZxOnTrpt0A6YhAfH++99x7279+PkydPwt3dvcljXV1d4eXlheTk5DYqHSGtS7Hgy82bbNY5w+hTI0S1mTNnIjMzEwEBAWodX1lZiYiICPTs2ROmpqbcIjS6UFJSgnnz5sHLywsikQiDBg3CxYsXlY7Jzs5GREQE3NzcIBaLMWbMmAbx4+LFi2otbNPe6DXAMwyD2bNn4/fff8eJEyfg4+PT7Dn5+flIT09v98v4kY6NYdgu+GvX2FHxhLQXYrEYLi4uMDVVrwO4pqYGIpEIc+bMwciRI3ValjfeeAOxsbHYvn07kpKSMGrUKIwcORIPHz4EwMaYSZMmITU1Ff/73/+QmJgILy8vjBw5Umm2lqOjY4OxX8ZArwF+1qxZ2LFjB3bt2gUrKytkZWUhKyuLS4VYWlqKBQsW4Ny5c7h79y7i4uIQHh4OBwcHTJ48WZ9FJ0QrMhmQmQlcuQLcu8fmiyekvYqLiwOPx8PBgwcRGBgIc3NzBAUFISkpiTvGwsICmzZtwsyZM9WeDQWwOVAmTZqEpUuXwsnJCdbW1njrrbdQ/TjpQ0VFBfbu3YvVq1fj6aefhp+fHyIjI+Hj48MtPJOcnIzz589j06ZNePLJJ9GtWzds3LgRpaWl2L17t25/GAZIrwF+06ZNKCoqQkhICFxdXblHTEwMADbnb1JSEiZOnIiuXbti+vTp6Nq1K86dOwcrKyt9Fp0QjVRWsgE9KQnIyGADPSHGYuHChfjyyy9x8eJFODk5YcKECTqZJnj8+HHcuHEDJ0+exO7du7Fv3z5ulpRMJkNNTU2DQWYikYgby6UYcF33GBMTEwiFwgbjvYyRXgfZNTeAXyQS4ejRo21UGkJ0jxZ+IR3BkiVLEBoaCgCIjo6Gu7s79u3bhxdeeKFF1xUKhdiyZQvEYjH8/f2xbNkyLFy4EMuXL4eVlRUGDhyI5cuX44knnoCzszN2796NCxcuQCKRAAC6d+8OLy8vLFq0CD/88AMsLCywbt06ZGVlITMzs8X1NnQGMciOEGPCMEBBAXDjBnD7NgV3YvwGDhzI/d/Ozg7dunXDjRs31Dr3/v37sLS05B5RUVHcvsDAQKX8JQMHDkRpaSnS09MBANu3bwfDMOjcuTPMzMzw7bffYurUqdyKbwKBAHv37sXt27dhZ2cHsViMuLg4hIWFtatV4bRlMNPkCGnvamqA3Fw24xwlMSMdHY/HU+s4Nzc3bilWoGGis6au3aVLF8THx6OsrAzFxcVwdXXFlClTlAZs9+vXD5cvX0ZRURGqq6vh6OiIoKAg9O/fX7MKtUMU4AlpoaoqNqjn5QFyub5LQ0jbO3/+PDw9PQGw88hv376N7t27q3Wuqalpo0ux/vvvv6ioqIBIJOJex9LSssF0agsLC1hYWKCwsBBHjx7F6tWrG1zLxsYGADvw7tKlS1i+fLna9WuvKMAToqWyMvb++qNHNHeddGzLli2Dvb09nJ2dsXjxYjg4OCjNd79+/Tqqq6tRUFCAkpISrsXeXKKc6upqzJgxA59++inu3buHJUuWYPbs2eA/zgB19OhRMAyDbt26ISUlBQsXLkS3bt3w2muvcdf49ddf4ejoCE9PTyQlJWHu3LmYNGkSRo0apesfg8GhAE+IBhiGDejZ2e1n4RfSfmiaWc5Q/Oc//8HcuXORnJyMwMBA7N+/H0KhkNs/duxYLlMcAPTp0wdA8wOtR4wYAYlEgqeffhpVVVV48cUXERkZye0vKirCokWL8ODBA9jZ2eHZZ5/FF198oZRFMDMzE/Pnz0d2djZcXV0xbdo0fPbZZzqquWGjAE+IGuRytgs+J4ftkieE1BoyZAiuXr3a6P67d+9qfe2lS5eqXEAMAF544YVmR+rPmTMHc+bM0fr12zMaRU9IE6RS4OFDNjFNejoFd0I2btwIS0tLpWQ27Z2/vz/CwsL0XQydoxY8ISqUl7Pd8IWFdH+dEIWdO3dymUY9PT1x9uxZPZdINw4dOsQl5lF31dH2gAI8IXUUFbGBvaRE3yUhxPB07txZ6XlISEiz99G1tW3btla5ripeXl5t9lptiQI86fDkcnYVt5wcyg1PCDEeFOBJhyWTsUE9N5dywxP9kFPiBKIFdd83FOBJh1NZyXbDFxRQYhqiH0KhEHw+HxkZGXB0dIRQKFQ78xvpuBiGQXV1NXJzc8Hn85WmIqpCAZ50GMXFbIudcsMTfePz+fDx8UFmZiYyMjL0XRzSzojFYnh6enIJfxpDAZ4YNcXCL9nZwOPBv4QYBKFQCE9PT27ZU0LUYWJiAlNTU7V6fCjAE6NEC7+Q9oDH40EgEChlXiNEVyjAE6NSVcW21vPz6f46IaRjowBPjEJpae3CL4QQQijAk3aMYdhMc7TwCyGENEQBnrQ7ivFIN27Q/HVCCGkMBXjSblRXs4PmcnJqnzczS4QQQjosCvDE4NVf+IUGzxFCSPMowBOD9egRG9hLS/VdEkIIaX8owBODQgu/EEKIbrQ4wNfU1CApKQleXl6wtbXVRZlIBySVsolpaOEXQgjRDY2HKM2bNw+bN28GwAb34OBg9O3bFx4eHoiLi9N1+YiRq6gA7t4FkpKAzEwK7oQQoisaB/jffvsNgYGBAIADBw4gLS0NN2/exLx587B48WKdF5AYp+JiIDkZuH6d7ZJnGH2XiJCG5HK2VyktTd8lIURzGnfR5+XlwcXFBQBw6NAhPP/88+jatStmzJiBb7/9VucFJMaDFn4h7UF1Nfsezcxk/5VKAS8vwNlZ3yUjRDMat+CdnZ1x/fp11NTU4MiRIxg5ciQAoLy8HCYmJhpda+XKlXjyySdhZWUFJycnTJo0Cbdu3VI6hmEYREZGws3NDSKRCCEhIbh27ZqmxSZ6JJOxH5ZJSWx3PAV3YmhKS9kepVOngEOHgIsXgQcPaKEi0r5pHOBfe+01vPDCCwgICACPx0NoaCgA4MKFC+jevbtG14qPj8esWbNw/vx5xMbGQiaTYdSoUSirk3d09erVWLduHTZs2ICLFy/CxcUFoaGhKCkp0bTopI1VVQH377OBPSODPiyJ4WAYIC+PfW8eO8Y+kpLYbZRngRgLjbvoIyMjERAQgPT0dDz//PMwMzMDwK5R+/HHH2t0rSNHjig937p1K5ycnJCQkICnn34aDMPg66+/xuLFi/HMM88AAKKjo+Hs7Ixdu3bhrbfe0rT4pA3Qwi/EEEmlyl3v1dX6LhEhrUuraXLPPfccAKCyzkTl6dOnt7gwRUVFAAA7OzsAQFpaGrKysjBq1CjuGDMzMwQHB+Ps2bMqA3xVVRWqqqq458XFxQAAqVQKqZpNSMVx6h5v6NqiPgzDBvScnNbvgpfLpUr/tnfGVh/AcOpUVsYG86wsdvxH3da5ummO5fIapKaeRX7+da7HUh3G8vlB2i+NA3xNTQ2ioqLw/fffIzs7G7dv34avry8+++wzeHt7Y8aMGVoVhGEYzJ8/H0OGDEFAQAAAICsrCwB7378uZ2dn3Lt3T+V1Vq5ciaVLlzbYfuzYMYjFYo3KFBsbq9Hxhs7Y6pOVRfUxdIZQJ3NzwNubfairvLwcly9fxsWLF5GQkIDi4mL07NkTPXr00OgahOiTxgH+iy++QHR0NFavXo2ZM2dy23v27ImvvvpK6wA/e/ZsXLlyBWfOnGmwj8fjKT1nGKbBNoVFixZh/vz53PPi4mJ4eHhg1KhRsLa2VqssUqkUsbGxCA0NhUAg0KAWhqk16lNdzd6vzM+vXd2trcjlUmRlxcLFJRR8fvv//RhbfYC2rZNMxvYcZWe3rOu9sDANyckHkZx8CPfuxSv1PohENrC3t8fIkSMhFArVup6i95AQfdE4wP/yyy/48ccfMWLECLz99tvc9l69euHmzZtaFeK9997D/v37cerUKbi7u3PbFdPxsrKy4Orqym3Pyclp0KpXMDMz48YF1CUQCDQObtqcY8h0UR9Fl+ejR7Vz1/W1ohufLzCagAgYX32A1qtTeTl7Lz0zU/uBcXJ5DR4+PIeUlD+QnHwAeXnXlfbb2Ung5xcOiSQcgwcPgJtbLIRCodp/Q8b02UHaJ40D/MOHD+Hn59dgu1wu1/ieE8MweO+997Bv3z7ExcXBx8dHab+Pjw9cXFwQGxuLPn36AACqq6sRHx+PVatWaVp00gK08AvRt4KC2qCubeO4srIIqalHkZLyB+7cOYSKinxuH49nAg+PoZBIxsPPLxz29l25fSYmdD+dtD8aB3h/f3+cPn0aXl5eStt//fVXLgira9asWdi1axf+97//wcrKirvnbmNjA5FIBB6Ph3nz5iEqKgoSiQQSiQRRUVEQi8WYOnWqpkUnGlIs/JKdzU55I6QtKbreMzPZQXLavgcLC+8gOfkAUlL+wP378ZDLa/Mhm5vbokuXMPj5jYev7xiIRLSeBjEeGgf4JUuW4NVXX8XDhw8hl8vx+++/49atW/jll1/wxx9/aHStTZs2AQBCQkKUtm/duhUREREAgA8//BAVFRV49913UVhYiKCgIBw7dgxWVlaaFp2oSSplP1jz8ig3PGlbFRW1AT0nR9uudxkePDiHlJQDSE7+A/n5N5T229l1g0QSDj+/8fDwGAw+nxbVJMZJ43d2eHg4YmJiEBUVBR6Ph88//xx9+/bFgQMHNJpCArBd9M3h8XiIjIxEZGSkpkUlGqqoYFvrBQWUG560ncLC2qCube4Etuv9CJKTDyA19TAqKgq4fXy+6eOudzao29lJNLo2jwdYWmpXLkL0SauvrqNHj8bo0aN1XRaiJ0VFbGuJBv2StiCTsb1DivvpddJpaKSgIOVxK/0A0tNPq+h6HwuJJBy+vqNhbt5Jo2vz+YCTE+DmBri4AHZ2wO3b2pWTEH2hvqkOimFq769r+wFLiLoqK9kWemYm+2VSm6mVcrkM6el/caPeCwqU162wt+/+uJUeDnf3gRp3vQsE7IIyiqBuSp+OpJ3T+C3M5/MbnYMOsIlwiOGSydjlL3NzKTc8aX23b7NBvbBQu/MrKgqRmnrk8aj3w6isrL0Qn28KT89g+PmNf9z13nB2T3PMzQFXV/bh5KS/KZ+EtAaNA/y+ffuUnkulUiQmJiI6OlplBjliOB48YO9x0mIapDXU1LBfHBUD5Hr0AG7e1Pz9lp9/mxsgl55+GgxT22gQiezRpcvYx6PeR8Pc3Ebjclpasq10V1fA3l7j0wlpNzQO8BMnTmyw7bnnnoO/vz9iYmK0zmRHWkdJCbuSG8De96QWCtElRde7IqgrZl1o8j6rqZHiwYO/uKlsBQXKN7sdHHpwXe+dOz8FPl+zZakBwNaWDehuboCaCS0Jafd0dpcpKChIKXUt0R+GYbtEs7PZjF/UYie6VFRUez+9sFC7GRcVFQV1Rr0fQWXlI24fny+Ap2cwN+rd1tZX4+vz+WzrXNFS13AZCkKMgk4CfEVFBdavX6+UZpa0PUUXaW4uLYVJdEcur+16z8xkvzRqimEY5Off4gbIPXjwV72udwf4+Y2Fn184fH1HwcxM82a2iQk7SE5xT13NlPGEGC2NA7ytra3SIDuGYVBSUgKxWIwdO3botHBEPdXVbGtd25zchNRXVVW7dnpOjnYDMmUyGdLSTiI5mW2pFxamKO13dAyAn994SCThcHML0qrrXShkR7y7ubGD5GjkOyG1NP5z+Oqrr5QCPJ/Ph6OjI4KCgmBrS2ke25KqhV8I0VZJSe3cdG2THZWX5yM19TBSUvZj3bqDSkum8vkCeHkNexzUx6NTJ58mrtQ4sbj2frqDA5uIhhDSkMYBXpFCluiP4v56WZm+S0LaM8VaA4qgrs37ie16v4nkZDbhzMOHZ8Ewtd1IYrEj/PzGwc8vHD4+oTAz0y7FtLU1G9Dd3IBOnbS6BCEdjloB/sqVK2pfsFevXloXhjROLme74HNyaOEXoj2ptPZeena2dl3vNTXVuH//NJdF7tGjVKX9jo490bXrWEyYYIeiojkAzDV+DR6PzR6nGCRHqWIJ0ZxaAb53797g8XjN5o7n8XiU6EbHFAu/5OZql/2LkNLS2lzv+fnajdMoL8/DnTuHkZx8AGlpR1FVVZvX2MRE+LjrPRwSyXjY2HiBz5eiW7dDuHTJRO3X4/MBR0egc2f2vrq55t8LCCF1qBXg09LSWrscpJ7ycraFpe00JNJxKdIQK1rqJSXaXINBXt51btT7w4fn6nW9O8HPbxwkErbrXSjUrolN6WEJaT1q/TnVX/udtJ6iIjawa/OhTDouqZR93yiSzmgzTZLteo9HcvIfSEk5gEePlL/YOzkF1hn1/iR4PO2yJlF6WELahtbfl69fv4779++jut4nyYQJE1pcqI5GLmdHLdPCL0QTZWW1Xe/aTpEsK8vFnTuHkJLyB1JTj6K6uvabpYmJGby9h8PPLxx+fuNgY+OpdVktLNiud1dX9t46jXwnpPVpHOBTU1MxefJkJCUlKd2XV0ydo3vw6pPJau+vy2TNH086NoZhvwgqgro2y/syDIPc3GvcALmHD88DqL0HZGHhzLXSvb1HQii0aFGZu3en9LCE6IvGAX7u3Lnw8fHBn3/+CV9fX/z999/Iz8/HBx98gC+//LI1ymh0KivZ1npBASWmIU2TyWoTzmjb9S6TVeH+/bjHXe9/oKjortJ+Z+c+XFB3de2ndde7Ij2sovu9sBDo2pW64AnRF40D/Llz53DixAk4OjqCz+eDz+djyJAhWLlyJebMmYPExMTWKKdRKClhP6yLivRdEmLIystrB8jl5mrb9Z6DlJSDSEn5A2lpx1BdXcrtMzU1h7f3CG6ZVWtr7VNMN5YeVi7XfolYQohuaBzga2pqYPl4UqqDgwMyMjLQrVs3eHl54datWzovYHun6FbNydEuhzfpOG7eZIO6Nl8A2a73JG5FtocPL6Bu17ulpSsX0L29R7So653SwxLSPmj8pxkQEIArV67A19cXQUFBWL16NYRCIX788Uf4+mq+6pOxUiz8om0eb2LcZDL2/aHI9R4QANy+rVlrXSarxL17cVxQLy6+r7TfxaUvNzfdxaWv1l3vgHJ6WHt76nYnpD3QOMB/+umnKHuc03LFihUYP348hg4dCnt7e8TExOi8gO1NVRX7gU0Lv5D6Kitr08LWTVykSbAsLc3GnTsHHyeciYVUWptflu16H/l4mdVxsLLq3KLyUnpYQto3jQP86NGjuf/7+vri+vXrKCgoaLDKXEdTWlq78AshCo8e1QZ1bd4bDMMgJ+dfbm56RsbfSvstLd3qjHofDoGgZQufK9LDurlRelhC2juNA3x0dDSee+45WFjU3sOzs7PTaaHaC4ZhP7Rp4ReioLg1oxj1XlGh+TVkskrcvXsCKSl/PO56T1fa7+ranwvqzs59WvTFWpEeVpHzndLDEmI8NA7wCxYswLvvvovw8HC88sorGDNmDEw72Cibmho2FWh2tnbTlohxqayszSCXna3dmgEFBQVITNyC5OTDj7vea0dkmpqK4OMT+niQ3DhYWbm1qLymprWD5Jyd2XSxhBDjo3FkzszMxJEjR7B79268+OKLEIlEeP755/HKK69g0KBBrVFGg1FdXXt/nfL5dGxFRbWt9IICzc9nGAbZ2ZeRnHwAd+4cQEbGJaX9VladuQFyXl7DIRCIWlReM7PaQXKUHpaQjkHjAG9qaorx48dj/PjxKC8vx759+7Br1y4MGzYM7u7uuHPnTmuUU69o4Rcilyt3vWsz5VEqrcC9eye4Ue8lJQ+V9ru59Yef3wT4+Y2Hs3PvFo9psbCo7Xq3t6f0sIR0NC3qWxeLxRg9ejQKCwtx79493LhxQ1flMgjFxWxXPC380jFVVdUmnMnJ0S6dcElJxuOEMweQlvYnZLLam/ICgRg+PqGQSMbiueeEuHPnZcjlLesv79SpNqjb2LToUoSQdk6rAK9oue/cuRN//vknPDw88NJLL+HXX3/Vdfn0KjWVujI7muLi2lHv2vTYMAyDrKx/uGVWs7ISlPZbW3twA+S8vIbB1NQcfL4UtraHtCovjwc4ONR2v4tbNoieEGJENA7wL730Eg4cOACxWIznn38ecXFxWt97P3XqFNasWYOEhARkZmZi3759mDRpErc/IiIC0dHRSucEBQXh/PnzWr0eIfXJ5eyYCkXXuzazIaTScty9e/xx1/tBlJZm1NnLg5vbgMdz08fDyalXi7ve66aHdXFh768TQkh9Ggd4Ho+HmJgYjB49usWj58vKyhAYGIjXXnsNzz77rMpjxowZg61bt3LPhYpk14RoqbqaHVORkaF9psGSkofc3PS7d49DJqtd51cgsICPzyhIJOHo0mUsLC2dW1xmSg9LCNGUxh8Tu3bt0tmLh4WFISwsrMljzMzM4OLiorPXJB1TSUnt/fT8fG263uXIyvoHycnsMqvZ2cqLKllbe0IiCYdEEg5Pz2CYmrZ8QrlYzAb1zp0pPSwhRHMG3w6Ii4uDk5MTOnXqhODgYHzxxRdwcnJq9PiqqipUVVVxz4sfL5otlUohVbOppjhOLjeOJPKKenSk+ihWM1PMTS+tXUwNPJ56I8qrq8tw9+4J3L59ECkph1FamllnLw+dOw+ARDIOXbuOg6NjQL2ud/V/1ny+lPvX2rq2+71+etj2lPrY2N5zNTVsPdT9DNH0WEJaA49hDGPiF4/Ha3APPiYmBpaWlvDy8kJaWho+++wzyGQyJCQkwKyRG4+RkZFYunRpg+27du2CmEYgkWbk5uYiISEBFy9eRFJSEqrrZDIyNzdH79698eSTT6Jfv37oRAnaSRPKy8sxdepUFBUVwdraWt/FIR2Q2gH+wYMHcHfXft3oZguiIsDXl5mZCS8vL+zZswfPPPOMymNUteA9PDyQl5en9h+ZVCpFbGwsXFxCwee3/zRfcrkUWVnGWZ+KCgGys2sTzmjaymUYOTIyEpCcfBDJyQeRnf2v0n4bGy9IJOMgkYyDl9fTMDVt+Yg2Pp/tcldkkhMKjev3Axjfe04kkuLOnViEhoZCoGbqv+LiYjg4OFCAJ3qjdhd9QEAA1q9fj1dffbU1y9MkV1dXeHl5ITk5udFjzMzMVLbuBQKB2n+YCny+wCg+nBSMoT4Mw3a9A0B8vABFRZrXp7q6DGlpsUhJYUe9l5Vl19nLg7v7QPj5saPeHR39lbrete0mNzVlg7mbG3tfve5bUXFNY/j91GcsdTIxYf/V5HNE088bQnRN7QAfFRWFWbNm4b///S9+/PFH2Nvbt2a5VMrPz0d6ejpcXV3b/LWJ/shktbnes7LY5/37a5aAqKjoPjc3/d69k6ipqe3lEQqt4Os7GhJJOHx9w2Bh4aiTctdND+voWBskCCGkLagd4N99912EhYVhxowZ8Pf3x48//ogJEya06MVLS0uRkpLCPU9LS8Ply5dhZ2cHOzs7REZG4tlnn4Wrqyvu3r2LTz75BA4ODpg8eXKLXpcYvvJydhpbVhY7T71uy1md0eRs1/vFx3PTDyAn54rS/k6dfB7neg+Hp+fTMDHRzfRLSg9LCDEUGo2i9/HxwYkTJ7BhwwY8++yzeOKJJxrMhf/nn3/Uvt6lS5cwbNgw7vn8+fMBANOnT8emTZuQlJSEX375BY8ePYKrqyuGDRuGmJgYWFlZaVJs0g4out4VWeQeT37QSHV1KVJTjz1eZvUgystzuH08Hh+dOw+CRDIefn7hcHB4osUJZxQoPSwhxBBpPE3u3r172Lt3L+zs7DBx4sQWJbsJCQlBU2P8jh49qvW1ieGTydhEM4oscnXGRqqtqOget3gL2/VeO+rdzMwavr5j4Oc3Hl26hEEsdtBJuSk9LCGkPdAoOv/000/44IMPMHLkSFy9ehWOjrq5V0k6joqK2lZ6bq7mg9bk8hpkZJzHjRvbcfr0p8jJuaq039a2CzdAztNzqM663k1M2AxyikFylB6WEGLo1A7wY8aMwd9//40NGzZg2rRprVkmYmTqdr0XFWl+flVVCdLSjj1eO/0QystzuX08Hh/u7oO5XO/29t111vWuSA/r6sqOgKf0sISQ9kTtj6yamhpcuXKlVefCE+MgkymvnV5Z2fw59T16lMbler93L04pI5qZmQ2efLIn7O3fgK9vOEQiO52VXSSq7Xp3cKD0sISQ9kvtAB8bG9ua5SDtXGWl8trpNTWanS+X1+Dhw/PcVLa8vGtK+21t/R630sPh5RWEoKBYXLo0tsXrpwOAlRUb0N3cAFvbFl+OEEIMAnU6Eq09elQb1BXJZzRRVVWM1NSjj0e9H0JFRR63j8czgYfHEG7tdHv7btw+Re72lrCzY1vqnTsDlpYtvhwhhBgcCvBEbTU1bNe7IuFMebnm1ygsTOVGvd+/H6/U9W5u3gm+vmGQSMbD13eMTrve+Xy2y13RUjdv+WJvhBBi0CjAkyYput4Vq7Jp1/V+jgvqeXnXlfbb2XXlBsi5uw+GiYnu0ns2lR6WEEKMHQV40kBRUe0AucJCzddOr6wsetz1zo56r6go4PaxXe9D64x676rTslN6WEIIYVGAJ5DLa7veMzO163ovKEjhBsilp5+CXC7j9pmb26JLlzD4+YWjS5cxMDfvpLvCg9LDEkKIKhTgO6iqKuWud5ms+XPqkstlePDgLBfU8/NvKu23t+9ep+t9EPh83b/Vunen9LCEENIYCvAdSGlpbSu9oECbrvdHuHPnyOOu98OorKwdOs/nm8LD42kuqNvZ+em07Dxe7RrqLi7sCP6uXWmeOiGENIYCvBGTy4H8fDag29kBJ05onhq2oCAZyckHHne9nwbD1I6yE4ns0KXLWPj5hcPXd5TOu975fHaQnKsr+1Ckh5XL2QBPCCGkcRTgjUx1NdvlnpnJ/iuVsoHSTs0ZZ3K5DOnpfyElhQ3qBQW3lfbb2z8BiYRdZrVz56d03vUuENQGdEoPSwgh2qOPTyNQWlqb672gQPNWekVFIVJTjyA5+QBSUw+jsvIRt4/PN4WnZ/DjtdPHw9a2i24LD0oPSwghrYECfDvEMLVd71lZQEmJ5tfIz7/FzU1PTz9Tr+vdHl26jIVEEg4fn1EwN9f9KDYrq9qgrm7vAiGEEPVRgG8npFLlrvfq6ubPqUsmk+Hu3Tjcvn0YKSl/oKAgWWm/g4M/N0CO7XrX/QRyRXpYNzc2wBNCCGk9FOANmGLUe1YWkJenTdd7Ae7cOYyUlP9h3bqDKK8zwZ3PF8DLK+Tx2unjYGvrq+PSU3pYQgjRJwrwBoRh2Hvoiq734mJNz2eQn3+LGyD34MFfYJjabwVisQO6dBn3uOs9FGZm1jquAaWHJYQQQ0EBXs9kstqu96wszbvea2qkSE8//fh++gEUFt5R2u/o2BMSSRgmTLBDcfFcALpvRguFta10Sg9LCCGGgQK8HpSX145616brvbw8H6mphx+Pej+Cqqrapr6JiRBeXsPg5zcefn7j0amTN/h8Kbp3P4RLl0w0fq3GiMW1QZ3SwxJCiOGhAN9GFF3vmZnadb3n5d143PX+Bx4+PFuv690Jfn7j4Oc3/nHXe+uMYLOxqc353qlTq7wEIYQQHaEA30pkMiAnp3aQXGWlZufX1FTj/v1T3FS2R49SlfY7OfXi5qa7uQ0Aj6f7yeN108O6urKLuhBCCGkfKMDrUEVFba73nBzNu97LynLrdL0fRXV17QR3tut9ODeVzcbGU8elZ/H5gJNTbVBXpIclhBDSvlCAb6FHj2q73jXNj84wDHJzryEl5Q+kpBzAgwfnANSuAGNh4fy46z0cPj4jIRRa6rLoHIGAHfHu6sr+S+lhCSGk/aOPcg3V1LBrpytGvVdUaHa+TFaF+/fjuWVWi4ruKu13du4NP7/xkEjC4erav1W63gF2Trqile7oSOlhCSHE2FCAV0NlpXLXe01N8+fUVVaWgzt3DiE5+Q+kpR1FdXUpt8/ExAze3iMeB/XxsLb20HHpa1lZ1c5Rp/SwhBBi3CjAN+H2bTawFxRodh7b9X6VGyD38OF5KHe9u0AiYaexeXuPhFDYeqPXFOlhAWDYMGqpE0JIR0EBvgk3b6o/UI7teo/jgnpR0T2l/c7OfbgBcq6u/Vqt671uelhXV3alNrkcyMholZcjhBBioPQa4E+dOoU1a9YgISEBmZmZ2LdvHyZNmsTtZxgGS5cuxY8//ojCwkIEBQXhu+++g7+/v/4KXUdpafbjrvcDSEs7Bqm0jNtnamr+uOudzfVube3eauUwMWG73jt3pvSwhBBCWHoN8GVlZQgMDMRrr72GZ599tsH+1atXY926ddi2bRu6du2KFStWIDQ0FLdu3YKVHpYjYxgGOTlXuAFyGRl/o27Xu6WlKzdAztt7BAQCcauVRSisXZnNyYnSwxJCCFGm1wAfFhaGsLAwlfsYhsHXX3+NxYsX45lnngEAREdHw9nZGbt27cJbb72l8ryqqipUVVVxz4sfp42TSqWQSqVqlUtxHJ8vhUxWibt345CcfAjJyYdQXHxf6VhX176QSMZBIhkHF5c+4CnlbFXv9dQlFtdOZ7OzU04P29StBLlcqvRve0f1MXzGVqeaGrYe6n6GaHosIa3BYO/Bp6WlISsrC6NGjeK2mZmZITg4GGfPnm00wK9cuRJLly5tsP3YsWMQi9VrURcWFuLSpUu4eDEK//77r9IXBqFQiMDAQDz55JPo378/7Ljh6FkADqtdv5aoqmJH9GsqKytW94XRI6qP4TO2OsXGql+fusszE6IPBhvgs7KyAADOzs5K252dnXHv3j1VpwAAFi1ahPnz53PPi4uL4eHhgVGjRsHauvnlUTMyMuDt7a20zcqqMySSsZBIxsHbexgEAhEAIDWVfegSjwfY2tYut6rmd5ImyeVSZGXFwsUlFHx++79BT/UxfMZWJ5FIijt3YhEaGgqBmoNcijVddIIQHTPYAK/Aq7dMGcMwDbbVZWZmBjMV+VUFAoFaf5heXl7w9/dHVVUV3Nymws9vEpydeyu9pq5WZFNoq/SwfL7AKD5sFag+hs9Y6qQY46Lu54jiWEL0yWADvIuLCwC2Je+qmMgNICcnp0GrXtfOnj2LkydP4tKlsZDLW+ePlNLDEmL4TE3ZrI+00BJpjww2rPj4+MDFxQWxsbHo06cPAKC6uhrx8fFYtWpVq762SCRqletSelhCDA+Px/aamZs3fCha7jRejrRHeg3wpaWlSElJ4Z6npaXh8uXLsLOzg6enJ+bNm4eoqChIJBJIJBJERUVBLBZj6tSpeiy1ZqysaqezUXpYQvRHIFAdyIVC5RkphBgLvQb4S5cuYdiwYdxzxeC46dOnY9u2bfjwww9RUVGBd999l0t0c+zYMb3MgdeEYpCcmxsb4AkhbYPHqw3c9YM55YogHY1eA3xISAgYhml0P4/HQ2RkJCIjI9uuUFpQpIdVtNRbqYefEPKYQKA6kLfWAFVC2iODvQdv6BTpYRX31GnALCG6xec3bIUrnlNrnJDmUYDXAKWHJUT36rbG6wZyao0T0jIU4JshFtfeT7e3p8E4hGhDVWtcEcjpizIhrYMCfBOCg9kBc4QQ9Sha4wIBu0Rxly6ApSXb+0UIaVsU4JtgY6PvEhBiePh81aPUzc1rcztIpcCVK+wsEhqfQoh+UIAnhKgkFKoO5NQaJ6R9oABPSAemaI2rCuSUaZGQ9o0CPCEdgKI1Xj+YU2ucEONFAZ4QI1G3NV4/kFNrnJCOhwI8Ie1M/da44kGD2QghdVGAJ8QAmZg0Pm+cWuOEEHVQgCdEj8zM2DXHAcDdnZ0zTq1xQoguUIAnpJWZmKgepa5ojUulwK1b7IJFFNgJIbpCAZ4QHVEE8PqBnII2IUQfKMATogFFa1zV3HFap4AQYkgowBNSD4/X+Lxxao0TQtoLCvCkw6rfGq8bzKk1Tghp7yjAE6OmqjWueJjSu58QYsToI44YBVPTxueNU2ucENIRUYAn7QaPVztvPCMD8PConTdOrXFCCFFGH4vE4JiaNj5vnMdj543fvAnY29OgN0IIaQwFeKIXita4qulm1BonhJCWo49S0qoUrfH6gZzujRNCSOuiAE9arH5rvG4wp9Y4IYToB338ErXVb40rHkIhtcYJIcTQUIAnShprjZubs4lhCCGEtA8U4DsogUB1IKfWOCGEGAcK8EaMx6vNn56RAXh61s4bp9Y4IYQYN76+C9CUyMhI8Hg8pYeLi4u+i2VwBALAygpwdATc3QE/PyAgAOjbF+jRA/D2Zo+zswMsLCi4E0JIR2DwLXh/f3/8+eef3HOTDhqd+PzG54130B8JIYSQJhh8gDc1NdWo1V5VVYWqqirueXFxMQBAKpVCKpWqdQ3FcXK5esfrUt1743X/FQpVHy+Xs4+mKOqjbv0NHdXH8BlbnbSpj7HUnbRfPIZhGH0XojGRkZFYs2YNbGxsYGZmhqCgIERFRcHX17fJc5YuXdpg+65duyAWi1uzuIQQwikvL8fUqVNRVFQEa2trfReHdEAGHeAPHz6M8vJydO3aFdnZ2VixYgVu3ryJa9euwd7eXuU5qlrwHh4eyMvLU/uPTCqVIjY2Fi4uoeDztU92rmlrvLUo6hMaGgqBESRvp/oYPmOrkzb1KS4uhoODAwV4ojcG3UUfFhbG/b9nz54YOHAgunTpgujoaMyfP1/lOWZmZjAzM2uwXSAQaPxBw+cLmg3w9e+N133wDWwIozY/A0NG9TF8xlYnTepjTPUm7ZNBB/j6LCws0LNnTyQnJ7f5awuFqlc4a+vWOCGEEKKOdhXgq6qqcOPGDQwdOrRNXs/Lq3beuKG1xgkhhJCmGHTYWrBgAeLj45GWloYLFy7gueeeQ3FxMaZPn94mr29rC4jFFNwJIYS0Pwbdgn/w4AFeeukl5OXlwdHREU899RTOnz8PLy8vfReNEEIIMWgGHeD37Nmj7yIQQggh7RJ1PhNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQMepqcLijW0lEsG6sOqVSK8vJyFBcXG0U+aaqPYTO2+gDGVydt6qP4zDHg9byIkTP6AF9SUgIA8PDw0HNJCCEdUUlJCWxsbPRdDNIBGfRysbogl8uRkZEBKysr8Hg8tc5RLDGbnp5uFMs8Un0Mm7HVBzC+OmlTH4ZhUFJSAjc3N/Ap3zXRA6NvwfP5fLi7u2t1rrW1tVF8OClQfQybsdUHML46aVofarkTfaKvlYQQQogRogBPCCGEGCEK8CqYmZlhyZIlMDMz03dRdILqY9iMrT6A8dXJ2OpDOgajH2RHCCGEdETUgieEEEKMEAV4QgghxAhRgCeEEEKMEAV4QgghxAh12AC/ceNG+Pj4wNzcHP369cPp06ebPD4+Ph79+vWDubk5fH198f3337dRSdWjSX1+//13hIaGwtHREdbW1hg4cCCOHj3ahqVtnqa/H4W//voLpqam6N27d+sWUEOa1qeqqgqLFy+Gl5cXzMzM0KVLF2zZsqWNSts8Teuzc+dOBAYGQiwWw9XVFa+99hry8/PbqLRNO3XqFMLDw+Hm5gYej4f//ve/zZ5j6J8HhAAAmA5oz549jEAgYH766Sfm+vXrzNy5cxkLCwvm3r17Ko9PTU1lxGIxM3fuXOb69evMTz/9xAgEAua3335r45Krpml95s6dy6xatYr5+++/mdu3bzOLFi1iBAIB888//7RxyVXTtD4Kjx49Ynx9fZlRo0YxgYGBbVNYNWhTnwkTJjBBQUFMbGwsk5aWxly4cIH566+/2rDUjdO0PqdPn2b4fD7zzTffMKmpqczp06cZf39/ZtKkSW1cctUOHTrELF68mNm7dy8DgNm3b1+Txxv65wEhCh0ywA8YMIB5++23lbZ1796d+fjjj1Ue/+GHHzLdu3dX2vbWW28xTz31VKuVUROa1keVHj16MEuXLtV10bSibX2mTJnCfPrpp8ySJUsMKsBrWp/Dhw8zNjY2TH5+flsUT2Oa1mfNmjWMr6+v0rZvv/2WcXd3b7UyakudAG/onweEKHS4Lvrq6mokJCRg1KhRSttHjRqFs2fPqjzn3LlzDY4fPXo0Ll26BKlU2mplVYc29alPLpejpKQEdnZ2rVFEjWhbn61bt+LOnTtYsmRJaxdRI9rUZ//+/ejfvz9Wr16Nzp07o2vXrliwYAEqKiraoshN0qY+gwYNwoMHD3Do0CEwDIPs7Gz89ttvGDduXFsUWecM+fOAkLqMfrGZ+vLy8lBTUwNnZ2el7c7OzsjKylJ5TlZWlsrjZTIZ8vLy4Orq2mrlbY429alv7dq1KCsrwwsvvNAaRdSINvVJTk7Gxx9/jNOnT8PU1LDe0trUJzU1FWfOnIG5uTn27duHvLw8vPvuuygoKND7fXht6jNo0CDs3LkTU6ZMQWVlJWQyGSZMmID169e3RZF1zpA/Dwipq8O14BXqLx3LMEyTy8mqOl7Vdn3RtD4Ku3fvRmRkJGJiYuDk5NRaxdOYuvWpqanB1KlTsXTpUnTt2rWtiqcxTX4/crkcPB4PO3fuxIABAzB27FisW7cO27ZtM4hWPKBZfa5fv445c+bg888/R0JCAo4cOYK0tDS8/fbbbVHUVmHonweEAB2wBe/g4AATE5MGrY2cnJwG38oVXFxcVB5vamoKe3v7ViurOrSpj0JMTAxmzJiBX3/9FSNHjmzNYqpN0/qUlJTg0qVLSExMxOzZswGwAZJhGJiamuLYsWMYPnx4m5RdFW1+P66urujcubPSUqNPPPEEGIbBgwcPIJFIWrXMTdGmPitXrsTgwYOxcOFCAECvXr1gYWGBoUOHYsWKFe2uxWvInweE1NXhWvBCoRD9+vVDbGys0vbY2FgMGjRI5TkDBw5scPyxY8fQv39/CASCViurOrSpD8C23CMiIrBr1y6DuheqaX2sra2RlJSEy5cvc4+3334b3bp1w+XLlxEUFNRWRVdJm9/P4MGDkZGRgdLSUm7b7du3wefz4e7u3qrlbY429SkvLwefr/xRY2JiAqC25dueGPLnASFK9DS4T68U03w2b97MXL9+nZk3bx5jYWHB3L17l2EYhvn444+ZV199lTteMS3m/fffZ65fv85s3rzZoKbFaFqfXbt2Maampsx3333HZGZmco9Hjx7pqwpKNK1PfYY2il7T+pSUlDDu7u7Mc889x1y7do2Jj49nJBIJ88Ybb+irCko0rc/WrVsZU1NTZuPGjcydO3eYM2fOMP3792cGDBigryooKSkpYRITE5nExEQGALNu3TomMTGRm/bX3j4PCFHokAGeYRjmu+++Y7y8vBihUMj07duXiY+P5/ZNnz6dCQ4OVjo+Li6O6dOnDyMUChlvb29m06ZNbVzipmlSn+DgYAZAg8f06dPbvuCN0PT3U5ehBXiG0bw+N27cYEaOHMmIRCLG3d2dmT9/PlNeXt7GpW6cpvX59ttvmR49ejAikYhxdXVlXn75ZebBgwdtXGrVTp482eTfQ3v8PCCEYRiGloslhBBCjFCHuwdPCCGEdAQU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEBXu3r0LHo+Hy5cv67sohBCiFQrwpN2KiIjApEmTGmyPi4sDj8fDo0ePtL62h4cHMjMzERAQoH0BCSFEjzrccrGENKe6uhpCoRAuLi76LgohhGiNWvDE6O3duxf+/v4wMzODt7c31q5dq7Tf29sbK1asQEREBGxsbDBz5swGXfQRERHg8XgNHnFxcQCAwsJCTJs2Dba2thCLxQgLC0NycjL3Gtu2bUOnTp1w9OhRPPHEE7C0tMSYMWOQmZnZVj8GQkgHQwGeGLWEhAS88MILePHFF5GUlITIyEh89tln2LZtm9Jxa9asQUBAABISEvDZZ581uM4333yDzMxM7jF37lw4OTmhe/fuANgvAJcuXcL+/ftx7tw5MAyDsWPHQiqVctcoLy/Hl19+ie3bt+PUqVO4f/8+FixY0Kr1J4R0YHpezY4QrU2fPp0xMTFhLCwslB7m5uYMAKawsJCZOnUqExoaqnTewoULmR49enDPvby8mEmTJikdk5aWxgBgEhMTG7zu3r17GTMzM+b06dMMwzDM7du3GQDMX3/9xR2Tl5fHiEQi5v/+7/8YhmHXRAfApKSkcMd89913jLOzc4t/DoQQogq14Em7NmzYMFy+fFnp8fPPP3P7b9y4gcGDByudM3jwYCQnJ6Ompobb1r9/f7VeLzExEdOmTcN3332HIUOGcK9hamqKoKAg7jh7e3t069YNN27c4LaJxWJ06dKFe+7q6oqcnBzNKkwIIWqiQXakXbOwsICfn5/StgcPHnD/ZxgGPB5PaT/DMCqv05ysrCxMmDABM2bMwIwZM5q8nqrXFggESvt5PF6j5xJCSEtRC54YtR49euDMmTNK286ePYuuXbvCxMRE7etUVlZi4sSJ6N69O9atW9fgNWQyGS5cuMBty8/Px+3bt/HEE0+0rAKEEKIlasETo/bBBx/gySefxPLlyzFlyhScO3cOGzZswMaNGzW6zltvvYX09HQcP34cubm53HY7OztIJBJMnDgRM2fOxA8//AArKyt8/PHH6Ny5MyZOnKjrKhFCiFqoBU+MWt++ffF///d/2LNnDwICAvD5559j2bJliIiI0Og68fHxyMzMRI8ePeDq6so9zp49CwDYunUr+vXrh/Hjx2PgwIFgGAaHDh1q0C1PCCFthcfQTUBCCCHE6FALnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFC/w9tmPyLlQOVXwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -2319,7 +3766,7 @@ "print('stds.shape (N,H,K) \\t', stds.shape)\n", "\n", "distr = GMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (means, stds)\n", + "distr_args = (means, stds, weights)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -2419,47 +3866,55 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " total_count_names = [f\"-total_count-{i}\" for i in range(1, n_components + 1)]\n", + " total_count_names = [\n", + " f\"-total_count-{i}\" for i in range(1, n_components + 1)\n", + " ]\n", " probs_names = [f\"-probs-{i}\" for i in range(1, n_components + 1)]\n", - " param_names = [i for j in zip(total_count_names, probs_names) for i in j]\n", - " self.output_names = self.output_names + param_names\n", + " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", + " self.param_names = [i for j in zip(total_count_names, probs_names, weight_names) for i in j]\n", + " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", - " self.output_names.insert(0, \"\") \n", + " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 2 * n_components\n", + " self.outputsize_multiplier = 3 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " mu, alpha = torch.tensor_split(output, 2, dim=-1)\n", - " return (mu, alpha)\n", + " mu, alpha, weights = output.chunk(3, dim=-1)\n", "\n", - " def scale_decouple(self, \n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float=0.2):\n", - " \"\"\" Scale Decouple\n", + " return mu, alpha, weights\n", + "\n", + " def scale_decouple(\n", + " self,\n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float = 1e-6,\n", + " ):\n", + " \"\"\"Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", " # Efficient NBinomial parametrization\n", - " mu, alpha = output\n", - " mu = F.softplus(mu) + 1e-8\n", - " alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts\n", + " mu, alpha, weights = output\n", + " mu = F.softplus(mu) + eps\n", + " alpha = F.softplus(alpha) + eps # alpha = 1/total_counts\n", + " weights = F.softmax(weights, dim=-1)\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(mu.size(dim=0), 1, -1)\n", " mu *= loc\n", - " alpha /= (loc + 1.)\n", + " alpha /= loc + 1.0\n", "\n", " # mu = total_count * (probs/(1-probs))\n", " # => probs = mu / (total_count + mu)\n", " # => probs = mu / [total_count * (1 + mu * (1/total_count))]\n", " total_count = 1.0 / alpha\n", - " probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 \n", - " return (total_count, probs)\n", + " probs = (mu * alpha / (1.0 + mu * alpha))\n", + " probs = torch.clamp(probs, eps, 1 - eps)\n", + " return (total_count, probs, weights)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -2481,16 +3936,10 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " total_count, probs = distr_args\n", + " total_count, probs, weights = distr_args\n", " B, H, K = total_count.size()\n", " Q = len(self.quantiles)\n", " assert total_count.shape == probs.shape\n", - "\n", - " # Sample K ~ Mult(weights)\n", - " # shared across B, H\n", - " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", - " \n", - " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", @@ -2533,17 +3982,15 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " total_count, probs = distr_args\n", + " total_count, probs, weights = distr_args\n", " B, H, K = total_count.size()\n", " \n", - " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", - " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", "\n", @@ -2567,7 +4014,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -2608,7 +4055,40 @@ "execution_count": null, "id": "b67e2931", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "counts.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "probs.shape (N,H,K) \t torch.Size([2, 2, 3])\n", + "samples.shape (N,H,num_samples) torch.Size([2, 2, 2000])\n", + "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", + "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABHt0lEQVR4nO3deVxU1f8/8NcFhmEVBGRTQEzUFHfTxAX8KJi7aZo7+DHTXJAwNbMSzKSw1FzSNAM/mdmn0jIzlUpNv5gLSi750SxATYkUBRSFEc7vD3/cHIdlGAZmez0fj3nonHvuOe8zA8x7zj33XkkIIUBEREQWxcrQARAREVHdYwJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJgYEeOHMHTTz8Nf39/KJVKeHl5oWvXrpg9e7ZavbCwMISFhdV6PJIkIS4uTm/tNW7cGAMHDtRbe5XZv38/JEnC/v3766S/6goLC4MkSZAkCVZWVnB2dkbTpk0xYsQIfPHFFygtLdXYp3HjxoiKiqpWP6mpqYiLi8OtW7eqtd+jfZW9nl988UW12qlMYWEh4uLiyn2PkpOTIUkSMjMz9dYfEVXMxtABWLJvv/0WgwcPRlhYGBITE+Hj44Nr167h+PHj2Lp1K95991257vvvv2/ASE1Dhw4dcPjwYbRs2dLQoVSoSZMm+OSTTwAAd+7cQUZGBr766iuMGDECPXr0wDfffAMXFxe5/vbt21GvXr1q9ZGamor4+HhERUXB1dVV6/106au6CgsLER8fDwAaCe2AAQNw+PBh+Pj41GoMRPQAEwADSkxMRGBgIPbs2QMbm3/eilGjRiExMVGtrjF/qBmaSqWCJEmoV68ennzySUOHUyl7e3uNGJ977jkkJSXh3//+N55//nl89tln8rb27dvXekx3796Fvb19nfRVmQYNGqBBgwYGjYHIkvAQgAHduHEDHh4eah/+Zays1N+aRw8BZGZmQpIkvPPOO1i2bBkCAwPh5OSErl274ueff9Zob8OGDWjWrBmUSiVatmyJLVu2ICoqCo0bN64yzuzsbEyZMgWNGjWCra0tAgMDER8fj/v372s91t27d6NDhw6wt7dHixYt8NFHH2nUOXPmDIYMGYL69evDzs4O7dq1w6ZNm9TqlE1Lf/zxx5g9ezYaNmwIpVKJixcvahwCKHuNKno87KOPPkLbtm1hZ2cHNzc3PP300zh37pxanaioKDg5OeHixYvo378/nJyc4Ofnh9mzZ6OoqEjr16I8EydORP/+/fH5558jKytLLn90Wr60tBSLFy9G8+bNYW9vD1dXV7Rp0wbvvfceACAuLg5z5swBAAQGBspjLXtNyg7JbNu2De3bt4ednZ38jbyiww337t1DbGwsvL29YW9vj9DQUJw8eVKtTkWHqB7+GcvMzJQ/4OPj4+XYyvqs6BCAvt+btWvXom3btnBycoKzszNatGiBV155RSN2InPHGQAD6tq1Kz788ENER0dj7Nix6NChAxQKRbXaWLNmDVq0aIEVK1YAAF577TX0798fGRkZ8lTy+vXrMWXKFAwfPhzLly9HXl4e4uPjtfrQys7ORufOnWFlZYXXX38djz32GA4fPozFixcjMzMTSUlJVbbxyy+/YPbs2Xj55Zfh5eWFDz/8EJMmTULTpk3Rs2dPAMD58+cREhICT09PrFy5Eu7u7ti8eTOioqLw119/Ye7cuWptzp8/H127dsW6detgZWUFT09PZGdnq9Xx8fHB4cOH1cr+/vtvjBs3Dg0bNpTLEhIS8Morr2D06NFISEjAjRs3EBcXh65du+LYsWMICgqS66pUKgwePBiTJk3C7Nmz8dNPP+GNN96Ai4sLXn/99Spfi8oMHjwYu3btwsGDBxEQEFBuncTERMTFxeHVV19Fz549oVKp8L///U8+3v/cc88hNzcXq1atwrZt2+Tp9IdnkE6cOIFz587h1VdfRWBgIBwdHSuN65VXXkGHDh3w4YcfIi8vD3FxcQgLC8PJkyfRpEkTrcfn4+OD3bt346mnnsKkSZPw3HPPAUCl3/r1/d5s3boV06ZNw8yZM/HOO+/AysoKFy9exK+//qr1OIjMhiCDuX79uujevbsAIAAIhUIhQkJCREJCgigoKFCrGxoaKkJDQ+XnGRkZAoBo3bq1uH//vlx+9OhRAUB8+umnQgghSkpKhLe3t+jSpYtae1lZWUKhUIiAgAC1cgBi4cKF8vMpU6YIJycnkZWVpVbvnXfeEQDE2bNnKx1jQECAsLOzU9v/7t27ws3NTUyZMkUuGzVqlFAqleLSpUtq+/fr1084ODiIW7duCSGE2LdvnwAgevbsqdFX2bZ9+/aVG8udO3dE586dhY+Pj8jMzBRCCHHz5k1hb28v+vfvr1b30qVLQqlUijFjxshlkZGRAoD473//q1a3f//+onnz5pW+DkI8eA9btWpV4fbvvvtOABBvv/22XBYQECAiIyPl5wMHDhTt2rWrtJ+lS5cKACIjI0NjW0BAgLC2thbnz58vd9vDfZW9nh06dBClpaVyeWZmplAoFOK5555TG9vDP59lIiMj1X7G/v77b42fsTJJSUlqcdfGezNjxgzh6uqq0TeRJeIhAANyd3fHwYMHcezYMbz11lsYMmQILly4gPnz56N169a4fv16lW0MGDAA1tbW8vM2bdoAgDyNfP78eWRnZ2PkyJFq+/n7+6Nbt25Vtr9z50706tULvr6+uH//vvzo168fAODAgQNVttGuXTv4+/vLz+3s7NCsWTO1qe4ff/wRvXv3hp+fn9q+UVFRKCws1PgmP3z48Cr7fVhJSQmeffZZnDt3Drt27ZK/YR8+fBh3797VmPr28/PDv/71L/zwww9q5ZIkYdCgQWplbdq0URuLroQQVdbp3LkzfvnlF0ybNg179uxBfn5+tftp06YNmjVrpnX9MWPGqB0yCQgIQEhICPbt21ftvqujNt6bzp0749atWxg9ejS+/vprrX7HiMwVEwAj0KlTJ8ybNw+ff/45rl69ihdffBGZmZkaCwHL4+7urvZcqVQCeLCwC3iwzgAAvLy8NPYtr+xRf/31F7755hsoFAq1R6tWrQBAqz+gj8ZYFmdZjGVxlrf629fXV20cZaq7Unzq1KnYvXs3vvjiC7Rr106t34ra8/X11ejXwcEBdnZ2GmO5d+9eteIpT9kHVdmYyzN//ny88847+Pnnn9GvXz+4u7ujd+/eOH78uNb9VPe18/b2Lrfs0ddG32rjvRk/fjw++ugjZGVlYfjw4fD09ESXLl2QkpJSCyMgMm5MAIyMQqHAwoULATxYFFdTZR++f/31l8a2R4+Zl8fDwwMRERE4duxYuY9JkybVOMayOK9du6ZRfvXqVTmOhz26iK8ycXFx+PDDD7FhwwZERERo9Augwr4f7bc27dixA5IkyesiymNjY4PY2FicOHECubm5+PTTT3H58mX07dsXhYWFWvVTndcOKP/nJDs7Wy2xs7OzK3dNSU2+YdfWezNx4kSkpqYiLy8P3377LYQQGDhwoF5mcYhMCRMAAyrvDxsAeYVzZd8EtdW8eXN4e3vjv//9r1r5pUuXkJqaWuX+AwcOxJkzZ/DYY4+hU6dOGg99xAgAvXv3xo8//ih/4Jf5z3/+AwcHB51P79u4cSPi4+OxaNGicle4d+3aFfb29ti8ebNa+ZUrV+TDEnUhKSkJ3333HUaPHq12uKQyrq6ueOaZZzB9+nTk5ubKq+cfnQWqqU8//VTt8ERWVhZSU1PVVv03btwYFy5cUEsCbty4ofEzVp3Yavu9cXR0RL9+/bBgwQIUFxfj7NmzNWqPyNTwLAAD6tu3Lxo1aoRBgwahRYsWKC0tRXp6Ot599104OTlh1qxZNe7DysoK8fHxmDJlCp555hn8+9//xq1btxAfHw8fHx+N0w0ftWjRIqSkpCAkJATR0dFo3rw57t27h8zMTOzatQvr1q1Do0aNahznwoUL5fUGr7/+Otzc3PDJJ5/g22+/RWJiotrFcbR1+PBhTJ06Fd26dUN4eLjG6ZFPPvkkXF1d8dprr+GVV17BhAkTMHr0aNy4cQPx8fGws7OTZ2P05e7du3Icd+/exR9//IGvvvoKO3fuRGhoKNatW1fp/oMGDUJwcDA6deqEBg0aICsrCytWrEBAQIC8Ir5169YAgPfeew+RkZFQKBRo3rw5nJ2ddYo5JycHTz/9NCZPnoy8vDwsXLgQdnZ2mD9/vlxn/Pjx+OCDDzBu3DhMnjwZN27cQGJiosaFhZydnREQEICvv/4avXv3hpubGzw8PMo9HbU23pvJkyfD3t4e3bp1g4+PD7Kzs5GQkAAXFxc88cQT1W6PyKQZeBGiRfvss8/EmDFjRFBQkHBychIKhUL4+/uL8ePHi19//VWtbkVnASxdulSjXZSzynr9+vWiadOmwtbWVjRr1kx89NFHYsiQIaJ9+/ZV7vv333+L6OhoERgYKBQKhXBzcxMdO3YUCxYsELdv3650jAEBAWLAgAEa5eWtGj99+rQYNGiQcHFxEba2tqJt27YiKSlJrU7ZyvTPP/9co81HzwIoW1Ve0eNhH374oWjTpo2wtbUVLi4uYsiQIRpnOERGRgpHR0eNfhcuXKjRXnlCQ0PV+nd0dBRNmjQRzzzzjPj8889FSUmJxj6Prsx/9913RUhIiPDw8BC2trbC399fTJo0ST6rocz8+fOFr6+vsLKyUntNKno/yuur7PX8+OOPRXR0tGjQoIFQKpWiR48e4vjx4xr7b9q0STz++OPCzs5OtGzZUnz22WcaZwEIIcT3338v2rdvL5RKpQAg9/noWQBl9PnebNq0SfTq1Ut4eXkJW1tb4evrK0aOHClOnTpV7mtCZM4kIbRYekxm59atW2jWrBmGDh2K9evXGzocIiKqYzwEYAGys7Px5ptvolevXnB3d0dWVhaWL1+OgoICvRxmICIi08MEwAIolUpkZmZi2rRpyM3NlRfVrVu3Tj6dj4iILAsPARAREVkgngZIRERkgZgAEBERWSCuAcCDW6xevXoVzs7O1b5KGhGRroQQKCgogK+vb5XX5CDSNyYAeHBZ0UdvQkNEVFcuX76slwtqEVUHEwBAvkLa5cuXNa5cVhGVSoW9e/ciIiICCoWiNsOrcxybaTLXsZnruAAgPz8ffn5+Ol+lkagmmADgn5uj1KtXr1oJgIODA+rVq2d2f5Q4NtNkrmMz13E9jIceyRB40ImIiMgCMQEgIiKyQEwAiIiILBDXABARGbHS0lIUFxcbOgwyAQqFAtbW1lrXZwJARGSkiouLkZGRgdLSUkOHQibC1dUV3t7eWi0sZQJARGSEhBC4du0arK2t4efnxwsFUaWEECgsLEROTg4AwMfHp8p9mAAQERmh+/fvo7CwEL6+vnBwcDB0OGQC7O3tAQA5OTnw9PSs8nAAU0oiIiNUUlICALC1tTVwJGRKypJFlUpVZV3OAJiA5SkXyi1/MbxZHUdCRHWNFwmi6qjOzwtnAIiIiCwQEwAiIiILxASAiIjIAnENgAkrb20A1wUQmbeK1gTVFmP8mxIWFoZ27dphxYoVhg7FpHEGgIiI9CoqKgpDhw7VKN+/fz8kScKtW7dq1P62bdvwxhtv1KgNU/DTTz9h0KBB8PX1hSRJ+Oqrr/TaPmcAjExdZ/dERKaiuLgYtra2cHNzM3QoNRIWFoaoqChERUVVWu/OnTto27YtJk6ciOHDh+s9Ds4AEBGRQRQVFSE6Ohqenp6ws7ND9+7dcezYMXl7WFgYZsyYgdjYWHh4eCA8PFwuj4mJAQBkZmZCkiSNR1hYmFZ9lLUXHR2NuXPnws3NDd7e3oiLi6s09sGDB5fbryRJ2LFjh15en379+mHx4sUYNmyYXtp7FBMAIiIyiLlz5+LLL7/Epk2bcOLECTRt2hR9+/ZFbm6uXGfTpk2wsbHB//3f/+GDDz7QaMPPzw/Xrl2THydPnoS7uzt69uypdR9l/Tg6OuLIkSNITEzEokWLkJKSUmHsSUlJuHbtGn777TcAwK5du+QY+vfvr4+Xp9YZNAGozvGNKVOmQJIkjUUfRUVFmDlzJjw8PODo6IjBgwfjypUrtRs4ERFVaufOnXByclJ79OvXT95+584drF27FkuXLkW/fv3QsmVLbNiwAfb29ti4caNcr2nTpkhMTETz5s3RokULjX6sra3h7e0Nb29vuLq6YurUqejatSvi4uK07gMA2rRpg4ULFyIoKAgTJkxAp06d8MMPP1Q4Pnd3d3h7e+Pvv/+GJEno3r27HIeNjWkcXTdoAlB2fGP16tWV1vvqq69w5MgR+Pr6amyLiYnB9u3bsXXrVhw6dAi3b9/GwIED5ctoEhFR3evVqxfS09PVHh9++KG8/ffff4dKpUK3bt3kMoVCgc6dO+PcuXNyWadOnbTuc9KkSSgoKMCWLVtgZWWldR/AgwTgYT4+PvKNdSpz6tQpNG7cGM7OzhXWWbJkiVoidPDgQUydOlWjrK4ZNE3p16+fWkZYnj///BMzZszAnj17MGDAALVteXl52LhxIz7++GP06dMHALB582b4+fnh+++/R9++fWstdnO1POUCJFGCQABr9l2EkKyN8jQgIjJujo6OaNq0qVrZw7OzQggAmpeuFUKolTk6OmrV3+LFi7F7924cPXpU/jDWtg/gQWLwMEmStLoN86lTpzSSh0dNnToVI0eOlJ+PHTsWw4cPVzu237Bhwyr70jejnqcoLS3F+PHjMWfOHLRq1Upje1paGlQqFSIiIuQyX19fBAcHIzU1tcIEoKioCEVFRfLz/Px8AA9unqDNDRTK6j78r75IomYzFzWNRxIlcgxl/+p7jIZUW++bMTDXsZnruADzHJO2mjZtCltbWxw6dAhjxowB8OD1OH78uLzAT1tffvklFi1ahO+++w6PPfZYrfRRkczMTAQHB1dax83NTe3MBXt7e3h6emokSHXNqBOAt99+GzY2NoiOji53e3Z2NmxtbVG/fn21ci8vL2RnZ1fYbkJCAuLj4zXK9+7dW+3bbla2SEQXgTXcf9eump1G+HD/je/9rpc2jZG+3zdjYq5jM8dxFRYWGjoEg3F0dMQLL7yAOXPmwM3NDf7+/khMTERhYSEmTZqkdTtnzpzBhAkTMG/ePLRq1Ur+2192uqA++qhMaWkpsrKycOXKFTRs2FCvN2+6ffs2Ll68KD/PyMhAenq6PJaaMtoEIC0tDe+99x5OnDhR7Re0vOmdh82fPx+xsbHy8/z8fPj5+SEiIgL16tXTqg+VSoWUlBSEh4drTB3VxJp9F6uuVInpvWqWUa7ZdxGSKEHje78j0+4xCMm6xm0ak9p634yBuY7NXMcF/DP7WB3mdEjurbfekmd6CwoK0KlTJ+zZs0fjS11ljh8/jsLCQixevBiLFy+Wy0NDQ7F//3699FGZ6OhoPP/882jRogXy8/P1mgAcP34cvXr1kp+XfW5FRkYiOTm5xu0bbQJw8OBB5OTkqGU5JSUlmD17NlasWIHMzEx4e3ujuLgYN2/eVHszc3JyEBISUmHbSqUSSqVSo1yhUFT7D4wu+1RGSNY12r+msTzcv5CsISRrs/ujC+j/fTMm5jo2cxyXuY2nTEUfTmFhYfJxeQCws7PDypUrsXLlynLr79+/v8ryqi6oU1UfFfWj7VX3+vXrh8uXL2tVt7L+yvPo66VvRnsdgPHjx+PUqVNqK0h9fX0xZ84c7NmzBwDQsWNHKBQKtanBa9eu4cyZM5UmAERERJbOoDMAVR3fcHd3V6uvUCjg7e2N5s2bAwBcXFwwadIkzJ49G+7u7nBzc8NLL72E1q1by2cFEBERkSaDJgD6OL6xfPly2NjYYOTIkbh79y569+6N5ORkWFvXbCqdiIjInBk0Aaju8Y3MzEyNMjs7O6xatQqrVq3SY2RERETmzWjXABAREVHtYQJARERkgZgAEBERWSCjvQ4A6WZ5iuZV+8zpwiFERKQfnAEgIiKyQEwAiIiILBAPARARmZJ9CXXbX6/5ddufFsLCwtCuXTusWLHC0KGYNM4AEBGRXkVFRWHo0KEa5fv374ckSbh161aN2t+2bRveeOONGrVhChISEvDEE0/A2dkZnp6eGDp0KM6fP6+39pkAEBGRSSguLgYAuLm5wdnZ2cDR6C4sLEyrq90eOHAA06dPx88//4yUlBTcv38fERERuHPnjl7iYAJAREQGUVRUhOjoaHh6esLOzg7du3fHsWPH5O1hYWGYMWMGYmNj4eHhgfDwcLk8JiYGwIMrxEqSpPEICwvTqo+y9qKjozF37ly4ubnB29sbcXFxlcY+ePDgcvuVJAk7duzQy+uze/duREVFoVWrVmjbti2SkpJw6dIlpKWl6aV9JgBERGQQc+fOxZdffolNmzbhxIkTaNq0Kfr27Yvc3Fy5zqZNm2BjY4P/+7//wwcffKDRhp+fH65duyY/Tp48CXd3d/Ts2VPrPsr6cXR0xJEjR5CYmIhFixap3Wn2UUlJSbh27Rp+++03AMCuXbvkGPr376+Pl0dDXl4egAczIPrARYAWrOyaAU9eWi+XPQmgVLLGdfcQPHElGVaiBNjnXu5CIF5zgIgqsnPnTjg5OamVlZSUyP+/c+cO1q5di+TkZPTr1w8AsGHDBqSkpGDjxo2YM2cOAKBp06ZITEyssB9ra2t4e3sDAO7du4ehQ4eia9euiIuL07oPAGjTpg0WLlwIAAgKCsLq1avxww8/yLMOjyq7W+3hw4chSRK6d+9eq4clhBCIjY1F9+7dERwcrJc2OQNARER616tXL6Snp6s9PvzwQ3n777//DpVKhW7dusllCoUCnTt3xrlz5+SyTp06ad3npEmTUFBQgC1btsDKykrrPoAHCcDDfHx8kJOTU2Wfp06dQuPGjSv98F+yZAmcnJzkx8GDBzF16lSNssrMmDEDp06dwqefflplTNriDAAREemdo6MjmjZtqlZ25coV+f9ld4KVJEmtjhBCrczR0VGr/hYvXozdu3fj6NGj8oextn0ADxKDh0mShNLS0ir7PXXqlEby8KipU6di5MiR8vOxY8di+PDhGDZsmFzWsGHDCvefOXMmduzYgZ9++gmNGjWqMiZtcQaAiIjqXNOmTWFra4tDhw7JZSqVCsePH8fjjz9erba+/PJLLFq0CP/973/x2GOP1UofFcnMzETz5s0rrePm5oamTZvKD3t7e3h6emqUPUoIgRkzZmDbtm348ccfERgYqJeYy3AGgIiI6pyjoyNeeOEFzJkzB25ubvD390diYiIKCwsxadIkrds5c+YMJkyYgHnz5qFVq1bIzs4GANja2sLNzU0vfVSmtLQUWVlZuHLlCho2bKgxs1AT06dPx5YtW/D111/D2dlZHpuLi0u5CUN1MQEgIjIlRnhlPl299dZbKC0txfjx41FQUIBOnTphz549qF+/vtZtHD9+HIWFhVi8eDEWL14sl4eGhmL//v166aMy0dHReP7559GiRQvk5+frNQFYu3YtAMinNJZJSkpCVFRUjdtnAkBERHpV0UVuwsLC5OPyAGBnZ4eVK1di5cqV5dbfv39/leVRUVGVfhhW1UdF/Xz11VcV1n9Yv379cPnyZa3qVtZfeR5+rWqDQdcA/PTTTxg0aBB8fX0hSZLaC65SqTBv3jy0bt0ajo6O8PX1xYQJE3D16lW1NoqKijBz5kx4eHjA0dERgwcPVltoQkRERJoMmgDcuXMHbdu2xerVqzW2FRYW4sSJE3jttddw4sQJbNu2DRcuXMDgwYPV6sXExGD79u3YunUrDh06hNu3b2PgwIFq55sSERGROoMeAujXr598cYZHubi4aFyFadWqVejcuTMuXboEf39/5OXlYePGjfj444/Rp08fAMDmzZvh5+eH77//Hn379i237aKiIhQVFcnP8/PzATyYdVCpVFrFXlZP2/rakoT+E5eKYizrq1SyVisve172r0pYAeW0UV6sK/ae0yib3qupRpmh1Nb7ZgzMdWzmOi7APMdEpsOk1gDk5eVBkiS4uroCANLS0qBSqRARESHX8fX1RXBwMFJTUytMABISEhAfH69RvnfvXjg4OFQrpsouFakL/Z7k8cCuXZpX7Hu4r+vuIeVuz3Xr8mD/AgC7dlW4v679G5K+3zdjYq5jM8dxFRYWGjoEsmAmkwDcu3cPL7/8MsaMGYN69eoBALKzs2Fra6uxmtPLy0s+XaI88+fPR2xsrPw8Pz8ffn5+iIiIkNuuikqlQkpKCsLDwzUuIFETa/Zd1FtbZSr6Bl7W1xNXktXKSyVr5Lp1gVvuEViJEnRu7Ab0iK1wf137N4Taet+MgbmOzVzHBfwz+1iZ2l4IRuZFm4sXlTGJBEClUmHUqFEoLS3F+++/X2X98q7y9DClUgmlUqlRrlAoqv0HRpd9KiMemY7Xh4riK+vLqoLDDlaiBFaiBMcz/sbPJRmaFbSM1Rj/aOv7fTMm5jo2cxxXZeNRKBSQJAl///03GjRooNfTy8j8CCFQXFyMv//+G1ZWVrC1ta1yH6NPAFQqFUaOHImMjAz8+OOPat/Qvb29UVxcjJs3b6rNAuTk5CAkpPxpbSIiU2BtbY1GjRrhypUryMzMNHQ4ZCIcHBzg7+8PK6uq1/gbdQJQ9uH/22+/Yd++ffLdl8p07NgRCoUCKSkp8nWWr127hjNnzlR69ygiIlPg5OSEoKAgLhYkrVhbW8PGxkbr2SKDJgC3b9/GxYv/HEfOyMhAeno63Nzc4Ovri2eeeQYnTpzAzp07UVJSIh/Xd3Nzg62tLVxcXDBp0iTMnj0b7u7ucHNzw0svvYTWrVvLZwUQ1bl9CVXXMaOruVHtsra2hrW1/g8NEhk0ATh+/Dh69eolPy9bmBcZGYm4uDjs2LEDANCuXTu1/fbt2ydfGnH58uWwsbHByJEjcffuXfTu3RvJyckW8wvz5KX1WtR6p9bjICIi02LQBODRy0I+SpvVr3Z2dli1ahVWrVqlz9CIiIjMGm8HTEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGM+kJApCcVnJf+5KUbWjdR1emGP/s/X62QylXV+fM8d56ISG84A0BERGSBOANgAQ7/of03fSIisgycASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJABr0S4E8//YSlS5ciLS0N165dw/bt2zF06FB5uxAC8fHxWL9+PW7evIkuXbpgzZo1aNWqlVynqKgIL730Ej799FPcvXsXvXv3xvvvv49GjRoZYETVszzlgqFDMD9V3U+g+0t1EwcRkZEzaAJw584dtG3bFhMnTsTw4cM1ticmJmLZsmVITk5Gs2bNsHjxYoSHh+P8+fNwdnYGAMTExOCbb77B1q1b4e7ujtmzZ2PgwIFIS0uDtbV1XQ+JalNVH+7mRJux8uZIRFQDOh0CSE5ORmFhYY0779evHxYvXoxhw4ZpbBNCYMWKFViwYAGGDRuG4OBgbNq0CYWFhdiyZQsAIC8vDxs3bsS7776LPn36oH379ti8eTNOnz6N77//vsbxERERmSudZgDmz5+P6OhojBgxApMmTUJISIi+40JGRgays7MREREhlymVSoSGhiI1NRVTpkxBWloaVCqVWh1fX18EBwcjNTUVffv2LbftoqIiFBUVyc/z8/MBACqVCiqVSqv4yuppW788kijRed8ypZL+ZznK2qxO25WNRevXSNT+khR9vG9V0mYcVfWvQxt1MjYDMNdxAeY5JjIdOiUAV65cwbfffovk5GT06tULgYGBmDhxIiIjI+Ht7a2XwLKzswEAXl5eauVeXl7IysqS69ja2qJ+/foadcr2L09CQgLi4+M1yvfu3QsHB4dqxZmSklKt+g8L1HnPf1x313/yVSbXrYvWdQPvVryeYdcubdc6tNC6P539//erJu9b1bQYx65dtdZG7Y7NcMxxXPqYSSXSlU4JgLW1NQYPHozBgwcjJycHmzdvRnJyMl577TU89dRTmDRpEgYNGgQrq5p/o5MkSe25EEKj7FFV1Zk/fz5iY2Pl5/n5+fDz80NERATq1aunVVwqlQopKSkIDw+HQqHQap9Hrdl3Uaf9HvbEleQat/GoUskauW5d4JZ7BFZazlIcaxRV4bbpvZpq1/HBZdrVqwHVkzNr/L5VSZtx9IitfLsObejjZ9IYmeu4gH9mH4kMocaLAD09PdGtWzecP38eFy5cwOnTpxEVFQVXV1ckJSUhLCxMp3bLZhKys7Ph4+Mjl+fk5MizAt7e3iguLsbNmzfVZgFycnIqPSyhVCqhVCo1yhUKRbX/wOiyTxmhh+l7bT+gdW1b2/YrG4vWr49Uql29mvj/sdTkfauSNuOoqu8atFGrYzMgcxyXuY2HTIvOX9H/+usvvPPOO2jVqhXCwsKQn5+PnTt3IiMjA1evXsWwYcMQGRmpc2CBgYHw9vZWm/YrLi7GgQMH5A/3jh07QqFQqNW5du0azpw5UyvrEoiIiMyFTjMAgwYNwp49e9CsWTNMnjwZEyZMgJubm7zd3t4es2fPxvLlyytt5/bt27h48Z9p8IyMDKSnp8PNzQ3+/v6IiYnBkiVLEBQUhKCgICxZsgQODg4YM2YMAMDFxQWTJk3C7Nmz4e7uDjc3N7z00kto3bo1+vTpo8vQiIiILIJOCYCnpycOHDiArl27VljHx8cHGRkZlbZz/Phx9OrVS35edlw+MjISycnJmDt3Lu7evYtp06bJFwLau3evfA0AAFi+fDlsbGwwcuRI+UJAycnJvAYAERFRJXRKAEJDQ9GhQweN8uLiYmzduhUTJkyAJEkICAiotJ2wsDAIISrcLkkS4uLiEBcXV2EdOzs7rFq1CqtWrdI6fiIiIkun0xqAiRMnIi8vT6O8oKAAEydOrHFQREREVLt0SgAqOs3uypUrcHFxqXFQREREVLuqdQigffv2kCQJkiShd+/esLH5Z/eSkhJkZGTgqaee0nuQREREpF/VSgDK7tSXnp6Ovn37wsnJSd5ma2uLxo0bl3tTHyKjcXAZgBYP/q3oXHtTucnOozcMElZQG5upjIOIDKJaCcDChQsBAI0bN8azzz4LOzu7WgmKiIiIapdOZwHU5AI/REREZHhaJwBubm64cOECPDw8UL9+/UqvtZ+bm6uX4IiIiKh2aJ0ALF++XL4Az/Lly6u8IQ+RmkePVwM4/McNjbKuTdzrIhoiIoundQLw8LR/VFRUbcRCREREdUTrBKA6t63U9pa6REREZBhaJwCurq5VTvuXXSCopKT2blFLxunJS+sr3riP0/pERMZG6wRg3759tRkHERER1SGtE4DQ0NDajIOIiIjqkNYJwKlTpxAcHAwrKyucOnWq0rpt2rSpcWBEBlPOGQtqeIU9IjIDWicA7dq1Q3Z2Njw9PdGuXTtIklTurXy5BoCIiMj4aZ0AZGRkoEGDBvL/iYiIyHRpnQAEBASU+38ii1PVIQJjwUMZRFQJne4FAADnz5/HqlWrcO7cOUiShBYtWmDmzJlo3ry5PuMjIiKiWmCly05ffPEFgoODkZaWhrZt26JNmzY4ceIEgoOD8fnnn+stuPv37+PVV19FYGAg7O3t0aRJEyxatAilpf/cxlUIgbi4OPj6+sLe3h5hYWE4e/as3mIgIiIyRzrNAMydOxfz58/HokWL1MoXLlyIefPmYcSIEXoJ7u2338a6deuwadMmtGrVCsePH8fEiRPh4uKCWbNmAQASExOxbNkyJCcno1mzZli8eDHCw8Nx/vx5+d4FREREpE6nBCA7OxsTJkzQKB83bhyWLl1a46DKHD58GEOGDMGAAQMAAI0bN8ann36K48ePA3jw7X/FihVYsGABhg0bBgDYtGkTvLy8sGXLFkyZMkVvsVDd4A2CiIjqhk4JQFhYGA4ePIimTZuqlR86dAg9evTQS2AA0L17d6xbtw4XLlxAs2bN8Msvv+DQoUNYsWIFgAdnI2RnZyMiIkLeR6lUIjQ0FKmpqRUmAEVFRSgqKpKfl93nQKVSQaVSaRVbWT1t65dHEjU/XbJUsq5xGxW1qa+2VaL8I03atl/R/jWJRZ9t6hZIFT83OsRX7bHV4Ge3Lunjd81YmeOYyHRonQDs2LFD/v/gwYMxb948pKWl4cknnwQA/Pzzz/j8888RHx+vt+DmzZuHvLw8tGjRAtbW1igpKcGbb76J0aNHA3gwEwEAXl5eavt5eXkhKyurwnYTEhLKjXPv3r1wcHCoVowpKSnVqv+wQJ33/Md19xA9tFK+XLcuemlnV0EFG7T8Yl/h/jWQcruZ/hutjl27qqjQQuemtR5blTEYl5r8rhmrwsJCQ4dAFkwS5V3NpxxWVtp9q9DnhYC2bt2KOXPmYOnSpWjVqhXS09MRExODZcuWITIyEqmpqejWrRuuXr0KHx8feb/Jkyfj8uXL2L17d7ntljcD4Ofnh+vXr2t9J0OVSoWUlBSEh4dDoVDoNL41+y7qtN/DnriSXOM2HlUqWSPXrQvcco/ASg+zFJ0bu5VbfjQzt0b760IlrJByuxnCnS5AIZVWvUNt6RFb+faDy6rdZLXHVlUMRkIfv2vGKj8/Hx4eHsjLy+NdVKnOaT0D8PDK+7oyZ84cvPzyyxg1ahQAoHXr1sjKykJCQgIiIyPh7e0N4MFMwMMJQE5OjsaswMOUSiWUSqVGuUKhqPYfGF32KSP0MMWujw/oytrWR/sVfRhp23ZtfFArpFLDJgBV/czUIDatx2ZiH6Y1+V0zVuY2HjItBj4QWrnCwkKNmQdra2s5GQkMDIS3t7fa1GBxcTEOHDiAkJDamxonIiIydTpfCOjOnTs4cOAALl26hOLiYrVt0dHRNQ4MAAYNGoQ333wT/v7+aNWqFU6ePIlly5bh3//+N4AHhxtiYmKwZMkSBAUFISgoCEuWLIGDgwPGjBmjlxiIiIjMkU4JwMmTJ9G/f38UFhbizp07cHNzw/Xr1+Hg4ABPT0+9JQCrVq3Ca6+9hmnTpiEnJwe+vr6YMmUKXn/9dbnO3LlzcffuXUybNg03b95Ely5dsHfvXl4DgIiIqBI6JQAvvvgiBg0ahLVr18LV1RU///wzFAoFxo0bJ1+gRx+cnZ2xYsUK+bS/8kiShLi4OMTFxemtXyIiInOn0xqA9PR0zJ49G9bW1rC2tkZRURH8/PyQmJiIV155Rd8xEhERkZ7plAAoFApIkgTgwTn3ly5dAgC4uLjI/yciIiLjpdMhgPbt2+P48eNo1qwZevXqhddffx3Xr1/Hxx9/jNatW+s7RiIiItIznRKAJUuWoKDgweXZ3njjDURGRuKFF15A06ZNkZSUpNcAiXh/ACIi/dMpAejUqZP8/wYNGmCXiV1SlIgA7Euouk6v+bUfBxEZhM7XAQAeXHHv/PnzkCQJzZs3R4MGDfQVF5F50+bDl4ioFum0CDA/Px/jx49Hw4YNERoaip49e8LX1xfjxo1DXl6evmMkIiIiPdMpAXjuuedw5MgR7Ny5E7du3UJeXh527tyJ48ePY/LkyfqOkYiIiPRMp0MA3377Lfbs2YPu3bvLZX379sWGDRvw1FNP6S04IiIiqh06zQC4u7vDxcVFo9zFxQX169evcVBERERUu3RKAF599VXExsbi2rVrcll2djbmzJmD1157TW/BERERUe3Q+hBA+/bt5av/AcBvv/2GgIAA+Pv7AwAuXboEpVKJv//+G1OmTNF/pERERKQ3WicAQ4cOrcUwiIiIqC5pnQAsXLiwNuMgIiKiOlSjCwGlpaXh3LlzkCQJLVu2RPv27fUVFxEREdUinRKAnJwcjBo1Cvv374erqyuEEMjLy0OvXr2wdetWXhGQiIjIyOl0FsDMmTORn5+Ps2fPIjc3Fzdv3sSZM2eQn5+P6OhofcdIREREeqZTArB7926sXbsWjz/+uFzWsmVLrFmzBt99953eggOAP//8E+PGjYO7uzscHBzQrl07pKWlyduFEIiLi4Ovry/s7e0RFhaGs2fP6jUGIiIic6NTAlBaWgqFQqFRrlAoUFpaWuOgyty8eRPdunWDQqHAd999h19//RXvvvsuXF1d5TqJiYlYtmwZVq9ejWPHjsHb2xvh4eHy7YqJiIhIk04JwL/+9S/MmjULV69elcv+/PNPvPjii+jdu7fegnv77bfh5+eHpKQkdO7cGY0bN0bv3r3x2GOPAXjw7X/FihVYsGABhg0bhuDgYGzatAmFhYXYsmWL3uIgIiIyNzotAly9ejWGDBmCxo0bw8/PD5Ik4dKlS2jdujU2b96st+B27NiBvn37YsSIEThw4AAaNmyIadOmyTccysjIQHZ2NiIiIuR9lEolQkNDkZqaWuEFiYqKilBUVCQ/z8/PBwCoVCqoVCqtYiurp239NfsuapRJ5dSrrlLJWg+tlN+mvtpWifLzzJq0X1Gb2u6n6/7GrFbGpuXPd22q7u+aKTHHMZHpkIQQQtedU1JS8L///Q9CCLRs2RJ9+vTRZ2yws7MDAMTGxmLEiBE4evQoYmJi8MEHH2DChAlITU1Ft27d8Oeff8LX11fe7/nnn0dWVhb27NlTbrtxcXGIj4/XKN+yZQscHBz0OgYioooUFhZizJgxyMvLQ7169QwdDlmYaicA9+/fh52dHdLT0xEcHFxbcQEAbG1t0alTJ6Smpspl0dHROHbsGA4fPiwnAFevXoWPj49cZ/Lkybh8+TJ2795dbrvlzQD4+fnh+vXrWv8SqlQqpKSkIDw8vNz1EI8qbwZAH564kqz3Nksla+S6dYFb7hFYiZIat9e5sVu55Uczc/XeZlVUwgopt5sh3OkCFJL+1qsYg1oZW49Y/bRTA9X9XTMl+fn58PDwYAJABlHtQwA2NjYICAhASUnNPxiq4uPjg5YtW6qVPf744/jyyy8BAN7e3gAe3Ijo4QQgJycHXl5eFbarVCqhVCo1yhUKRbX/wGi7j6iFqXoAevmArqxtfbRf0YdRTdqu6QecQio1uwSgjF7HZkQfuLr8fho7cxsPmRad7wY4f/585Obq/g1OG926dcP58+fVyi5cuICAgAAAQGBgILy9vZGSkiJvLy4uxoEDBxASElKrsREREZkynRYBrly5EhcvXoSvry8CAgLg6Oiotv3EiRN6Ce7FF19ESEgIlixZgpEjR+Lo0aNYv3491q9fDwCQJAkxMTFYsmQJgoKCEBQUhCVLlsDBwQFjxozRSwxERETmSKcEYOjQoZAkCTVYP6iVJ554Atu3b8f8+fOxaNEiBAYGYsWKFRg7dqxcZ+7cubh79y6mTZuGmzdvokuXLti7dy+cnZ1rNTYiIiJTVq0EoLCwEHPmzMFXX30FlUqF3r17Y9WqVfDw8Kit+DBw4EAMHDiwwu2SJCEuLg5xcXG1FgMREZG5qdYagIULFyI5ORkDBgzA6NGj8f333+OFF16ordiIiIiollRrBmDbtm3YuHEjRo0aBQAYO3YsunXrhpKSElhb184qdyIiItK/as0AXL58GT169JCfd+7cGTY2NmqXBCYiIiLjV60EoKSkBLa2tmplNjY2uH//vl6DIiIiotpVrUMAQghERUWpXUTn3r17mDp1qtqpgNu2bdNfhERERKR31UoAIiMjNcrGjRunt2CIiIioblQrAUhKSqqtOIiq5fAfN8ot79rEvY4jISIyTeZ3T1QiIiKqEhMAIiIiC8QEgIiIyALpdC8Aouqo6Hg9mYB9CZVv7zW/buIgIr3jDAAREZEF4gyAkXvy0npDh0BERGaIMwBEREQWiAkAERGRBWICQEREZIG4BoDM3sNnIZRK1gAvFkhExBkAIiIiS2RSCUBCQgIkSUJMTIxcJoRAXFwcfH19YW9vj7CwMJw9e9ZwQRIREZkAk0kAjh07hvXr16NNmzZq5YmJiVi2bBlWr16NY8eOwdvbG+Hh4SgoKDBQpERERMbPJBKA27dvY+zYsdiwYQPq168vlwshsGLFCixYsADDhg1DcHAwNm3ahMLCQmzZssWAERMRERk3k1gEOH36dAwYMAB9+vTB4sWL5fKMjAxkZ2cjIiJCLlMqlQgNDUVqaiqmTJlSbntFRUUoKiqSn+fn5wMAVCoVVCqVVjGV1dO2viRKtKr3qFLJWqf9aqKsT0P0XVMqoZnTPjyOsv+XV8/UlY2pTsem5c9/zbqo3u+aKTHHMZHpMPoEYOvWrThx4gSOHTumsS07OxsA4OXlpVbu5eWFrKysCttMSEhAfHy8RvnevXvh4OBQrfhSUlK0qhdYrVb/cd09RMc9ay7XrYvB+tbVrvKO/JSz6j/ldrNaj8VQ6nRsu3bVWVfa/q6ZksLCQkOHQBbMqBOAy5cvY9asWdi7dy/s7OwqrCdJktpzIYRG2cPmz5+P2NhY+Xl+fj78/PwQERGBevXqaRWbSqVCSkoKwsPDoVAoqqy/Zt9Frdp91BNXknXaryZKJWvkunWBW+4RWOk4c2EonRu7aZQdzcyV/182tnCnC1BIpXUZWq1TCSuk3G5mfGPrEVt1nUpU93fNlJTNPhIZglEnAGlpacjJyUHHjh3lspKSEvz0009YvXo1zp8/D+DBTICPj49cJycnR2NW4GFKpRJKpVKjXKFQVPsPjLb7CB2n0w35AWwlSkwuATie8bdGWXkT4gqp1Lg+JPXI6Mampw9tXX4/jZ25jYdMi1EfCO3duzdOnz6N9PR0+dGpUyeMHTsW6enpaNKkCby9vdWmBouLi3HgwAGEhBhu6pyIiMjYGfUMgLOzM4KDg9XKHB0d4e7uLpfHxMRgyZIlCAoKQlBQEJYsWQIHBweMGTPGECETERGZBKNOALQxd+5c3L17F9OmTcPNmzfRpUsX7N27F87OzoYOjYiIyGiZXAKwf/9+teeSJCEuLg5xcXEGiYeIiMgUGfUaACIiIqodTACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgskMldCtjYLU+5YOgQiIiIqsQZACIiIgvEBICIiMgC8RAAERnWvoTKtwsrAC3qJBQiS8IZACIiIgvEGQAiql1VfcMnIoPgDAAREZEFYgJARERkgYz6EEBCQgK2bduG//3vf7C3t0dISAjefvttNG/eXK4jhEB8fDzWr1+PmzdvokuXLlizZg1atWplwMi19+Sl9YYOgYiILJBRzwAcOHAA06dPx88//4yUlBTcv38fERERuHPnjlwnMTERy5Ytw+rVq3Hs2DF4e3sjPDwcBQUFBoyciIjIuBn1DMDu3bvVniclJcHT0xNpaWno2bMnhBBYsWIFFixYgGHDhgEANm3aBC8vL2zZsgVTpkwpt92ioiIUFRXJz/Pz8wEAKpUKKpVKq9jK6j1aXxIl2g3u/yuVrKtVvy6UxWSMsdVU2ZhUwqhzX52UjcncxiaPS8vfTVNijmMi0yEJIYShg9DWxYsXERQUhNOnTyM4OBh//PEHHnvsMZw4cQLt27eX6w0ZMgSurq7YtGlTue3ExcUhPj5eo3zLli1wcHCotfiJiB5WWFiIMWPGIC8vD/Xq1TN0OGRhTCYBEEJgyJAhuHnzJg4ePAgASE1NRbdu3fDnn3/C19dXrvv8888jKysLe/bsKbet8mYA/Pz8cP36da1/CVUqFVJSUhAeHg6FQiGXr9l3sVrjeuJKcrXq14VSyRq5bl3glnsEVtWc0TB2ZWMLd7oAhVRq6HD0SiWskHK7mdmNTR7XI79r5iA/Px8eHh5MAMggjPoQwMNmzJiBU6dO4dChQxrbJElSey6E0Ch7mFKphFKp1ChXKBTV/gPz6D6imtPmxvwBayVKjDq+mlBIpWb1Ifkwcx2bLr+fxs7cxkOmxSQSgJkzZ2LHjh346aef0KhRI7nc29sbAJCdnQ0fHx+5PCcnB15eXnUeJ5mOo5m55SY3XZu4GyAaIqK6Z9SrhYQQmDFjBrZt24Yff/wRgYGBatsDAwPh7e2NlJQUuay4uBgHDhxASEhIXYdLRERkMox6BmD69OnYsmULvv76azg7OyM7OxsA4OLiAnt7e0iShJiYGCxZsgRBQUEICgrCkiVL4ODggDFjxhg4eiIiIuNl1AnA2rVrAQBhYWFq5UlJSYiKigIAzJ07F3fv3sW0adPkCwHt3bsXzs7OdRwtERGR6TDqBECbExQkSUJcXBzi4uJqPyAiIiIzYdRrAIiIiKh2MAEgIiKyQEZ9CICISHZwGVDR9Q16za/bWIjMAGcAiIiILBBnAIgecviPGxplvDgQEZkjJgBEVWBSQETmiIcAiIiILBATACIiIgvEBICIiMgCcQ0AEZm+fQmVb+dpgkQaOANARERkgZgAEBERWSAeAiDSQXmnBlaEpwwSkTHiDAAREZEF4gwAUS3jhYTMRFULDQEuNiSTwgSghtbsuwghWZe77clL6+s4GiIiIu0wASAi88dv70QazGYNwPvvv4/AwEDY2dmhY8eOOHjwoKFDIiIiMlpmkQB89tlniImJwYIFC3Dy5En06NED/fr1w6VLlwwdGhERkVEyi0MAy5Ytw6RJk/Dcc88BAFasWIE9e/Zg7dq1SEjQYuqPqI5V5zTC8nARIRHVlMknAMXFxUhLS8PLL7+sVh4REYHU1NRy9ykqKkJRUZH8PC8vDwCQm5sLlUqlVb8qlQqFhYUoKsmrcBFgwb37WrVlbEolgcLCQhTcuw8rUWLocPTKXMZ243axRplKWKGwsBA3pGIopFIDRFU76mxcN6pIysp5zavdxiMKCgoAAEKIau1HpA8mnwBcv34dJSUl8PLyUiv38vJCdnZ2ufskJCQgPj5eozwwMLBWYiQiUxBnsDYKCgrg4uKih/6JtGfyCUAZSZLUngshNMrKzJ8/H7GxsfLz0tJS5Obmwt3dvcJ9HpWfnw8/Pz9cvnwZ9erV0z1wI8SxmSZzHZu5jgt48HeqoKAAvr6+hg6FLJDJJwAeHh6wtrbW+Lafk5OjMStQRqlUQqlUqpW5urrq1H+9evXM7o9SGY7NNJnr2Mx1XPzmT4Zi8mcB2NraomPHjkhJSVErT0lJQUhIiIGiIiIiMm4mPwMAALGxsRg/fjw6deqErl27Yv369bh06RKmTp1q6NCIiIiMklkkAM8++yxu3LiBRYsW4dq1awgODsauXbsQEBBQa30qlUosXLhQ41CCOeDYTJO5js1cx0VkaJLg+SdEREQWx+TXABAREVH1MQEgIiKyQEwAiIiILBATACIiIgvEBEAH5nDr4YSEBDzxxBNwdnaGp6cnhg4divPnz6vVEUIgLi4Ovr6+sLe3R1hYGM6ePWugiHWXkJAASZIQExMjl5ny2P7880+MGzcO7u7ucHBwQLt27ZCWliZvN9Wx3b9/H6+++ioCAwNhb2+PJk2aYNGiRSgt/ef6/6Y6NiKjJKhatm7dKhQKhdiwYYP49ddfxaxZs4Sjo6PIysoydGjV0rdvX5GUlCTOnDkj0tPTxYABA4S/v7+4ffu2XOett94Szs7O4ssvvxSnT58Wzz77rPDx8RH5+fkGjLx6jh49Kho3bizatGkjZs2aJZeb6thyc3NFQECAiIqKEkeOHBEZGRni+++/FxcvXpTrmOrYFi9eLNzd3cXOnTtFRkaG+Pzzz4WTk5NYsWKFXMdUx0ZkjJgAVFPnzp3F1KlT1cpatGghXn75ZQNFpB85OTkCgDhw4IAQQojS0lLh7e0t3nrrLbnOvXv3hIuLi1i3bp2hwqyWgoICERQUJFJSUkRoaKicAJjy2ObNmye6d+9e4XZTHtuAAQPEv//9b7WyYcOGiXHjxgkhTHtsRMaIhwCqoezWwxEREWrlld162FSU3RLZzc0NAJCRkYHs7Gy1sSqVSoSGhprMWKdPn44BAwagT58+auWmPLYdO3agU6dOGDFiBDw9PdG+fXts2LBB3m7KY+vevTt++OEHXLhwAQDwyy+/4NChQ+jfvz8A0x4bkTEyiysB1hVdbj1sCoQQiI2NRffu3REcHAwA8njKG2tWVladx1hdW7duxYkTJ3Ds2DGNbaY8tj/++ANr165FbGwsXnnlFRw9ehTR0dFQKpWYMGGCSY9t3rx5yMvLQ4sWLWBtbY2SkhK8+eabGD16NADTft+IjBETAB1U59bDpmDGjBk4deoUDh06pLHNFMd6+fJlzJo1C3v37oWdnV2F9UxxbKWlpejUqROWLFkCAGjfvj3Onj2LtWvXYsKECXI9UxzbZ599hs2bN2PLli1o1aoV0tPTERMTA19fX0RGRsr1THFsRMaIhwCqQZdbDxu7mTNnYseOHdi3bx8aNWokl3t7ewOASY41LS0NOTk56NixI2xsbGBjY4MDBw5g5cqVsLGxkeM3xbH5+PigZcuWamWPP/44Ll26BMC037c5c+bg5ZdfxqhRo9C6dWuMHz8eL774IhISEgCY9tiIjBETgGowp1sPCyEwY8YMbNu2DT/++CMCAwPVtgcGBsLb21ttrMXFxThw4IDRj7V37944ffo00tPT5UenTp0wduxYpKeno0mTJiY7tm7dummcrnnhwgX5xlem/L4VFhbCykr9T5K1tbV8GqApj43IKBlwAaJJKjsNcOPGjeLXX38VMTExwtHRUWRmZho6tGp54YUXhIuLi9i/f7+4du2a/CgsLJTrvPXWW8LFxUVs27ZNnD59WowePdpkT7l6+CwAIUx3bEePHhU2NjbizTffFL/99pv45JNPhIODg9i8ebNcx1THFhkZKRo2bCifBrht2zbh4eEh5s6dK9cx1bERGSMmADpYs2aNCAgIELa2tqJDhw7yqXOmBEC5j6SkJLlOaWmpWLhwofD29hZKpVL07NlTnD592nBB18CjCYApj+2bb74RwcHBQqlUihYtWoj169erbTfVseXn54tZs2YJf39/YWdnJ5o0aSIWLFggioqK5DqmOjYiY8TbARMREVkgrgEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEg0oPMzExIkoT09HRDh0JEpBUmAGRWhBDo06cP+vbtq7Ht/fffh4uLi3znPCIiS8YEgMyKJElISkrCkSNH8MEHH8jlGRkZmDdvHt577z34+/sbMEIiIuPABIDMjp+fH9577z289NJLyMjIgBACkyZNQu/evREVFaVRf/To0Rg1apRamUqlgoeHB5KSkgAAu3fvRvfu3eHq6gp3d3cMHDgQv//+e4UxJCcnw9XVVa3sq6++giRJamXffPMNOnbsCDs7OzRp0gTx8fG4f/++vD0uLg7+/v5QKpXw9fVFdHR0NV8NIqLy2Rg6AKLaEBkZie3bt2PixIkYPnw4zpw5gzNnzpRbd+zYsRg5ciRu374NJycnAMCePXtw584dDB8+HABw584dxMbGonXr1rhz5w5ef/11PP3000hPT9e4h7229uzZg3HjxmHlypXo0aMHfv/9dzz//PMAgIULF+KLL77A8uXLsXXrVrRq1QrZ2dn45ZdfdOqLiEiDYW9GSFR7/vrrL9GgQQNhZWUltm3bVmG94uJi4eHhIf7zn//IZaNHjxYjRoyocJ+cnBwBQL4VbUZGhgAgTp48KYQQIikpSbi4uKjts337dvHwr1yPHj3EkiVL1Op8/PHHwsfHRwghxLvvviuaNWsmiouLtRovEVF18BAAmS1PT088//zzePzxx/H0009XWE+hUGDEiBH45JNPADz4tv/1119j7Nixcp3ff/8dY8aMQZMmTVCvXj0EBgYCQI0WFKalpWHRokVwcnKSH5MnT8a1a9dQWFiIESNG4O7du2jSpAkmT56M7du3qx0eICKqCR4CILNmY2MDG5uqf8zHjh2L0NBQ5OTkICUlBXZ2dujXr5+8fdCgQfDz88OGDRvg6+uL0tJSBAcHo7i4uNz2rKysIIRQK1OpVGrPS0tLER8fj2HDhmnsb2dnBz8/P5w/fx4pKSn4/vvvMW3aNCxduhQHDhyAQqHQZvhERBViAkAEICQkBH5+fvjss8/w3XffYcSIEbC1tQUA3LhxA+fOncMHH3yAHj16AAAOHTpUaXsNGjRAQUEB7ty5A0dHRwDQuEZAhw4dcP78eTRt2rTCduzt7TF48GAMHjwY06dPR4sWLXD69Gl06NChBqMlImICQATgwemDY8aMwbp163DhwgXs27dP3la/fn24u7tj/fr18PHxwaVLl/Dyyy9X2l6XLl3g4OCAV155BTNnzsTRo0eRnJysVuf111/HwIED4efnhxEjRsDKygqnTp3C6dOnsXjxYiQnJ6OkpERu6+OPP4a9vT0CAgJq4yUgIgvDNQBE/9/YsWPx66+/omHDhujWrZtcbmVlha1btyItLQ3BwcF48cUXsXTp0krbcnNzw+bNm7Fr1y60bt0an376KeLi4tTq9O3bFzt37kRKSgqeeOIJPPnkk1i2bJn8Ae/q6ooNGzagW7duaNOmDX744Qd88803cHd31/vYicjySOLRA5VERERk9jgDQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQW6P8BNNz4zl/QBbAAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABb6ElEQVR4nO3deVxU1fsH8M8MDMMMm7IPsiui4r7hmpiCS25tVlZKmlmWaVaWaYlLmJZmm5ZlYi5lZfb1Vy6gCWm4oGIumJosgjAgCLLDLOf3xzQXLusM2yw879drXjD33rnzHGaYZ85yzxEwxhgIIYQQYlaEhg6AEEIIIS2PEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEnwzRUVFQSAQwNraGmlpabX2h4SEoGfPnrxtvr6+EAgE3M3a2hpdunTB4sWLkZubyzs2IiICAoEAQqEQycnJtc5fUlICe3t7CAQChIeHNxpvzee2tbVFcHAwvvvuO/0K3ght3DXL0xzh4eGwtbXV6VhfX1/e3yM1NRUCgQBRUVHcNu1rl5qaqlcckZGR+PXXX2ttj42NhUAgQGxsrF7nq4s2Xu1NKBTCyckJEydOxKlTp5p9fl2Eh4fD19eXt00gECAiIkKv82RmZiIiIgIXL16stU/7PiGEtDxK8C2koqICy5cv1/n44cOH49SpUzh16hQOHTqEefPm4auvvsL48ePrPN7W1hbbt2+vtf2nn36CQqGASCRq0nNrk9ysWbOwZcsWnc9h7Pbv34933323wWMeeughnDp1CjKZTK9z15fg+/fvj1OnTqF///56na8hCxYswKlTp3DixAmsXbsWf//9N0aPHo3ExMQWew59nDp1Cs8//7xej8nMzMTKlSvrTPDPP/98m31hIaS9sTR0AOZi/Pjx2LNnD9544w306dOn0eM7dOiAIUOGcPdHjx6NoqIirF69Gjdu3EDXrl15xz/xxBPYsWMHVq5cCaGw6nvZtm3b8PDDD+PAgQM6x1rzuceOHQsfHx9s3LgRL730Up2PUalUUCqVEIvFOj+PIfXr16/RY1xcXODi4tJiz2lvb8/7u7YEb29v7pzDhw9Hly5dMGbMGGzevBlff/11nY8pKyuDtbV1q9SMW7p8np6e8PT0bNFzEkI0qAbfQpYsWQInJye89dZbTT6Hg4MDANRZG589ezbS09MRExPDbbtx4wZOnjyJ2bNnN/k5AU3CDwwM5LoYtM3D69evx5o1a+Dn5wexWIzjx48DAA4cOIChQ4dCKpXCzs4OoaGh9dbC0tPT8cgjj8De3h4ODg545plncPfuXd4xe/fuRVhYGGQyGSQSCbp37463334bJSUldZ7z6tWrGDNmDGxsbODi4oJXXnkFpaWlvGNqNtHXpa4m+sTEREyaNAmurq4Qi8Xw8PDAQw89hIyMDACaJuqSkhLs2LGDaz4PCQkBUH8T/ZkzZzB58mQ4OTnB2toanTt3xqJFixqMrT7aBKt9rbRliI6OxuzZs+Hi4gKpVIqKigoAmr/t0KFDYWNjA1tbW4wbN67O2n9UVBQCAwMhFovRvXv3erts6mqiv3PnDl544QV4eXnBysoKHh4eeOyxx5CdnY3Y2FgMGjQIAPDcc89xfzPtOepqoler1Vi/fj26desGsVgMV1dXzJw5k3sNtLTdXwkJCRg5ciSkUin8/f3xwQcfQK1W8863Zs0aBAYGQiKRoEOHDujduzc++eQTHf/qhJgmSvAtxM7ODsuXL8eRI0fwxx9/NHo8YwxKpRJKpRLFxcU4fvw4Nm3ahOHDh8PPz6/W8QEBARg5ciS+/fZbbtu3334LX19fjBkzplmxKxQKpKWl1arNfvrpp/jjjz/w0Ucf4dChQ+jWrRv27NmDqVOnwt7eHt9//z22bduG/Px8hISE4OTJk7XO/fDDD6NLly74+eefERERgV9//RXjxo2DQqHgjrl58yYmTpyIbdu24fDhw1i0aBF+/PFHTJ48uc5YJ06ciDFjxuDXX3/FK6+8gq+++gpPPPFEs/4GgGY8Q2hoKLKzs/HFF18gJiYGmzZtgre3N4qKigBomqglEgnXF37q1Cls3ry53nMeOXIEI0eOxO3bt7Fx40YcOnQIy5cvR3Z2dpNi/PfffwGg1ms1e/ZsiEQi7Ny5Ez///DNEIhEiIyPx1FNPoUePHvjxxx+xc+dOFBUVYeTIkUhKSuIeGxUVheeeew7du3fHvn37sHz5cqxevVqn9/GdO3cwaNAg7N+/H4sXL8ahQ4ewadMmODg4ID8/H/379+e6lpYvX879zRpq5n/ppZfw1ltvITQ0FAcOHMDq1atx+PBhDBs2rNaYDrlcjqeffhrPPPMMDhw4gAkTJmDp0qXYtWsXd8z69esRERGBp556Cr///jv27t2LOXPmoKCgoNHyEWLSGGmW7du3MwAsISGBVVRUMH9/fzZw4ECmVqsZY4yNGjWKBQUF8R7j4+PDANS6DR48mGVlZfGOXbFiBQPA7t69y7Zv387EYjHLy8tjSqWSyWQyFhERwRhjzMbGhs2aNavReH18fNjEiROZQqFgCoWCpaSksFmzZjEA7M0332SMMZaSksIAsM6dO7PKykrusSqVinl4eLBevXoxlUrFbS8qKmKurq5s2LBhteJ+7bXXeM+/e/duBoDt2rWrzvjUajVTKBQsLi6OAWB///03t08b5yeffMJ7zPvvv88AsJMnT/LKWf3voS3T9u3buW3a1y4lJYUxxti5c+cYAPbrr782+Des7299/PhxBoAdP36c29a5c2fWuXNnVlZW1uA5a9LGu27dOqZQKFh5eTk7f/48GzRoEAPAfv/9d14ZZs6cyXv87du3maWlJVuwYAFve1FREXN3d2fTp09njFW9pv379+fes4wxlpqaykQiEfPx8eE9HgBbsWIFd3/27NlMJBKxpKSkesuSkJBQ62+vpX2faF27do0BYPPnz+cdd+bMGQaAvfPOO9y2UaNGMQDszJkzvGN79OjBxo0bx92fNGkS69u3b73xEWKuqAbfgqysrLBmzRqcO3cOP/74Y4PHjhgxAgkJCUhISMBff/2Fbdu24e7du3jwwQfrHXn++OOPw8rKCrt378bBgwchl8t1Gjlf08GDByESiSASieDn54cff/wRCxYswJo1a3jHTZkyhdddcP36dWRmZuLZZ5/ljQOwtbXFo48+itOnT9dqKn/66ad596dPnw5LS0uuuR8AkpOTMWPGDLi7u8PCwgIikQijRo0CAFy7dq1W/DXPOWPGDADgnbMpunTpgo4dO+Ktt97Cl19+yavlNsWNGzdw69YtzJkzB9bW1k06x1tvvQWRSARra2sMGDAAt2/fxldffYWJEyfyjnv00Ud5948cOQKlUomZM2dyLUVKpRLW1tYYNWoU142gfU1nzJjBayr38fHBsGHDGo3v0KFDGD16NLp3796k8tWkfQ1rvq8HDx6M7t2749ixY7zt7u7uGDx4MG9b7969eVe0DB48GH///Tfmz5+PI0eOoLCwsEViJcTY0SC7Fvbkk0/io48+wrJly/DII4/Ue5yDgwMGDhzI3R82bBh69OiBoUOHYsOGDVi7dm2tx9jY2OCJJ57At99+Cx8fH25wnL5GjBiBjz/+GAKBAFKpFJ07d4aVlVWt42qOLs/Ly6tzOwB4eHhArVYjPz8fUqmU2+7u7s47ztLSEk5OTty5iouLMXLkSFhbW2PNmjXo2rUrpFIp13dfVlZW5+Or0z6H9pxN5eDggLi4OLz//vt45513kJ+fD5lMhrlz52L58uV6XakAgBtr0JxBZAsXLsQzzzwDoVCIDh06wM/Pr87BczVfE20XgLb/uybtFzTt36zm66Td1tglhHfv3m3RQXKNvcdqXopa870AAGKxmPe+Wbp0KWxsbLBr1y58+eWXsLCwwAMPPIB169bx/gcJMTeU4FuYQCDAunXrEBoaiq1bt+r12N69ewMA/v7773qPmT17Nr755htcunQJu3fvblKMNb9c1KdmItF+mGZlZdU6NjMzE0KhEB07duRtl8vl6NSpE3dfqVQiLy+PO9cff/yBzMxMxMbGcrV2APX2j9Z8vPY5qsfXHL169cIPP/wAxhguXbqEqKgorFq1ChKJBG+//bZe59L2k9ccHKYPT0/PJr1Wzs7OAICff/65wS+B2r+Z9m9YXV3banJxcWlW+eqLJysrq9YXh8zMTK5c+rC0tMTixYuxePFiFBQU4OjRo3jnnXcwbtw4pKen876QEmJOqIm+FYwdOxahoaFYtWoViouLdX6c9jphV1fXeo8ZOnQoZs+ejYcffhgPP/xwc0PVS2BgIDp16oQ9e/aAMcZtLykpwb59+7iR9dXV/BLy448/QqlUciPPtYmp5uV3X331Vb1x1Dznnj17AIA7Z0sQCATo06cPPv74Y3To0AEXLlzg9tWsIdana9eu6Ny5M7799ltuVHtbGTduHCwtLXHr1i0MHDiwzhugeU1lMhm+//573mualpaG+Pj4Rp9nwoQJOH78OK5fv17vMdrXVpe/2YMPPggAvEFyAJCQkIBr1641e0Bphw4d8Nhjj+Hll1/GvXv39J7kiBBTQjX4VrJu3ToMGDAAOTk5CAoKqrW/oKAAp0+fBqAZGX7t2jVERkZCLBbj5ZdfbvDc27Zta5WYGyMUCrF+/Xo8/fTTmDRpEubNm4eKigp8+OGHKCgowAcffFDrMb/88gssLS0RGhqKq1ev4t1330WfPn0wffp0AJquiY4dO+LFF1/EihUrIBKJsHv37npbMaysrLBhwwYUFxdj0KBBiI+Px5o1azBhwgSMGDGiWeX77bffsHnzZkybNg3+/v5gjOGXX35BQUEBQkNDueN69eqF2NhY/N///R9kMhns7OwQGBhY5zm/+OILTJ48GUOGDMFrr70Gb29v3L59G0eOHGlyC4wufH19sWrVKixbtgzJyckYP348OnbsiOzsbJw9exY2NjbcnAqrV6/G888/j4cffhhz585FQUEBIiIi6my2r2nVqlU4dOgQHnjgAbzzzjvo1asXCgoKcPjwYSxevBjdunVD586dIZFIsHv3bnTv3h22trbw8PCAh4dHrfMFBgbihRdewGeffQahUIgJEyYgNTUV7777Lry8vPDaa6/p/beYPHkyevbsiYEDB8LFxQVpaWnYtGkTfHx8EBAQoPf5CDEZhh3jZ/qqj6KvacaMGQxAo6PoLSwsmLe3N3vsscdYYmIi79jqo+gbos8o+oceeqjBY7QjuD/88MM69//6668sODiYWVtbMxsbGzZmzBj2119/1Rn3+fPn2eTJk5mtrS2zs7NjTz31FMvOzuYdGx8fz4YOHcqkUilzcXFhzz//PLtw4UKtkdezZs1iNjY27NKlSywkJIRJJBLm6OjIXnrpJVZcXFyrnPqOov/nn3/YU089xTp37swkEglzcHBggwcPZlFRUbxzX7x4kQ0fPpxJpVIGgI0aNYoxVvcoesYYO3XqFJswYQJzcHBgYrGYde7cudbVBTU19hrULENd7z/GNK/V6NGjmb29PROLxczHx4c99thj7OjRo7zjvvnmGxYQEMCsrKxY165d2bfffstmzZrV6Ch6xhhLT09ns2fPZu7u7kwkEjEPDw82ffp03uv8/fffs27dujGRSMQ7R81R9IxpRvavW7eOde3alYlEIubs7MyeeeYZlp6ezjuuritUGGO14t6wYQMbNmwYc3Z2ZlZWVszb25vNmTOHpaam1vk3I8RcCBir1i5HCCGEELNAffCEEEKIGaIETwghhJghSvCEEEKIGaIETwghhJghSvCEEEKIGTL76+DVajUyMzNhZ2fXKutjE0JIXRhjKCoqgoeHB2/tBkLaitkn+MzMTHh5eRk6DEJIO5Went6i8/UToiuzT/B2dnYANP9k9vb2Oj1GoVAgOjoaYWFhei8wYoyoPMbN3MoDmF+ZmlKewsJCeHl5cZ9BhLQ1s0/w2mZ5e3t7vRK8VCqFvb292Xw4UXmMl7mVBzC/MjWnPNQ1SAyFOoYIIYQQM0QJnhBCCDFDlOAJIYQQM2T2ffCEEGLMVCoVFAqFocMgJkIkEsHCwkKnYynBE0KIATDGIJfLUVBQYOhQiInp0KED3N3dGx3ASQmeEEIMQJvcXV1dIZVKabQ9aRRjDKWlpcjJyQEAyGSyBo+nBE8IIW1MpVJxyd3JycnQ4RATIpFIAAA5OTlwdXVtsLmeBtkRQkgDGAPKylr2nNo+d6lU2rInJu2C9n3T2NgNqsETQkgdlEogNxe4exewbKVPSmqWJ02h6/uGEjwhhFRTVgbk5AD37gFqtWZbayV4QloTvW0JIe0eY8D9+5rEXlRk6GgIaRnUB08IabdUKkAuB65cAW7douRuTEJCQrBo0SLuvq+vLzZt2tSqzxkbGwuBQACBQIBp06a16nPVR/v8HTp0aPa5KMETQtqdsjIgLQ24dAm4cweorDR0RKQxCQkJeOGFF9rkua5fv46oqCi9HhMeHs4lZ+1tyJAhvGMqKiqwYMECODs7w8bGBlOmTEFGRgbvmKysrBb7IkMJnhDSbhQUADduAElJmgF02j52YvxcXFza7KoDV1fXJtWgx48fj6ysLO528OBB3v5FixZh//79+OGHH3Dy5EkUFxdj0qRJUKlU3DHu7u5wcHBobhEAUIInhJg5lQrIzgYuXzbeZnjGGEpKSgxyY4zpHGdISAgWLFiARYsWoWPHjnBzc8PWrVtRUlKC5557DnZ2dujcuTMOHTrEe1xSUhImTpwIW1tbuLm54dlnn0Vubi63v6SkBDNnzoStrS1kMhk2bNhQ67lrNtFv3LgRvXr1go2NDby8vDB//nwUFxdz+6OiotChQwccOXIE3bt3h62tLZeA9VVXfDW7EABALBbD3d2duzk6OnL77t+/j23btmHDhg0YO3Ys+vXrh127duHy5cs4evSo3jHpghI8IcQslZcDt29rmuEzMoy7Gb60tBS2trYGuZWWluoV644dO+Ds7IyzZ89iwYIFeOmll/D4449j2LBhuHDhAsaNG4dnn32WO29WVhZGjRqFvn374ty5czh8+DCys7Mxffp07pxvvvkmjh8/jv379yM6OhqxsbE4f/58g3EIhUJ8+umnuHLlCnbs2IE//vgDS5YsqfV3/eijj7Bz5078+eefuH37Nt544w29yqtPfLGxsXB1dUXXrl0xd+5cbsY5ADh//jwUCgXCwsK4bR4eHujZsyfi4+P1jkkXNIqeEGJWCgpoNHxr6tOnD5YvXw4AWLp0KT744AM4Oztj7ty5AID33nsPW7ZswaVLlzBkyBBs2bIF/fv3R2RkJHeOb7/9Fl5eXrhx4wY8PDywbds2fPfddwgNDQWg+RLh6enZYBzVa89+fn5YvXo1XnrpJWzevJnbrlAo8OWXX6Jz584AgFdeeQWrVq3Sq7zFxcU6xTdhwgQ8/vjj8PHxQUpKCt599108+OCDOH/+PMRiMeRyOaysrNCxY0fe49zc3CCXy/WKSVeU4AkhJk+lqpqUpqLC0NHoTyqV8pqX2/q59dG7d2/udwsLCzg5OaFXr17cNjc3NwDgaq/nz5/H8ePHYWtrW+tct27dQllZGSorKzF06FBuu6OjIwIDAxuM4/jx44iMjERSUhIKCwuhVCpRXl6OkpIS2NjYcGXTJndAM3d79Vq1Lm7duqVTfE888QT3e8+ePTFw4ED4+Pjg999/xyOPPFLv+RljrTbhESV4QojJKi/X1Nbz8kx7wJxAIOCSkrETiUS8+wKBgLdNm6zU/70garUakydPxrp162qdSyaT4ebNm3rHkJaWhokTJ+LFF1/E6tWr4ejoiJMnT2LOnDm86VvrilWfMQcA9D5eSyaTwcfHhyufu7s7KisrkZ+fz6vF5+TkYNiwYU16jsZQHzwhxOTcvw/cvAlcvaqptZtycjd3/fv3x9WrV+Hr64suXbrwbjY2NujSpQtEIhFOnz7NPSY/Px83btyo95znzp2DUqnEhg0bMGTIEHTt2hWZmZmtEn9T4gOAvLw8pKencyu+DRgwACKRCDExMdwxWVlZuHLlCiV4Qkj7plJpautXrgD//gsUFho6IqKLl19+Gffu3cNTTz2Fs2fPIjk5GdHR0Zg9ezZUKhVsbW0xZ84cvPnmmzh27BiuXLmC8PBwCIX1p6fOnTtDqVTis88+Q3JyMnbu3Ikvv/yyVeLXJb7i4mK88cYbOHXqFFJTUxEbG4vJkyfD2dkZDz/8MADAwcEBc+bMweuvv45jx44hMTERzzzzDHr16oWxY8e2SuzURE8IMWrm0gzfXnl4eOCvv/7CW2+9hXHjxqGiogI+Pj4YP348lyQ//PBDFBcXY8qUKbCzs8Prr7+O+/fv13vOvn37YuPGjVi3bh2WLl2KBx54AGvXrsXMmTNbpQyNxWdhYYHLly/ju+++Q0FBAWQyGUaPHo29e/fCzs6OO+7jjz+GpaUlpk+fjrKyMowZMwZRUVENLvnaHJTgCSFGSTs3PNXUjUdsbGytbampqbW21ey3DggIwC+//FLveW1tbbFz507s3LmT2/bmm282+DyvvfYaXnvtNd62Z599lvs9PDwc4eHhvP3Tpk1rUp96XfH9/vvv3O8SiQRHjhxp9DzW1tb47LPP8Nlnn+kdQ1NQgieEGA2VSlNTz8kxzdHwxDx4enpi8uTJ+P7779v8uW1tbaFUKmFtbd3sc1GCJ4QYXHm5ZrBcXp4myRNiCMHBwdyo97ou62sLFy9eBIAWabY3+CC7O3fu4JlnnoGTkxOkUin69u3LmyGIMYaIiAh4eHhAIpEgJCQEV69eNWDEhJCWUn00fE4OJXdiWBKJhBvh7+7u3uCxsbGxrbK6nfb5/fz8mn0ugyb4/Px8DB8+HCKRCIcOHUJSUhI2bNjAm+R//fr12LhxIz7//HMkJCTA3d0doaGhKKJpqggxSWq1JplfvUqj4QlpTQZtol+3bh28vLywfft2bpuvry/3O2MMmzZtwrJly7iZgHbs2AE3Nzfs2bMH8+bNa+uQCSFNVFFRNRqeauqEtD6DJvgDBw5g3LhxePzxxxEXF4dOnTph/vz53JzGKSkpkMvlvMn5xWIxRo0ahfj4+DoTfEVFBSqqjc4p/K96oFAoeDMcNUR7nK7HGzsqj3Ezt/IA/DIVFWn61025pq5S6f8amdPrSUyTQRN8cnIytmzZgsWLF+Odd97B2bNn8eqrr0IsFmPmzJncBPzauY213NzckJaWVuc5165di5UrV9baHh0drfecy9VnHDIHVB7jZm7lAcyvTPqUR99V2ghpaQZN8Gq1GgMHDuRWGerXrx+uXr2KLVu28CYsqDkRf0OT8y9duhSLFy/m7hcWFsLLywthYWGwt7fXKS6FQoGYmBiEhobWmsvYFFF5jJs5laeyUlNbz81V4M6dGLi7h0IoNO0yAYBEosCtW/q9RoWm3GRBzIJBE7xMJkOPHj1427p37459+/YBADeKUS6Xc/P5AprJ+WvW6rXEYjHEYnGt7SKRSO8Pz6Y8xphReYybKZensFDTv66d3Es7l4hQKDKLBK+9Ykmf18hUX0tiPgya4IcPH47r16/ztt24cQM+Pj4ANGv8uru7IyYmBv369QMAVFZWIi4urs6ViQghbUetrpqUprzc0NGYh61b2/b5XnhBv+NDQkIQFxcHAEhMTETfvn1bPigjpW01dnBwQEFBgWGD0ZFBL5N77bXXcPr0aURGRuLff//Fnj17sHXrVrz88ssANH/QRYsWITIyEvv37+cm+ZdKpZgxY4YhQyek3aqoADIygEuXgNu3Kbm3N3PnzkVWVhZ69uyp0/GxsbGYOnUqZDIZbGxs0LdvX+zevbvWMQKBoNbtn3/+aXa8dZ1XIBDgww8/5I4JCQmptf/JJ5/knScrK6tVrntvTQatwQ8aNAj79+/H0qVLsWrVKvj5+WHTpk14+umnuWOWLFmCsrIyzJ8/H/n5+QgODkZ0dDRvAn9CSOsrKtLU1k2k8kJaiVQqbXQSmOri4+PRu3dvvPXWW3Bzc8Pvv/+OmTNnwt7eHpMnT+Yde/36dd5YKRcXl2bHm5WVxbt/6NAhzJkzB48++ihv+9y5c7Fq1SruvkQi4e13d3eHg4NDs+NpSwafqnbSpEmYNGlSvfsFAgEiIiIQERHRdkERQgBUNcPfvQuUlRk6GmJsYmNjMXr0aPz222945513cP36dfTp0wfffPMNevXqBQB45513eI959dVXceTIEezfv79Wgnd1deVNdNaYkJAQriVh165dsLCwwEsvvYTVq1dzTeo1v4z873//w+jRo+Hv78/bru8XF1Ng8KlqCSHGp7KS3wxPyZ005M0338RHH32EhIQEuLq6YsqUKQ3OA3D//n04OjrW2t6vXz/IZDKMGTMGx48f1+m5d+zYAUtLS5w5cwaffvopPv74Y3zzzTd1HpudnY3ff/8dc+bMqbVv9+7dcHZ2RlBQEN544w2zmC3V4DV4Qojx0DbD379fNRKekMasWLECoaGhADQJ19PTE/v378f06dNrHfvzzz8jISEBX331FbdNJpNh69atGDBgACoqKrBz506MGTMGsbGxeOCBBxp8bi8vL3z88ccQCAQIDAzE5cuX8fHHH3MTplW3Y8cO2NnZcTOjaj399NPcoO4rV65g6dKl+Pvvv01+HgdK8IS0c2o1cO+eJrFTTZ00xdChQ7nfHR0dERgYiGvXrtU6LjY2FuHh4fj6668RFBTEbQ8MDERgYCDvfOnp6fjoo4/wwAMP4MSJE5gwYQK3/6uvvuLGag0ZMoQ3L8rQoUOxYcMGqFSqWiuyffvtt3j66adrLcVa/ctAz549ERAQgIEDB+LChQvo37+/vn8Oo0EJnpB2qmpSGkCpNHQ0xNzUnIwsLi4OkydPxsaNG3kTmdVnyJAh2LVrFwBg4MCB3DKqQO3ZTXVx4sQJXL9+HXv37m302P79+0MkEuHmzZuU4AkhpoOa4UlLO336NLy9vQFoVgm9ceMGunXrxu2PjY3FpEmTsG7dOryg48X3iYmJ3ARn2mVc63vumvcDAgJq1d63bduGAQMGoE+fPo0+99WrV6FQKHgTrJkiSvCEtAPUDE9a06pVq+Dk5AQ3NzcsW7YMzs7OmDZtGgBNcn/ooYewcOFCPProo9waI1ZWVtxAu02bNsHX1xdBQUGorKzErl27sG/fPm5W04akp6dj8eLFmDdvHi5cuIDPPvsMGzZs4B1TWFiIn376qdZ2ALh16xZ2796NiRMnwtnZGUlJSXj99dfRr18/DB8+vJl/GcOiBE+IGaNmeNOi78xyxuKDDz7AwoULcfPmTfTp0wcHDhyAlZUVACAqKgqlpaVYu3Yt1q5dyz1m1KhRiI2NBaCZofSNN97AnTt3IJFIEBQUhN9//x0TJ05s9LlnzpyJsrIyDB48GBYWFliwYEGtVoIffvgBjDE89dRTtR5vZWWFY8eO4ZNPPkFxcTG8vLzw0EMPYcWKFbVaAUwNJXhCzFBxcdWkNNQMT1rbiBEjcOXKlTr3RUVFISoqqsHHL1myBEuWLGnSc4tEImzatAlbtmyp95gXXnih3q4BLy8vbvpdc0MJnhAzwVhVMzytVEpay+bNm/HNN9/g1KlThg6lTdna2kKpVNYagW/MKMETYuIUCk1Sp2Z40tp2796Nsv8GcXh7eyM+Pt7AEbUd7Sh+U2q2pwRPiImiZnjS1jp16sS7HxISAmbAN5+2D78t1DeK35hRgifEhFAzPCFEV5TgCTEBCoVmNPzdu9QMTwjRDSV4QoxYSYmmtp6fT83whBD9UIInxMhom+Hv3tUkeEIIaQpK8IQYCW0zfG6u5ndCCGkOSvCEGJh2sFxSElBjfQ5CCGkySvCEGABjmn71nBzN4i/abZTg27fz59v2+QYM0O/4kJAQbta3xMRE9O3bt+WDMgBfX1+kpaUB0CyW06FDB8MG1EKEhg6AkPZEoQCysoDLl4GUFOpjJ6Zn7ty5yMrKQs+ePXU6vry8HOHh4ejVqxcsLS25RWhaQlFRERYtWgQfHx9IJBIMGzYMCQkJvGOys7MRHh4ODw8PSKVSjB8/Hjdv3uQdk5CQoNPCNqaGEjwhbaC0VJPQL18GMjOpj52YLqlUCnd3d1ha6tYArFKpIJFI8Oqrr2Ls2LEtGsvzzz+PmJgY7Ny5E5cvX0ZYWBjGjh2LO3fuAAAYY5g2bRqSk5Pxv//9D4mJifDx8cHYsWNRUu3btYuLC7eynTmhBE9IK9GOhv/nH+DaNc3vdKkbMSexsbEQCAT4/fff0adPH1hbWyM4OBiXL1/mjrGxscGWLVswd+5cuLu763zu8PBwTJs2DStXroSrqyvs7e0xb948VFZWAgDKysqwb98+rF+/Hg888AC6dOmCiIgI+Pn5cQvP3Lx5E6dPn8aWLVswaNAgBAYGYvPmzSguLsb333/fsn8MI0QJnpAWplRSMzxpX95880189NFHSEhIgKurK6ZMmQJFCzRTHTt2DNeuXcPx48fx/fffY//+/Vi5ciUAQKlUQqVS1Vr8RSKR4OTJkwCAiooKAOAdY2FhASsrK+4Yc0YJnpAWUloKpKYCly5RMzxpX1asWIHQ0FD06tULO3bsQHZ2Nvbv39/s81pZWeHbb79FUFAQHnroIaxatQqffvop1Go17OzsMHToUKxevRqZmZlQqVTYtWsXzpw5g6ysLABAt27d4OPjg6VLlyI/Px+VlZX44IMPIJfLuWPMGSV4QppBOxr++nVNM3xeHjXDk/Zn6NCh3O+Ojo4IDAzEtWvXdHrs7du3YWtry90iIyO5fX369IFUKuU9T3FxMdLT0wEAO3fuBGMMnTp1glgsxqeffooZM2ZwK76JRCLs27cPN27cgKOjI6RSKWJjYzFhwgSTWhWuqQya4CMiIiAQCHi36n00jDFERETAw8MDEokEISEhuHr1qgEjJkSjejN8crJmZTdCSBWBjtd8enh44OLFi9ztxRdf1PncnTt3RlxcHJf0z549C4VCAT8/P+7YAQMG4OLFiygoKEBWVhYOHz6MvLw83jHmyuA1+KCgIGRlZXG36oMz1q9fj40bN+Lzzz9HQkIC3N3dERoaiiLthcOEtDFtMzyNhiekyunTp7nf8/PzcePGDXTr1k2nx1paWqJLly7crfpo9r///ptbf177PLa2tvD09OSdw8bGBjKZDPn5+Thy5AimTp1a63kcHBzg4uKCmzdv4ty5c3UeY24MPtGNpaVlnSMrGWPYtGkTli1bhkceeQQAsGPHDri5uWHPnj2YN29eW4dK2inGNGuu5+RQTZ2QuqxatQpOTk5wc3PDsmXL4OzszLvePSkpCZWVlbh37x6Kiopw8eJFAGh0opzKykrMmTMHy5cvR1paGlasWIFXXnkFQqGmbnrkyBEwxhAYGIh///0Xb775JgIDA/Hcc89x5/jpp5/g4uICb29vXL58GQsXLsS0adMQFhbW0n8Go2PwBH/z5k14eHhALBYjODgYkZGR8Pf3R0pKCuRyOe9FEIvFGDVqFOLj4+tN8BUVFdzISQAoLCwEACgUCp1HdWqPa4lRoMaAytM0SqWmTz0vD/jvypxWoVYreD/NgbmVSaXS/z3XlPenvjPLGYsPPvgACxcuxM2bN9GnTx8cOHAAVlZW3P6JEydyM8UBQL9+/QBoKnINGTNmDAICAvDAAw+goqICTz75JCIiIrj99+/fx9KlS5GRkQFHR0c8+uijeP/99yESibhjsrKysHjxYmRnZ0Mmk2HmzJl49913W6jkxs2gCT44OBjfffcdunbtiuzsbKxZswbDhg3D1atXIZfLAQBubm68x7i5ufHeKDWtXbuWu4yiuujoaN5gDV3ExMTodbyxo/IYN7ncvMoDmF+Z9HnPlWoXGWgHRowYgStXrtS7PzU1tcnnXrlyZZ2f6QAwffp0TJ8+vcHHv/rqq3j11Veb/PymzKAJfsKECdzvvXr1wtChQ9G5c2fs2LEDQ4YMAVB7oAZjrMHBG0uXLsXixYu5+4WFhfDy8kJYWBjs7e11ikuhUCAmJgahoaG8b4KmisrTOMaA+/cNs0SrWq2AXB4Dd/dQCIWm//oA5lcmiUSBW7f0e89pWw/NzebNm/HNN9/g1KlThg6lxQQFBSE5OdnQYbQ4gzfRV2djY4NevXrh5s2bXP+NXC6HTCbjjsnJyalVq69OLBZDLBbX2i4SifROBk15jDGj8tSmVGqWZ717t6oZXmigoadCocgskmF15lIm7RVV+rznzOl/TWv37t3coDdvb2/Ex8cbOKKWcfDgQa5LRdeKoCkwqgRfUVGBa9euYeTIkfDz84O7uztiYmK4/prKykrExcVh3bp1Bo6UmLqyMs2guXv3ALXa0NEQYho6derEux8SEtJoP3pTRUVFtcp56+Lj49Nmz9WWDJrg33jjDUyePBne3t7IycnBmjVrUFhYiFmzZkEgEGDRokWIjIxEQEAAAgICEBkZCalUihkzZhgybGKitM3w1ZdoJYQQc2XQBJ+RkYGnnnoKubm5cHFxwZAhQ3D69Gnu29SSJUtQVlaG+fPnIz8/H8HBwYiOjoadnZ0hwyYmRqXSNMFXb4YnxBioqfmINIGu7xuDJvgffvihwf0CgQARERG8yyII0RU1wxNjZWVlBaFQiMzMTLi4uMDKykrnmd9I+8UYQ2VlJe7evQuhUMi7FLEuRtUHT0hL0E5KQ83wxFgJhUL4+fkhKysLmZmZhg6HmBipVApvb29uwp/6UIInZkGlqhoNX22eI0KMlpWVFby9vbllTwnRhYWFBSwtLXVq8aEET0xaebmmtp6XR83wxPQIBAKzu3yVGA9K8MRk/fuvZvEXQgghtVGCJyZD2wyfna25X1xsuElpCCHE2FGCJ0avZjM8NcUTQkjjKMETo6WdlMZMp/QmhJBWRQmeGBWVSlNTz8mh0fCEENIclOCJUaDR8IQQ0rIowRODomZ4QghpHZTgSZujZnhiChjTvE+zsgCRCLC1NXREhOin2QlepVLh8uXL8PHxQceOHVsiJmKmKiqqmuFp4i5ijJRKzXs0MxOQy6sWJ/LxoQRPTI/eCX7RokXo1asX5syZA5VKhVGjRiE+Ph5SqRS//fYbQkJCWiFMYsoKCzUfmvfvGzoSQmorL9ck86wszfuUvnwSc6F3gv/555/xzDPPAAD+7//+DykpKfjnn3/w3XffYdmyZfjrr79aPEhietTqqrnhy8sNHQ0hfMXFmlp6VpZmtUHGDB0RIS1P7wSfm5sLd3d3AMDBgwfx+OOPo2vXrpgzZw4+/fTTFg+QmBZqhifGiDFNIs/K0txopUHSHuid4N3c3JCUlASZTIbDhw9j8+bNAIDS0lJYWFi0eIDENFAzPDE2KpWmBUlbU6cBnaS90TvBP/fcc5g+fTpkMhkEAgFCQ0MBAGfOnEG3bt1aPEBivNTqqtHw1AxPjEFFRVV/enY2tSKR9k3vBB8REYGePXsiPT0djz/+OMRiMQDNGrVvv/12iwdIjE9FhaZmlJtLH6DE8IqLq5re8/KoP50QrSZdJvfYY48BAMqrVdtmzZrVMhERo0XN8MRY5OdXNb3TJEmE1E3vBK9SqRAZGYkvv/wS2dnZuHHjBvz9/fHuu+/C19cXc+bMaY04iYFom+Hv3gXKygwdDWmv1GrNl0u5XJPYqUuIkMbpvZr2+++/j6ioKKxfvx5WVlbc9l69euGbb75p0eCI4VRWAhkZwKVLwO3blNxJ21MogPR04MwZ4LffgPh4IDmZkjshutK7Bv/dd99h69atGDNmDF588UVue+/evfHPP/+0aHCk7RUVaWpKBQWGjoS0R6WlVU3vtPAQIc2jd4K/c+cOunTpUmu7Wq2GQqFokaBI21KrNdcI5+RQTZ20vYKCqqRO4zsIaTl6J/igoCCcOHECPj4+vO0//fQT+vXr12KBkdZXWVk1KY1SaehoSHuhVmvGdGRlafrUS0sNHREh5knvBL9ixQo8++yzuHPnDtRqNX755Rdcv34d3333HX777bcmB7J27Vq88847WLhwITZt2gQAYIxh5cqV2Lp1K/Lz8xEcHIwvvvgCQUFBTX4eUtUMf/8+XVJE2oZCobkuXZvUqbGPkNan9yC7yZMnY+/evTh48CAEAgHee+89XLt2Df/3f//HTXqjr4SEBGzduhW9e/fmbV+/fj02btyIzz//HAkJCXB3d0doaCiKaJ7JJsnLA5KSgBs3NM2ilNxJa0tNBU6eBH7/HTh7VjNojpI7IW1D7wQPAOPGjUNcXByKi4tRWlqKkydPIiwsrEkBFBcX4+mnn8bXX3/NW26WMYZNmzZh2bJleOSRR9CzZ0/s2LEDpaWl2LNnT5Oeqz2qrNTUmgDNhyv1sZPWdP8+8M8/wJ9/au5fuqRpLaLBcoS0vWavB99cL7/8Mh566CGMHTsWa9as4banpKRALpfzvjiIxWJuedp58+bVeb6KigpUVJt0uvC/WTAUCoXOgwC1x5nyoMGSEk0/5/37gEqlKYdabbrlqU5bDiqP4WkHaMrl/P50oVDB+2nqBAL9PxNM+fODmAe9E7xQKIRAIKh3v0qPuUt/+OEHXLhwAQkJCbX2yeVyAJrFbapzc3NDWlpavedcu3YtVq5cWWt7dHQ0pFKpzrEBQExMjF7HGzu5nMpjzEy5PI6OmltN/fubbpnqos9nQimNHiQGpneC379/P+++QqFAYmIiduzYUWdirU96ejoWLlyI6OhoWFtb13tczS8TjLEGv2AsXboUixcv5u4XFhbCy8sLYWFhsLe31yk2hUKBmJgYhIaGQiQS6fQYQ6qs1PSv1zcaXq1WQC6Pgbt7KIRC4y9PY6g8ba+8vGqQXG5u403uQqEC/fvH4MKFUKjVxlkmfXh6lkMq/RWPPvqozp8JhTSHLjEwvRP81KlTa2177LHHEBQUhL179+o8Ve358+eRk5ODAQMGcNtUKhX+/PNPfP7557h+/ToATU1eJpNxx+Tk5NSq1VcnFou5BXCqE4lEeifrpjymLRUXV01Kox0wJ2xgVIVQKDLaBNIUVJ7WVVhYtYjLvXtNO4daLTLJBM8YQ0FBMlJSYpCSEoP09D/Qt28PPPnkkzp/JhjzZwdpH1qsDz44OBhz587V+fgxY8bg8uXLvG3PPfccunXrhrfeegv+/v5wd3dHTEwMd319ZWUl4uLisG7dupYK2+QwVjUpDbUAkpbEmKYVKCtLM/FMSYmhI2pbpaV5SEv7g0vq9++n8vbfunULjC49ISakRRJ8WVkZPvvsM3h6eur8GDs7O/Ts2ZO3zcbGBk5OTtz2RYsWITIyEgEBAQgICEBkZCSkUilmzJjREmGbFIVCM2ju7l2alIa0HKVS82UxM1MzSK6y0tARtR2lsgIZGX9xCV0uvwCgKoELhSJ06jQUfn6hGDZsNAYNym6we5AQY6N3gu/YsSPvTc4YQ1FREaRSKXbt2tWiwS1ZsgRlZWWYP38+N9FNdHQ07OzsWvR5jFldzfCENEd5uSaZZ2Vp3lt6jIs1aYypkZNzuVqz+wkolfzrRp2dg+DnFwo/v1B4ez8AKytbAICPjwIWFgcNETYhTaZ3gv/44495CV4oFMLFxQXBwcG869ibIjY2lndfIBAgIiICERERzTqvqaFmeNLSioqqmt7z89vPl8XCwgwuoaemHkNpaQ5vv62tDL6+Y/9L6mNhayur50yEmB69E3x4eHgrhEEAaoYnLUf7JVE7SK69TP5YUVGItLRYLqnfu3edt18ksoG39yiulu7s3IOa3YnZ0inBX7p0SecT1pxuljSupERTW29PNSvS8lQqzZdD7cps1eZ7MlsqlQKZmWf/q6EfxZ07p8FYVZ+DQCCETDaIS+idOg2BhYWVASMmpO3olOD79u0LgUDQ6AhSgUCg10Q37RljmoSek9P+RiuTllNRUdWfnp1t/v3pjDHk5V3/L6HHIC0tFpWV/OaJjh27VOtHD4FE0ryuQ0JMlU4JPiUlpbXjaDe0zfC5ubToBmma4uKqpve8PPNv9SkpyUFq6tH/mt2Poqgog7dfInGCr+8Y+PmFwtd3LDp08DVMoIQYGZ0SfM2134n+qBmeNEf1/nRznyBNoShFevoJrh89J4ffRWhhIYaX1wiulu7m1hcCQZPWzSLErDX5OvikpCTcvn0blTUunJ0yZUqzgzIX1AxPmkrbn65N6uXlho6o9ajVKmRnJ3I19IyMk1Cp+J8rbm59uYTu6TkCIpHEQNESYjr0TvDJycl4+OGHcfnyZV6/vHYkKvXBa5rec3M1H9DUDE90VVlZNUguO9u8r6QoKEjhEnpa2jGUlfHnwrW39/qvyT0Uvr4PwsbG1UCREmK69E7wCxcuhJ+fH44ePQp/f3+cPXsWeXl5eP311/HRRx+1Rowmo7RU88FMzfBEV6WlmoRubQ1ER5tvUi8ry+emgU1NPYr8/Fu8/WKxPXx8RsPXV3M9uqNjV7p8jZBm0jvBnzp1Cn/88QdcXFwgFAohFAoxYsQIrF27Fq+++ioSExNbI06jRc3wRF8FBVWXst2/r1kgaODAxldoMyVKZQUuX76M48dPISXlOLKyzoGxqgIKhZbw8BjCTTDj4TEYQmGLLY1BCEETErxKpYKtrWb6RmdnZ2RmZiIwMBA+Pj7cCnDtgVJZNSkNNcOThqjVVf3pcrl5zk7IGMPdu1e4Gvrt23FQKPgFdXLqziV0b+8QiMXtZ8ppQgxB7wTfs2dPXLp0Cf7+/ggODsb69ethZWWFrVu3wt/fvzViNCqlpZra+r171AxP6qdQaLprtP3p5vglsKgok0voKSlHUVIi5+3v0KEDPD0nwNc3DL6+Y2Fvr/tiVISQ5tM7wS9fvhwl/7VFr1mzBpMmTcLIkSPh5OSEvXv3tniAxoAxTbNqTo7mGmRC6lJWVtX0nptrXk3uAFBRUYTbt+O4a9Jzc5N4+y0tJdw0sP7+ozBxYjrOn3/IJNeDJ8Qc6J3gx40bx/3u7++PpKQk3Lt3r9Yqc+ZAqaTR8KRh9+9XLeJSUGDoaFqWWq1EZmYCl9Dv3DkFtbr6KEABZLKB3AQznp7DYGkpBgAIhQoIBBl1n5gQ0ib0TvA7duzAY489BhsbG26bo6NjiwZlLJKSGj+GtC9qtWb2OG1N3Zz60xljuHfvJpfQ09KOo6LiPu+YDh38uYTu6/sgJBLz/N8nxBzoneDfeOMNzJ8/H5MnT8YzzzyD8ePHw9LSPEe/qtWaEc6kfVMqNYPjtLcaczuZtJKSu0hNPcYl9cLC27z91tYd4es7hltStWNH8x9nQ4i50DszZ2Vl4fDhw/j+++/x5JNPQiKR4PHHH8czzzyDYcOGtUaMhLS58vKqpve7d82nP12hKENGxkmkpGgSenY2/7JWCwsreHoO5xK6u3t/CIUWBoqWENIceid4S0tLTJo0CZMmTUJpaSn279+PPXv2YPTo0fD09MStW7caPwkhRqiwUJPQ5XLNVRLmgDE1srMvcgk9I+MklEr+vLeurr25hO7lNRJWVjb1nI0QYkqa1bYulUoxbtw45OfnIy0tDdeuXWupuAhpdYxp+tO1NXVzmajo/v00LqGnph5DWVkub7+trQc3r7uv7xjY2robKFLjJRAA9vaAk5Pm5uUFZNCYQWJimpTgtTX33bt34+jRo/Dy8sJTTz2Fn376qaXjI6RFKZWayx21NXVz6E8vLy9AWlost0b6vXs3efutrGzh7R3CJXUnp25md8VLc4lEQMeOgLMz4OiouVUfWiSVGi42QppK7wT/1FNP4f/+7/8glUrx+OOPIzY2lvreiVErL9ck86wsTXI39fWQVKpK3LlzhkvomZlnedPACgQW6NQpmGt29/AIhoUFXYtenY2Npmbu6Kj5aW+vqbUTYk70TvACgQB79+7FuHHjzHb0PDF9RUVVTe+mvvgPYwy5ude4hH77dhwqK/kzLjk6BsLPT5PQvb1DYG3tYKBojY9QCHToUNXc7uioWdyHEHOnd4bes2dPa8RBSLMwphkYp10/vajI0BE1T3GxnLt0LSXlKIqLM3n7JRJnLqH7+o6Fg4O3gSI1PlZW/Kb2jh0BC7oQgLRDVAUnJkul0szzrk3qFRWGjqjpysvL8e+/h5GcfBwpKTG4e/cKb7+lpTW8vEZy/eiurr0hENAkDYCmeV3b1O7kBPy3FhYh7R4leGJSKio0/ekWFsDhw6Y7hbBarYJcfv6/Gno0MjPjoVTyp4F1d+/3Xw09FF5ew2FpSe3KFhZVyVz7U0TDCwipk84JPiMjA56eLbsa1JYtW7BlyxakpqYCAIKCgvDee+9hwoQJADR9jytXrsTWrVuRn5+P4OBgfPHFFwgKCmrROIhxKy6uqqXn5WkGQw0caHqD5fLzb/2X0GOQlvYHyssLePsdHHy4hO7r+yCkUmfDBGpEpNKqpnZnZ01tnWaXJEQ3Oif4nj174rPPPsOzzz7bYk/u6emJDz74AF26dAGgmed+6tSpSExMRFBQENavX4+NGzciKioKXbt2xZo1axAaGorr16/Dzo7WkjZn1fvTCwv5+0xltHNpaR7S0v7gllQtKEjh7ReLHeDj8yA6d34QU6ZYIiNjNhizMlC0hicU8q89d3Sky9MIaQ6dE3xkZCRefvll/Prrr9i6dSucnJya/eSTJ0/m3X///fexZcsWnD59Gj169MCmTZuwbNkyPPLIIwA0XwDc3NywZ88ezJs3r9nPT4yHSqWZElab1MvLG3+MsVEqK5CR8RdXS5fLLwCoGr4vFIrQqdNQrh9dJhsAodASQqECMtlB3LkjMOnR/vqystIMgNMm9I4d+deeE0KaR+d/p/nz52PChAmYM2cOgoKCsHXrVkyZMqXFAlGpVPjpp59QUlKCoUOHIiUlBXK5HGFhYdwxYrEYo0aNQnx8fL0JvqKiAhXVRlsV/lf9UygUUOjYYas9Tq020Q7eGrTlMLbyVFZqrkuXyzU/q3dBN9QMKxQqeD8NhTE1cnIuIzn5GFJSjuH27ZNQKst4x7i49ICf31j4+4+Bt/dIWFlVHwHGACiMpjwtqa4y2dpWJfSOHYG6GuGMdc5/lUpTDl0/Q/Q9lpDWIGBM/zrD559/jtdeew3du3evdS38hQsX9DrX5cuXMXToUJSXl8PW1hZ79uzBxIkTER8fj+HDh+POnTvw8PDgjn/hhReQlpaGI0eO1Hm+iIgIrFy5stb2PXv2QErtfaSZcnNz8ffff+PixYu4dOkS7t/nL6fasWNH9OnTh7uZ61LKpHGlpaWYMWMG7t+/D3t7e0OHQ9ohvRvE0tLSsG/fPjg6OmLq1KnNnuwmMDAQFy9eREFBAfbt24dZs2YhLi6O219zSk3GWIPTbC5duhSLFy/m7hcWFsLLywthYWE6/5MpFArExMTA3T0UQqHpD9FVqxWQyw1Xnvv3Nc3u2dma35tLKFSgf/8YXLgQCrW6dctTUVGItLS4/2rpR5GXd4O3XySygY/PA/DzGwN//zFwdu7BvT+TkzW3xrRleVqLtbWmVq4dEGdnp0BOjvn8D0kkCty6FYPQ0FCIdBy2X1hz8AghbUyv7Pz111/j9ddfx9ixY3HlyhW4uLg0OwArKytukN3AgQORkJCATz75BG+99RYAQC6XQyaTccfn5OTAzc2t3vOJxWKIxeJa20Uikc7/mFpCocgsPpy02qo8anVVf7pcDpSWttbziFo8IapUCmRmnuUGxt25cxqMVQ3XFwiEkMkGcf3onToNgYVF1cA4xpo+a15rlKc1CASa5vXqk8nUvPZc29RuLv9D2oly9Pkc0ffzhpCWpnOCHz9+PM6ePYvPP/8cM2fObLWAGGOoqKiAn58f3N3dERMTg379+gEAKisrERcXh3Xr1rXa85OmUSg0NfTMTM1PU+l+ZIwhL+86Nw1sWlosKiv50+B17NiFS+je3iGQSDoaKFrDsLSsSuTa0e2UuwgxfjoneJVKhUuXLrXotfDvvPMOJkyYAC8vLxQVFeGHH35AbGwsDh8+DIFAgEWLFiEyMhIBAQEICAhAZGQkpFIpZsyY0WIxkKYrK9Mk9KwsIDfXeAdI1VRSksObBraoiL8OqETiBF/fMdw0sB06+BomUAORSvmXqjk4mM6liYSQKjon+JiYmBZ/8uzsbDz77LPIysqCg4MDevfujcOHDyM0NBQAsGTJEpSVlWH+/PncRDfR0dF0DbwBFRRomt0zMzW/mwKFohTp6Se4hJ6T8zdvv4WFGF5eI7iE7u7er91MA6tdiKX6ZDK0EAsh5sGgV51u27atwf0CgQARERGIiIhom4BILWq1ZvY4bU29tfrTW5JarUJ2diKX0DMyTkKl4i/87ubWl2t29/QcAZFIYqBo25aVFX/e9g4d6NpzQswV/WuTWpRKTS1de6usbPwxhlZQkMIl9LS0Yygru8fbb2/vxZsG1sbG1UCRti07O/687dT4RUj7QQmeANDMHKddP/3uXePvTy8uLsa1a78gOTkWqakxyM+/xdsvFtvDx2c0fH1D4ec3Fo6OXRu8vNIcWFhUXaqmHeFu1X5nviWk3aME344VFmoSulyumfvdmCmVFbhz5xQ32l0uPw91tW8hQqElPDyG/NfsPhYeHoMhFJr329vauqqp3clJMxiOFmIhhGiZ9ycg4WFMM9pdW1MvKTF0RPVjjOHu3Svc9ei3b8dBoeAPAHB27gZf3zD4+Y2Ft3cIxGLzbX8WCDQJvHr/OU3MSAhpCCV4M6dUaq5LB4AjR4x7EZeiokwuoaekHEVJiZy338bGDb6+Y+HvPxpTpwKpqTNNYmKYphCJNM3t1SeTocFwhBB90EeGGSovr7qULSdHs23gQOMbLFdRUYTbt+O4a9Jzc5N4+y0tJfD2HgU/v7Hw8wuFi0svCAQCCIUKODsfRGqqYeJuLV5eVcnc3p6uPSeENA8leDNRVFTV9J6fz58u1Vj6ZdVqJTIzE7iEfufOKajV1ZaQgwAy2UDuenRPz2GwtKw97bCp0157Xv1StXv3gH79jOe1IoSYPkrwJooxTVLQrp9eVNT4Y9oaYwz37t3kEnpa2nFUVPBXm+nQwZ9L6L6+D0IiMb/V18TiqkvVHB01Te/auc0B479igRBimijBmxCVStPkrk3q1Za9NxolJXeRlvbHf9ekx6Cw8DZvv7V1R/j4PMhNMtOxo7+BIm099vb8a89rLsRCCCFtgRK8kauo0PSna5dbVakaf0xbUijKkJHxF5fQs7MTefstLKzg6Tkcvr6afnR39/4QCi3qOZvpsbDgL8Li5EQLsRBCjAMleCNUXFzVn37vXtOXH20NjKmRnX0RKSmaZveMjJNQKvlD811de3MJ3ctrJKysbAwUbcuTSvnzttNCLIQQY0UJ3khU708vLDR0NHz376dxCT019RjKynJ5+21tPbgmd1/fMbC1dTdQpC1LKKxqbtfeJO1jynpCiBmgBG8gKpVmSlhtUjem69PLywuQlhbLzRp3795N3n4rK1t4e4dwSd3JqZtZTAOrXYhF29TesSNde04IMV308dWGKiv5/elKZeOPaQsqVSXu3DnDJfTMzLNgrGpot0BgAQ+PwVxC9/AIhoWF6Xc029ry5223tzd0RIQQ0nIowbeykpKqWnpennFcEsUYQ27uNS6h374dh8rKYt4xjo6B3AQz3t4hsLZ2MFC0LUMo1NTItU3tjo6ay9cIIcRcUYJvBQUFmgFymZnG05+en5+Py5d3Izn5OFJSjqK4OJO3XyJx5hK6r+9YODh4GyjSlmFtXXvdc5pEhhDSnlCCbwFqNb8/vazM0BEBlZUlSE//87/L16Jx9+5V3n5LS2t4eY3kmt1dXXtDIDDNDCgQaNY5rz5vO117Tghp7yjBN5FCoelHz8zU/FQoDBuPWq2CXH6eux49IyMeanVVUAKBAG5ufeHnFwZf31B4eQ2HpaW1ASNuOu3At8DAqoRO154TQggfJXg9lJZW1dJzcw3fn56ff4tL6Glpf6C8vIC338HBB35+ofD3H41p01S4ceNJk1x9TSrl953b2Wleg8BAanYnhJD6UIJvREFB1cpsBQWGjaW0NI+bBjY19SgKClJ4+8VihxrTwHbmVl+ztz9ooKj1o12IpfpkMtY1GhoM/cWKEEJMASX4Bhw9qplVzlCUygreNLBy+QUAVdPaCYUidOo0lEvoMtkACIWm9ZJaWfGnee3Qga49J4SQlkAfpQ0oLW3b52NMjZycy1xCT08/AaWSP2LP2TmIS+je3g/Aysq0RpPZ2fETup2doSMihBDzRAnewAoLM7iEnpp6DKWlObz9trYybl53X98xsLPzMFCk+rOw0Fx7Xn0yGSsrQ0dFCCHtAyX4NlZRUchNA5uSEoN7967z9otENvD2HsXV0p2de5jMNLASCf/acwcHGgRHCCGGYtAEv3btWvzyyy/4559/IJFIMGzYMKxbtw6BgYHcMYwxrFy5Elu3bkV+fj6Cg4PxxRdfICgoyICR606lUiAz8yw3MO7OndNgrGrNV4FACJlsEJfQO3UaAgsL46/mCgSaBF49oUulho6KEEKIlkETfFxcHF5++WUMGjQISqUSy5YtQ1hYGJKSkmBjo1lidP369di4cSOioqLQtWtXrFmzBqGhobh+/TrsjLADlzGGvLzrXEJPSzuOysoi3jEdO3ap1o8eAomko4Gi1Z1IpGlurz6ZDA2GI4QQ42XQj+jDhw/z7m/fvh2urq44f/48HnjgATDGsGnTJixbtgyPPPIIAGDHjh1wc3PDnj17MG/ePEOEXUtJSQ5SU4/+1+x+FEVFGbz9EokTfH3HcNPAdujga5hA9WBjw7/23N6e1j0nhBBTYlR1sPv37wMAHB0dAQApKSmQy+UICwvjjhGLxRg1ahTi4+PrTPAVFRWoqKjg7hf+Nxm8QqGAQsfp5rTHCYV1H69QlOL27ZNISfkDKSlHkZ19ibffwkIML6/h8PcfAz+/MXB371tjGti2nfZOW476yiMUVjW3Ozpqauo1rz1nTHMzBtoZ+qrP1GfKzK08gPmVSaXSlEPXzxB9jyWkNRhNgmeMYfHixRgxYgR69uwJAJDL5QAANzc33rFubm5IS0ur8zxr167FypUra22Pjo6GVM9O4v79YwAAKpUKKSkpuHjxIv7++29cu3YNyhprvfr5+aFPnz7o27cvunfvDjG3VJkcAL+lwlC05WkIY8C9e20QTAuQyxsvjykxt/IA5lemmBjdy1Pa1tfZElKD0ST4V155BZcuXcLJkydr7as5ipwxVu/I8qVLl2Lx4sXc/cLCQnh5eSEsLAz2Oi74rVAosGvXLhw4UInk5Fikph5HWRk/69nbe8HPbwz8/B6En9+DsLFxBaBZ4/3yZZ2eps106KBAly4xUKlC4egown/DG0yWWq2AXB4Dd/dQCIWmN/VuTeZWHsD8yiSRKHDrVgxCQ0Mh0nHhg0JjWUqStFtGkeAXLFiAAwcO4M8//4Snpye33d3dHYCmJi+TybjtOTk5tWr1WmKxuFrtuYpIJNLpH1Mul2PEiBG4detWjfPaw8dnNHx9Q+HnNxaOjl15XzKMZfpUC4uqke3anxYWmql2vbxEZvFhqyUUUnmMnbmUycJC81PXzxHtsYQYkkETPGMMCxYswP79+xEbGws/Pz/efj8/P7i7uyMmJgb9+vUDAFRWViIuLg7r1q1rlZjc3NxQUlICCwsLeHgMga9vGPz8xsLDY7BRTgMrlfLnbXdwqD0Yzli+fBBCCGk7Bs1YL7/8Mvbs2YP//e9/sLOz4/rcHRwcIJFIIBAIsGjRIkRGRiIgIAABAQGIjIyEVCrFjBkzWiUmgUCA//3vf0hOTsbVq48a1epr1QfDaUe4SySGjooQQogxMmiC37JlCwAgJCSEt3379u0IDw8HACxZsgRlZWWYP38+N9FNdHR0q14D369fP2RlZbXa+XVlZVVVO3dy0oxup2vPCSGE6MLgTfSNEQgEiIiIQEREROsHZGC2tvx523UcE0gIIYTUQvVBAxEKNTXy6pPJ1DE2kBBCCGkSSvBtxNqa33feoQMtxEIIIaT1UIJvBQKBZp1zbVO7kxNM/tpzQgghpoUSfAuwtORfe+7oqFmchRBCCDEUSvBNIJXy+87ruvacEEIIMSRK8I0QCjX95dX7z2suxEIIIYQYG0rwDRg+XJPYtdNUEkLaD0tLzVwUIhFNKEVMEyX4Bjg50Uh3QsyNQKBJ2iJRVQKv62f1bjda+ZWYIkrwhBCzIRQ2nLS1iZ2Q9oASPCHEJFhaNl7rpu40QqpQgieEGJRAwO/vru8ndZcRoh9K8ISQViMUaqZgbih5W1rSZaaEtAZK8ISQJrGwaLi2nZkJ9O5Nfd6EGAoleEJILY31dTfWZE6jzgkxPErwhLQjAoFuo8ypyZwQ00cJnhAzYWHReM3bkv7jCWk36N+dEBOgy8QsNMqcEFIdJXhCDEjbZA5o1jyQSOpO4NRkTgjRFyV4QlqJrk3mCgWQmgr4+tKIc0JIy6EET0gT6DIxC82qRggxJErwhFSjXYikrpHl9S1EQgghxogSPGk3aCESQkh7QgmemAVaiIQQQvgowROjVtfa3dppUAMCqkad0yVihBDCZ9CPxT///BOTJ0+Gh4cHBAIBfv31V95+xhgiIiLg4eEBiUSCkJAQXL161TDBkhanXYjEzg5wdATc3QEvL6BzZ6BbN8085v37A716ae77+2v2u7pqHm9jo3k8JXdCCKnNoDX4kpIS9OnTB8899xweffTRWvvXr1+PjRs3IioqCl27dsWaNWsQGhqK69evw87OzgARE11RkzkhhBiWQRP8hAkTMGHChDr3McawadMmLFu2DI888ggAYMeOHXBzc8OePXswb968tgyVVNPchUgIIYS0PqPtg09JSYFcLkdYWBi3TSwWY9SoUYiPj683wVdUVKCiooK7X1hYCABQKBRQ6LjElfY4tdo8lsTSlqOx8giFmuRc/Rrv6v3f2lp5Y5eIqVSaW2vRvj66vp7GztzKA5hfmZpSHnMpOzFdRpvg5XI5AMDNzY233c3NDWlpafU+bu3atVi5cmWt7dHR0ZBKpXrGEKPX8cbO3MoTE0PlMXbmViZ9ylNaWtqKkRDSOKNN8FqCGtVFxlitbdUtXboUixcv5u4XFhbCy8sLYWFhsLe31+k5FQoFYmJi4O4eCqHQ+C+MbqjWralxK3D0aAxCQ0MhMoMLvbWvD5XHeJlbmZpSHm3rISGGYrQJ3t3dHYCmJi+TybjtOTk5tWr11YnFYojF4lrbRSKR3h80QqHIoAm+pdbu1rYUNuVvYMyoPMbP3MqkT3nMqdzENBltgvfz84O7uztiYmLQr18/AEBlZSXi4uKwbt06A0fXfLR2NyGEkNZk0BRSXFyMf//9l7ufkpKCixcvwtHREd7e3li0aBEiIyMREBCAgIAAREZGQiqVYsaMGQaMunG0EAkhhBBDM2iCP3fuHEaPHs3d1/adz5o1C1FRUViyZAnKysowf/585OfnIzg4GNHR0Qa7Br6+hUho7W5CCCHGxqAJPiQkBIyxevcLBAJEREQgIiKi7YKqxtNTMxUqLURCCCHE1FAvbwOcnSmpE0IIMU003xghhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWbI7Ce60c6Up8/SjQqFAqWlpSgsLDSLFaGoPMbN3MoDmF+ZmlIe7WdOQ7N1EtKazD7BFxUVAQC8vLwMHAkhpD0qKiqCg4ODocMg7ZCAmfnXS7VajczMTNjZ2UGg4yowhYWF8PLyQnp6Ouzt7Vs5wtZH5TFu5lYewPzK1JTyMMZQVFQEDw8PCIXUG0rantnX4IVCITw9PZv0WHt7e7P4cNKi8hg3cysPYH5l0rc8VHMnhkRfKwkhhBAzRAmeEEIIMUOU4OsgFouxYsUKiMViQ4fSIqg8xs3cygOYX5nMrTykfTD7QXaEEEJIe0Q1eEIIIcQMUYInhBBCzBAleEIIIcQMUYInhBBCzFC7TfCbN2+Gn58frK2tMWDAAJw4caLB4+Pi4jBgwABYW1vD398fX375ZRtFqht9yvPLL78gNDQULi4usLe3x9ChQ3HkyJE2jLZx+r4+Wn/99RcsLS3Rt2/f1g1QT/qWp6KiAsuWLYOPjw/EYjE6d+6Mb7/9to2ibZy+5dm9ezf69OkDqVQKmUyG5557Dnl5eW0UbcP+/PNPTJ48GR4eHhAIBPj1118bfYyxfx4QAgBg7dAPP/zARCIR+/rrr1lSUhJbuHAhs7GxYWlpaXUen5yczKRSKVu4cCFLSkpiX3/9NROJROznn39u48jrpm95Fi5cyNatW8fOnj3Lbty4wZYuXcpEIhG7cOFCG0deN33Lo1VQUMD8/f1ZWFgY69OnT9sEq4OmlGfKlCksODiYxcTEsJSUFHbmzBn2119/tWHU9dO3PCdOnGBCoZB98sknLDk5mZ04cYIFBQWxadOmtXHkdTt48CBbtmwZ27dvHwPA9u/f3+Dxxv55QIhWu0zwgwcPZi+++CJvW7du3djbb79d5/FLlixh3bp1422bN28eGzJkSKvFqA99y1OXHj16sJUrV7Z0aE3S1PI88cQTbPny5WzFihVGleD1Lc+hQ4eYg4MDy8vLa4vw9KZveT788EPm7+/P2/bpp58yT0/PVouxqXRJ8Mb+eUCIVrtroq+srMT58+cRFhbG2x4WFob4+Pg6H3Pq1Klax48bNw7nzp2DQqFotVh10ZTy1KRWq1FUVARHR8fWCFEvTS3P9u3bcevWLaxYsaK1Q9RLU8pz4MABDBw4EOvXr0enTp3QtWtXvPHGGygrK2uLkBvUlPIMGzYMGRkZOHjwIBhjyM7Oxs8//4yHHnqoLUJuccb8eUBIdWa/2ExNubm5UKlUcHNz4213c3ODXC6v8zFyubzO45VKJXJzcyGTyVot3sY0pTw1bdiwASUlJZg+fXprhKiXppTn5s2bePvtt3HixAlYWhrXW7op5UlOTsbJkydhbW2N/fv3Izc3F/Pnz8e9e/cM3g/flPIMGzYMu3fvxhNPPIHy8nIolUpMmTIFn332WVuE3OKM+fOAkOraXQ1eq+bSsYyxBpeTrev4urYbir7l0fr+++8RERGBvXv3wtXVtbXC05uu5VGpVJgxYwZWrlyJrl27tlV4etPn9VGr1RAIBNi9ezcGDx6MiRMnYuPGjYiKijKKWjygX3mSkpLw6quv4r333sP58+dx+PBhpKSk4MUXX2yLUFuFsX8eEAK0wxq8s7MzLCwsatU2cnJyan0r13J3d6/zeEtLSzg5ObVarLpoSnm09u7dizlz5uCnn37C2LFjWzNMnelbnqKiIpw7dw6JiYl45ZVXAGgSJGMMlpaWiI6OxoMPPtgmsdelKa+PTCZDp06deEuNdu/eHYwxZGRkICAgoFVjbkhTyrN27VoMHz4cb775JgCgd+/esLGxwciRI7FmzRqTq/Ea8+cBIdW1uxq8lZUVBgwYgJiYGN72mJgYDBs2rM7HDB06tNbx0dHRGDhwIEQiUavFqoumlAfQ1NzDw8OxZ88eo+oL1bc89vb2uHz5Mi5evMjdXnzxRQQGBuLixYsIDg5uq9Dr1JTXZ/jw4cjMzERxcTG37caNGxAKhfD09GzVeBvTlPKUlpZCKOR/1FhYWACoqvmaEmP+PCCEx0CD+wxKe5nPtm3bWFJSElu0aBGzsbFhqampjDHG3n77bfbss89yx2svi3nttddYUlIS27Ztm1FdFqNvefbs2cMsLS3ZF198wbKysrhbQUGBoYrAo295ajK2UfT6lqeoqIh5enqyxx57jF29epXFxcWxgIAA9vzzzxuqCDz6lmf79u3M0tKSbd68md26dYudPHmSDRw4kA0ePNhQReApKipiiYmJLDExkQFgGzduZImJidxlf6b2eUCIVrtM8Iwx9sUXXzAfHx9mZWXF+vfvz+Li4rh9s2bNYqNGjeIdHxsby/r168esrKyYr68v27JlSxtH3DB9yjNq1CgGoNZt1qxZbR94PfR9faoztgTPmP7luXbtGhs7diyTSCTM09OTLV68mJWWlrZx1PXTtzyffvop69GjB5NIJEwmk7Gnn36aZWRktHHUdTt+/HiD/w+m+HlACGOM0XKxhBBCiBlqd33whBBCSHtACZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCalDamoqBAIBLl68aOhQCCGkSSjBE5MVHh6OadOm1doeGxsLgUCAgoKCJp/by8sLWVlZ6NmzZ9MDJIQQA2p3y8US0pjKykpYWVnB3d3d0KEQQkiTUQ2emL19+/YhKCgIYrEYvr6+2LBhA2+/r68v1qxZg/DwcDg4OGDu3Lm1mujDw8MhEAhq3WJjYwEA+fn5mDlzJjp27AipVIoJEybg5s2b3HNERUWhQ4cOOHLkCLp37w5bW1uMHz8eWVlZbfVnIIS0M5TgiVk7f/48pk+fjieffBKXL19GREQE3n33XURFRfGO+/DDD9GzZ0+cP38e7777bq3zfPLJJ8jKyuJuCxcuhKurK7p16wZA8wXg3LlzOHDgAE6dOgXGGCZOnAiFQsGdo7S0FB999BF27tyJP//8E7dv38Ybb7zRquUnhLRjBl7NjpAmmzVrFrOwsGA2Nja8m7W1NQPA8vPz2YwZM1hoaCjvcW+++Sbr0aMHd9/Hx4dNmzaNd0xKSgoDwBITE2s97759+5hYLGYnTpxgjDF248YNBoD99ddf3DG5ublMIpGwH3/8kTGmWRMdAPv333+5Y7744gvm5ubW7L8DIYTUhWrwxKSNHj0aFy9e5N2++eYbbv+1a9cwfPhw3mOGDx+OmzdvQqVScdsGDhyo0/MlJiZi5syZ+OKLLzBixAjuOSwtLREcHMwd5+TkhMDAQFy7do3bJpVK0blzZ+6+TCZDTk6OfgUmhBAd0SA7YtJsbGzQpUsX3raMjAzud8YYBAIBbz9jrM7zNEYul2PKlCmYM2cO5syZ0+D56npukUjE2y8QCOp9LCGENBfV4IlZ69GjB06ePMnbFh8fj65du8LCwkLn85SXl2Pq1Kno1q0bNm7cWOs5lEolzpw5w23Ly8vDjRs30L179+YVgBBCmohq8MSsvf766xg0aBBWr16NJ554AqdOncLnn3+OzZs363WeefPmIT09HceOHcPdu3e57Y6OjggICMDUqVMxd+5cfPXVV7Czs8Pbb7+NTp06YerUqS1dJEII0QnV4IlZ69+/P3788Uf88MMP6NmzJ9577z2sWrUK4eHhep0nLi4OWVlZ6NGjB2QyGXeLj48HAGzfvh0DBgzApEmTMHToUDDGcPDgwVrN8oQQ0lYEjDoBCSGEELNDNXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDP0/abufTubPN7wAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -2625,7 +4105,7 @@ "print('probs.shape (N,H,K) \\t', probs.shape)\n", "\n", "model = NBMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (counts, probs)\n", + "distr_args = (counts, probs, weights)\n", "samples, sample_mean, quants = model.sample(distr_args, num_samples=2000)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb new file mode 100644 index 000000000..6bafac332 --- /dev/null +++ b/nbs/models.deepnpts.ipynb @@ -0,0 +1,1137 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.deepnpts" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# DeepNPTS" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting.\n", + "\n", + "\n", + "**References**
\n", + "[Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", + "\n", + "\n", + ":::{.callout-warning collapse=\"false\"}\n", + "#### Exogenous Variables, Losses, and Parameters Availability\n", + "\n", + "Given the sampling procedure during inference, DeepNPTS only supports `DistributionLoss` as training loss.\n", + "\n", + "Note that DeepNPTS generates a non-parametric forecast distribution using Monte Carlo. We use this sampling procedure also during validation to make it closer to the inference procedure. Therefore, only the `MQLoss` is available for validation.\n", + "\n", + "Aditionally, Monte Carlo implies that historic exogenous variables are not available for the model.\n", + ":::" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "import numpy as np\n", + "\n", + "import torch\n", + "import torch.nn as nn\n", + "import neuralforecast.losses.pytorch as losses\n", + "from typing import Optional\n", + "from functools import partial\n", + "\n", + "\n", + "from neuralforecast.common._base_windows import BaseWindows\n", + "from neuralforecast.losses.pytorch import MQLoss, GMM, PMM, NBMM\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import logging\n", + "import warnings\n", + "\n", + "from fastcore.test import test_eq\n", + "from nbdev.showdoc import show_doc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "logging.getLogger(\"pytorch_lightning\").setLevel(logging.ERROR)\n", + "warnings.filterwarnings(\"ignore\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. DeepNPTS" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class DeepNPTS(BaseWindows):\n", + " \"\"\" DeepNPTS\n", + "\n", + " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + "\n", + " **Parameters:**
\n", + " `h`: int, Forecast horizon.
\n", + " `input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + " `hidden_size`: int=32, hidden size of dense layers.
\n", + " `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + " `dropout`: float=0.1, dropout.
\n", + " `n_layers`: int=2, number of dense layers.
\n", + " `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", + " `stat_exog_list`: str list, static exogenous columns.
\n", + " `hist_exog_list`: str list, historic exogenous columns.
\n", + " `futr_exog_list`: str list, future exogenous columns.
\n", + " `exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + " `loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + " `valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + " `max_steps`: int=1000, maximum number of training steps.
\n", + " `learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + " `num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + " `early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + " `val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + " `batch_size`: int=32, number of different series in each batch.
\n", + " `valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + " `windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + " `inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + " `step_size`: int=1, step size between each window of temporal data.
\n", + " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `alias`: str, optional, Custom name of the model.
\n", + " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + " **References**
\n", + " - [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", + "\n", + " \"\"\"\n", + " # Class attributes\n", + " SAMPLING_TYPE = 'windows'\n", + " \n", + " def __init__(self,\n", + " h,\n", + " input_size: int = -1,\n", + " hidden_size: int = 32,\n", + " batch_norm: bool = True,\n", + " dropout: float = 0.1,\n", + " n_layers: int = 2,\n", + " trajectory_samples: int = 100,\n", + " futr_exog_list = None,\n", + " hist_exog_list = None,\n", + " stat_exog_list = None,\n", + " exclude_insample_y = False,\n", + " loss = GMM(),\n", + " valid_loss = MQLoss(level=[80, 90]),\n", + " max_steps: int = 1000,\n", + " learning_rate: float = 1e-5,\n", + " num_lr_decays: int = 3,\n", + " early_stop_patience_steps: int =-1,\n", + " val_check_steps: int = 100,\n", + " batch_size: int = 32,\n", + " valid_batch_size: Optional[int] = None,\n", + " windows_batch_size: int = 1024,\n", + " inference_windows_batch_size: int = -1,\n", + " start_padding_enabled = False,\n", + " step_size: int = 1,\n", + " scaler_type: str = 'standard',\n", + " random_seed: int = 1,\n", + " num_workers_loader = 0,\n", + " drop_last_loader = False,\n", + " optimizer = None,\n", + " optimizer_kwargs = None,\n", + " **trainer_kwargs):\n", + "\n", + " if hist_exog_list is not None:\n", + " raise Exception('DeepNPTS does not support historical exogenous variables.')\n", + "\n", + " if exclude_insample_y:\n", + " raise Exception('DeepNPTS has no possibility for excluding y.')\n", + " \n", + " supported_losses = (losses.GMM,\n", + " losses.PMM,\n", + " losses.NBMM)\n", + "\n", + " if not isinstance(loss, supported_losses):\n", + " raise Exception('DeepNPTS only supports GMM, PMM or NBMM as loss function.') \n", + " \n", + " if not isinstance(valid_loss, losses.MQLoss):\n", + " raise Exception('DeepNPTS only supports MQLoss as validation loss.')\n", + " \n", + " # Overwrite n_components, it has to be the input_size in DeepNPTS\n", + " loss.n_components = input_size\n", + " \n", + " # Inherit BaseWindows class\n", + " super(DeepNPTS, self).__init__(h=h,\n", + " input_size=input_size,\n", + " futr_exog_list=futr_exog_list,\n", + " hist_exog_list=hist_exog_list,\n", + " stat_exog_list=stat_exog_list,\n", + " exclude_insample_y = exclude_insample_y,\n", + " loss=loss,\n", + " valid_loss=valid_loss,\n", + " max_steps=max_steps,\n", + " learning_rate=learning_rate,\n", + " num_lr_decays=num_lr_decays,\n", + " early_stop_patience_steps=early_stop_patience_steps,\n", + " val_check_steps=val_check_steps,\n", + " batch_size=batch_size,\n", + " windows_batch_size=windows_batch_size,\n", + " valid_batch_size=valid_batch_size,\n", + " inference_windows_batch_size=inference_windows_batch_size,\n", + " start_padding_enabled=start_padding_enabled,\n", + " step_size=step_size,\n", + " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", + " drop_last_loader=drop_last_loader,\n", + " random_seed=random_seed,\n", + " optimizer=optimizer,\n", + " optimizer_kwargs=optimizer_kwargs,\n", + " **trainer_kwargs)\n", + "\n", + " self.h = h\n", + " self.h_backup = self.h # Used because h=1 during training\n", + " self.use_softmax = True\n", + " self.hidden_size = hidden_size\n", + " self.dropout = dropout\n", + " self.trajectory_samples = trajectory_samples\n", + "\n", + " self.futr_exog_size = len(self.futr_exog_list)\n", + " self.stat_exog_size = len(self.stat_exog_list)\n", + "\n", + " input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size\n", + " # Create DeepNPTSNetwork\n", + " modules = [] \n", + " for i in range(n_layers):\n", + " modules.append(nn.Linear(input_dim if i == 0 else hidden_size, hidden_size))\n", + " modules.append(nn.ReLU())\n", + " if batch_norm:\n", + " modules.append(nn.BatchNorm1d(hidden_size))\n", + " if dropout > 0.0:\n", + " modules.append(nn.Dropout(dropout))\n", + "\n", + " self.deepnptsnetwork = nn.Sequential(*modules)\n", + " self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07))\n", + "\n", + " # Add output layers for Mixture distribution \n", + " output_modules = []\n", + " if dropout > 0.0:\n", + " output_modules.append(nn.Dropout(self.dropout))\n", + " \n", + " if isinstance(loss, GMM):\n", + " output_modules.append(nn.Linear(hidden_size, input_size + 1))\n", + " elif isinstance(loss, PMM):\n", + " output_modules.append(nn.Linear(hidden_size, input_size))\n", + " elif isinstance(loss, NBMM):\n", + " output_modules.append(nn.Linear(hidden_size, input_size))\n", + "\n", + " self.output_layer = nn.Sequential(*output_modules)\n", + " self.output_layer.apply(self._init_weights)\n", + "\n", + "\n", + " @staticmethod\n", + " def _init_weights(module, scale=1.0):\n", + " if type(module) == nn.Linear:\n", + " nn.init.uniform_(module.weight, -scale, scale)\n", + " nn.init.zeros_(module.bias)\n", + "\n", + " def _domain_map(self, o_t, insample_y):\n", + " if isinstance(self.loss, GMM):\n", + " weights = o_t[:, :-1] # [B, L + 1] -> [B, L]\n", + " kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1]\n", + " kernel_width = torch.repeat_interleave(input=kernel_width,\n", + " repeats=weights.shape[1],\n", + " dim=-1) # [B, 1] -> [B, L]\n", + " output = torch.cat([insample_y, kernel_width, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", + " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L]\n", + " elif isinstance(self.loss, PMM):\n", + " weights = o_t # [B, L] -> [B, L]\n", + " output = torch.cat([insample_y, weights], dim=-1) # [B, L] + [B, L] = [B, 2 * L]\n", + " output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] \n", + " elif isinstance(self.loss, NBMM):\n", + " weights = torch.ones_like(o_t) # [B, L] -> [B, L]\n", + " output = torch.cat([insample_y, o_t, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", + " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * \n", + "\n", + " else:\n", + " raise NotImplementedError\n", + " \n", + " return output\n", + "\n", + " # Override BaseWindows method\n", + " def training_step(self, batch, batch_idx):\n", + " \n", + " # Only train one-step ahead\n", + " self.h = 1\n", + " self.quantiles = self.loss.quantiles\n", + "\n", + " # Create and normalize windows [Ws, L+H, C]\n", + " y_idx = batch[\"y_idx\"]\n", + " windows = self._create_windows(batch, step=\"train\")\n", + " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h :, y_idx])\n", + " windows = self._normalization(windows=windows, y_idx=y_idx)\n", + "\n", + " # Parse windows\n", + " (\n", + " insample_y,\n", + " insample_mask,\n", + " outsample_y,\n", + " outsample_mask,\n", + " _,\n", + " futr_exog,\n", + " stat_exog,\n", + " ) = self._parse_windows(batch, windows)\n", + "\n", + " windows_batch = dict(\n", + " insample_y=insample_y, # [Ws, L]\n", + " insample_mask=insample_mask, # [Ws, L]\n", + " futr_exog=futr_exog, # [Ws, L+H]\n", + " hist_exog=None, \n", + " stat_exog=stat_exog, # [Ws, 1]\n", + " y_idx=y_idx # [Ws, 1]\n", + " ) \n", + "\n", + " # Model Predictions\n", + " output = self.train_forward(windows_batch)\n", + "\n", + " _, y_loc, y_scale = self._inv_normalization(\n", + " y_hat=outsample_y, \n", + " temporal_cols=batch[\"temporal_cols\"], \n", + " y_idx=y_idx\n", + " )\n", + " # outsample_y = original_insample_y\n", + " outsample_y = original_outsample_y\n", + " distr_args = self.loss.scale_decouple(\n", + " output=output, loc=y_loc, scale=y_scale\n", + " )\n", + " loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask)\n", + "\n", + " if torch.isnan(loss):\n", + " print(\"Model Parameters\", self.hparams)\n", + " print(\"insample_y\", torch.isnan(insample_y).sum())\n", + " print(\"outsample_y\", torch.isnan(outsample_y).sum())\n", + " print(\"output\", torch.isnan(output).sum())\n", + " raise Exception(\"Loss is NaN, training stopped.\")\n", + "\n", + " self.log(\"train_loss\", loss, prog_bar=True, on_epoch=True)\n", + " self.train_trajectories.append((self.global_step, float(loss)))\n", + "\n", + " self.h = self.h_backup \n", + " \n", + " return loss\n", + "\n", + " # Override BaseWindows method\n", + " def validation_step(self, batch, batch_idx):\n", + "\n", + " self.h = self.h_backup\n", + " self.quantiles = self.valid_loss.quantiles\n", + "\n", + " if self.val_size == 0:\n", + " return np.nan\n", + "\n", + " # TODO: Hack to compute number of windows\n", + " windows = self._create_windows(batch, step=\"val\")\n", + " n_windows = len(windows[\"temporal\"])\n", + " y_idx = batch[\"y_idx\"]\n", + "\n", + " # Number of windows in batch\n", + " windows_batch_size = self.inference_windows_batch_size\n", + " if windows_batch_size < 0:\n", + " windows_batch_size = n_windows\n", + " n_batches = int(np.ceil(n_windows / windows_batch_size))\n", + "\n", + " valid_losses = []\n", + " batch_sizes = []\n", + " for i in range(n_batches):\n", + " # Create and normalize windows [Ws, L+H, C]\n", + " w_idxs = np.arange(\n", + " i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows)\n", + " )\n", + " windows = self._create_windows(batch, step=\"val\", w_idxs=w_idxs)\n", + " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h:, 0])\n", + " windows = self._normalization(windows=windows, y_idx=y_idx)\n", + "\n", + " # Parse windows\n", + " (\n", + " insample_y,\n", + " insample_mask,\n", + " _,\n", + " outsample_mask,\n", + " _,\n", + " futr_exog,\n", + " stat_exog,\n", + " ) = self._parse_windows(batch, windows)\n", + " \n", + " windows_batch = dict(\n", + " insample_y=insample_y, # [Ws, L]\n", + " insample_mask=insample_mask, # [Ws, L]\n", + " futr_exog=futr_exog, # [Ws, L+H]\n", + " hist_exog=None, # [Ws, L]\n", + " stat_exog=stat_exog,\n", + " y_idx=y_idx,\n", + " ) # [Ws, 1]\n", + "\n", + " # Model Predictions\n", + " output_batch = self(windows_batch)\n", + " # Monte Carlo already returns y_hat with mean and quantiles\n", + " output_batch = output_batch[:,:, 1:] # Remove mean\n", + " valid_loss_batch = self.valid_loss(y=original_outsample_y, y_hat=output_batch, mask=outsample_mask)\n", + " valid_losses.append(valid_loss_batch)\n", + " batch_sizes.append(len(output_batch))\n", + "\n", + " valid_loss = torch.stack(valid_losses)\n", + " batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device)\n", + " valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes)\n", + "\n", + " if torch.isnan(valid_loss):\n", + " raise Exception(\"Loss is NaN, training stopped.\")\n", + "\n", + " self.log(\"valid_loss\", valid_loss, prog_bar=True, on_epoch=True)\n", + " self.validation_step_outputs.append(valid_loss)\n", + " return valid_loss\n", + "\n", + " # Override BaseWindows method\n", + " def predict_step(self, batch, batch_idx):\n", + "\n", + " self.h == self.h_backup\n", + " self.quantiles = self.loss.quantiles\n", + "\n", + " # TODO: Hack to compute number of windows\n", + " windows = self._create_windows(batch, step='predict')\n", + " n_windows = len(windows['temporal'])\n", + " y_idx = batch['y_idx']\n", + "\n", + " # Number of windows in batch\n", + " windows_batch_size = self.inference_windows_batch_size\n", + " if windows_batch_size < 0:\n", + " windows_batch_size = n_windows\n", + " n_batches = int(np.ceil(n_windows/windows_batch_size))\n", + "\n", + " y_hats = []\n", + " for i in range(n_batches):\n", + " # Create and normalize windows [Ws, L+H, C]\n", + " w_idxs = np.arange(i*windows_batch_size, \n", + " min((i+1)*windows_batch_size, n_windows))\n", + " windows = self._create_windows(batch, step='predict', w_idxs=w_idxs)\n", + " windows = self._normalization(windows=windows, y_idx=y_idx)\n", + "\n", + " # Parse windows\n", + " insample_y, insample_mask, _, _, _, futr_exog, stat_exog = self._parse_windows(batch, windows)\n", + " windows_batch = dict(insample_y=insample_y, # [Ws, L]\n", + " insample_mask=insample_mask, # [Ws, L]\n", + " futr_exog=futr_exog, # [Ws, L+H]\n", + " stat_exog=stat_exog,\n", + " y_idx=y_idx)\n", + " \n", + " # Model Predictions\n", + " y_hat = self(windows_batch)\n", + " # Monte Carlo already returns y_hat with mean and quantiles\n", + " y_hats.append(y_hat)\n", + " y_hat = torch.cat(y_hats, dim=0)\n", + " return y_hat\n", + "\n", + " def train_forward(self, windows_batch):\n", + " # Parse windows_batch\n", + " x_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", + " stat_exog = windows_batch['stat_exog'] # [B, S]\n", + "\n", + " batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len\n", + "\n", + " # Concatenate x_t with future exogenous\n", + " if self.futr_exog_size > 0: \n", + " futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F]\n", + " x_t = torch.cat((x_t, futr_exog_t), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", + " \n", + " x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)]\n", + "\n", + " # Concatenate x_t with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x_t = torch.cat((x_t, stat_exog), dim=1) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S]\n", + "\n", + " # Run through DeepNPTSNetwork\n", + " h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size]\n", + " o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1]\n", + "\n", + " output = self._domain_map(o_t, windows_batch['insample_y']) # [B, L + 1], [B, L] -> [B, 3 * L]\n", + " output = self.loss.domain_map(output) # [B, 3 * L] -> ([B, L], [B, L], [B, L])\n", + "\n", + " return output\n", + "\n", + " def forward(self, windows_batch):\n", + " # Parse windows_batch\n", + " insample_y_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", + " stat_exog = windows_batch['stat_exog'] # [B, S]\n", + " y_idx = windows_batch['y_idx']\n", + "\n", + " batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len\n", + " device = insample_y_t.device\n", + " dtype = insample_y_t.dtype\n", + "\n", + " # Repeat insample_y for trajectory samples\n", + " insample_y_t = torch.repeat_interleave(input=insample_y_t, \n", + " repeats=self.trajectory_samples, \n", + " dim=0) # [B, L, 1] -> [B * n_samples, L, 1]\n", + " \n", + " # Input x_t is insample_y at time t\n", + " x_t = insample_y_t\n", + "\n", + " # Repeat futr_exog if available for trajectory samples and add to x_t \n", + " if self.futr_exog_size > 0: \n", + " futr_exog = torch.repeat_interleave(input=futr_exog, \n", + " repeats=self.trajectory_samples, \n", + " dim=0) # [B, L + h, F] -> [B * n_samples, L + h, F] \n", + " x_t = torch.cat((x_t, futr_exog[:, :seq_len]), dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", + " \n", + " x_t = x_t.reshape(batch_size * self.trajectory_samples, -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + "\n", + " # Repeat stat_exog if available for trajectory samples and add to x_t\n", + " if self.stat_exog_size > 0:\n", + " stat_exog = torch.repeat_interleave(\n", + " input=stat_exog, \n", + " repeats=self.trajectory_samples, \n", + " dim=0) # [B, S] -> [B * n_samples, S] \n", + " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", + "\n", + " # Scales for inverse normalization\n", + " y_scale = self.scaler.x_scale[:, :, y_idx]\n", + " y_loc = self.scaler.x_shift[:, :, y_idx]\n", + " y_scale = torch.repeat_interleave(input=y_scale, \n", + " repeats=self.trajectory_samples, \n", + " dim=0)\n", + " y_loc = torch.repeat_interleave(input=y_loc, \n", + " repeats=self.trajectory_samples, \n", + " dim=0)\n", + " # Create forecasts tensor\n", + " forecasts = torch.zeros((batch_size, \n", + " self.h,\n", + " len(self.quantiles) + 1), \n", + " device=device, \n", + " dtype=dtype)\n", + " \n", + " # Recursive predictions\n", + " for t in range(self.h):\n", + " # Run input throught DeepNPTSNetwork\n", + " h_t = self.deepnptsnetwork(x_t) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size]\n", + " o_t = self.output_layer(h_t) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)]\n", + " output = self._domain_map(o_t, insample_y_t.squeeze(-1)) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L]\n", + " output = self.loss.domain_map(output) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L])\n", + "\n", + " # Inverse normalization\n", + " distr_args = self.loss.scale_decouple(output=output, \n", + " loc=y_loc, \n", + " scale=y_scale)\n", + "\n", + " # Sample and create probabilistic outputs\n", + " samples_t_flat, _, _ = self.loss.sample(distr_args=distr_args, \n", + " num_samples=1)\n", + "\n", + " samples_t_flat = samples_t_flat.squeeze()\n", + " samples_t = samples_t_flat.reshape(batch_size, \n", + " self.trajectory_samples) # [B * n_samples] -> [B, n_samples] \n", + " \n", + " samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] \n", + " quantiles_t = torch.quantile(input=samples_t, \n", + " q=self.quantiles, \n", + " dim=-1) # [B, n_samples] -> [Q, B]\n", + " forecasts[:, t, 0] = samples_t_mean\n", + " forecasts[:, t, 1:] = quantiles_t.permute(1, 0)\n", + "\n", + " insample_y_t_next = self.scaler.scaler(samples_t_flat, \n", + " y_loc.squeeze(), \n", + " y_scale.squeeze()) # [B * n_samples] -> [B * n_samples]\n", + " insample_y_t_next = insample_y_t_next.unsqueeze(-1)\\\n", + " .unsqueeze(-1) # [B * n_samples] -> [B * n_samples, 1, 1]\n", + "\n", + " # Update insample_y_t \n", + " insample_y_t = torch.cat([insample_y_t[:, 1:], \n", + " insample_y_t_next], \n", + " dim=1) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1]\n", + " \n", + " # Update input\n", + " x_t = insample_y_t\n", + " # Concatenate x_t with future exogenous\n", + " if self.futr_exog_size > 0: \n", + " x_t = torch.cat((x_t, \n", + " futr_exog[:, t:seq_len + t]), \n", + " dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", + " \n", + " x_t = x_t.reshape(batch_size * self.trajectory_samples\n", + " , -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + "\n", + " # Concatenate x_t with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", + " \n", + " return forecasts\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", + "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=-1,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", + "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=-1,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "show_doc(DeepNPTS, title_level=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage Example" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from neuralforecast import NeuralForecast\n", + "from neuralforecast.losses.pytorch import MQLoss, DistributionLoss, GMM\n", + "from neuralforecast.tsdataset import TimeSeriesDataset\n", + "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Seed set to 1\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b74158f17d254e4884139ee5c48e5706", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Sanity Checking: | | 0/? [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#| eval: false\n", + "import pandas as pd\n", + "import pytorch_lightning as pl\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from neuralforecast import NeuralForecast\n", + "#from neuralforecast.models import DeepAR\n", + "from neuralforecast.losses.pytorch import DistributionLoss, HuberMQLoss\n", + "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic\n", + "\n", + "#AirPassengersPanel['y'] = AirPassengersPanel['y'] + 10\n", + "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds=AirPassengersPanel['ds'].values[-12]].reset_index(drop=True) # 12 test\n", + "\n", + "nf = NeuralForecast(\n", + " models=[DeepNPTS(h=12,\n", + " input_size=12,\n", + " trajectory_samples=100,\n", + " loss=GMM(),\n", + " # learning_rate=1e-5,\n", + " n_layers = 2,\n", + " dropout=0.0,\n", + " stat_exog_list=['airline1'],\n", + " futr_exog_list=['trend'],\n", + " max_steps=1000,\n", + " val_check_steps=10,\n", + " early_stop_patience_steps=3,\n", + " scaler_type='robust',\n", + " enable_progress_bar=True),\n", + " ],\n", + " freq='M'\n", + ")\n", + "nf.fit(df=Y_train_df, static_df=AirPassengersStatic, val_size=12)\n", + "Y_hat_df = nf.predict(futr_df=Y_test_df)\n", + "\n", + "# Plot quantile predictions\n", + "Y_hat_df = Y_hat_df.reset_index(drop=False).drop(columns=['unique_id','ds'])\n", + "plot_df = pd.concat([Y_test_df, Y_hat_df], axis=1)\n", + "plot_df = pd.concat([Y_train_df, plot_df])\n", + "\n", + "plot_df = plot_df[plot_df.unique_id=='Airline1'].drop('unique_id', axis=1)\n", + "plt.plot(plot_df['ds'], plot_df['y'], c='black', label='True')\n", + "plt.plot(plot_df['ds'], plot_df['DeepNPTS'], c='red', label='mean')\n", + "plt.plot(plot_df['ds'], plot_df['DeepNPTS-median'], c='blue', label='median')\n", + "plt.fill_between(x=plot_df['ds'][-12:], \n", + " y1=plot_df['DeepNPTS-lo-90'][-12:].values, \n", + " y2=plot_df['DeepNPTS-hi-90'][-12:].values,\n", + " alpha=0.4, label='level 90')\n", + "plt.legend()\n", + "plt.grid()\n", + "plt.plot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/neuralforecast/_modidx.py b/neuralforecast/_modidx.py index 4bcbdabad..275d7598b 100644 --- a/neuralforecast/_modidx.py +++ b/neuralforecast/_modidx.py @@ -512,6 +512,24 @@ 'neuralforecast/models/deepar.py'), 'neuralforecast.models.deepar.DeepAR.validation_step': ( 'models.deepar.html#deepar.validation_step', 'neuralforecast/models/deepar.py')}, + 'neuralforecast.models.deepnpts': { 'neuralforecast.models.deepnpts.DeepNPTS': ( 'models.deepnpts.html#deepnpts', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.__init__': ( 'models.deepnpts.html#deepnpts.__init__', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS._domain_map': ( 'models.deepnpts.html#deepnpts._domain_map', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS._init_weights': ( 'models.deepnpts.html#deepnpts._init_weights', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.forward': ( 'models.deepnpts.html#deepnpts.forward', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.predict_step': ( 'models.deepnpts.html#deepnpts.predict_step', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.train_forward': ( 'models.deepnpts.html#deepnpts.train_forward', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.training_step': ( 'models.deepnpts.html#deepnpts.training_step', + 'neuralforecast/models/deepnpts.py'), + 'neuralforecast.models.deepnpts.DeepNPTS.validation_step': ( 'models.deepnpts.html#deepnpts.validation_step', + 'neuralforecast/models/deepnpts.py')}, 'neuralforecast.models.dilated_rnn': { 'neuralforecast.models.dilated_rnn.AttentiveLSTMLayer': ( 'models.dilated_rnn.html#attentivelstmlayer', 'neuralforecast/models/dilated_rnn.py'), 'neuralforecast.models.dilated_rnn.AttentiveLSTMLayer.__init__': ( 'models.dilated_rnn.html#attentivelstmlayer.__init__', diff --git a/neuralforecast/common/_scalers.py b/neuralforecast/common/_scalers.py index 15ddb3bd4..bef76f7e9 100644 --- a/neuralforecast/common/_scalers.py +++ b/neuralforecast/common/_scalers.py @@ -313,8 +313,8 @@ def identity_statistics(x, mask, dim=-1, eps=1e-6): shape = list(x.shape) shape[dim] = 1 - x_shift = torch.zeros(shape) - x_scale = torch.ones(shape) + x_shift = torch.zeros(shape, device=x.device) + x_scale = torch.ones(shape, device=x.device) return x_shift, x_scale diff --git a/neuralforecast/core.py b/neuralforecast/core.py index b13338d4c..9919512c0 100644 --- a/neuralforecast/core.py +++ b/neuralforecast/core.py @@ -58,6 +58,7 @@ iTransformer, BiTCN, TiDE, + DeepNPTS, ) # %% ../nbs/core.ipynb 5 @@ -173,6 +174,8 @@ def _insample_times( "autobitcn": BiTCN, "tide": TiDE, "autotide": TiDE, + "deepnpts": DeepNPTS, + "autodeepnpts": DeepNPTS, } # %% ../nbs/core.ipynb 8 diff --git a/neuralforecast/losses/pytorch.py b/neuralforecast/losses/pytorch.py index d7f29c83b..2e5ede2f5 100644 --- a/neuralforecast/losses/pytorch.py +++ b/neuralforecast/losses/pytorch.py @@ -1166,17 +1166,20 @@ def __init__( # If True, predict_step will return Distribution's parameters self.return_params = return_params if self.return_params: - self.param_names = [f"-lambda-{i}" for i in range(1, n_components + 1)] + lambda_names = [f"-lambda-{i}" for i in range(1, n_components + 1)] + weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] + self.param_names = [i for j in zip(lambda_names, weight_names) for i in j] self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = n_components + self.outputsize_multiplier = 2 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - return (output,) # , weights + lambdas, weights = output.chunk(2, dim=-1) + return (lambdas, weights) def scale_decouple( self, @@ -1190,13 +1193,15 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - lambdas = output[0] + lambdas, weights = output + weights = F.softmax(weights, dim=-1) + if (loc is not None) and (scale is not None): loc = loc.view(lambdas.size(dim=0), 1, -1) scale = scale.view(lambdas.size(dim=0), 1, -1) lambdas = (lambdas * scale) + loc lambdas = F.softplus(lambdas) - return (lambdas,) + return (lambdas, weights) def sample(self, distr_args, num_samples=None): """ @@ -1218,15 +1223,10 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - lambdas = distr_args[0] + lambdas, weights = distr_args B, H, K = lambdas.size() Q = len(self.quantiles) - # Sample K ~ Mult(weights) - # shared across B, H - # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) - weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) - # Avoid loop, vectorize weights = weights.reshape(-1, K) lambdas = lambdas.flatten() @@ -1267,7 +1267,7 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: @@ -1276,11 +1276,9 @@ def neglog_likelihood( mask = mask * ((y > 0) * 1) eps = 1e-10 - lambdas = distr_args[0] + lambdas, weights = distr_args B, H, K = lambdas.size() - weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) - y = y[:, :, None] mask = mask[:, :, None] @@ -1307,7 +1305,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1369,18 +1367,22 @@ def __init__( if self.return_params: mu_names = [f"-mu-{i}" for i in range(1, n_components + 1)] std_names = [f"-std-{i}" for i in range(1, n_components + 1)] - mu_std_names = [i for j in zip(mu_names, std_names) for i in j] - self.output_names = self.output_names + mu_std_names + weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] + self.param_names = [ + i for j in zip(mu_names, std_names, weight_names) for i in j + ] + self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 2 * n_components + self.outputsize_multiplier = 3 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - means, stds = torch.tensor_split(output, 2, dim=-1) - return (means, stds) + means, stds, weights = output.chunk(3, dim=-1) + + return (means, stds, weights) def scale_decouple( self, @@ -1395,14 +1397,16 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - means, stds = output + means, stds, weights = output stds = F.softplus(stds) + weights = F.softmax(weights, dim=-1) if (loc is not None) and (scale is not None): loc = loc.view(means.size(dim=0), 1, -1) scale = scale.view(means.size(dim=0), 1, -1) means = (means * scale) + loc stds = (stds + eps) * scale - return (means, stds) + + return (means, stds, weights) def sample(self, distr_args, num_samples=None): """ @@ -1424,17 +1428,11 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - means, stds = distr_args + means, stds, weights = distr_args B, H, K = means.size() Q = len(self.quantiles) assert means.shape == stds.shape - # Sample K ~ Mult(weights) - # shared across B, H - # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) - - weights = (1 / K) * torch.ones_like(means, device=means.device) - # Avoid loop, vectorize weights = weights.reshape(-1, K) means = means.flatten() @@ -1475,18 +1473,16 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - means, stds = distr_args + means, stds, weights = distr_args B, H, K = means.size() - weights = (1 / K) * torch.ones_like(means, device=means.device) - y = y[:, :, None] mask = mask[:, :, None] @@ -1514,7 +1510,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1572,25 +1568,29 @@ def __init__( f"-total_count-{i}" for i in range(1, n_components + 1) ] probs_names = [f"-probs-{i}" for i in range(1, n_components + 1)] - param_names = [i for j in zip(total_count_names, probs_names) for i in j] - self.output_names = self.output_names + param_names + weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] + self.param_names = [ + i for j in zip(total_count_names, probs_names, weight_names) for i in j + ] + self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 2 * n_components + self.outputsize_multiplier = 3 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - mu, alpha = torch.tensor_split(output, 2, dim=-1) - return (mu, alpha) + mu, alpha, weights = output.chunk(3, dim=-1) + + return mu, alpha, weights def scale_decouple( self, output, loc: Optional[torch.Tensor] = None, scale: Optional[torch.Tensor] = None, - eps: float = 0.2, + eps: float = 1e-6, ): """Scale Decouple @@ -1599,9 +1599,10 @@ def scale_decouple( Also adds domain protection to the distribution parameters. """ # Efficient NBinomial parametrization - mu, alpha = output - mu = F.softplus(mu) + 1e-8 - alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts + mu, alpha, weights = output + mu = F.softplus(mu) + eps + alpha = F.softplus(alpha) + eps # alpha = 1/total_counts + weights = F.softmax(weights, dim=-1) if (loc is not None) and (scale is not None): loc = loc.view(mu.size(dim=0), 1, -1) mu *= loc @@ -1611,8 +1612,9 @@ def scale_decouple( # => probs = mu / (total_count + mu) # => probs = mu / [total_count * (1 + mu * (1/total_count))] total_count = 1.0 / alpha - probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 - return (total_count, probs) + probs = mu * alpha / (1.0 + mu * alpha) + probs = torch.clamp(probs, eps, 1 - eps) + return (total_count, probs, weights) def sample(self, distr_args, num_samples=None): """ @@ -1634,17 +1636,11 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - total_count, probs = distr_args + total_count, probs, weights = distr_args B, H, K = total_count.size() Q = len(self.quantiles) assert total_count.shape == probs.shape - # Sample K ~ Mult(weights) - # shared across B, H - # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) - - weights = (1 / K) * torch.ones_like(probs, device=probs.device) - # Avoid loop, vectorize weights = weights.reshape(-1, K) total_count = total_count.flatten() @@ -1686,18 +1682,16 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - total_count, probs = distr_args + total_count, probs, weights = distr_args B, H, K = total_count.size() - weights = (1 / K) * torch.ones_like(probs, device=probs.device) - y = y[:, :, None] mask = mask[:, :, None] @@ -1728,7 +1722,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): diff --git a/neuralforecast/models/__init__.py b/neuralforecast/models/__init__.py index fbca72d6e..ee07166ab 100644 --- a/neuralforecast/models/__init__.py +++ b/neuralforecast/models/__init__.py @@ -2,7 +2,7 @@ 'MLP', 'NHITS', 'NBEATS', 'NBEATSx', 'DLinear', 'NLinear', 'TFT', 'VanillaTransformer', 'Informer', 'Autoformer', 'PatchTST', 'FEDformer', 'StemGNN', 'HINT', 'TimesNet', 'TimeLLM', 'TSMixer', 'TSMixerx', 'MLPMultivariate', - 'iTransformer', 'BiTCN', 'TiDE', + 'iTransformer', 'BiTCN', 'TiDE', 'DeepNPTS' ] from .rnn import RNN @@ -33,4 +33,4 @@ from .itransformer import iTransformer from .bitcn import BiTCN from .tide import TiDE - +from .deepnpts import DeepNPTS \ No newline at end of file diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py new file mode 100644 index 000000000..d4da85974 --- /dev/null +++ b/neuralforecast/models/deepnpts.py @@ -0,0 +1,557 @@ +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.deepnpts.ipynb. + +# %% auto 0 +__all__ = ['DeepNPTS'] + +# %% ../../nbs/models.deepnpts.ipynb 3 +import numpy as np + +import torch +import torch.nn as nn +import neuralforecast.losses.pytorch as losses +from typing import Optional +from functools import partial + + +from ..common._base_windows import BaseWindows +from ..losses.pytorch import MQLoss, GMM, PMM, NBMM + +# %% ../../nbs/models.deepnpts.ipynb 7 +class DeepNPTS(BaseWindows): + """DeepNPTS + + Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. + + **Parameters:**
+ `h`: int, Forecast horizon.
+ `input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
+ `hidden_size`: int=32, hidden size of dense layers.
+ `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
+ `dropout`: float=0.1, dropout.
+ `n_layers`: int=2, number of dense layers.
+ `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
+ `stat_exog_list`: str list, static exogenous columns.
+ `hist_exog_list`: str list, historic exogenous columns.
+ `futr_exog_list`: str list, future exogenous columns.
+ `exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
+ `loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
+ `valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
+ `max_steps`: int=1000, maximum number of training steps.
+ `learning_rate`: float=1e-3, Learning rate between (0, 1).
+ `num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
+ `early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
+ `val_check_steps`: int=100, Number of training steps between every validation loss check.
+ `batch_size`: int=32, number of different series in each batch.
+ `valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
+ `windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
+ `inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
+ `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
+ `step_size`: int=1, step size between each window of temporal data.
+ `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
+ `random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `alias`: str, optional, Custom name of the model.
+ `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
+ `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
+ `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
+ + **References**
+ - [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). "Deep Non-Parametric Time Series Forecaster". arXiv.](https://arxiv.org/abs/2312.14657)
+ + """ + + # Class attributes + SAMPLING_TYPE = "windows" + + def __init__( + self, + h, + input_size: int = -1, + hidden_size: int = 32, + batch_norm: bool = True, + dropout: float = 0.1, + n_layers: int = 2, + trajectory_samples: int = 100, + futr_exog_list=None, + hist_exog_list=None, + stat_exog_list=None, + exclude_insample_y=False, + loss=GMM(), + valid_loss=MQLoss(level=[80, 90]), + max_steps: int = 1000, + learning_rate: float = 1e-5, + num_lr_decays: int = 3, + early_stop_patience_steps: int = -1, + val_check_steps: int = 100, + batch_size: int = 32, + valid_batch_size: Optional[int] = None, + windows_batch_size: int = 1024, + inference_windows_batch_size: int = -1, + start_padding_enabled=False, + step_size: int = 1, + scaler_type: str = "standard", + random_seed: int = 1, + num_workers_loader=0, + drop_last_loader=False, + optimizer=None, + optimizer_kwargs=None, + **trainer_kwargs + ): + + if hist_exog_list is not None: + raise Exception("DeepNPTS does not support historical exogenous variables.") + + if exclude_insample_y: + raise Exception("DeepNPTS has no possibility for excluding y.") + + supported_losses = (losses.GMM, losses.PMM, losses.NBMM) + + if not isinstance(loss, supported_losses): + raise Exception("DeepNPTS only supports GMM, PMM or NBMM as loss function.") + + if not isinstance(valid_loss, losses.MQLoss): + raise Exception("DeepNPTS only supports MQLoss as validation loss.") + + # Overwrite n_components, it has to be the input_size in DeepNPTS + loss.n_components = input_size + + # Inherit BaseWindows class + super(DeepNPTS, self).__init__( + h=h, + input_size=input_size, + futr_exog_list=futr_exog_list, + hist_exog_list=hist_exog_list, + stat_exog_list=stat_exog_list, + exclude_insample_y=exclude_insample_y, + loss=loss, + valid_loss=valid_loss, + max_steps=max_steps, + learning_rate=learning_rate, + num_lr_decays=num_lr_decays, + early_stop_patience_steps=early_stop_patience_steps, + val_check_steps=val_check_steps, + batch_size=batch_size, + windows_batch_size=windows_batch_size, + valid_batch_size=valid_batch_size, + inference_windows_batch_size=inference_windows_batch_size, + start_padding_enabled=start_padding_enabled, + step_size=step_size, + scaler_type=scaler_type, + num_workers_loader=num_workers_loader, + drop_last_loader=drop_last_loader, + random_seed=random_seed, + optimizer=optimizer, + optimizer_kwargs=optimizer_kwargs, + **trainer_kwargs + ) + + self.h = h + self.h_backup = self.h # Used because h=1 during training + self.use_softmax = True + self.hidden_size = hidden_size + self.dropout = dropout + self.trajectory_samples = trajectory_samples + + self.futr_exog_size = len(self.futr_exog_list) + self.stat_exog_size = len(self.stat_exog_list) + + input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size + # Create DeepNPTSNetwork + modules = [] + for i in range(n_layers): + modules.append(nn.Linear(input_dim if i == 0 else hidden_size, hidden_size)) + modules.append(nn.ReLU()) + if batch_norm: + modules.append(nn.BatchNorm1d(hidden_size)) + if dropout > 0.0: + modules.append(nn.Dropout(dropout)) + + self.deepnptsnetwork = nn.Sequential(*modules) + self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07)) + + # Add output layers for Mixture distribution + output_modules = [] + if dropout > 0.0: + output_modules.append(nn.Dropout(self.dropout)) + + if isinstance(loss, GMM): + output_modules.append(nn.Linear(hidden_size, input_size + 1)) + elif isinstance(loss, PMM): + output_modules.append(nn.Linear(hidden_size, input_size)) + elif isinstance(loss, NBMM): + output_modules.append(nn.Linear(hidden_size, input_size)) + + self.output_layer = nn.Sequential(*output_modules) + self.output_layer.apply(self._init_weights) + + @staticmethod + def _init_weights(module, scale=1.0): + if type(module) == nn.Linear: + nn.init.uniform_(module.weight, -scale, scale) + nn.init.zeros_(module.bias) + + def _domain_map(self, o_t, insample_y): + if isinstance(self.loss, GMM): + weights = o_t[:, :-1] # [B, L + 1] -> [B, L] + kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1] + kernel_width = torch.repeat_interleave( + input=kernel_width, repeats=weights.shape[1], dim=-1 + ) # [B, 1] -> [B, L] + output = torch.cat( + [insample_y, kernel_width, weights], dim=-1 + ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] + output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L] + elif isinstance(self.loss, PMM): + weights = o_t # [B, L] -> [B, L] + output = torch.cat( + [insample_y, weights], dim=-1 + ) # [B, L] + [B, L] = [B, 2 * L] + output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] + elif isinstance(self.loss, NBMM): + weights = torch.ones_like(o_t) # [B, L] -> [B, L] + output = torch.cat( + [insample_y, o_t, weights], dim=-1 + ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] + output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * + + else: + raise NotImplementedError + + return output + + # Override BaseWindows method + def training_step(self, batch, batch_idx): + + # Only train one-step ahead + self.h = 1 + self.quantiles = self.loss.quantiles + + # Create and normalize windows [Ws, L+H, C] + y_idx = batch["y_idx"] + windows = self._create_windows(batch, step="train") + original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, y_idx]) + windows = self._normalization(windows=windows, y_idx=y_idx) + + # Parse windows + ( + insample_y, + insample_mask, + outsample_y, + outsample_mask, + _, + futr_exog, + stat_exog, + ) = self._parse_windows(batch, windows) + + windows_batch = dict( + insample_y=insample_y, # [Ws, L] + insample_mask=insample_mask, # [Ws, L] + futr_exog=futr_exog, # [Ws, L+H] + hist_exog=None, + stat_exog=stat_exog, # [Ws, 1] + y_idx=y_idx, # [Ws, 1] + ) + + # Model Predictions + output = self.train_forward(windows_batch) + + _, y_loc, y_scale = self._inv_normalization( + y_hat=outsample_y, temporal_cols=batch["temporal_cols"], y_idx=y_idx + ) + # outsample_y = original_insample_y + outsample_y = original_outsample_y + distr_args = self.loss.scale_decouple(output=output, loc=y_loc, scale=y_scale) + loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask) + + if torch.isnan(loss): + print("Model Parameters", self.hparams) + print("insample_y", torch.isnan(insample_y).sum()) + print("outsample_y", torch.isnan(outsample_y).sum()) + print("output", torch.isnan(output).sum()) + raise Exception("Loss is NaN, training stopped.") + + self.log("train_loss", loss, prog_bar=True, on_epoch=True) + self.train_trajectories.append((self.global_step, float(loss))) + + self.h = self.h_backup + + return loss + + # Override BaseWindows method + def validation_step(self, batch, batch_idx): + + self.h = self.h_backup + self.quantiles = self.valid_loss.quantiles + + if self.val_size == 0: + return np.nan + + # TODO: Hack to compute number of windows + windows = self._create_windows(batch, step="val") + n_windows = len(windows["temporal"]) + y_idx = batch["y_idx"] + + # Number of windows in batch + windows_batch_size = self.inference_windows_batch_size + if windows_batch_size < 0: + windows_batch_size = n_windows + n_batches = int(np.ceil(n_windows / windows_batch_size)) + + valid_losses = [] + batch_sizes = [] + for i in range(n_batches): + # Create and normalize windows [Ws, L+H, C] + w_idxs = np.arange( + i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) + ) + windows = self._create_windows(batch, step="val", w_idxs=w_idxs) + original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, 0]) + windows = self._normalization(windows=windows, y_idx=y_idx) + + # Parse windows + ( + insample_y, + insample_mask, + _, + outsample_mask, + _, + futr_exog, + stat_exog, + ) = self._parse_windows(batch, windows) + + windows_batch = dict( + insample_y=insample_y, # [Ws, L] + insample_mask=insample_mask, # [Ws, L] + futr_exog=futr_exog, # [Ws, L+H] + hist_exog=None, # [Ws, L] + stat_exog=stat_exog, + y_idx=y_idx, + ) # [Ws, 1] + + # Model Predictions + output_batch = self(windows_batch) + # Monte Carlo already returns y_hat with mean and quantiles + output_batch = output_batch[:, :, 1:] # Remove mean + valid_loss_batch = self.valid_loss( + y=original_outsample_y, y_hat=output_batch, mask=outsample_mask + ) + valid_losses.append(valid_loss_batch) + batch_sizes.append(len(output_batch)) + + valid_loss = torch.stack(valid_losses) + batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device) + valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes) + + if torch.isnan(valid_loss): + raise Exception("Loss is NaN, training stopped.") + + self.log("valid_loss", valid_loss, prog_bar=True, on_epoch=True) + self.validation_step_outputs.append(valid_loss) + return valid_loss + + # Override BaseWindows method + def predict_step(self, batch, batch_idx): + + self.h == self.h_backup + self.quantiles = self.loss.quantiles + + # TODO: Hack to compute number of windows + windows = self._create_windows(batch, step="predict") + n_windows = len(windows["temporal"]) + y_idx = batch["y_idx"] + + # Number of windows in batch + windows_batch_size = self.inference_windows_batch_size + if windows_batch_size < 0: + windows_batch_size = n_windows + n_batches = int(np.ceil(n_windows / windows_batch_size)) + + y_hats = [] + for i in range(n_batches): + # Create and normalize windows [Ws, L+H, C] + w_idxs = np.arange( + i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) + ) + windows = self._create_windows(batch, step="predict", w_idxs=w_idxs) + windows = self._normalization(windows=windows, y_idx=y_idx) + + # Parse windows + insample_y, insample_mask, _, _, _, futr_exog, stat_exog = ( + self._parse_windows(batch, windows) + ) + windows_batch = dict( + insample_y=insample_y, # [Ws, L] + insample_mask=insample_mask, # [Ws, L] + futr_exog=futr_exog, # [Ws, L+H] + stat_exog=stat_exog, + y_idx=y_idx, + ) + + # Model Predictions + y_hat = self(windows_batch) + # Monte Carlo already returns y_hat with mean and quantiles + y_hats.append(y_hat) + y_hat = torch.cat(y_hats, dim=0) + return y_hat + + def train_forward(self, windows_batch): + # Parse windows_batch + x_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + futr_exog = windows_batch["futr_exog"] # [B, L + h, F] + stat_exog = windows_batch["stat_exog"] # [B, S] + + batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len + + # Concatenate x_t with future exogenous + if self.futr_exog_size > 0: + futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F] + x_t = torch.cat( + (x_t, futr_exog_t), dim=2 + ) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] + + x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)] + + # Concatenate x_t with static exogenous + if self.stat_exog_size > 0: + x_t = torch.cat( + (x_t, stat_exog), dim=1 + ) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S] + + # Run through DeepNPTSNetwork + h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size] + o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1] + + output = self._domain_map( + o_t, windows_batch["insample_y"] + ) # [B, L + 1], [B, L] -> [B, 3 * L] + output = self.loss.domain_map( + output + ) # [B, 3 * L] -> ([B, L], [B, L], [B, L]) + + return output + + def forward(self, windows_batch): + # Parse windows_batch + insample_y_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + futr_exog = windows_batch["futr_exog"] # [B, L + h, F] + stat_exog = windows_batch["stat_exog"] # [B, S] + y_idx = windows_batch["y_idx"] + + batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len + device = insample_y_t.device + dtype = insample_y_t.dtype + + # Repeat insample_y for trajectory samples + insample_y_t = torch.repeat_interleave( + input=insample_y_t, repeats=self.trajectory_samples, dim=0 + ) # [B, L, 1] -> [B * n_samples, L, 1] + + # Input x_t is insample_y at time t + x_t = insample_y_t + + # Repeat futr_exog if available for trajectory samples and add to x_t + if self.futr_exog_size > 0: + futr_exog = torch.repeat_interleave( + input=futr_exog, repeats=self.trajectory_samples, dim=0 + ) # [B, L + h, F] -> [B * n_samples, L + h, F] + x_t = torch.cat( + (x_t, futr_exog[:, :seq_len]), dim=2 + ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] + + x_t = x_t.reshape( + batch_size * self.trajectory_samples, -1 + ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] + + # Repeat stat_exog if available for trajectory samples and add to x_t + if self.stat_exog_size > 0: + stat_exog = torch.repeat_interleave( + input=stat_exog, repeats=self.trajectory_samples, dim=0 + ) # [B, S] -> [B * n_samples, S] + x_t = torch.cat( + (x_t, stat_exog), dim=1 + ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] + + # Scales for inverse normalization + y_scale = self.scaler.x_scale[:, :, y_idx] + y_loc = self.scaler.x_shift[:, :, y_idx] + y_scale = torch.repeat_interleave( + input=y_scale, repeats=self.trajectory_samples, dim=0 + ) + y_loc = torch.repeat_interleave( + input=y_loc, repeats=self.trajectory_samples, dim=0 + ) + # Create forecasts tensor + forecasts = torch.zeros( + (batch_size, self.h, len(self.quantiles) + 1), device=device, dtype=dtype + ) + + # Recursive predictions + for t in range(self.h): + # Run input throught DeepNPTSNetwork + h_t = self.deepnptsnetwork( + x_t + ) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size] + o_t = self.output_layer( + h_t + ) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)] + output = self._domain_map( + o_t, insample_y_t.squeeze(-1) + ) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L] + output = self.loss.domain_map( + output + ) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L]) + + # Inverse normalization + distr_args = self.loss.scale_decouple( + output=output, loc=y_loc, scale=y_scale + ) + + # Sample and create probabilistic outputs + samples_t_flat, _, _ = self.loss.sample( + distr_args=distr_args, num_samples=1 + ) + + samples_t_flat = samples_t_flat.squeeze() + samples_t = samples_t_flat.reshape( + batch_size, self.trajectory_samples + ) # [B * n_samples] -> [B, n_samples] + + samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] + quantiles_t = torch.quantile( + input=samples_t, q=self.quantiles, dim=-1 + ) # [B, n_samples] -> [Q, B] + forecasts[:, t, 0] = samples_t_mean + forecasts[:, t, 1:] = quantiles_t.permute(1, 0) + + insample_y_t_next = self.scaler.scaler( + samples_t_flat, y_loc.squeeze(), y_scale.squeeze() + ) # [B * n_samples] -> [B * n_samples] + insample_y_t_next = insample_y_t_next.unsqueeze(-1).unsqueeze( + -1 + ) # [B * n_samples] -> [B * n_samples, 1, 1] + + # Update insample_y_t + insample_y_t = torch.cat( + [insample_y_t[:, 1:], insample_y_t_next], dim=1 + ) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1] + + # Update input + x_t = insample_y_t + # Concatenate x_t with future exogenous + if self.futr_exog_size > 0: + x_t = torch.cat( + (x_t, futr_exog[:, t : seq_len + t]), dim=2 + ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] + + x_t = x_t.reshape( + batch_size * self.trajectory_samples, -1 + ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] + + # Concatenate x_t with static exogenous + if self.stat_exog_size > 0: + x_t = torch.cat( + (x_t, stat_exog), dim=1 + ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] + + return forecasts From 54b2f0ae801f73c72acabbb7f2c3a4702f580a66 Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Mon, 22 Apr 2024 23:01:57 +0200 Subject: [PATCH 05/11] deepnpts_simple --- nbs/losses.pytorch.ipynb | 1714 ++--------------------------- nbs/models.deepnpts.ipynb | 869 +-------------- neuralforecast/_modidx.py | 14 +- neuralforecast/losses/pytorch.py | 118 +- neuralforecast/models/__init__.py | 2 +- neuralforecast/models/deepnpts.py | 454 +------- 6 files changed, 292 insertions(+), 2879 deletions(-) diff --git a/nbs/losses.pytorch.ipynb b/nbs/losses.pytorch.ipynb index 36adfaabd..387da910d 100644 --- a/nbs/losses.pytorch.ipynb +++ b/nbs/losses.pytorch.ipynb @@ -67,7 +67,7 @@ " Normal, \n", " StudentT, \n", " Poisson,\n", - " NegativeBinomial\n", + " NegativeBinomial,\n", ")\n", "\n", "from torch.distributions import constraints" @@ -244,61 +244,7 @@ "execution_count": null, "id": "1d004cd0", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__init__\n", - "\n", - "> MAE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Error\n", - "\n", - "Calculates Mean Absolute Error between\n", - "`y` and `y_hat`. MAE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the\n", - "deviation of the prediction and the true\n", - "value at a given time and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L85){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__init__\n", - "\n", - "> MAE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Error\n", - "\n", - "Calculates Mean Absolute Error between\n", - "`y` and `y_hat`. MAE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the\n", - "deviation of the prediction and the true\n", - "value at a given time and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} |y_{\\tau} - \\hat{y}_{\\tau}| $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAE, name='MAE.__init__', title_level=3)" ] @@ -308,51 +254,7 @@ "execution_count": null, "id": "0a20a273", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__call__\n", - "\n", - "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mae`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L106){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAE.__call__\n", - "\n", - "> MAE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mae`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAE.__call__, name='MAE.__call__', title_level=3)" ] @@ -426,61 +328,7 @@ "execution_count": null, "id": "e8c65b82", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__init__\n", - "\n", - "> MSE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Squared Error\n", - "\n", - "Calculates Mean Squared Error between\n", - "`y` and `y_hat`. MSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the \n", - "squared deviation of the prediction and the true\n", - "value at a given time, and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L126){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__init__\n", - "\n", - "> MSE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Squared Error\n", - "\n", - "Calculates Mean Squared Error between\n", - "`y` and `y_hat`. MSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the \n", - "squared deviation of the prediction and the true\n", - "value at a given time, and averages these devations\n", - "over the length of the series.\n", - "\n", - "$$ \\mathrm{MSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MSE, name='MSE.__init__', title_level=3)" ] @@ -490,51 +338,7 @@ "execution_count": null, "id": "b0126a7f", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__call__\n", - "\n", - "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mse`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MSE.__call__\n", - "\n", - "> MSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mse`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MSE.__call__, name='MSE.__call__', title_level=3)" ] @@ -612,67 +416,7 @@ "execution_count": null, "id": "d961d383", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__init__\n", - "\n", - "> RMSE.__init__ (horizon_weight=None)\n", - "\n", - "Root Mean Squared Error\n", - "\n", - "Calculates Root Mean Squared Error between\n", - "`y` and `y_hat`. RMSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the squared deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "Finally the RMSE will be in the same scale\n", - "as the original time series so its comparison with other\n", - "series is possible only if they share a common scale. \n", - "RMSE has a direct connection to the L2 norm.\n", - "\n", - "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L167){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__init__\n", - "\n", - "> RMSE.__init__ (horizon_weight=None)\n", - "\n", - "Root Mean Squared Error\n", - "\n", - "Calculates Root Mean Squared Error between\n", - "`y` and `y_hat`. RMSE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the squared deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "Finally the RMSE will be in the same scale\n", - "as the original time series so its comparison with other\n", - "series is possible only if they share a common scale. \n", - "RMSE has a direct connection to the L2 norm.\n", - "\n", - "$$ \\mathrm{RMSE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\sqrt{\\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} (y_{\\tau} - \\hat{y}_{\\tau})^{2}} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(RMSE, name='RMSE.__init__', title_level=3)" ] @@ -682,51 +426,7 @@ "execution_count": null, "id": "d398d3e3", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__call__\n", - "\n", - "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`rmse`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### RMSE.__call__\n", - "\n", - "> RMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`rmse`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(RMSE.__call__, name='RMSE.__call__', title_level=3)" ] @@ -817,69 +517,7 @@ "execution_count": null, "id": "174e8042", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__init__\n", - "\n", - "> MAPE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Percentage Error\n", - "\n", - "Calculates Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. MAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the percentual deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "The closer to zero an observed value is, the higher penalty MAPE loss\n", - "assigns to the corresponding error.\n", - "\n", - "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L212){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__init__\n", - "\n", - "> MAPE.__init__ (horizon_weight=None)\n", - "\n", - "Mean Absolute Percentage Error\n", - "\n", - "Calculates Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. MAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the percentual deviation\n", - "of the prediction and the observed value at a given time and\n", - "averages these devations over the length of the series.\n", - "The closer to zero an observed value is, the higher penalty MAPE loss\n", - "assigns to the corresponding error.\n", - "\n", - "$$ \\mathrm{MAPE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAPE, name='MAPE.__init__', title_level=3)" ] @@ -889,51 +527,7 @@ "execution_count": null, "id": "da63f136", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__call__\n", - "\n", - "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mape`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L237){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MAPE.__call__\n", - "\n", - "> MAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mape`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MAPE.__call__, name='MAPE.__call__', title_level=3)" ] @@ -1015,73 +609,7 @@ "execution_count": null, "id": "dee99fb8", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__init__\n", - "\n", - "> SMAPE.__init__ (horizon_weight=None)\n", - "\n", - "Symmetric Mean Absolute Percentage Error\n", - "\n", - "Calculates Symmetric Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. SMAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the relative deviation\n", - "of the prediction and the observed value scaled by the sum of the\n", - "absolute values for the prediction and observed value at a\n", - "given time, then averages these devations over the length\n", - "of the series. This allows the SMAPE to have bounds between\n", - "0% and 200% which is desireble compared to normal MAPE that\n", - "may be undetermined when the target is zero.\n", - "\n", - "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L259){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__init__\n", - "\n", - "> SMAPE.__init__ (horizon_weight=None)\n", - "\n", - "Symmetric Mean Absolute Percentage Error\n", - "\n", - "Calculates Symmetric Mean Absolute Percentage Error between\n", - "`y` and `y_hat`. SMAPE measures the relative prediction\n", - "accuracy of a forecasting method by calculating the relative deviation\n", - "of the prediction and the observed value scaled by the sum of the\n", - "absolute values for the prediction and observed value at a\n", - "given time, then averages these devations over the length\n", - "of the series. This allows the SMAPE to have bounds between\n", - "0% and 200% which is desireble compared to normal MAPE that\n", - "may be undetermined when the target is zero.\n", - "\n", - "$$ \\mathrm{sMAPE}_{2}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{|y_{\\tau}|+|\\hat{y}_{\\tau}|} $$\n", - "\n", - "**Parameters:**
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Makridakis S., \"Accuracy measures: theoretical and practical concerns\".](https://www.sciencedirect.com/science/article/pii/0169207093900793)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(SMAPE, name='SMAPE.__init__', title_level=3)" ] @@ -1091,51 +619,7 @@ "execution_count": null, "id": "db62a845", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__call__\n", - "\n", - "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`smape`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L286){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### SMAPE.__call__\n", - "\n", - "> SMAPE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`smape`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(SMAPE.__call__, name='SMAPE.__call__', title_level=3)" ] @@ -1222,71 +706,7 @@ "execution_count": null, "id": "b6a4cf21", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__init__\n", - "\n", - "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", - "\n", - "Mean Absolute Scaled Error \n", - "Calculates the Mean Absolute Scaled Error between\n", - "`y` and `y_hat`. MASE measures the relative prediction\n", - "accuracy of a forecasting method by comparinng the mean absolute errors\n", - "of the prediction and the observed value against the mean\n", - "absolute errors of the seasonal naive model.\n", - "The MASE partially composed the Overall Weighted Average (OWA), \n", - "used in the M4 Competition.\n", - "\n", - "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", - "\n", - "**Parameters:**
\n", - "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L308){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__init__\n", - "\n", - "> MASE.__init__ (seasonality:int, horizon_weight=None)\n", - "\n", - "Mean Absolute Scaled Error \n", - "Calculates the Mean Absolute Scaled Error between\n", - "`y` and `y_hat`. MASE measures the relative prediction\n", - "accuracy of a forecasting method by comparinng the mean absolute errors\n", - "of the prediction and the observed value against the mean\n", - "absolute errors of the seasonal naive model.\n", - "The MASE partially composed the Overall Weighted Average (OWA), \n", - "used in the M4 Competition.\n", - "\n", - "$$ \\mathrm{MASE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\frac{|y_{\\tau}-\\hat{y}_{\\tau}|}{\\mathrm{MAE}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{season}_{\\tau})} $$\n", - "\n", - "**Parameters:**
\n", - "`seasonality`: int. Main frequency of the time series; Hourly 24, Daily 7, Weekly 52, Monthly 12, Quarterly 4, Yearly 1.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Rob J. Hyndman, & Koehler, A. B. \"Another look at measures of forecast accuracy\".](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "[Spyros Makridakis, Evangelos Spiliotis, Vassilios Assimakopoulos, \"The M4 Competition: 100,000 time series and 61 forecasting methods\".](https://www.sciencedirect.com/science/article/pii/S0169207019301128)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MASE, name='MASE.__init__', title_level=3)" ] @@ -1296,53 +716,7 @@ "execution_count": null, "id": "32a2c11b", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__call__\n", - "\n", - "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mase`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L335){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MASE.__call__\n", - "\n", - "> MASE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> y_insample:torch.Tensor, mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mase`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MASE.__call__, name='MASE.__call__', title_level=3)" ] @@ -1429,69 +803,7 @@ "execution_count": null, "id": "edeb6f9a", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__init__\n", - "\n", - "> relMSE.__init__ (y_train, horizon_weight=None)\n", - "\n", - "Relative Mean Squared Error\n", - "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", - "as an alternative to percentage errors, to avoid measure unstability.\n", - "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", - "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", - "\n", - "**Parameters:**
\n", - "`y_train`: numpy array, Training values.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "- [Hyndman, R. J and Koehler, A. B. (2006).\n", - " \"Another look at measures of forecast accuracy\",\n", - " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", - " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L364){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__init__\n", - "\n", - "> relMSE.__init__ (y_train, horizon_weight=None)\n", - "\n", - "Relative Mean Squared Error\n", - "Computes Relative Mean Squared Error (relMSE), as proposed by Hyndman & Koehler (2006)\n", - "as an alternative to percentage errors, to avoid measure unstability.\n", - "$$ \\mathrm{relMSE}(\\mathbf{y}, \\mathbf{\\hat{y}}, \\mathbf{\\hat{y}}^{naive1}) =\n", - "\\frac{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}})}{\\mathrm{MSE}(\\mathbf{y}, \\mathbf{\\hat{y}}^{naive1})} $$\n", - "\n", - "**Parameters:**
\n", - "`y_train`: numpy array, Training values.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "- [Hyndman, R. J and Koehler, A. B. (2006).\n", - " \"Another look at measures of forecast accuracy\",\n", - " International Journal of Forecasting, Volume 22, Issue 4.](https://www.sciencedirect.com/science/article/pii/S0169207006000239)
\n", - "- [Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - " \"Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. \n", - " Submitted to the International Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(relMSE, name='relMSE.__init__', title_level=3)" ] @@ -1501,53 +813,7 @@ "execution_count": null, "id": "a317b5c5", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__call__\n", - "\n", - "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`relMSE`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L391){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### relMSE.__call__\n", - "\n", - "> relMSE.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor (batch_size, output_size), Actual values.
\n", - "`y_hat`: tensor (batch_size, output_size)), Predicted values.
\n", - "`y_insample`: tensor (batch_size, input_size), Actual insample Seasonal Naive predictions.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`relMSE`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(relMSE.__call__, name='relMSE.__call__', title_level=3)" ] @@ -1632,67 +898,7 @@ "execution_count": null, "id": "70bd46d9", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__init__\n", - "\n", - "> QuantileLoss.__init__ (q, horizon_weight=None)\n", - "\n", - "Quantile Loss\n", - "\n", - "Computes the quantile loss between `y` and `y_hat`.\n", - "QL measures the deviation of a quantile forecast.\n", - "By weighting the absolute deviation in a non symmetric way, the\n", - "loss pays more attention to under or over estimation.\n", - "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", - "\n", - "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", - "\n", - "**Parameters:**
\n", - "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L418){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__init__\n", - "\n", - "> QuantileLoss.__init__ (q, horizon_weight=None)\n", - "\n", - "Quantile Loss\n", - "\n", - "Computes the quantile loss between `y` and `y_hat`.\n", - "QL measures the deviation of a quantile forecast.\n", - "By weighting the absolute deviation in a non symmetric way, the\n", - "loss pays more attention to under or over estimation.\n", - "A common value for q is 0.5 for the deviation from the median (Pinball loss).\n", - "\n", - "$$ \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q)}_{\\tau}) = \\frac{1}{H} \\sum^{t+H}_{\\tau=t+1} \\Big( (1-q)\\,( \\hat{y}^{(q)}_{\\tau} - y_{\\tau} )_{+} + q\\,( y_{\\tau} - \\hat{y}^{(q)}_{\\tau} )_{+} \\Big) $$\n", - "\n", - "**Parameters:**
\n", - "`q`: float, between 0 and 1. The slope of the quantile loss, in the context of quantile regression, the q determines the conditional quantile level.
\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(QuantileLoss, name='QuantileLoss.__init__', title_level=3)" ] @@ -1702,51 +908,7 @@ "execution_count": null, "id": "0b1588e9", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__call__\n", - "\n", - "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`quantile_loss`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L445){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### QuantileLoss.__call__\n", - "\n", - "> QuantileLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies datapoints to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`quantile_loss`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(QuantileLoss.__call__, name='QuantileLoss.__call__', title_level=3)" ] @@ -1918,87 +1080,7 @@ "execution_count": null, "id": "8f42ec82", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__init__\n", - "\n", - "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", - "\n", - "Multi-Quantile loss\n", - "\n", - "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", - "MQL calculates the average multi-quantile Loss for\n", - "a given set of quantiles, based on the absolute \n", - "difference between predicted quantiles and observed values.\n", - "\n", - "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", - "\n", - "The limit behavior of MQL allows to measure the accuracy \n", - "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", - "the continuous ranked probability score (CRPS). This can be achieved \n", - "through a numerical integration technique, that discretizes the quantiles \n", - "and treats the CRPS integral with a left Riemann approximation, averaging over \n", - "uniformly distanced quantiles. \n", - "\n", - "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", - "\n", - "**Parameters:**
\n", - "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", - "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", - "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L494){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__init__\n", - "\n", - "> MQLoss.__init__ (level=[80, 90], quantiles=None, horizon_weight=None)\n", - "\n", - "Multi-Quantile loss\n", - "\n", - "Calculates the Multi-Quantile loss (MQL) between `y` and `y_hat`.\n", - "MQL calculates the average multi-quantile Loss for\n", - "a given set of quantiles, based on the absolute \n", - "difference between predicted quantiles and observed values.\n", - "\n", - "$$ \\mathrm{MQL}(\\mathbf{y}_{\\tau},[\\mathbf{\\hat{y}}^{(q_{1})}_{\\tau}, ... ,\\hat{y}^{(q_{n})}_{\\tau}]) = \\frac{1}{n} \\sum_{q_{i}} \\mathrm{QL}(\\mathbf{y}_{\\tau}, \\mathbf{\\hat{y}}^{(q_{i})}_{\\tau}) $$\n", - "\n", - "The limit behavior of MQL allows to measure the accuracy \n", - "of a full predictive distribution $\\mathbf{\\hat{F}}_{\\tau}$ with \n", - "the continuous ranked probability score (CRPS). This can be achieved \n", - "through a numerical integration technique, that discretizes the quantiles \n", - "and treats the CRPS integral with a left Riemann approximation, averaging over \n", - "uniformly distanced quantiles. \n", - "\n", - "$$ \\mathrm{CRPS}(y_{\\tau}, \\mathbf{\\hat{F}}_{\\tau}) = \\int^{1}_{0} \\mathrm{QL}(y_{\\tau}, \\hat{y}^{(q)}_{\\tau}) dq $$\n", - "\n", - "**Parameters:**
\n", - "`level`: int list [0,100]. Probability levels for prediction intervals (Defaults median).\n", - "`quantiles`: float list [0., 1.]. Alternative to level, quantiles to estimate from y distribution.\n", - "`horizon_weight`: Tensor of size h, weight for each timestamp of the forecasting window.
\n", - "\n", - "**References:**
\n", - "[Roger Koenker and Gilbert Bassett, Jr., \"Regression Quantiles\".](https://www.jstor.org/stable/1913643)
\n", - "[James E. Matheson and Robert L. Winkler, \"Scoring Rules for Continuous Probability Distributions\".](https://www.jstor.org/stable/2629907)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MQLoss, name='MQLoss.__init__', title_level=3)" ] @@ -2008,51 +1090,7 @@ "execution_count": null, "id": "bac2237a", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__call__\n", - "\n", - "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mqloss`: tensor (single value)." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L568){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### MQLoss.__call__\n", - "\n", - "> MQLoss.__call__ (y:torch.Tensor, y_hat:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "**Parameters:**
\n", - "`y`: tensor, Actual values.
\n", - "`y_hat`: tensor, Predicted values.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns:**
\n", - "`mqloss`: tensor (single value)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(MQLoss.__call__, name='MQLoss.__call__', title_level=3)" ] @@ -2071,17 +1109,7 @@ "execution_count": null, "id": "da37f2ef", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check MQLoss' stored quantiles\n", @@ -2626,99 +1654,7 @@ "execution_count": null, "id": "a462101b", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__init__\n", - "\n", - "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> **distribution_kwargs)\n", - "\n", - "DistributionLoss\n", - "\n", - "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", - "interact with NeuralForecast models modularly. It shares the negative \n", - "log-likelihood as the optimization objective and a sample method to \n", - "generate empirically the quantiles defined by the `level` list.\n", - "\n", - "Additionally, it implements a distribution transformation that factorizes the\n", - "scale-dependent likelihood parameters into a base scale and a multiplier \n", - "efficiently learnable within the network's non-linearities operating ranges.\n", - "\n", - "Available distributions:
\n", - "- Poisson
\n", - "- Normal
\n", - "- StudentT
\n", - "- NegativeBinomial
\n", - "- Tweedie
\n", - "- Bernoulli (Temporal Classifiers)\n", - "\n", - "**Parameters:**
\n", - "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", - "\n", - "**References:**
\n", - "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", - "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", - " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L913){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__init__\n", - "\n", - "> DistributionLoss.__init__ (distribution, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> **distribution_kwargs)\n", - "\n", - "DistributionLoss\n", - "\n", - "This PyTorch module wraps the `torch.distribution` classes allowing it to \n", - "interact with NeuralForecast models modularly. It shares the negative \n", - "log-likelihood as the optimization objective and a sample method to \n", - "generate empirically the quantiles defined by the `level` list.\n", - "\n", - "Additionally, it implements a distribution transformation that factorizes the\n", - "scale-dependent likelihood parameters into a base scale and a multiplier \n", - "efficiently learnable within the network's non-linearities operating ranges.\n", - "\n", - "Available distributions:
\n", - "- Poisson
\n", - "- Normal
\n", - "- StudentT
\n", - "- NegativeBinomial
\n", - "- Tweedie
\n", - "- Bernoulli (Temporal Classifiers)\n", - "\n", - "**Parameters:**
\n", - "`distribution`: str, identifier of a torch.distributions.Distribution class.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`num_samples`: int=500, number of samples for the empirical quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.

\n", - "\n", - "**References:**
\n", - "- [PyTorch Probability Distributions Package: StudentT.](https://pytorch.org/docs/stable/distributions.html#studentt)
\n", - "- [David Salinas, Valentin Flunkert, Jan Gasthaus, Tim Januschowski (2020).\n", - " \"DeepAR: Probabilistic forecasting with autoregressive recurrent networks\". International Journal of Forecasting.](https://www.sciencedirect.com/science/article/pii/S0169207019301888)
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DistributionLoss, name='DistributionLoss.__init__', title_level=3)" ] @@ -2728,65 +1664,7 @@ "execution_count": null, "id": "d8c367f8", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.sample\n", - "\n", - "> DistributionLoss.sample (distr_args:torch.Tensor,\n", - "> num_samples:Optional[int]=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1040){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.sample\n", - "\n", - "> DistributionLoss.sample (distr_args:torch.Tensor,\n", - "> num_samples:Optional[int]=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrite number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DistributionLoss.sample, name='DistributionLoss.sample', title_level=3)" ] @@ -2796,75 +1674,7 @@ "execution_count": null, "id": "04e32679", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__call__\n", - "\n", - "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Computes the negative log-likelihood objective function. \n", - "To estimate the following predictive distribution:\n", - "\n", - "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", - "\n", - "where $\\theta$ represents the distributions parameters. It aditionally \n", - "summarizes the objective signal using a weighted average using the `mask` tensor. \n", - "\n", - "**Parameters**
\n", - "`y`: tensor, Actual values.
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns**
\n", - "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1083){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DistributionLoss.__call__\n", - "\n", - "> DistributionLoss.__call__ (y:torch.Tensor, distr_args:torch.Tensor,\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Computes the negative log-likelihood objective function. \n", - "To estimate the following predictive distribution:\n", - "\n", - "$$\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta) \\quad \\mathrm{and} \\quad -\\log(\\mathrm{P}(\\mathbf{y}_{\\tau}\\,|\\,\\theta))$$\n", - "\n", - "where $\\theta$ represents the distributions parameters. It aditionally \n", - "summarizes the objective signal using a weighted average using the `mask` tensor. \n", - "\n", - "**Parameters**
\n", - "`y`: tensor, Actual values.
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`mask`: tensor, Specifies date stamps per serie to consider in loss.
\n", - "\n", - "**Returns**
\n", - "`loss`: scalar, weighted loss function against which backpropagation will be performed.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DistributionLoss.__call__, name='DistributionLoss.__call__', title_level=3)" ] @@ -2874,17 +1684,7 @@ "execution_count": null, "id": "14a7e381", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check DistributionLoss' stored quantiles\n", @@ -2964,42 +1764,35 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " lambda_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", - " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", - " self.param_names = [i for j in zip(lambda_names, weight_names) for i in j]\n", + " self.param_names = [f\"-lambda-{i}\" for i in range(1, n_components + 1)]\n", " self.output_names = self.output_names + self.param_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 2 * n_components\n", + " self.outputsize_multiplier = n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " lambdas, weights = output.chunk(2, dim=-1)\n", - " return (lambdas, weights)\n", - "\n", - " def scale_decouple(\n", - " self,\n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " ):\n", - " \"\"\"Scale Decouple\n", + " return (output,)#, weights\n", + " \n", + " def scale_decouple(self, \n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None):\n", + " \"\"\" Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " lambdas, weights = output\n", - " weights = F.softmax(weights, dim=-1)\n", - "\n", + " lambdas = output[0]\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(lambdas.size(dim=0), 1, -1)\n", " scale = scale.view(lambdas.size(dim=0), 1, -1)\n", " lambdas = (lambdas * scale) + loc\n", " lambdas = F.softplus(lambdas)\n", - " return (lambdas, weights)\n", + " return (lambdas,)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -3021,10 +1814,15 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", "\n", - " lambdas, weights = distr_args\n", + " lambdas = distr_args[0]\n", " B, H, K = lambdas.size()\n", " Q = len(self.quantiles)\n", "\n", + " # Sample K ~ Mult(weights)\n", + " # shared across B, H\n", + " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", + " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", + "\n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " lambdas = lambdas.flatten() \n", @@ -3062,7 +1860,7 @@ " \n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", " if mask is None: \n", " mask = (y > 0) * 1\n", @@ -3070,9 +1868,11 @@ " mask = mask * ((y > 0) * 1)\n", "\n", " eps = 1e-10\n", - " lambdas, weights = distr_args\n", + " lambdas = distr_args[0]\n", " B, H, K = lambdas.size()\n", "\n", + " weights = (1/K) * torch.ones_like(lambdas, device=lambdas.device)\n", + "\n", " y = y[:,:,None]\n", " mask = mask[:,:,None]\n", "\n", @@ -3097,7 +1897,7 @@ " return loss\n", "\n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)\n" @@ -3108,83 +1908,7 @@ "execution_count": null, "id": "62d7daba", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__init__\n", - "\n", - "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> batch_correlation=False, horizon_correlation=False)\n", - "\n", - "Poisson Mixture Mesh\n", - "\n", - "This Poisson Mixture statistical model assumes independence across groups of \n", - "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", - "\n", - "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", - "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", - "\\prod_{\\beta\\in[g_{i}]} \n", - "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", - "\n", - "**Parameters:**
\n", - "`n_components`: int=10, the number of mixture components.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", - "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", - "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", - "\n", - "**References:**
\n", - "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", - "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1117){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__init__\n", - "\n", - "> PMM.__init__ (n_components=10, level=[80, 90], quantiles=None,\n", - "> num_samples=1000, return_params=False,\n", - "> batch_correlation=False, horizon_correlation=False)\n", - "\n", - "Poisson Mixture Mesh\n", - "\n", - "This Poisson Mixture statistical model assumes independence across groups of \n", - "data $\\mathcal{G}=\\{[g_{i}]\\}$, and estimates relationships within the group.\n", - "\n", - "$$ \\mathrm{P}\\left(\\mathbf{y}_{[b][t+1:t+H]}\\right) = \n", - "\\prod_{ [g_{i}] \\in \\mathcal{G}} \\mathrm{P} \\left(\\mathbf{y}_{[g_{i}][\\tau]} \\right) =\n", - "\\prod_{\\beta\\in[g_{i}]} \n", - "\\left(\\sum_{k=1}^{K} w_k \\prod_{(\\beta,\\tau) \\in [g_i][t+1:t+H]} \\mathrm{Poisson}(y_{\\beta,\\tau}, \\hat{\\lambda}_{\\beta,\\tau,k}) \\right)$$\n", - "\n", - "**Parameters:**
\n", - "`n_components`: int=10, the number of mixture components.
\n", - "`level`: float list [0,100], confidence levels for prediction intervals.
\n", - "`quantiles`: float list [0,1], alternative to level list, target quantiles.
\n", - "`return_params`: bool=False, wether or not return the Distribution parameters.
\n", - "`batch_correlation`: bool=False, wether or not model batch correlations.
\n", - "`horizon_correlation`: bool=False, wether or not model horizon correlations.
\n", - "\n", - "**References:**
\n", - "[Kin G. Olivares, O. Nganba Meetei, Ruijun Ma, Rohan Reddy, Mengfei Cao, Lee Dicker. \n", - "Probabilistic Hierarchical Forecasting with Deep Poisson Mixtures. Submitted to the International \n", - "Journal Forecasting, Working paper available at arxiv.](https://arxiv.org/pdf/2110.13179.pdf)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(PMM, name='PMM.__init__', title_level=3)" ] @@ -3194,63 +1918,7 @@ "execution_count": null, "id": "fa8da65c", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.sample\n", - "\n", - "> PMM.sample (distr_args, num_samples=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1206){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.sample\n", - "\n", - "> PMM.sample (distr_args, num_samples=None)\n", - "\n", - "Construct the empirical quantiles from the estimated Distribution,\n", - "sampling from it `num_samples` independently.\n", - "\n", - "**Parameters**
\n", - "`distr_args`: Constructor arguments for the underlying Distribution type.
\n", - "`loc`: Optional tensor, of the same shape as the batch_shape + event_shape\n", - " of the resulting distribution.
\n", - "`scale`: Optional tensor, of the same shape as the batch_shape+event_shape \n", - " of the resulting distribution.
\n", - "`num_samples`: int=500, overwrites number of samples for the empirical quantiles.
\n", - "\n", - "**Returns**
\n", - "`samples`: tensor, shape [B,H,`num_samples`].
\n", - "`quantiles`: tensor, empirical quantiles defined by `levels`.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(PMM.sample, name='PMM.sample', title_level=3)" ] @@ -3260,39 +1928,7 @@ "execution_count": null, "id": "ba75717c", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__call__\n", - "\n", - "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Call self as a function." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/losses/pytorch.py#L1305){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### PMM.__call__\n", - "\n", - "> PMM.__call__ (y:torch.Tensor, distr_args:Tuple[torch.Tensor],\n", - "> mask:Optional[torch.Tensor]=None)\n", - "\n", - "Call self as a function." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(PMM.__call__, name='PMM.__call__', title_level=3)" ] @@ -3311,17 +1947,7 @@ "execution_count": null, "id": "e4a20e21", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -3345,43 +1971,11 @@ "execution_count": null, "id": "a56a2fbe", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "lambdas.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", - "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", - "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9FUlEQVR4nO3de1hU1foH8O8GhuGm4IDcFBHF+/2SijcgA8PUTI+WWoGHvOQtw7TQU4BxpEOllpblJTDL7KamZiqWoh21BOV4ySwLUE8QKQoICiOs3x/+2MdxuM0wwzDM9/M88zzM2muv9a4ZYN5Ze++1JSGEABEREVkUK1MHQERERA2PCQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICYCJ/fDDD3jsscfQpk0bKJVKeHh4ICAgAAsXLtSoFxQUhKCgIKPHI0kSYmNjDdZe27ZtMXr0aIO1V5NDhw5BkiQcOnSoQfrTVVBQECRJgiRJsLKyQrNmzeDv74+JEyfiiy++QEVFhdY+bdu2RUREhE79HD16FLGxsbhx44ZO+93fV+Xr+cUXX+jUTk1KSkoQGxtb5XuUnJwMSZKQlZVlsP6IqHo2pg7Akn399dcYO3YsgoKCkJiYCC8vL+Tk5CAtLQ1bt27Fm2++Kdd99913TRipeejbty+OHTuGrl27mjqUarVr1w4ff/wxAKC4uBiZmZnYsWMHJk6ciGHDhmHXrl1wdnaW62/fvh3NmzfXqY+jR48iLi4OERERcHFxqfN++vSlq5KSEsTFxQGAVkL7yCOP4NixY/Dy8jJqDER0FxMAE0pMTISfnx/27dsHG5v/vRVPPPEEEhMTNeo25g81U1Or1ZAkCc2bN8egQYNMHU6N7O3ttWJ85plnkJSUhL///e+YMWMGPv30U3lbnz59jB7TrVu3YG9v3yB91aRly5Zo2bKlSWMgsiQ8BGBC165dg5ubm8aHfyUrK8235v5DAFlZWZAkCW+88QZWrFgBPz8/ODk5ISAgAMePH9dqb/369ejYsSOUSiW6du2KLVu2ICIiAm3btq01ztzcXMycOROtW7eGra0t/Pz8EBcXhzt37tR5rHv37kXfvn1hb2+Pzp0744MPPtCqc/bsWTz66KNo0aIF7Ozs0Lt3b2zatEmjTuW09ObNm7Fw4UK0atUKSqUSFy9e1DoEUPkaVfe41wcffIBevXrBzs4OKpUKjz32GM6fP69RJyIiAk5OTrh48SJGjRoFJycn+Pj4YOHChSgtLa3za1GVadOmYdSoUfj888+RnZ0tl98/LV9RUYH4+Hh06tQJ9vb2cHFxQc+ePfHWW28BAGJjY7Fo0SIAgJ+fnzzWytek8pDMtm3b0KdPH9jZ2cnfyKs73HD79m1ERUXB09MT9vb2CAwMxKlTpzTqVHeI6t7fsaysLPkDPi4uTo6tss/qDgEY+r1Zu3YtevXqBScnJzRr1gydO3fGkiVLtGInauo4A2BCAQEB2LBhA+bPn4+pU6eib9++UCgUOrXxzjvvoHPnzli1ahUA4OWXX8aoUaOQmZkpTyWvW7cOM2fOxIQJE7By5UoUFBQgLi6uTh9aubm5GDBgAKysrPDKK6+gffv2OHbsGOLj45GVlYWkpKRa2/jPf/6DhQsX4qWXXoKHhwc2bNiAyMhI+Pv7Y/jw4QCACxcuYPDgwXB3d8fbb78NV1dXfPTRR4iIiMCff/6JxYsXa7QZHR2NgIAAvPfee7CysoK7uztyc3M16nh5eeHYsWMaZX/99ReefPJJtGrVSi5LSEjAkiVLMHnyZCQkJODatWuIjY1FQEAATpw4gQ4dOsh11Wo1xo4di8jISCxcuBCHDx/Gq6++CmdnZ7zyyiu1vhY1GTt2LPbs2YMjR47A19e3yjqJiYmIjY3FP/7xDwwfPhxqtRo///yzfLz/mWeeQX5+PlavXo1t27bJ0+n3ziCdPHkS58+fxz/+8Q/4+fnB0dGxxriWLFmCvn37YsOGDSgoKEBsbCyCgoJw6tQptGvXrs7j8/Lywt69e/Hwww8jMjISzzzzDADU+K3f0O/N1q1bMXv2bMybNw9vvPEGrKyscPHiRfz00091HgdRkyHIZK5evSqGDh0qAAgAQqFQiMGDB4uEhARRVFSkUTcwMFAEBgbKzzMzMwUA0aNHD3Hnzh25/McffxQAxCeffCKEEKK8vFx4enqKgQMHarSXnZ0tFAqF8PX11SgHIGJiYuTnM2fOFE5OTiI7O1uj3htvvCEAiHPnztU4Rl9fX2FnZ6ex/61bt4RKpRIzZ86Uy5544gmhVCrFpUuXNPYPCwsTDg4O4saNG0IIIQ4ePCgAiOHDh2v1Vbnt4MGDVcZSXFwsBgwYILy8vERWVpYQQojr168Le3t7MWrUKI26ly5dEkqlUkyZMkUuCw8PFwDEZ599plF31KhRolOnTjW+DkLcfQ+7detW7fZvvvlGABD/+te/5DJfX18RHh4uPx89erTo3bt3jf28/vrrAoDIzMzU2ubr6yusra3FhQsXqtx2b1+Vr2ffvn1FRUWFXJ6VlSUUCoV45plnNMZ27+9npfDwcI3fsb/++kvrd6xSUlKSRtzGeG/mzp0rXFxctPomskQ8BGBCrq6uOHLkCE6cOIHXXnsNjz76KH755RdER0ejR48euHr1aq1tPPLII7C2tpaf9+zZEwDkaeQLFy4gNzcXkyZN0tivTZs2GDJkSK3t7969G8HBwfD29sadO3fkR1hYGAAgNTW11jZ69+6NNm3ayM/t7OzQsWNHjanu7777DiNGjICPj4/GvhERESgpKdH6Jj9hwoRa+71XeXk5Hn/8cZw/fx579uyRv2EfO3YMt27d0pr69vHxwYMPPohvv/1Wo1ySJIwZM0ajrGfPnhpj0ZcQotY6AwYMwH/+8x/Mnj0b+/btQ2Fhoc799OzZEx07dqxz/SlTpmgcMvH19cXgwYNx8OBBnfvWhTHemwEDBuDGjRuYPHkyvvrqqzr9jRE1VUwAGoH+/fvjxRdfxOeff44//vgDzz//PLKysrROBKyKq6urxnOlUgng7oldwN3zDADAw8NDa9+qyu73559/YteuXVAoFBqPbt26AUCd/oHeH2NlnJUxVsZZ1dnf3t7eGuOopOuZ4rNmzcLevXvxxRdfoHfv3hr9Vteet7e3Vr8ODg6ws7PTGsvt27d1iqcqlR9UlWOuSnR0NN544w0cP34cYWFhcHV1xYgRI5CWllbnfnR97Tw9Passu/+1MTRjvDdPPfUUPvjgA2RnZ2PChAlwd3fHwIEDkZKSYoQREDVuTAAaGYVCgZiYGAB3T4qrr8oP3z///FNr2/3HzKvi5uaG0NBQnDhxospHZGRkvWOsjDMnJ0er/I8//pDjuNf9J/HVJDY2Fhs2bMD69esRGhqq1S+Aavu+v19j2rlzJyRJks+LqIqNjQ2ioqJw8uRJ5Ofn45NPPsHly5cxcuRIlJSU1KkfXV47oOrfk9zcXI3Ezs7OrspzSurzDdtY7820adNw9OhRFBQU4Ouvv4YQAqNHjzbILA6ROWECYEJV/WMDIJ/hXNM3wbrq1KkTPD098dlnn2mUX7p0CUePHq11/9GjR+Ps2bNo3749+vfvr/UwRIwAMGLECHz33XfyB36lDz/8EA4ODnpf3rdx40bExcVh2bJlVZ7hHhAQAHt7e3z00Uca5VeuXJEPSzSEpKQkfPPNN5g8ebLG4ZKauLi44G9/+xvmzJmD/Px8+ez5+2eB6uuTTz7RODyRnZ2No0ePapz137ZtW/zyyy8aScC1a9e0fsd0ic3Y742joyPCwsKwdOlSlJWV4dy5c/Vqj8jc8CoAExo5ciRat26NMWPGoHPnzqioqEBGRgbefPNNODk54bnnnqt3H1ZWVoiLi8PMmTPxt7/9DX//+99x48YNxMXFwcvLS+tyw/stW7YMKSkpGDx4MObPn49OnTrh9u3byMrKwp49e/Dee++hdevW9Y4zJiZGPt/glVdegUqlwscff4yvv/4aiYmJGovj1NWxY8cwa9YsDBkyBCEhIVqXRw4aNAguLi54+eWXsWTJEjz99NOYPHkyrl27hri4ONjZ2cmzMYZy69YtOY5bt27h999/x44dO7B7924EBgbivffeq3H/MWPGoHv37ujfvz9atmyJ7OxsrFq1Cr6+vvIZ8T169AAAvPXWWwgPD4dCoUCnTp3QrFkzvWLOy8vDY489hunTp6OgoAAxMTGws7NDdHS0XOepp57C+++/jyeffBLTp0/HtWvXkJiYqLWwULNmzeDr64uvvvoKI0aMgEqlgpubW5WXoxrjvZk+fTrs7e0xZMgQeHl5ITc3FwkJCXB2dsYDDzygc3tEZs3EJyFatE8//VRMmTJFdOjQQTg5OQmFQiHatGkjnnrqKfHTTz9p1K3uKoDXX39dq11UcZb1unXrhL+/v7C1tRUdO3YUH3zwgXj00UdFnz59at33r7/+EvPnzxd+fn5CoVAIlUol+vXrJ5YuXSpu3rxZ4xh9fX3FI488olVe1VnjZ86cEWPGjBHOzs7C1tZW9OrVSyQlJWnUqTwz/fPPP9dq8/6rACrPKq/uca8NGzaInj17CltbW+Hs7CweffRRrSscwsPDhaOjo1a/MTExWu1VJTAwUKN/R0dH0a5dO/G3v/1NfP7556K8vFxrn/vPzH/zzTfF4MGDhZubm7C1tRVt2rQRkZGR8lUNlaKjo4W3t7ewsrLSeE2qez+q6qvy9dy8ebOYP3++aNmypVAqlWLYsGEiLS1Na/9NmzaJLl26CDs7O9G1a1fx6aefal0FIIQQBw4cEH369BFKpVIAkPu8/yqASoZ8bzZt2iSCg4OFh4eHsLW1Fd7e3mLSpEni9OnTVb4mRE2ZJEQdTj2mJufGjRvo2LEjxo0bh3Xr1pk6HCIiamA8BGABcnNz8c9//hPBwcFwdXVFdnY2Vq5ciaKiIoMcZiAiIvPDBMACKJVKZGVlYfbs2cjPz5dPqnvvvffky/mIiMiy8BAAERGRBeJlgERERBaICQAREZEF4jkAuHuL1T/++APNmjXTeZU0IiJ9CSFQVFQEb2/vWtfkIDI0JgC4u6zo/TehISJqKJcvXzbIglpEumACAMgrpF2+fFlr5bLqqNVq7N+/H6GhoVAoFMYMz2Sa+hib+vgAjrGxKywshI+Pj96rNBLVBxMA/O/mKM2bN9cpAXBwcEDz5s3N7p9OXTX1MTb18QEco7ngoUcyBR50IiIiskBMAIiIiCwQEwAiIiILZNJzABISErBt2zb8/PPPsLe3x+DBg/Gvf/0LnTp1kusIIRAXF4d169bh+vXrGDhwIN555x2NJWxLS0vxwgsv4JNPPsGtW7cwYsQIvPvuuzyrlojMXkVFBcrKykwdBpkBhUIBa2vrOtc3aQKQmpqKOXPm4IEHHsCdO3ewdOlShIaG4qeffoKjoyMAIDExEStWrEBycjI6duyI+Ph4hISE4MKFC/KZswsWLMCuXbuwdetWuLq6YuHChRg9ejTS09N1ejGIiBqTsrIyZGZmoqKiwtShkJlwcXGBp6dnnU4sNWkCsHfvXo3nSUlJcHd3R3p6OoYPHw4hBFatWoWlS5di/PjxAIBNmzbBw8MDW7ZswcyZM1FQUICNGzdi8+bNeOihhwAAH330EXx8fHDgwAGMHDmywcdFRFRfQgjk5OTA2toaPj4+XCiIaiSEQElJCfLy8gAAXl5ete7TqC4DLCgoAACoVCoAQGZmJnJzcxEaGirXUSqVCAwMxNGjRzFz5kykp6dDrVZr1PH29kb37t1x9OjRKhOA0tJSlJaWys8LCwsB3L2cSK1W1ynWynp1rW+OmvoYm/r4AI6xsasp5jt37qCkpATe3t5wcHBowKjIXNnb2wMA8vLy4O7uXusMeKNJAIQQiIqKwtChQ9G9e3cAd+9jDwAeHh4adT08PJCdnS3XsbW1RYsWLbTqVO5/v4SEBMTFxWmV79+/X+c/tJSUFJ3qm6OmPsamPj6AY2ysSkpKqt1WXl4OALC1tW2ocKgJqPwMU6vV5pMAzJ07F6dPn8b333+vte3+YxlCiFqPb9RUJzo6GlFRUfLzytW4QkNDdVoIKCUlBSEhIWaz+Mg7By/qVF8S5Wh7+zdk2bXH7Ac71b6DmTHH91BXHGPjVjn7WBMuEkS60OX3pVEkAPPmzcPOnTtx+PBhjTP3PT09Adz9ln/v8Yy8vDx5VsDT0xNlZWW4fv26xixAXl4eBg8eXGV/SqUSSqVSq1yhUOj8D0SffUxFSPqdECkka7MZoz7M6T3UF8fYOJlbvNS0mPSsEiEE5s6di23btuG7776Dn5+fxnY/Pz94enpqTO2VlZUhNTVV/nDv168fFAqFRp2cnBycPXu22gSAiIjI0pl0BmDOnDnYsmULvvrqKzRr1kw+Zu/s7Ax7e3tIkoQFCxZg+fLl6NChAzp06IDly5fDwcEBU6ZMketGRkZi4cKFcHV1hUqlwgsvvIAePXrIVwUQERGRJpMmAGvXrgUABAUFaZQnJSUhIiICALB48WLcunULs2fPlhcC2r9/v8bds1auXAkbGxtMmjRJXggoOTmZawAQUZOzMuWXBu3v+ZCODdpfXQQFBaF3795YtWqVqUMxayY/BFDVo/LDH7h7QkNsbCxycnJw+/ZtpKamylcJVLKzs8Pq1atx7do1lJSUYNeuXfDx8Wng0RAREQBERERg3LhxWuWHDh2CJEm4ceNGvdrftm0bXn311Xq1YQ4OHz6MMWPGwNvbG5IkYceOHQZtnytLEBGRWahcElmlUmnMApuboKAgJCcn11qvuLgYvXr1wpo1a4wSBxMAIiIyidLSUsyfPx/u7u6ws7PD0KFDceLECXl7UFAQ5s6di6ioKLi5uSEkJEQuX7BgAQAgKysLkiRpPSoPLdfWR2V78+fPx+LFi6FSqeDp6YnY2NgaYx87dmyV/UqShJ07dxrk9QkLC0N8fLy8Eq6hMQEgIiKTWLx4Mb788kts2rQJJ0+ehL+/P0aOHIn8/Hy5zqZNm2BjY4N///vfeP/997Xa8PHxQU5Ojvw4deoUXF1dMXz48Dr3UdmPo6MjfvjhByQmJmLZsmU1Li6VlJSEnJwc/PrrrwCAPXv2yDGMGjXKEC+P0TWKdQCIiKhp2b17N5ycnDTKKlc3BO5Ob69duxbJyckICwsDAKxfvx4pKSnYuHEjFi1aBADw9/dHYmJitf1YW1vLa8bcvn0b48aNQ0BAAGJjY+vcBwD07NkTMTExAIAOHTpgzZo1+Pbbb+VZh/u5uroCAI4dOwZJkjB06FCzOyzBGQAiIjK44OBgZGRkaDw2bNggb//tt9+gVqsxZMgQuUyhUGDAgAE4f/68XNa/f/869xkZGYmioiJs2bIFVlZWde4DuJsA3MvLy0u+sU5NTp8+jbZt29b44b98+XI4OTnJjyNHjmDWrFlaZQ2NMwBERGRwjo6O8Pf31yi7cuWK/LMQAkDtS71X3hq+NvHx8di7dy9+/PFH+cO4rn0A2qsySpJUp9swnz59Wit5uN+sWbMwadIk+fnUqVMxYcIEjWP7rVq1qrUvQ+MMABERNTh/f3/Y2tpq3P9FrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp5rvlaJSqeDv7y8/7O3t4e7urlXW0DgDQEREDc7R0RHPPvssFi1aBJVKhTZt2iAxMRElJSWIjIyscztnz57F008/jRdffBHdunWTV5S1tbWFSqUySB81qaioQHZ2Nq5cuYJWrVoZ9OZNN2/exMWL/7uJW2ZmJjIyMuSx1BcTACIiM9IYV+bT12uvvYaKigo89dRTKCoqQv/+/bFv3z6t27vXJC0tDSUlJYiPj0d8fLxcHhgYiEOHDhmkj5rMnz8fM2bMQOfOnVFYWGjQBCAtLQ3BwcHy88q72IaHh9dpHYHaSKLyIIkFKywshLOzMwoKCnS6HfCePXswatQos7mjl65LiEqiHH63fkGmfUcsCDXMdFljYo7voa44xsatpv89t2/fRmZmJvz8/GBnZ2eiCMnc6PJ7w3MAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQFwJ0ER0XZSnUlNaBYyIiEyHMwBEREQWiDMARETm5GBCw/YXHN2w/dVBUFAQevfujVWrVpk6FLPGGQAiIjKoiIgIjBs3Tqv80KFDkCQJN27cqFf727Ztw6uvvlqvNsxBQkICHnjgATRr1gzu7u4YN24cLly4YLD2mQAQEZFZKCsrAwCoVCo0a9bMxNHoLygoqE5380tNTcWcOXNw/PhxpKSk4M6dOwgNDUVxcbFB4mACQEREJlFaWor58+fD3d0ddnZ2GDp0KE6cOCFvDwoKwty5cxEVFQU3NzeEhITI5QsWLAAAZGVlQZIkrUdQUFCd+qhsb/78+Vi8eDFUKhU8PT0RGxtbY+xjx46tsl9JkrBz506DvD579+5FREQEunXrhl69eiEpKQmXLl1Cenq6QdpnAkBERCaxePFifPnll9i0aRNOnjwJf39/jBw5Evn5+XKdTZs2wcbGBv/+97/x/vvva7Xh4+ODnJwc+XHq1Cm4urpi+PDhde6jsh9HR0f88MMPSExMxLJly5CSklJt7ElJScjJycGvv/4KANizZ48cw6hRowzx8mgpKCgAcHcGxBB4EiARERnc7t274eTkpFFWXl4u/1xcXIy1a9ciOTkZYWFhAID169cjJSUFGzduxKJFiwAA/v7+SExMrLYfa2treHp6AgBu376NcePGISAgALGxsXXuAwB69uyJmJgYAECHDh2wZs0afPvtt/Ksw/1cXV0BAMeOHYMkSRg6dKhRD0sIIRAVFYWhQ4eie/fuBmmTMwBERGRwwcHByMjI0Hhs2LBB3v7bb79BrVZjyJAhcplCocCAAQNw/vx5uax///517jMyMhJFRUXYsmULrKys6twHcDcBuJeXlxfy8vJq7fP06dNo27ZtjR/+y5cvh5OTk/w4cuQIZs2apVVWk7lz5+L06dP45JNPao2prjgDQEREBufo6Ah/f3+NsitXrsg/CyEAAJIkadQRQmiUOTo61qm/+Ph47N27Fz/++KP8YVzXPoC7icG9JElCRUVFrf2ePn1aK3m436xZszBp0iT5+dSpUzFhwgSMHz9eLmvVqlW1+8+bNw87d+7E4cOH0bp161pjqivOABARUYPz9/eHra0tvv/+e7lMrVYjLS0NXbp00amtL7/8EsuWLcNnn32G9u3bG6WP6mRlZaFTp0411lGpVPD395cf9vb2cHd31yq7nxACc+fOxbZt2/Ddd9/Bz8/PIDFX4gwAERE1OEdHRzz77LNYtGgRVCoV2rRpg8TERJSUlCAyMrLO7Zw9exZPP/00XnzxRXTr1g25ubkAAFtbW6hUKoP0UZOKigpkZ2fjypUraNWqldbMQn3MmTMHW7ZswVdffYVmzZrJY3N2dq4yYdAVEwAiInPSCFfm09drr72GiooKPPXUUygqKkL//v2xb98+tGjRos5tpKWloaSkBPHx8YiPj5fLAwMDcejQIYP0UZP58+djxowZ6Ny5MwoLCw2aAKxduxYA5EsaKyUlJSEiIqLe7TMBICIig6pukZugoCD5uDwA2NnZ4e2338bbb79dZf1Dhw7VWh4REVHjh2FtfVTXz44dO6qtf6+wsDBcvny5TnVr6q8q975WxsBzAIiIiCwQEwAiIiILxEMAZHQrU37Ra7/nQzoaOBIiIqrEGQAiIiILxBkAavyMdf9zYQWgs3HaJjIQY58IRk1LXRYvqsQEgIioEVIoFJAkCX/99Rdatmxp0MvLqOkRQqCsrAx//fUXrKysYGtrW+s+TACIiBoha2trtG7dGleuXEFWVpapwyEz4eDggDZt2sDKqvYj/EwAiIgaKScnJ3To0AFqtdrUoZAZsLa2ho2NTZ1ni5gAEBE1YtbW1rC2tjZ1GNQE8SoAIiIiC8QEgIiIyAKZNAE4fPgwxowZA29vb0iSpLX2ckREBCRJ0ngMGjRIo05paSnmzZsHNzc3ODo6YuzYsRr3nCYiIiJtJk0AiouL0atXL6xZs6baOg8//DBycnLkx549ezS2L1iwANu3b8fWrVvx/fff4+bNmxg9ejTKy8uNHT4REZHZMulJgGFhYQgLC6uxjlKphKenZ5XbCgoKsHHjRmzevBkPPfQQAOCjjz6Cj48PDhw4gJEjR1a5X2lpKUpLS+XnhYWFAAC1Wl3ns20r6+l7dq4k9EtQ6nM2sK59VtaXRHmD9ltJ7lMYJ09V/3+7TfkM6/r+npoDcx6jOcZMTYckGskyU5IkYfv27Rg3bpxcFhERgR07dsDW1hYuLi4IDAzEP//5T7i7uwMAvvvuO4wYMQL5+fka93bu1asXxo0bh7i4uCr7io2NrXLbli1b4ODgYNiBERFVo6SkBFOmTEFBQQGaN29u6nDIwjTqywDDwsIwceJE+Pr6IjMzEy+//DIefPBBpKenQ6lUIjc3F7a2thof/gDg4eGB3NzcatuNjo5GVFSU/LywsBA+Pj4IDQ2t8x+hWq1GSkoKQkJCoFAodB7bOwcv6rwPAMwJ9tdthyMr5B9/zMrXadcKyRr5qoFQ5f+AQb7O1VccFlX9NhhgrPeMwZDUwgopNzvq/R6ag/r+npoDcx5j5ewjkSk06gTg8ccfl3/u3r07+vfvD19fX3z99dcYP358tfsJIWpcCEGpVEKpVGqVKxQKnf+B6LMPAAhJv+t6de5L+t+60FZ6TsVbiXIopBrWl64lpnqPtaa+DUDf99CccIyNk7nFS02LWV0G6OXlBV9fX/z6668AAE9PT5SVleH69esa9fLy8uDh4WGKEImIiMyCWSUA165dw+XLl+Hl5QUA6NevHxQKBVJSUuQ6OTk5OHv2LAYPHmyqMImIiBo9kx4CuHnzJi5e/N/x4czMTGRkZEClUkGlUiE2NhYTJkyAl5cXsrKysGTJEri5ueGxxx4DADg7OyMyMhILFy6Eq6srVCoVXnjhBfTo0UO+KoCoVkdWGP0wA4Kjjds+EZGOTJoApKWlITg4WH5eeWJeeHg41q5dizNnzuDDDz/EjRs34OXlheDgYHz66ado1qyZvM/KlSthY2ODSZMm4datWxgxYgSSk5O5djYREVENTJoABAUFoaarEPft21drG3Z2dli9ejVWr15tyNCIiIiaNLM6B4CIiIgMgwkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIEa9c2AyEwcTKhx86BL1/Rs11W//YiIqFZ6JQDJycmYNGkSHBwcDB0PUdNUS5JUb1xqmIh0pNchgOjoaHh6eiIyMhJHjx41dExERERkZHrNAFy5cgVff/01kpOTERwcDD8/P0ybNg3h4eHw9PQ0dIx0j5Upv+hUX+/pdyIiatL0mgGwtrbG2LFjsW3bNly+fBkzZszAxx9/jDZt2mDs2LH46quvUFFh5LurERERkd7qfRWAu7s7hgwZgoCAAFhZWeHMmTOIiIhA+/btcejQIQOESERERIamdwLw559/4o033kC3bt0QFBSEwsJC7N69G5mZmfjjjz8wfvx4hIeHGzJWIiIiMhC9zgEYM2YM9u3bh44dO2L69Ol4+umnoVKp5O329vZYuHAhVq5cabBAiYiIyHD0SgDc3d2RmpqKgICAaut4eXkhMzNT78CIiIjIePQ6BBAYGIi+fftqlZeVleHDDz8EAEiSBF9f3/pFR0REREahVwIwbdo0FBQUaJUXFRVh2rRp9Q6KiIiIjEuvBEAIAUmStMqvXLkCZ2fnegdFRERExqXTOQB9+vSBJEmQJAkjRoyAjc3/di8vL0dmZiYefvhhgwdJREREhqVTAjBu3DgAQEZGBkaOHAknJyd5m62tLdq2bYsJEyYYNEAiIiIyPJ0SgJiYGABA27Zt8fjjj8POzs4oQREREZFx6XUZIBf4ISIiMm91TgBUKhV++eUXuLm5oUWLFlWeBFgpPz/fIMERERGRcdQ5AVi5ciWaNWsm/1xTAkBERESNW50TgHun/SMiIowRCxERETWQOicAhYWFdW60efPmegVDREREDaPOCYCLi0ut0/6VCwSVl5fXOzAiIiIynjonAAcPHjRmHERERNSA6pwABAYGGjMOIqqPgwlVlwsrAJ2BIysAqaJ+fQRH129/ImpU6pwAnD59Gt27d4eVlRVOnz5dY92ePXvWOzAiIiIynjonAL1790Zubi7c3d3Ru3dvSJIEIYRWPZ4DQERE1PjVOQHIzMxEy5Yt5Z/Jshz7/ZqpQyAiIgOqcwLg6+tb5c9ERERkfvS6FwAAXLhwAatXr8b58+chSRI6d+6MefPmoVOnToaMj4iIiIzASp+dvvjiC3Tv3h3p6eno1asXevbsiZMnT6J79+74/PPPDR0jERERGZheMwCLFy9GdHQ0li1bplEeExODF198ERMnTjRIcERERGQces0A5Obm4umnn9Yqf/LJJ5Gbm1vvoIiIiMi49EoAgoKCcOTIEa3y77//HsOGDat3UERERGRcdT4EsHPnTvnnsWPH4sUXX0R6ejoGDRoEADh+/Dg+//xzxMXFGT5KIiIiMqg6JwDjxo3TKnv33Xfx7rvvapTNmTMHs2bNqndgREREZDx1TgAqKuq5jjgRERE1GnqdA2Aohw8fxpgxY+Dt7Q1JkrBjxw6N7UIIxMbGwtvbG/b29ggKCsK5c+c06pSWlmLevHlwc3ODo6Mjxo4diytXrjTgKIiIiMyP3gsBFRcXIzU1FZcuXUJZWZnGtvnz59e5jV69emHatGmYMGGC1vbExESsWLECycnJ6NixI+Lj4xESEoILFy6gWbNmAIAFCxZg165d2Lp1K1xdXbFw4UKMHj0a6enpsLa21nd4RERETZpeCcCpU6cwatQolJSUoLi4GCqVClevXoWDgwPc3d3rnACEhYUhLCysym1CCKxatQpLly7F+PHjAQCbNm2Ch4cHtmzZgpkzZ6KgoAAbN27E5s2b8dBDDwEAPvroI/j4+ODAgQMYOXKkPsMjIiJq8vRKAJ5//nmMGTMGa9euhYuLC44fPw6FQoEnn3wSzz33nEECy8zMRG5uLkJDQ+UypVKJwMBAHD16FDNnzkR6ejrUarVGHW9vb3Tv3h1Hjx6tNgEoLS1FaWmp/LywsBAAoFaroVar6xRfZb261r+fJBrmjokVkv6zIJX71qeN+lAL4x6hqmzf2P2YkkHHqOfvurHV92/RlMwxZmo69EoAMjIy8P7778Pa2hrW1tYoLS1Fu3btkJiYiPDwcPkbe31ULijk4eGhUe7h4YHs7Gy5jq2tLVq0aKFVp6YFiRISEqq8XHH//v1wcHDQKc6UlBSd6lfy02sv3V11HVzvNvJVAw0Qie72FDVMPyk3OzZMRyZkkDHu2VP/NoxI379FUyopKTF1CGTB9EoAFAoFJEkCcPfD9tKlS+jSpQucnZ1x6dIlgwZY2U8lIYRW2f1qqxMdHY2oqCj5eWFhIXx8fBAaGormzZvXKS61Wo2UlBSEhIRAoVDUaZ97vXPwos776OOBK8l671shWSNfNRCq/B9g1UAzFoYyoK2q1jpqYYWUmx0R4vQLFFLTvMrFoGMcFlV7HROo79+iKVXOPhKZgl4JQJ8+fZCWloaOHTsiODgYr7zyCq5evYrNmzejR48eBgnM09MTwN1v+V5eXnJ5Xl6ePCvg6emJsrIyXL9+XWMWIC8vD4MHV//NV6lUQqlUapUrFAqd/4Hosw8AiAaaVjfEB7eVKDe7BECXDzuFVNFkE4BKBhljI/9w1fdv0ZTMLV5qWvQ6MLh8+XL5Q/nVV1+Fq6srnn32WeTl5WHdunUGCczPzw+enp4a03plZWVITU2VP9z79esHhUKhUScnJwdnz56tMQEgIiKydHrNAPTv31/+uWXLltij57HBmzdv4uLF/02FZ2ZmIiMjAyqVCm3atMGCBQuwfPlydOjQAR06dMDy5cvh4OCAKVOmAACcnZ0RGRmJhQsXwtXVFSqVCi+88AJ69OghXxVARERE2vReBwC4O9V+4cIFSJKETp06oWXLljrtn5aWhuDgYPl55XH58PBwJCcnY/Hixbh16xZmz56N69evY+DAgdi/f7+8BgAArFy5EjY2Npg0aRJu3bqFESNGIDk5mWsAEBER1UCvBKCwsBBz5szB1q1bUV5+99iwtbU1Hn/8cbzzzjtwdnauUztBQUEQQlS7XZIkxMbGIjY2tto6dnZ2WL16NVavXq3TGIiIiCyZXucAPPPMM/jhhx+we/du3LhxAwUFBdi9ezfS0tIwffp0Q8dIREREBqbXDMDXX3+Nffv2YejQoXLZyJEjsX79ejz88MMGC46IiIiMQ68ZAFdX1yqn+Z2dnbUW5SEiIqLGR68E4B//+AeioqKQk5Mjl+Xm5mLRokV4+eWXDRYcERERGUedDwH06dNHY3W9X3/9Fb6+vmjTpg0A4NKlS1Aqlfjrr78wc+ZMw0dKREREBlPnBGDcuHFGDIOIiIgaUp0TgJiYGGPGQURERA2oXgsBpaen4/z585AkCV27dkWfPn0MFRcREREZkV4JQF5eHp544gkcOnQILi4uEEKgoKAAwcHB2Lp1q84rAhIREVHD0usqgHnz5qGwsBDnzp1Dfn4+rl+/jrNnz6KwsBDz5883dIxERERkYHrNAOzduxcHDhxAly5d5LKuXbvinXfeQWhoqMGCIyIiIuPQawagoqKiyvtYKxQKVFQ07fuqExERNQV6JQAPPvggnnvuOfzxxx9y2X//+188//zzGDFihMGCIyIiIuPQKwFYs2YNioqK0LZtW7Rv3x7+/v7w8/NDUVER78pHRERkBvQ6B8DHxwcnT55ESkoKfv75Zwgh0LVrVzz00EOGjo+IiIiMQOcE4M6dO7Czs0NGRgZCQkIQEhJijLiIiIjIiHQ+BGBjYwNfX1+Ul5cbIx4iIiJqAHrfDTA6Ohr5+fmGjoeIiIgagF7nALz99tu4ePEivL294evrC0dHR43tJ0+eNEhwREREZBx6JQDjxo2DJEkQQhg6HiIiImoAOiUAJSUlWLRoEXbs2AG1Wo0RI0Zg9erVcHNzM1Z8REREZAQ6nQMQExOD5ORkPPLII5g8eTIOHDiAZ5991lixERERkZHoNAOwbds2bNy4EU888QQAYOrUqRgyZAjKy8thbW1tlACJiIjI8HSaAbh8+TKGDRsmPx8wYABsbGw0lgQmIiKixk+nBKC8vBy2trYaZTY2Nrhz545BgyIiIiLj0ukQgBACERERUCqVctnt27cxa9YsjUsBt23bZrgIm7hBl9aZOgQiIrJAOiUA4eHhWmVPPvmkwYIhIiKihqFTApCUlGSsOIiIiKgB6bUUMBEREZk3JgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIF0uhkQEVmwgwnGbT842rjtE5EGzgAQERFZICYAREREFqhRJwCxsbGQJEnj4enpKW8XQiA2Nhbe3t6wt7dHUFAQzp07Z8KIiYiIzEOjTgAAoFu3bsjJyZEfZ86ckbclJiZixYoVWLNmDU6cOAFPT0+EhISgqKjIhBETERE1fo3+JEAbGxuNb/2VhBBYtWoVli5divHjxwMANm3aBA8PD2zZsgUzZ86sts3S0lKUlpbKzwsLCwEAarUaarW6TnFV1qtr/ftJohwAUCFZ67V/Q6iMrTHHWB21qD23raxTl7rmyqzGqOffUn3/Fk3JHGOmpkMSQghTB1Gd2NhYvP7663B2doZSqcTAgQOxfPlytGvXDr///jvat2+PkydPok+fPvI+jz76KFxcXLBp06Ya242Li9Mq37JlCxwcHIwyFiKi+5WUlGDKlCkoKChA8+bNTR0OWZhGnQB88803KCkpQceOHfHnn38iPj4eP//8M86dO4cLFy5gyJAh+O9//wtvb295nxkzZiA7Oxv79u2rtt2qZgB8fHxw9erVOv8RqtVqpKSkICQkBAqFQuexvXPwIgDggSvJOu/bUCoka+SrBkKV/wOs/n/GwlwMaKuqtY5aWCHlZkeEOP0ChVTRAFE1PLMa47AovXar79+iKRUWFsLNzY0JAJlEoz4EEBYWJv/co0cPBAQEoH379ti0aRMGDRoEAJAkSWMfIYRW2f2USiWUSqVWuUKh0PkfiD77AID4/2l1c/hgtRLlZhHnvXT5sFNIFY3/w7GezGKM9fzw1vdv0ZTMLV5qWszgwOD/ODo6okePHvj111/l8wJyc3M16uTl5cHDw8MU4REREZkNs0oASktLcf78eXh5ecHPzw+enp5ISUmRt5eVlSE1NRWDBw82YZRERESNX6M+BPDCCy9gzJgxaNOmDfLy8hAfH4/CwkKEh4dDkiQsWLAAy5cvR4cOHdChQwcsX74cDg4OmDJliqlDJyIiatQadQJw5coVTJ48GVevXkXLli0xaNAgHD9+HL6+vgCAxYsX49atW5g9ezauX7+OgQMHYv/+/WjWrJmJIyciImrcGnUCsHXr1hq3S5KE2NhYxMbGNkxARERETYRZnQNAREREhtGoZwDMwTsHL8qX9FHjcez3a7XWqZCsAVfgx6x8jcscA9q5GjM0IqJGgTMAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFogJABERkQViAkBERGSBmAAQERFZICYAREREFsjG1AEQNTbHfr+m974B7VwNGAkRkfFwBoCIiMgCcQaAiBqHgwn67SesAHQGjqwApIrq6wVH69c+URPFGQAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskA2pg6AiKhBHEwwfh/B0cbvg8hAOANARERkgTgDQGRAx36/ptd+Ae1cDRwJEVHNOANARERkgZgAEBERWaAmkwC8++678PPzg52dHfr164cjR46YOiQiIqJGq0kkAJ9++ikWLFiApUuX4tSpUxg2bBjCwsJw6dIlU4dGRETUKDWJBGDFihWIjIzEM888gy5dumDVqlXw8fHB2rVrTR0aERFRo2T2VwGUlZUhPT0dL730kkZ5aGgojh49WuU+paWlKC0tlZ8XFBQAAPLz86FWq+vUr1qtRklJCUrLCyAkaz2jB4pu39F7X2OrkARKSkpQdPsOrES5qcMxuMY0vms3y4zSrlpYoaSkBNekMiikCqP0YWqNaozXdLsKpKioCAAghDBGNEQ1MvsE4OrVqygvL4eHh4dGuYeHB3Jzc6vcJyEhAXFxcVrlfn5+RomRiCxFrF57FRUVwdnZ2bChENXC7BOASpIkaTwXQmiVVYqOjkZUVJT8vKKiAvn5+XB1da12n/sVFhbCx8cHly9fRvPmzfUPvBFr6mNs6uMDOMbGTgiBoqIieHt7mzoUskBmnwC4ubnB2tpa69t+Xl6e1qxAJaVSCaVSqVHm4uKiV//Nmzc3u386umrqY2zq4wM4xsaM3/zJVMz+JEBbW1v069cPKSkpGuUpKSkYPHiwiaIiIiJq3Mx+BgAAoqKi8NRTT6F///4ICAjAunXrcOnSJcyaNcvUoRERETVKTSIBePzxx3Ht2jUsW7YMOTk56N69O/bs2QNfX1+j9alUKhETE6N1KKEpaepjbOrjAzhGIqqeJHj9CRERkcUx+3MAiIiISHdMAIiIiCwQEwAiIiILxASAiIjIAjEB0ENTuvXw4cOHMWbMGHh7e0OSJOzYsUNjuxACsbGx8Pb2hr29PYKCgnDu3DnTBKuHhIQEPPDAA2jWrBnc3d0xbtw4XLhwQaOOuY9x7dq16Nmzp7wQTkBAAL755ht5u7mP734JCQmQJAkLFiyQy5raGIkaAhMAHTW1Ww8XFxejV69eWLNmTZXbExMTsWLFCqxZswYnTpyAp6cnQkJC5JuYNHapqamYM2cOjh8/jpSUFNy5cwehoaEoLi6W65j7GFu3bo3XXnsNaWlpSEtLw4MPPohHH31U/gA09/Hd68SJE1i3bh169uypUd6UxkjUYATpZMCAAWLWrFkaZZ07dxYvvfSSiSIyHABi+/bt8vOKigrh6ekpXnvtNbns9u3bwtnZWbz33nsmiLD+8vLyBACRmpoqhGiaYxRCiBYtWogNGzY0qfEVFRWJDh06iJSUFBEYGCiee+45IUTTfQ+JjI0zADqovPVwaGioRnlNtx42Z5mZmcjNzdUYr1KpRGBgoNmOt/LWzyqVCkDTG2N5eTm2bt2K4uJiBAQENKnxzZkzB4888ggeeughjfKmNEaihtQkVgJsKPrceticVY6pqvFmZ2ebIqR6EUIgKioKQ4cORffu3QE0nTGeOXMGAQEBuH37NpycnLB9+3Z07dpV/gA09/Ft3boVJ0+exIkTJ7S2NZX3kKihMQHQgy63Hm4Kmsp4586di9OnT+P777/X2mbuY+zUqRMyMjJw48YNfPnllwgPD0dqaqq83ZzHd/nyZTz33HPYv38/7Ozsqq1nzmMkMgUeAtCBPrceNmeenp4A0CTGO2/ePOzcuRMHDx5E69at5fKmMkZbW1v4+/ujf//+SEhIQK9evfDWW281ifGlp6cjLy8P/fr1g42NDWxsbJCamoq3334bNjY28jjMeYxEpsAEQAeWduthPz8/eHp6aoy3rKwMqampZjNeIQTmzp2Lbdu24bvvvoOfn5/G9qYwxqoIIVBaWtokxjdixAicOXMGGRkZ8qN///6YOnUqMjIy0K5dO7MfI5FJmO78Q/O0detWoVAoxMaNG8VPP/0kFixYIBwdHUVWVpapQ9NLUVGROHXqlDh16pQAIFasWCFOnTolsrOzhRBCvPbaa8LZ2Vls27ZNnDlzRkyePFl4eXmJwsJCE0deN88++6xwdnYWhw4dEjk5OfKjpKRErmPuY4yOjhaHDx8WmZmZ4vTp02LJkiXCyspK7N+/Xwhh/uOryr1XAQjRNMdIZGxMAPTwzjvvCF9fX2Frayv69u0rX1Jmjg4ePCgAaD3Cw8OFEHcvsYqJiRGenp5CqVSK4cOHizNnzpg2aB1UNTYAIikpSa5j7mP8+9//Lv8+tmzZUowYMUL+8BfC/MdXlfsTgKY4RiJj4+2AiYiILBDPASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAygKysLEiShIyMDFOHQkRUJ0wAqEkRQuChhx7CyJEjtba9++67cHZ2xqVLl0wQGRFR48IEgJoUSZKQlJSEH374Ae+//75cnpmZiRdffBFvvfUW2rRpY8IIiYgaByYA1OT4+PjgrbfewgsvvIDMzEwIIRAZGYkRI0YgIiJCq/7kyZPxxBNPaJSp1Wq4ubkhKSkJALB3714MHToULi4ucHV1xejRo/Hbb79VG0NycjJcXFw0ynbs2AFJkjTKdu3ahX79+sHOzg7t2rVDXFwc7ty5I2+PjY1FmzZtoFQq4e3tjfnz5+v4ahARVc3G1AEQGUN4eDi2b9+OadOmYcKECTh79izOnj1bZd2pU6di0qRJuHnzJpycnAAA+/btQ3FxMSZMmAAAKC4uRlRUFHr06IHi4mK88soreOyxx5CRkQErK/3y6H379uHJJ5/E22+/jWHDhuG3337DjBkzAAAxMTH44osvsHLlSmzduhXdunVDbm4u/vOf/+jVFxGRFtPejJDIeP7880/RsmVLYWVlJbZt21ZtvbKyMuHm5iY+/PBDuWzy5Mli4sSJ1e6Tl5cnAMi3nM3MzBQAxKlTp4QQQiQlJQlnZ2eNfbZv3y7u/ZMbNmyYWL58uUadzZs3Cy8vLyGEEG+++abo2LGjKCsrq9N4iYh0wUMA1GS5u7tjxowZ6NKlCx577LFq6ykUCkycOBEff/wxgLvf9r/66itMnTpVrvPbb79hypQpaNeuHZo3bw4/Pz8AqNcJhenp6Vi2bBmcnJzkx/Tp05GTk4OSkhJMnDgRt27dQrt27TB9+nRs375d4/AAEVF98BAANWk2Njawsan913zq1KkIDAxEXl4eUlJSYGdnh7CwMHn7mDFj4OPjg/Xr18Pb2xsVFRXo3r07ysrKqmzPysoKQgiNMrVarfG8oqICcXFxGD9+vNb+dnZ28PHxwYULF5CSkoIDBw5g9uzZeP3115GamgqFQlGX4RMRVYsJABGAwYMHw8fHB59++im++eYbTJw4Eba2tgCAa9eu4fz583j//fcxbNgwAMD3339fY3stW7ZEUVERiouL4ejoCABaawT07dsXFy5cgL+/f7Xt2NvbY+zYsRg7dizmzJmDzp0748yZM+jbt289RktExASACMDdywenTJmC9957D7/88gsOHjwob2vRogVcXV2xbt06eHl54dKlS3jppZdqbG/gwIFwcHDAkiVLMG/ePPz4449ITk7WqPPKK69g9OjR8PHxwcSJE2FlZYXTp0/jzJkziI+PR3JyMsrLy+W2Nm/eDHt7e/j6+hrjJSAiC8NzAIj+39SpU/HTTz+hVatWGDJkiFxuZWWFrVu3Ij09Hd27d8fzzz+P119/vca2VCoVPvroI+zZswc9evTAJ598gtjYWI06I0eOxO7du5GSkoIHHngAgwYNwooVK+QPeBcXF6xfvx5DhgxBz5498e2332LXrl1wdXU1+NiJyPJI4v4DlURERNTkcQaAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCwQEwAiIiILxASAiIjIAjEBICIiskBMAIiIiCzQ/wGGQ4CjYVIb5gAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABXk0lEQVR4nO3deXxMV/8H8M9MMjMy2cg+WWSziy20YimxJERRtbZaRFNPn1LlQdVSBC2lpbqg9RShKFWPX7WUhIot0RJJLVEiiUhIkAiTPZOZ8/tjzE1GFjPJbJl836/XvJh779z5nizzyT33nHt5jDEGQgghhJgVvrELIIQQQojuUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwDdQVFQUeDwe97C0tISnpyemTp2Ku3fvctvFxsZy20RFRdW4r4EDB4LH48HHx0dtuY+PD3g8HoKDg2t83c6dO7l9x8bG1llv1Tp4PB4sLCzg6uqKcePG4fr161q0/Pl8fHwwfPhwne6Tx+Phvffee+52qnZW/XpERkaCx+OpbRccHFzr17U2ycnJiIyMxO3bt6utCw8Pr/b9qy9VvaqHUCiEr68vZs2ahcePH+vkPZ6Hx+MhMjKSe676ea+p7XU5cuSI2n6q8vHxQXh4eL1rJITUjAJeR7Zv3474+HjExMRg2rRp+PHHH/HSSy+hqKhIbTtbW1ts3bq12uvT09MRGxsLOzu7Gvdva2uL06dPIzU1tdq6bdu21fq62qxatQrx8fE4efIkPvzwQ8TExKBPnz5qf5Q0ZoGBgYiPj0dgYGCd223atAmbNm3Sat/JyclYvnx5jSG3ZMkSHDx4UKv9Pc/Ro0cRHx+Pw4cPY9SoUfj6668RFhYGY1xl+uWXX0Z8fDwkEolWrzty5AiWL19e47qDBw9iyZIluiiPEFIFBbyOBAQEICgoCAMGDMCyZcswf/58pKen4//+7//UtpswYQLOnj2LlJQUteXbtm2Dh4cH+vTpU+P++/btCw8PD2zbtk1teWpqKk6fPo0JEyZoVW/r1q0RFBSEfv36Yc6cOVi/fj3y8/Nr7V0AgOLiYq3ew5js7OwQFBT03D98OnTogA4dOujsff39/dGtWzed7Q8AunfvjqCgIISEhOCLL77Am2++ifPnzyMuLq7W1+jre+Xs7IygoCCIRCKd7bNbt27w9/fX2f4IIUoU8HoSFBQEAMjIyFBbHhISAi8vL7WgVigU2LFjB6ZMmQI+v+ZvCZ/Px+TJk7Fjxw4oFApu+bZt2+Dl5YXBgwfrtF5V9/ClS5cwduxYtGjRgvsQLi0txcKFC+Hr6wuhUAgPDw/MmDGj1m7jgwcPonPnzmjWrBn8/Pzw1Vdfqa0vLS3F3Llz0bVrV9jb28PBwQG9evXCL7/8Umu93333Hdq0aQORSIQOHTpg7969autr6qKvSU1d9Js3b0aXLl1gY2MDW1tbtGvXDosWLQKg7KIeN24cAGDAgAHVTrvU1EWvUCjw9ddfo2vXrrCyskLz5s0RFBSEQ4cO1VlbbZ79XgUHByMgIACnT59G7969IRaL8dZbbwEApFIp5s2bp/a9mj17drWeJalUimnTpsHR0RE2NjYYOnQobt68We29a+uiP3r0KAYNGgR7e3uIxWK0b98eq1ev5r4mGzduBAC1Uw6qfdTURX/nzh28+eabcHFxgUgkQvv27bFu3Tq1n/3bt2+Dx+Ph888/x/r16+Hr6wsbGxv06tUL58+fV9tfWloaXnvtNbi7u0MkEsHV1RWDBg1CUlKS5l94QhoZS2MXYK5u3boFQHnEUxWfz0d4eDi2bt2Kjz/+GBYWFoiOjkZWVhamTp2KWbNm1brPt956C6tXr8axY8cQFhYGuVyOHTt2ICIiotY/DBpa7+jRo/Haa6/h3//+N4qKisAYw6hRo3DixAksXLgQL730Ei5fvoxly5YhPj4e8fHxakd3SUlJmD17NiIjI+Hm5obdu3dj1qxZKC8vx7x58wAAZWVlePToEebNmwcPDw+Ul5fj+PHjGD16NLZv347Jkyer1XTo0CGcPHkSK1asgLW1NTZt2oTXX38dlpaWGDt2bIO+Dnv37sX06dMxc+ZMfP755+Dz+bh16xaSk5MBKLuoV61ahUWLFmHjxo3cKYC6jkDDw8Oxa9cuREREYMWKFRAKhbh06ZLW57FVavpeZWdn480338T8+fOxatUq8Pl8FBcXo3///sjKysKiRYvQuXNnXLt2DUuXLsWVK1dw/Phx8Hg87nsaFxeHpUuX4oUXXsC5c+cQFhamUT1bt27FtGnT0L9/f3z77bdwcXHBzZs3cfXqVQDK0xZFRUX4+eefER8fz72utm7+hw8fonfv3igvL8fKlSvh4+OD3377DfPmzUNqamq1UyobN25Eu3btsGHDBu79hg0bhvT0dNjb2wMAhg0bBrlcjrVr16Jly5bIzc1FXFycwcYyEGIUjDTI9u3bGQB2/vx5JpPJWEFBAfvtt9+Ys7Mzs7W1ZTk5OYwxxk6ePMkAsP3797O0tDTG4/HYb7/9xhhjbNy4cSw4OJgxxtjLL7/MvL291d7D29ubvfzyy4wxxvr378/Gjh3LGGPs8OHDjMfjsfT0dLZ//34GgJ08ebLOelV17Nu3j8lkMlZcXMxOnz7NWrVqxSwsLNjff//NGGNs2bJlDABbunSp2uuPHj3KALC1a9eqLd+3bx8DwLZs2aJWN4/HY0lJSWrbhoSEMDs7O1ZUVFRjjRUVFUwmk7GIiAjWrVs3tXUAmJWVFfd1VW3frl071qpVq2rtrPr1ULWpqv79+7P+/ftzz9977z3WvHnzGutSqetrPWXKFLXv3+nTpxkAtnjx4jr3WRNVvTk5OUwmk7H8/Hy2a9cuZmVlxby8vFhJSQnXBgDsxIkTaq9fvXo14/P57MKFC2rLf/75ZwaAHTlyhDHG2O+//84AsC+//FJtu08++YQBYMuWLeOWqX7e09PTGWOMFRQUMDs7O9a3b1+mUChqbcuMGTOqfe1VvL292ZQpU7jnCxYsYADYn3/+qbbdu+++y3g8Hrtx4wZjjLH09HQGgHXq1IlVVFRw2/31118MAPvxxx8ZY4zl5uYyAGzDhg211keIOaIueh0JCgqCQCCAra0thg8fDjc3N/z+++9wdXWttq2vry+Cg4Oxbds25OXl4ZdffuG6VJ/nrbfewqFDh5CXl4etW7diwIAB9Rq1PWHCBAgEAojFYvTr1w9yuRw///wzOnfurLbdmDFj1J7/8ccfAFCtS3XcuHGwtrbGiRMn1JZ37NgRXbp0UVs2ceJESKVSXLp0iVu2f/9+9OnTBzY2NrC0tIRAIMDWrVtrHNk/aNAgta+rhYUFJkyYgFu3biErK0vzL0INXnzxRTx+/Bivv/46fvnlF+Tm5jZof7///jsAYMaMGfXeh5ubGwQCAVq0aIE333wTgYGBOHr0KJo1a8Zt06JFCwwcOFDtdb/99hsCAgLQtWtXVFRUcI8hQ4aonb44efIkAOCNN95Qe/3EiROfW1tcXBykUimmT59ebYZCff3xxx/o0KEDXnzxRbXl4eHhYIxxP4MqL7/8MiwsLLjnqp9h1SkMBwcH+Pv747PPPsP69euRmJio1tVPiLmigNeRnTt34sKFC0hMTMS9e/dw+fLlWgfMAUBERAR+/fVXrF+/HlZWVhp3LY8dOxbNmjXDF198gV9//RURERH1qnfNmjW4cOECLl26hDt37iAtLQ2jRo2qtt2z3ah5eXmwtLSs1pXP4/Hg5uaGvLw8teVubm7V9qlaptr2f//7H8aPHw8PDw/s2rUL8fHxuHDhAt566y2UlpbW+vq69llfkyZNwrZt25CRkYExY8bAxcUFPXv2RExMTL329/DhQ1hYWNRYs6aOHz+OCxcuICkpCbm5uTh79my1gYE1dXffv38fly9fhkAgUHvY2tqCMcb98aL6njo6Oqq9XpOaHz58CADw9PSsb/OqycvLq7E97u7u3Pqqnq1bdYqopKQEgPJn88SJExgyZAjWrl2LwMBAODs74/3330dBQYHO6ibE1NA5eB1p3749evToofH2o0ePxowZM/Dpp59i2rRpsLKy0uh1YrEYr732GlavXg07OzuMHj26XvX6+flpVO+zR2WOjo6oqKjAw4cP1UKeMYacnBy88MILatvn5ORU26dqmeqDedeuXfD19cW+ffvU3q+srKzGmjTZZ0NMnToVU6dORVFREU6fPo1ly5Zh+PDhuHnzJry9vbXal7OzM+RyOXJycrSeWqbSpUsXODk51blNTUfPTk5OsLKyqjbzoup6oPJ7mpeXp/b1q+nr/CzVz0BDe06qcnR0RHZ2drXl9+7dA4Dnfi1q4u3tzU1PvXnzJn766SdERkaivLwc3377bcMKJsRE0RG8kVhZWWHp0qUYMWIE3n33Xa1e++6772LEiBFYunSpWjetIQwaNAiAMpSrOnDgAIqKirj1KteuXcPff/+ttmzPnj2wtbXlBqipLuJSNaRycnJqHUV/4sQJ3L9/n3sul8uxb98++Pv76/RI0traGmFhYVi8eDHKy8tx7do1ANWPEOuiGqi2efNmndWlqeHDhyM1NRWOjo7o0aNHtYfq1M6AAQMAALt371Z7/Z49e577Hr1794a9vT2+/fbbOufla/M1GzRoEJKTk9VO4QCVF3RS1Vtfbdq0wUcffYROnTpVew9CzAkdwRvRnDlzMGfOHK1f17Vr12rz6w0lJCQEQ4YMwYcffgipVIo+ffpwo+i7deuGSZMmqW3v7u6OkSNHIjIyEhKJBLt27UJMTAzWrFkDsVgMQBlE//vf/zB9+nSMHTsWmZmZWLlyJSQSSbXrBQDKI7iBAwdiyZIl3Cj6f/75p9pUufpQ9ab06dMHEokEOTk5WL16Nezt7bneiYCAAADAli1bYGtri2bNmsHX17fG3oOXXnoJkyZNwscff4z79+9j+PDhEIlESExMhFgsxsyZMxtcc21mz56NAwcOoF+/fvjPf/6Dzp07Q6FQ4M6dO4iOjsbcuXPRs2dPhIaGol+/fpg/fz6KiorQo0cPnDt3Dj/88MNz38PGxgbr1q3D22+/jcGDB2PatGlwdXXFrVu38Pfff+Obb74BAHTq1AmA8tRQWFgYLCws0LlzZwiFwmr7/M9//oOdO3fi5ZdfxooVK+Dt7Y3Dhw9j06ZNePfdd9GmTRutvg6XL1/Ge++9h3HjxqF169YQCoX4448/cPnyZSxYsECrfRHSmFDAE63weDz83//9HyIjI7F9+3Z88skncHJywqRJk7Bq1apqF0Dp2rUrpk6dimXLliElJQXu7u5Yv349/vOf/3DbTJ06FQ8ePMC3336Lbdu2wc/PDwsWLEBWVlaNVz8bOXIkOnbsiI8++gh37tyBv78/du/erfXFfmry0ksvISoqCj/99BPy8/Ph5OSEvn37YufOnVx3tK+vLzZs2IAvv/wSwcHBkMvl2L59e62XW42KikJgYCC2bt2KqKgoWFlZoUOHDtzcen2xtrbGmTNn8Omnn2LLli1IT0+HlZUVWrZsicGDB3NH8Hw+H4cOHcKcOXOwdu1alJeXo0+fPjhy5AjatWv33PeJiIiAu7s71qxZg7fffhuMMfj4+GDKlCncNhMnTsS5c+ewadMmrFixAowxpKen1zhA1NnZGXFxcVi4cCEWLlwIqVQKPz8/rF27tl5/ELu5ucHf3x+bNm1CZmYmeDwe/Pz8sG7dOr3+gUWIsfFYXf1qhBBCCGmU6Bw8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZsjsp8kpFArcu3cPtra2OrtWNiGEPA9jDAUFBXB3d2/w3R4JqQ+zD/h79+7By8vL2GUQQpqozMxMnV5hkRBNmX3A29raAlD+ktnZ2Wn0GplMhujoaISGhkIgEOizPIOg9pg2c2sPYH5tqk97pFIpvLy8uM8gQgzN7ANe1S1vZ2enVcCLxWLY2dmZzYcTtcd0mVt7APNrU0PaQ6cGibHQiSFCCCHEDFHAE0IIIWaIAp4QQggxQ2Z/Dp4QQkyZXC6HTCYzdhmkkRAIBLCwsNBoWwp4QggxAsYYcnJy8PjxY2OXQhqZ5s2bw83N7bkDOCngCSHECFTh7uLiArFYTKPtyXMxxlBcXIwHDx4AACQSSZ3bU8ATQoiByeVyLtwdHR2NXQ5pRKysrAAADx48gIuLS53d9TTIjhBC6sAYUFys232qzrmLxWLd7pg0Caqfm+eN3aAjeEIIqUFJCZCbCzx6BAiF+nkP6pYn9aHpzw0FPCGEPKVQKAM9NxcoKqpcrq+AJ0SfKOAJIU1eUVHl0bpCYexqCNENOgdPCGmSKiqA+/eBa9eAf/5RBjyFu+kIDg7G7Nmzuec+Pj7YsGGDXt8zNjYWPB4PPB4Po0aN0ut71Ub1/s2bN2/wvijgCSFNilQKpKUBly8DWVlAaamxKyKauHDhAv71r38Z5L1u3LiBqKgorV4THh7OhbPqERQUpLZNWVkZZs6cCScnJ1hbW2PkyJHIyspS2yY7O1tnf8hQFz0hxOyVlwN5ecqj9PJyY1dD6sPZ2dlg7+Xi4lKvI+ihQ4di+/bt3HPhM4M3Zs+ejV9//RV79+6Fo6Mj5s6di+HDhyMhIYGb7ubm5gZ7e/sG1a9CR/CEELPEGJCfD6SkAFeuAPfumW64M8ZQVFRklAdjTOM6g4ODMXPmTMyePRstWrSAq6srtmzZgqKiIkydOhW2trbw9/fH77//rva65ORkDBs2DDY2NnB1dcWkSZOQm5vLrS8qKsLkyZNhY2MDiUSCdevWVXvvZ7vo169fj06dOsHa2hpeXl6YPn06CgsLufVRUVFo3rw5jh07hvbt28PGxgZDhw5Fdna2Ft+Z2ut79hQCAIhEIri5uXEPBwcHbt2TJ0+wdetWrFu3DoMHD0a3bt2wa9cuXLlyBcePH9e6Jk1QwBNCzEppqbLr/fJlZVe8VGrsip6vuLgYNjY2RnkUaznJf8eOHXBycsJff/2FmTNn4t1338W4cePQu3dvXLp0CUOGDMGkSZO4/WZnZ6N///7o2rUrLl68iKNHj+L+/fsYP348t88PPvgAJ0+exMGDBxEdHY3Y2FgkJCTUWQefz8dXX32Fq1evYseOHfjjjz8wf/78al/Xzz//HD/88ANOnz6NO3fuYN68eVq1V5v6YmNj4eLigjZt2mDatGncFecAICEhATKZDKGhodwyd3d3BAQEIC4uTuuaNEFd9ISQRq+26W1E97p06YKPPvoIALBw4UJ8+umncHJywrRp0wAAS5cuxebNm3H58mUEBQVh8+bNCAwMxKpVq7h9bNu2DV5eXrh58ybc3d2xdetW7Ny5EyEhIQCUf0R4enrWWUfVo2dfX1+sXLkS7777LjZt2sQtl8lk+Pbbb+Hv7w8AeO+997BixQqt2ltYWKhRfWFhYRg3bhy8vb2Rnp6OJUuWYODAgUhISIBIJEJOTg6EQiFatGih9jpXV1fk5ORoVZOmKOAJIY2Wanpbfj4glxu7mvoTi8Vq3cuGfm9tdO7cmfu/hYUFHB0d0alTJ26Zq6srAHBHrwkJCTh58iRsbGyq7Ss1NRUlJSUoLy9Hr169uOUODg5o27ZtnXWcPHkSq1atQnJyMqRSKSoqKlBaWoqioiJYW1tzbVOFO6C8dnvVo2pNpKamalTfhAkTuP8HBASgR48e8Pb2xuHDhzF69Oha988Y09sFj4zaRb9582Z07twZdnZ2sLOzQ69evdTO3TDGEBkZCXd3d1hZWSE4OBjXrl0zYsWEEGOrqAAePACSkyuntzXmcAeUU6Osra2N8tA2XAQCQbXaqy5T7U/xdM6hQqHAiBEjkJSUpPZISUlBv379tBoDoJKRkYFhw4YhICAABw4cQEJCAjZu3AhA/fKtNdWq7fvVpz5A+ceEt7c3UlJSACgHz5WXlyM/P19tuwcPHnB/FOmaUQPe09MTn376KS5evIiLFy9i4MCBeOWVV7gQX7t2LdavX49vvvkGFy5cgJubG0JCQlBQUGDMsgkhRlB1eltmpvJSssT0BQYG4tq1a/Dx8UGrVq3UHtbW1mjVqhUEAgHOnz/PvSY/Px83b96sdZ8XL15ERUUF1q1bh6CgILRp0wb37t3TS/31qQ8A8vLykJmZyd3xrXv37hAIBIiJieG2yc7OxtWrV9G7d2+91G7UgB8xYgSGDRuGNm3aoE2bNvjkk09gY2OD8+fPgzGGDRs2YPHixRg9ejQCAgKwY8cOFBcXY8+ePcYsmxBiIOXlQHY2cPWqcjR8fr5ydDxpPGbMmIFHjx7h9ddfx19//YW0tDRER0fjrbfeglwuh42NDSIiIvDBBx/gxIkTuHr1KsLDw8Hn1x5P/v7+qKiowNdff420tDT88MMP+Pbbb/VSvyb1FRYWYt68eYiPj8ft27cRGxuLESNGwMnJCa+++ioAwN7eHhEREZg7dy5OnDiBxMREvPnmm+jUqRMGDx6sl9pN5hy8XC7H/v37UVRUhF69eiE9PR05OTlqIw5FIhH69++PuLg4vPPOOzXup6ysDGVlZdxz6dMhtDKZ7Ll33lFRbafp9qaO2mPazK09QMPaxJjyaD0vz3RGwMvl2rfHnL6fDeHu7o5z587hww8/xJAhQ1BWVgZvb28MHTqUC8nPPvsMhYWFGDlyJGxtbTF37lw8efKk1n127doV69evx5o1a7Bw4UL069cPq1evxuTJk/XShufVZ2FhgStXrmDnzp14/PgxJBIJBgwYgH379sHW1pbb7osvvoClpSXGjx+PkpISDBo0CFFRUXXe8rUheKy+Jxh05MqVK+jVqxdKS0thY2ODPXv2YNiwYYiLi0OfPn1w9+5duLu7c9v/61//QkZGBo4dO1bj/iIjI7F8+fJqy/fs2UO3ZiSEGExxcTEmTpyIJ0+ewM7OTm1daWkp0tPT4evri2bNmhmpQvKs2NhYDBgwAPn5+c+90E1wcDC6du2ql8vnRkVFYfbs2Xj8+HGN6zX9+TH6EXzbtm2RlJSEx48f48CBA5gyZQpOnTrFrX92AMjzRhwuXLgQc+bM4Z5LpVJ4eXkhNDS02i9ZbWQyGWJiYhASElJtkEZjRO0xbebWHkDzNikUwOPHyqN1U57eZmUlQ2qqdt8jqal0PxCteXp6YsSIEfjxxx8N/t42NjaoqKjQyR9+Rg94oVCIVq1aAQB69OiBCxcu4Msvv8SHH34IAMjJyeEGKQDPH3EoEokgEomqLRcIBFp/eNbnNaaM2mPazK09QO1tKipShvqjR5Uj4Os45Wp0qh5Ubb5H5va9bAp69uzJjXqvaVqfISQlJQGATrrtjR7wz2KMoaysDL6+vnBzc0NMTAy6desGACgvL8epU6ewZs0aI1dJCNGWXF55PXgaAU9MkZWVFXfA+TyxsbF6qUHT99eEUQN+0aJFCAsLg5eXFwoKCrB3717Exsbi6NGj4PF4mD17NlatWoXWrVujdevWWLVqFcRiMSZOnGjMsgkhWigoUIb648d0O1ZCDMmoAX///n1MmjQJ2dnZsLe3R+fOnXH06FHucoDz589HSUkJpk+fjvz8fPTs2RPR0dFqoxIJIaZHNYD8+vXK/xNCDMuoAb9169Y61/N4PERGRiIyMtIwBRFC6o0x4MmTyqN1ACgrM+1z64SYM5M7B08IaVzKypShnpdXebROF6MhxPgo4AkhWlMolFeVy80FjHSPFELIc1DAE0I0VlysDPWq09sIIaaJAp4QUie5vPJe68XFxq7GvG3ZYtj3+9e/tNs+ODiYuxBZYmIiunbtqvuiTJTqAmv29va1XmHO1NDwF0JIjQoKgPR05d3b7tyhcCdK06ZNQ3Z2NgICAjTaPjY2Fq+88gokEgmsra3RtWtX7N69u9o2PB6v2uOff/5pcL017ZfH4+Gzzz7jtgkODq62/rXXXlPbT3Z2tl4uS6tPdARPCOHIZJUXo6lyzyZCOGKxGG5ubhpvHxcXh86dO+PDDz+Eq6srDh8+jMmTJ8POzg4jRoxQ2/bGjRtqlxR3dnZucL3Z2dlqz3///XdERERgzJgxasunTZuGFStWcM+trKzU1ru5ucHe3r7B9RgSBTwhTZxqeltenvJfGgFPNKW6Octvv/2GRYsW4caNG+jSpQu+//57dOrUCYDygmZVvf/++zh27BgOHjxYLeBdXFyee5OXqoKDg7mehF27dsHCwgLvvvsuVq5cyXWpP/vHyC+//IIBAwbAz89Pbbm2f7g0BtRFT0gTVVYG3L0LXLkCpKYq565TuJP6+OCDD/D555/jwoULcHFxwciRI+u8Xe6TJ0/g4OBQbXm3bt0gkUgwaNAgnDx5UqP33rFjBywtLfHnn3/iq6++whdffIHvv/++xm3v37+Pw4cPIyIiotq63bt3w8nJCR07dsS8efNQUFCg0fubMjqCJ6QJUd29LTdXeY6dEF1YtmwZdwXSHTt2wNPTEwcPHsT48eOrbfvzzz/jwoUL+O6777hlEokEW7ZsQffu3VFWVoYffvgBgwYNQmxsLPr161fne3t5eeGLL74Aj8dD27ZtceXKFXzxxReYNm1atW137NgBW1tbjB49Wm35G2+8wd3/5OrVq1i4cCH+/vtvxMTE1OfLYTIo4AlpAmh6G9GnXr16cf93cHBA27Ztcf369WrbxcbGIjw8HP/973/RsWNHbnnbtm3Rtm1btf1lZmbi888/R79+/XDmzBmEhYVx67/77ju88cYbAICgoCC1W4j36tUL69atg1wur3ZHtm3btuGNN96odivWqn8MBAQEoHXr1ujRowcuXbqEwMBAbb8cJoMCnhAzRdPbiDFVDV0AOHXqFEaMGIH169dj8uTJz319UFAQdu3aBUB5K3HVbVQB1HnL8NqcOXMGN27cwL59+567bWBgIAQCAVJSUijgCSGmo7BQGer5+XT3NmIY58+fR8uWLQEA+fn5uHnzJtq1a8etj42NxfDhw7FmzRr8S8PJ94mJiZBIJADqvo3r+fPnqz1v3bp1taP3rVu3onv37ujSpctz3/vatWuQyWTc+zdWFPCEmAHV9La8PKC01NjVkKZmxYoVcHR0hKurKxYvXgwnJyeMGjUKgDLcX375ZcyaNQtjxoxBTk4OAEAoFHID7TZs2AAfHx907NgR5eXl2LVrFw4cOIADBw48970zMzMxZ84cvPPOO7h06RK+/vprrFu3Tm0bqVSK/fv3V1sOAKmpqdi9ezeGDRsGJycnJCcnY+7cuejWrRv69OnTwK+McVHAE9JIMQZIpcqjdZreZh60vbKcqfj0008xa9YspKSkoEuXLjh06BCEQiEAICoqCsXFxVi9ejVWr17NvaZ///6IjY0FAJSXl2PevHm4e/curKys0LFjRxw+fBjDhg177ntPnjwZJSUlePHFF2FhYYGZM2dW6yXYu3cvGGN4/fXXq71eKBTixIkT+PLLL1FYWAgvLy+8/PLLWLZsWbVegMaGAp6QRqasrPJiNHSvdWIK+vbti6tXr9a4LioqClFRUXW+fv78+Zg/f3693lsgEGDDhg3YvHlzrdv861//qvXUgJeXF3f5XXNDAU9II8BY5d3baHobMaZNmzbh+++/R3x8vLFLMSgbGxtUVFRUG4FvyijgCTFhJSWV09sqKoxdDWnqdu/ejZKSEgBAy5YtERcXZ+SKDEc1ir8xddtTwBNiYlTT2/LygKIiY1dDSCUPDw+158HBwWBGHPyhOodvCLWN4jdlFPCEmIiiIuVV5mh6GyFEFyjgCTGiigrgwQPl/1NSAD7dHYIQoiMU8IQYwZMnldPb6NKxhBB9oIAnxEDKy5Whnpen/D8hhOgTBTwhesRY5d3bpFJjV0MIaUoo4AnRA5reRggxNgp4QnREoai8extNbyP1kZBg2Pfr3l277YODg7mrviUmJqJr1666L8oIfHx8kJGRAUB5s5zmzZsbtyAdoTG7hDRQURGQkQH8/bfyXwp3Ys6mTZuG7OxsBAQEaLR9aWkpwsPD0alTJ1haWnI3odGFgoICzJ49G97e3rCyskLv3r1x4cIFtW3u37+P8PBwuLu7QywWY+jQoUhJSVHb5sKFCxrd2KaxMWrAr169Gi+88AJsbW3h4uKCUaNG4caNG2rbhIeHg8fjqT2CgoKMVDEhShUVwP37wLVrwD//KI/aae46aQrEYjHc3NxgaalZB7BcLoeVlRXef/99DB48WKe1vP3224iJicEPP/yAK1euIDQ0FIMHD8bdu3cBAIwxjBo1Cmlpafjll1+QmJgIb29vDB48GEVV/hJ3dnbm7mxnTowa8KdOncKMGTNw/vx5xMTEoKKiAqGhoWpfeAAYOnQosrOzuceRI0eMVDFp6qRSIC0NuHwZyMqiW7OSpi02NhY8Hg+HDx9Gly5d0KxZM/Ts2RNXrlzhtrG2tsbmzZsxbdo0uLm5abzv8PBwjBo1CsuXL4eLiwvs7OzwzjvvoPzpFJSSkhIcOHAAa9euRb9+/dCqVStERkbC19eXu/FMSkoKzp8/j82bN+OFF15A27ZtsWnTJhQWFuLHH3/U7RfDBBn1HPzRo0fVnm/fvh0uLi5ISEhAv379uOUikUjjH4yysjKUlZVxz6VPhy7LZDLINLz1lmo7Tbc3ddSehikvV55bf/RIP9PbFAqZ2r/mwNzaJJdr/zNnLr9vmvjggw/w5Zdfws3NDYsWLcLIkSNx8+ZNCASCBu33xIkTaNasGU6ePInbt29j6tSpcHJywieffIKKigrI5fJqN3+xsrLC2bNnAYDLgqrbWFhYQCgU4uzZs3j77bcbVJ+pM6lBdk+ePAGAal0lsbGxcHFxQfPmzdG/f3988skncHFxqXEfq1evxvLly6stj46Ohlgs1qqemJgYrbY3ddQe05aTY17tAcyvTdr8zBUXF+uxEtOybNkyhISEAAB27NgBT09PHDx4EOPHj2/QfoVCIbZt2waxWIyOHTtixYoV+OCDD7By5UrY2tqiV69eWLlyJdq3bw9XV1f8+OOP+PPPP9G6dWsAQLt27eDt7Y2FCxfiu+++g7W1NdavX4+cnBxkZ2c3uN2mzmQCnjGGOXPmoG/fvmqDN8LCwjBu3Dh4e3sjPT0dS5YswcCBA5GQkACRSFRtPwsXLsScOXO451KpFF5eXggNDYWdnZ1GtchkMsTExCAkJKTBf4GaAmqP5lT3Wjfk9DaFQoacnBi4uYWAz2/83x/A/NpkZSVDaqp2P3PSJnThg169enH/d3BwQNu2bXH9+nWNXnvnzh106NCBe75o0SIsWrQIANClSxe1A7NevXqhsLAQmZmZ8Pb2xg8//IC33noLHh4esLCwQGBgICZOnIhLly4BUN4r/sCBA4iIiICDgwMsLCwwePBghIWF6aLZJs9kAv69997D5cuXua4VlQkTJnD/DwgIQI8ePeDt7Y3Dhw9j9OjR1fYjEolqDH6BQKB1GNTnNaaM2lOz2qa3Gfq68Hy+wCzCsCpzaZPqDqHa/MyZ0+9affB4PI22c3d3527FClTvwa1r3/7+/jh16hSKiooglUohkUgwYcIE+Pr6ctt2794dSUlJePLkCcrLy+Hs7IyePXuiR48e2jWoETKJgJ85cyYOHTqE06dPw9PTs85tJRIJvL29q01zIERbRUXKUM/Pp+vBE9IQ58+fR8uWLQEo55HfvHkT7dq10+i1lpaWtd6K9e+//0ZJSQmsrKy497GxsamWE9bW1rC2tkZ+fj6OHTuGtWvXVtuXvb09AOXAu4sXL2LlypUat6+xMmrAM8Ywc+ZMHDx4ELGxsWp/ddUmLy8PmZmZkEgkBqiQmJuKisqj9ZISY1dDiHlYsWIFHB0d4erqisWLF8PJyUltvntycjLKy8vx6NEjFBQUcEfsz7tQTnl5OSIiIvDRRx8hIyMDy5Ytw3vvvQf+0+61Y8eOgTGGtm3b4tatW/jggw/Qtm1bTJ06ldvH/v374ezsjJYtW+LKlSuYNWsWRo0ahdDQUF1/GUyOUQN+xowZ2LNnD3755RfY2toiJycHgPIvLSsrKxQWFiIyMhJjxoyBRCLB7du3sWjRIjg5OeHVV181ZumkkZFKlaH++LHy+vCEmCJtryxnKj799FPMmjULKSkp6NKlCw4dOgShUMitHzZsGHelOADo1q0bAOVBXl0GDRqE1q1bo1+/figrK8Nrr72GyMhIbv2TJ0+wcOFCZGVlwcHBAWPGjMEnn3yidnokOzsbc+bMwf379yGRSDB58mQsWbJERy03bUYNeNVcxeDgYLXl27dvR3h4OCwsLHDlyhXs3LkTjx8/hkQiwYABA7Bv3z7Y2toaoWLSmJSXKwfM5ebS3dsI0ae+ffvi6tWrta6/fft2vfe9fPnyGmdGAcD48eOfO1L//fffx/vvv1/v92/MjN5FXxcrKyscO3bMQNUQc8CY+r3WCSG6tWnTJnz//feIj483dik607FjR6SlpRm7DJ0ziUF2hDRUaWnlvdbp7m2E6Mfu3btR8nTwSsuWLREXF2fkinTjyJEj3IWJNJ1O3RhQwJNGS6Go7IIvLDR2NYSYPw8PD7XnwcHBz+2Jra+oqCi97Lcm3t7eBnsvQ6KAJ42O6gJh167RgDlCCKkNBTxpFOTyyqN11cVo5HLDX4yGEF1S0C0IST1o+nNDAU9MWkFB5fQ2+iwk5kIoFILP5+PevXtwdnaGUCjU+MpvpOlijKG8vBwPHz4En89Xm4pYEwp4YnJkssoBc1VuDEiI2eDz+fD19UV2djbu3btn7HJIIyMWi9GyZUvugj+1oYAnJqHq9DaplM6tE9Oij5kZQqEQLVu25G57SogmLCwsYGlpqVGPDwU8Maqyssqj9SZ0+2zSCJSWAllZwN27gK0tUMsdqhuEx+OZ3U2giOmggCcGp1Aob/BC09uIqaka6nl5lcttbIxXEyH1RQFPDKa4WBnqjx7R3duI6agt1Alp7CjgiV6pprfl5VXOXyfE2CjUSVNAAU/0gqa3EVNDoU6aGgp4ojMyWeXFaGh6GzEFFOqkKaOAJw1C09uIqaka6o8e0c8kaboo4Em90PQ2Ykoo1AmpjgKeaEyhUJ5Tz81VnmMnxJhKS5WBnpVFoU5ITRoc8HK5HFeuXIG3tzdatGihi5qIiaHpbcRUqEJddU6dQp2Q2mkd8LNnz0anTp0QEREBuVyO/v37Iy4uDmKxGL/99huCg4P1UCYxNLlcGei5uTS9jRgXhToh9aN1wP/888948803AQC//vor0tPT8c8//2Dnzp1YvHgxzp07p/MiieEUFipDPT+fprcR46FQJ6ThtA743NxcuLm5AQCOHDmCcePGoU2bNoiIiMBXX32l8wKJ/qmmt+XlKT9YCTEGCnVCdEvrgHd1dUVycjIkEgmOHj2KTZs2AQCKi4thYWGh8wKJfjCmnNaWm6uc5kYfpsQYKNQJ0R+tA37q1KkYP348JBIJeDweQkJCAAB//vkn2rVrp/MCiW6VlVVejIamtxFjKC0F7t1Tjn6nUCdEf7QO+MjISAQEBCAzMxPjxo2DSCQCoLxH7YIFC3ReIGk41QforVs0YI4YhyrU795V/nFJoU6I/tVrmtzYsWMBAKVVTthOmTJFNxURnSkpUX6Y5uYqnxcWAny+cWsiTcvt2xTqhBiL1h/3crkcK1euhIeHB2xsbJCWlgYAWLJkCbZu3arzAol25HLg4UPg+nUgORl48ACoqDB2VaSpKC0F0tKAuDjl88uXlT+PFO6EGJ7WAf/JJ58gKioKa9euhVAo5JZ36tQJ33//vVb7Wr16NV544QXY2trCxcUFo0aNwo0bN9S2YYwhMjIS7u7usLKyQnBwMK5du6Zt2WavsFB5tHT5MnDnDnXFE8NRhfqZM8DvvwNJSZW9RoQQ49E64Hfu3IktW7bgjTfeUBs137lzZ/zzzz9a7evUqVOYMWMGzp8/j5iYGFRUVCA0NBRFRUXcNmvXrsX69evxzTff4MKFC3Bzc0NISAgK6FqpqKgA7t8Hrl0DbtxQDliiuevEEGoKdTpSJ8S0aH0O/u7du2jVqlW15QqFAjIth2UfPXpU7fn27dvh4uKChIQE9OvXD4wxbNiwAYsXL8bo0aMBADt27ICrqyv27NmDd955R9vyzYLq7m00vY0YUlMcKMcYQ25uMm7ePAKJJBPDhg0zdkmEaEzrgO/YsSPOnDkDb29vteX79+9Ht27dGlTMkydPAAAODg4AgPT0dOTk5CA0NJTbRiQScZfHrSngy8rKUFblZuRSqRQAIJPJNP4DRLWdtn+w6FN5ufLSsY8eKf+vDYVCpvZvY0ftMZzSUiAnRxnsVae08XjKR234fJnav41JSUk+0tNPIC0tBqmpMSgoyAIA+Pn54dNPP9V4P6b0+UGaJq0DftmyZZg0aRLu3r0LhUKB//3vf7hx4wZ27tyJ3377rd6FMMYwZ84c9O3bFwEBAQCAnJwcAMqL61Tl6uqKjIyMGvezevVqLF++vNry6OhoiMVirWqKiYnRantTl5ND7TFlptoeoRDw8VE+tBUYaJptqkoulyMlJQWJiYlISkpCSkoKFFXOdQmFQnTo0AGBgYGIjo4Gr66/bKoopoEwxMi0DvgRI0Zg3759WLVqFXg8HpYuXYrAwED8+uuv3EVv6uO9997D5cuXcfbs2Wrrnv2FYozV+ku2cOFCzJkzh3sulUrh5eWF0NBQ2NnZaVSLTCZDTEwMQkJCIBAItGiFbpSWKo+W8vN1MwJeoZAhJycGbm4h4PMN3x5do/boXlkZkJ1d/Ui9vvh8GQIDY3DpUggUCtP7HkmlWUhNjUFaWjTS0/9AaWm+2nonp/bw9w+Fn18IWrZ8Cf7+lnBx0e4zQdV7SIix1Gse/JAhQzBkyBCdFTFz5kwcOnQIp0+fhqenJ7dcdc37nJwcSCQSbvmDBw+qHdWriEQi7uI7VQkEAq3Duj6vqS+FovLubVXGGOp03jqfLzCLQFSh9jRMWVnlFeX0dU5doRCYRMDLZCXIzDyDtLRjSEs7htxc9Zk4zZo1h4/PYPj5DYGf3xDY2XmprWdM2d2uzWeCMQ4OCKmqwfeDbwjGGGbOnImDBw8iNjYWvr6+aut9fX3h5uaGmJgY7vx+eXk5Tp06hTVr1hijZJ0rLFQeMT16RCPgif6pQv3uXfMe9c4YQ17eP08D/Sju3DmFiorKC3PxeHxIJC/Az28o/PyGwN39BfD5Rv04JETntP6J5vP5dZ6DksvlGu9rxowZ2LNnD3755RfY2tpy59zt7e1hZWUFHo+H2bNnY9WqVWjdujVat26NVatWQSwWY+LEidqWbjIqKiqvB093byP61lRCvbT0MdLTjyM9XXmULpVmqq23tfWAn98Q+PoOga/vYFhZORipUkIMQ+uAP3jwoNpzmUyGxMRE7Nixo8bBbXXZvHkzACA4OFht+fbt2xEeHg4AmD9/PkpKSjB9+nTk5+ejZ8+eiI6Ohq2trbalG53q7m2PH5vvhywxDU0h1BUKObKzLyIt7RjS04/h7t3zYKyyG8zCQoSWLftxoe7s3FHjAXKEmAOtA/6VV16ptmzs2LHo2LEj9u3bh4iICI33xTT41OHxeIiMjERkZKQ2ZZqM8nJlqOflaT+9jRBtNIVQLyi4i7S0aKSlHcXt28dRUvJIbb2jY7un59GHomXLfhAItJs5Q4g50dlJp549e2LatGm62l2jxpjyKD03V3nUToi+VA313FzzG8dRUVGKzMyzSEs7irS0Y3j48KraepHIXm1wnL19SyNVSojp0UnAl5SU4Ouvv1YbAd8UlZZWHq3TDV6IvqimtKlGv5tTqCsHx93gut0zMmJRUVFSZQse3N1fgK+vMtA9PHrS4DhCaqH1b0aLFi3UzmMxxlBQUACxWIxdu3bptLjGoLbpbYTokjmHemnpE9y+fYIb8S6V3lFbb2MjqTI4LgRisaORKiWkcdE64L/44gu1gOfz+XB2dkbPnj3RokULnRZnyoqKlB+0+fnKW7QSomvmGuoKhRw5OZe4bnfl4LjKXyILCyG8vF7iprA5OwfQ4DhC6kHrgFeNbm+KKioqj9ZLSp6/PSHaMtdQLyzM5i4yk54eg5KSPLX1Dg5tufPoLVv2h1BobaRKCTEfGgX85cuXNd5h586d612MqaLpbUSfVPdGio9Xjn43h1CvqCjD33//jRMnTiM19TgePFD/DBGJ7ODtPYgL9ebNfYxTKCFmTKOA79q1K3g83nOntfF4PK0udGPq7t9XdsHT9Daia+XllZeJffQICAxs3OHOGMOjRylVrhwXC5ms6s1WeJBIulcZHBcECwu6lCsh+qRRwKenp+u7DpOUna3ba8GTpq1qqFftfm+sP2NlZdIqg+OO4cmT22rrW7RoAS+v4fD1DXs6OM7JOIUS0kRpFPDP3vudEKKZ2kK9MWJM8XRwnDLQs7Liqg2O8/TsCz+/IWjVaiCGDctCQsLLJnGzGUKaonpPIE1OTsadO3dQ/kz/9ciRIxtcFCGNmTmFemFhDtLTo5+GejRKSnLV1js4tOa63b29gyEU2gBQ3i6Wx7trjJIJIU9pHfBpaWl49dVXceXKFbXz8qppLOZ0Dp4QTZlLqMvl5cjMPMdNYXvw4G+19UKhLXx8BnJT2Jo3961lT4QQY9M64GfNmgVfX18cP34cfn5++Ouvv5CXl4e5c+fi888/10eNhJgkcwh1xhjy829x3e4ZGSchk6lfscnNrTs32t3DoxcNjiOkkdA64OPj4/HHH3/A2dkZfD4ffD4fffv2xerVq/H+++8jMTFRH3USYhJUoa66oUtjDPWysgJkZPzBjXh//Fh9EK21tSt8fUOfXj0uBNbWLkaqlBDSEFoHvFwuh42N8jybk5MT7t27h7Zt28Lb2xs3btzQeYGEGFtjD3XGFLh/PwlpaceQmnoUd+/GQaGovFkCny+Ap2cf+PkNgb//ULi4dAaP10iH9hNCOFoHfEBAAC5fvgw/Pz/07NkTa9euhVAoxJYtW+Dn56ePGgkxuMYe6oWF97nBcenpMSgufqC2vkULf+48esuWwRCJbI1UqWkTiwEnJ6BVK6CgwNjVEKIdrQP+o48+QtHTu6p8/PHHGD58OF566SU4Ojpi3759Oi+QEENpzKEul5cjKyuOO5d+/776qTKh0Abe3gO5c+ktWvgbqVLTpgp0JyfA2Rmwtq5cTgFPGhutA37IkCHc//38/JCcnIxHjx5Vu8scIY2BTKYM9MYY6vn5qVUGx/2B8vJCtfWurt24QPf07A0LC6GRKjVdVQPdyQl4evaRELOgdcDv2LEDY8eOhbV15c0gHBwcdFoUIfokk1WOfm9MoV5eXoiMjJNITT2K9PRjyM9PVVsvFjvD1zcU/v5D4eMTAhsbVyNVaroo0ElTonXAz5s3D9OnT8eIESPw5ptvYujQobC0rPf1cggxCFWo370LPHjQOEJdOTju7ypXjjsHhULGrefzLbnBcX5+Q+Dq2pUGxz2DAp00ZVonc3Z2No4ePYoff/wRr732GqysrDBu3Di8+eab6N27tz5qJKReGmOoFxU9QHp6zNPBcdEoKrqvtr55cz8u0L29B0AksjNSpabJykp57pwCnZB6BLylpSWGDx+O4cOHo7i4GAcPHsSePXswYMAAeHp6IjU19fk7IURPGluoy+UyZGaewfXrP+DcueXIyVEfHCcQWMPbewA34t3BoZWRKjVNFOiE1K5BfetisRhDhgxBfn4+MjIycP36dV3VRYjGGluo5+enPT1CP4bbt/9Aebn68GxX165PLzKjHBxnaSkyUqWmhwKdEM3VK+BVR+67d+/G8ePH4eXlhddffx379+/XdX2E1Cozs3GEunJwXCwX6o8epaitF4ud0KNHBzRvPgU+PsNgY+NmpEpNDwU6IfWndcC//vrr+PXXXyEWizFu3DjExsbSuXdiEKoj9Xv3AC8vIDHRNIOdMYYHDy5zl4LNzDxbbXCch0evp+fSh8LdPQAvvHAUFy8Oa/K3VrWyqpyDToFOSMNoHfA8Hg/79u3DkCFDaPQ80buaut/5fGXAm5Kiooe4fTuGu61qUVGO2np7ex/uPLqPz0C1wXE8nuzZ3TUZFOiE6I/WCb1nzx6dvfnp06fx2WefISEhAdnZ2Th48CBGjRrFrQ8PD8eOHTvUXtOzZ0+cP39eZzUQ09MYzqnL5TLcvXue63bPzk4AwLj1AoEY3t4DuHulOzi0pgtBgQKdEEMy6iF4UVERunTpgqlTp2LMmDE1bjN06FBs376dey4U0tW4zJFMBmRnKy8+Y6qh/vjx7SpXjjuBsjKp2noXl87c4Dgvr740OA7KQHdwoEAnxBiMGvBhYWEICwurcxuRSAQ3Nxp0ZI5MPdTLy4tw584ppKUdRVraMTx6dFNtvZWVI3x9Q552vYfCxkZipEpNR7NmyjB3dlY+DwlRnlIhhBiexgGflZUFT09PfdZSo9jYWLi4uKB58+bo378/PvnkE7i41H5/6rKyMpSVlXHPpVLlUZZMJoNMptm5TtV2VQdGNWaqdphCe2Qy4P59ZRf8s6GuaRDw+TK1f3VFOTjuCtLSYpCaGoPMzLOQy8u59TyeBTw9g+DnFwJ//1C4uXUDn29RZQ/1q0df7TGEZs0qR7g7OlbenEWhkCEnxzR+5nRBLle2Q9PPEG23JUQfeIwx9vzNgObNm+Prr7/GpEmT9FMIj1ftHPy+fftgY2MDb29vpKenY8mSJaioqEBCQgJEopq7PyMjI7F8+fJqy/fs2QOxWKyX2knjJZVK8ffffyMxMRFJSUl49OiR2npnZ2d069YN3bp1Q+fOndXuwUBIXYqLizFx4kQ8efIEdnZ0xUFieBoH/KZNm7BgwQKEhIRgy5YtcHR01G0hNQT8s7Kzs+Ht7Y29e/di9OjRNW5T0xG8l5cXcnNzNf4lk8lkiImJgZtbCPj8xj9tSXk0Zdj21HWk3lB8vgyBgTG4dClE62llCkUF7t79C6mp0UhLi8a9e+qD4ywtreDt3R/+/sqjdAeHNnofHNeQ9uib6gjd0VH5r6Z/3xjjZ06frKxkSE2NQUhICAQCzdojlUrh5OREAU+MRuMu+unTpyMsLAwRERHo2LEjtmzZgpEjR+qztmokEgm8vb2RkpJS6zYikajGo3uBQKDxL6YKny8wiw8nFX23R3VO/e5dZbjr+5y6QiHQKBCfPMngBsfdvn0CZWVP1NY7OwdwU9iUg+OacesYUz4MQdP26JPqHLpqpHtDB8WZy++QxdMzMdp8jmj7eUOIrmk1yM7X1xd//PEHvvnmG4wZMwbt27evNhf+0qVLOi2wqry8PGRmZkIiocFMpqKionJKmyFCXRMyWfHTwXHKUM/L+0dtvZWVA3x8Qp5eaCYUtrYeRqrU+FRH6KpQt7U1dkWEEF3RehR9RkYGDhw4AAcHB7zyyisNuthNYWEhbt26xT1PT09HUlISHBwc4ODggMjISIwZMwYSiQS3b9/GokWL4OTkhFdffbXe70karqKicvS7KYQ6YwwPH15Deroy0O/cOQ25vPI0DY/Hh4dHEDcnXSLp8czguKaDAp2QpkOrdP7vf/+LuXPnYvDgwbh69SqcVXNh6unixYsYMGAA93zOnDkAgClTpmDz5s24cuUKdu7cicePH0MikWDAgAHYt28fbOlTyeBUoX73Lp6OjjZuPQUFBbh27Sekpp5Aeno0Cgruqq23s/PiLgXr4zMIzZo1N06hRkaBTkjTpXHADx06FH/99Re++eYbTJ48WSdvHhwcjLrG+B07dkwn70Pqx5RCXaGowL17fz29ctxRZGdfhKJKQZaWzdCyZTB3r3RHx3ZN8spxFOiEEBWNA14ul+Py5ctGmQtPDMeUQl0qzawyOO44Sksfq613du4AX1/V4LiXIBBYGadQI6o6D93ZmQKdEFJJ44CPiYnRZx3EiEwl1GWyEmRmnkZq6tGng+Ouq61v1qwFfHwGw99/MF591QK3b082+qhzQ6NAJ4Roim4H10SZQqgzxpCbe527FGxm5mlUVJRy63k8PtzdX+SmsEkkL4DPtwCfL4OT0xHcvm34mg1NJKoMcwp0Qog2KOCbkIoK5ah3Y4Z6SUk+bt8+znW9FxRkqa23tfXkzqP7+AyClZWD4Ys0Igp0QoiuUMCbOdWROo8HHD2qfG5ICoUc2dkXuEC/d+9PMFb5l4WFhQgtW/bnRrw7ObVvUoPjRKLKG7MMHAjQBc8IIbpCAW+Gnu1+B4AePQx3xC6VZiE9PRqpqUefDo7LV1vv6Ngefn5D4O8/FF5e/ZrU4LiajtAVCuXFguhWqoQQXaKANxMVFcowV118Ri6vXKfv23VWVJTizp3T3FF6bu41tfUikf3T26oqu97t7Lz0W5AJqRroTk50hE4IMRwK+EasrlDXJ8YY8vL+4QL9zp1TqKgoqbIF7+ngOGWgu7u/CD6/afyoUaATQkxF0/jUNSPGCvXS0se4ffvE01A/Cqk0U229jY07dx7d13dwkxkcR4FOCDFVFPCNgDFCXTk47uLTK8cdw927f4KxyjdWDo7rBz+/IfD1HQJn545NYnCcUKh+tzUKdEKIqaKAN1HGCPWCgntVrhwXg5KSR2rrHR3bcd3uLVv2h0Ag1n9RRkaBTghprCjgTYihQ72iohSZmWe5bveHD6+qrReJ7OHjM4gLdXt7b/0WZAIo0Akh5oIC3sgMGerKwXE3kJp6FOnpx5CREVttcJxE0oO7cpyHR0+zHxwnFKqfQ7e3N3ZFhBCiG+b96W2iDBnqpaVPcOfOMfz111b8+ecsPHmSobbexkbCnUf39R0MsdhJf8WYAAp0QkhTQQFvIKpQV118Rl+hzpgC2dkJ3Ln0u3fjnxkcJ4SX10tct7uzcyezHhxHgU4Iaaoo4PXIUKFeWJiNtLRopKUdRXp6DEpK8tTWOzi0Rq9ebWBr+y94eg6CUGitn0JMgCrQVaFOgU4Iaaoo4HXMEKFeUVGGzMyzSE9XHqU/eHBZbb1QaAsfn8HcUbqDgwd69DiCixfDzO72qhTohBBSMwp4HdB3qDPG8OhRCjcnPSPjJGSy4ipb8CCRdIev75Cng+OCYGFRNchlui3IiFSj3AEgOBho3tyY1RBCiOmigK8nfYd6WZkUt2//wU1he/Lkttp6a2s3+PmFws9vKHx8BsPa2lm3BZiImo7QVTdnoSlshBBSOwp4Legz1BlTICfnktrgOIWi8t6ufL4AXl59uSlsLi6dzXJwHHW5E0KIblDAP4c+Q72wMAfp6dFPu95jUFz8UG29g0Nrrtvd2zsYQqH53U+UAp0QQvSDAr4OFy8q76uuq1CXy8uRmXmOGxx3/36S2nqh0AY+PoO4UG/Rwk83b2xCKNAJIcQwKODrcO+e8nxvQzx6dIs7j64cHFektt7NLZC7C5uHR69nBsc1flUD3cmJBsURQoihUMDrWFlZATIy/uDOpT9+nKa2Xix24aav+fqGwNraxUiV6gcFOiGEmAYK+AZiTIH795O4QM/KOldtcJynZx8u1F1du4DH4xuxYt2iQCeEENNk1IA/ffo0PvvsMyQkJCA7OxsHDx7EqFGjuPWMMSxfvhxbtmxBfn4+evbsiY0bN6Jjx47GKxpAUdEDpKdHP71pSwyKix+orW/Rwr/K4LgBEIlsjVSp7j0b6Pb2gBkO5ieEkEbPqAFfVFSELl26YOrUqRgzZky19WvXrsX69esRFRWFNm3a4OOPP0ZISAhu3LgBW1vDhaZcXo6srHikpR19OjguUW29QGANH5+B3BS2Fi38DVabvlGgE0JI42TUgA8LC0NYWFiN6xhj2LBhAxYvXozRo0cDAHbs2AFXV1fs2bMH77zzjl5ry87OxsWL3yI19TgyMv5AeXmh2npX125ct7unZ29YWAj1Wo+hUKATQoh5MNlz8Onp6cjJyUFoaCi3TCQSoX///oiLi6s14MvKylBWVsY9l0qlAACZTAaZ7PmXbM3JycGAAQOQmpqqtlwsdoaf32D4+YXAzy8ENjauz7zSdC8Hy+fL1P6tSigEHByUYe7oqLw6XNVAZ0z5MCUKhUzt38bO3NoDmF+b5HJlOzT5DFHRZltC9MFkAz4nJwcA4OqqHqSurq7IyMio6SUAgNWrV2P58uXVlkdHR0MsFj/3fZXXfX8ECwsLtGvXDt26dUO3bt3g6+sLPl81OC5B84aYkMDAmDrXFxUpH41FTk7d7WlszK09gPm1KSZG8/YUFxc/fyNC9MhkA17l2cuxMsbqvETrwoULMWfOHO65VCqFl5cXQkNDYafhxct//fVX3LlzB//88woUCgHy84H8/PrVb2wCAeDkJIOHRwzE4hDY2wsafZe7QiFDTk4M3NxCwOc3/usGmFt7APNrk5WVDKmpMQgJCYFAoFl7VL2HhBiLyQa8m5sbAOWRvEQi4ZY/ePCg2lF9VSKRCCKRqNpygUCg8S/miy++iNzcXCgUgkZ3e1VloKtfKY4x5UV7mjcXmMWHrQqfT+0xdebSJgsL5b/afI5ouh0h+mKyAe/r6ws3NzfExMSgW7duAIDy8nKcOnUKa9asMXJ1pqOmQH/2CN3UzqETQgjRP6MGfGFhIW7dusU9T09PR1JSEhwcHNCyZUvMnj0bq1atQuvWrdG6dWusWrUKYrEYEydONGLVxqVJoBNCCCFGDfiLFy9iwIAB3HPVufMpU6YgKioK8+fPR0lJCaZPn85d6CY6Otqgc+CNTSBQjm53dqZAJ4QQojmjBnxwcDBYHf3HPB4PkZGRiIyMNFxRRkaBTgghRBdM9hx8U0GBTgghRB8o4A2MAp0Q0yUQACJR9QefD9y8aezqCNEOBbyeVQ101d3WKNAJMQ4er+YAFworg7wmdFE60hhRwOsYBTohxmVhUXOIq4KckKaCAr6BKNAJMbzautJFIsCSPtUIAUABrzVVoKvmoVOgE6J7tXWlq47Ca+tKJ4RUooB/Dgp0QvSDutIJ0S8K+Dr06we0aEGBTkh9CQSAlRV1pRNiDPQrVgc6WiekbrV1pVtYKG9w1LGjMuQJIYZHAU8IqVN9utJpWhkhxkcBTwjh5oFXnRNOXemENG70q0tIE0Cj0glpeijgCTETNCqdEFIVBTwhjciz3efPDmwjhBAVCnhCTEhdXekiEc3qIIRojgKeEAN7titdNaWsQwfA2trY1RFCzAUFPCF6oE1XumpKGZ0nJ4ToEgU8IfVAXemEEFNHAU9ILSwtaz8Sp6NtQoipo4AnTRqNSieEmCsKeGLW+Py6Q5y60gkh5ooCnjR6lpY1d6FTVzohpCmjgCeNQtWjcNW0srZtldPKqCudEEKqo4AnJkGbrnTVtDIrKwp3QgipDQU8MZiautJVD7pnOCGE6BYFPNEpGpVOCCGmwaQDPjIyEsuXL1db5urqipycHCNVRGhUOiGENA4mHfAA0LFjRxw/fpx7bkGHgXpHXemEENL4mXzAW1paws3NTePty8rKUFZWxj2XSqUAAJlMBplqdNZzqLZTKDTb3tSp2lG1PUKh+lSyqtPL6vobSsMvoV6pvj+afj9Nnbm1BzC/NtWnPebSdtJ48RhjzNhF1CYyMhKfffYZ7O3tIRKJ0LNnT6xatQp+fn51vubZbn0A2LNnD8RisT7LJYQQTnFxMSZOnIgnT57Azs7O2OWQJsikA/73339HcXEx2rRpg/v37+Pjjz/GP//8g2vXrsHR0bHG19R0BO/l5YXc3FyNf8lkMhliYmLg5hYCPt90+6SfvVZ61SPyql3pqvaEhIRAYAZ97NQe02dubapPe6RSKZycnCjgidGYdBd9WFgY9/9OnTqhV69e8Pf3x44dOzBnzpwaXyMSiSASiaotFwgEWn/Q8PkCowY8j6cMal2NSq/P18CUUXtMn7m1SZv2mFO7SeNk0gH/LGtra3Tq1AkpKSnGLkVn+PzaA1wopFHphBBC6qdRBXxZWRmuX7+Ol156ydilaIVGpRNCCDE0kw74efPmYcSIEWjZsiUePHiAjz/+GFKpFFOmTDF2aWp03ZVOCCGENJRJB3xWVhZef/115ObmwtnZGUFBQTh//jy8vb0NXgt1pRNCCGlMTDrg9+7da9T3b9lSebcy6konhBDS2Jh0wBubgwMFOyGEkMaJb+wCCCGEEKJ7FPCEEEKIGaKAJ4QQQswQBTwhhBBihijgCSGEEDNEAU8IIYSYIQp4QgghxAxRwBNCCCFmyOwvdKO63b1UKtX4NTKZDMXFxZBKpWZxy0dqj2kzt/YA5tem+rRH9Zmj+gwixNDMPuALCgoAAF5eXkauhBDSFBUUFMDe3t7YZZAmiMfM/M9LhUKBe/fuwdbWFjwN7wgjlUrh5eWFzMxM2NnZ6blC/aP2mDZzaw9gfm2qT3sYYygoKIC7uzv4fDobSgzP7I/g+Xw+PD096/VaOzs7s/hwUqH2mDZzaw9gfm3Stj105E6Mif6sJIQQQswQBTwhhBBihijgayASibBs2TKIRCJjl6IT1B7TZm7tAcyvTebWHtI0mP0gO0IIIaQpoiN4QgghxAxRwBNCCCFmiAKeEEIIMUMU8IQQQogZarIBv2nTJvj6+qJZs2bo3r07zpw5U+f2p06dQvfu3dGsWTP4+fnh22+/NVClmtGmPf/73/8QEhICZ2dn2NnZoVevXjh27JgBq30+bb8/KufOnYOlpSW6du2q3wK1pG17ysrKsHjxYnh7e0MkEsHf3x/btm0zULXPp217du/ejS5dukAsFkMikWDq1KnIy8szULV1O336NEaMGAF3d3fweDz83//933NfY+qfB4QAAFgTtHfvXiYQCNh///tflpyczGbNmsWsra1ZRkZGjdunpaUxsVjMZs2axZKTk9l///tfJhAI2M8//2zgymumbXtmzZrF1qxZw/766y928+ZNtnDhQiYQCNilS5cMXHnNtG2PyuPHj5mfnx8LDQ1lXbp0MUyxGqhPe0aOHMl69uzJYmJiWHp6Ovvzzz/ZuXPnDFh17bRtz5kzZxifz2dffvklS0tLY2fOnGEdO3Zko0aNMnDlNTty5AhbvHgxO3DgAAPADh48WOf2pv55QIhKkwz4F198kf373/9WW9auXTu2YMGCGrefP38+a9eundqyd955hwUFBemtRm1o256adOjQgS1fvlzXpdVLfdszYcIE9tFHH7Fly5aZVMBr257ff/+d2dvbs7y8PEOUpzVt2/PZZ58xPz8/tWVfffUV8/T01FuN9aVJwJv65wEhKk2ui768vBwJCQkIDQ1VWx4aGoq4uLgaXxMfH19t+yFDhuDixYuQyWR6q1UT9WnPsxQKBQoKCuDg4KCPErVS3/Zs374dqampWLZsmb5L1Ep92nPo0CH06NEDa9euhYeHB9q0aYN58+ahpKTEECXXqT7t6d27N7KysnDkyBEwxnD//n38/PPPePnllw1Rss6Z8ucBIVWZ/c1mnpWbmwu5XA5XV1e15a6ursjJyanxNTk5OTVuX1FRgdzcXEgkEr3V+zz1ac+z1q1bh6KiIowfP14fJWqlPu1JSUnBggULcObMGVhamtaPdH3ak5aWhrNnz6JZs2Y4ePAgcnNzMX36dDx69Mjo5+Hr057evXtj9+7dmDBhAkpLS1FRUYGRI0fi66+/NkTJOmfKnweEVNXkjuBVnr11LGOsztvJ1rR9TcuNRdv2qPz444+IjIzEvn374OLioq/ytKZpe+RyOSZOnIjly5ejTZs2hipPa9p8fxQKBXg8Hnbv3o0XX3wRw4YNw/r16xEVFWUSR/GAdu1JTk7G+++/j6VLlyIhIQFHjx5Feno6/v3vfxuiVL0w9c8DQoAmeATv5OQECwuLakcbDx48qPZXuYqbm1uN21taWsLR0VFvtWqiPu1R2bdvHyIiIrB//34MHjxYn2VqTNv2FBQU4OLFi0hMTMR7770HQBmQjDFYWloiOjoaAwcONEjtNanP90cikcDDw0PtVqPt27cHYwxZWVlo3bq1XmuuS33as3r1avTp0wcffPABAKBz586wtrbGSy+9hI8//rjRHfGa8ucBIVU1uSN4oVCI7t27IyYmRm15TEwMevfuXeNrevXqVW376Oho9OjRAwKBQG+1aqI+7QGUR+7h4eHYs2ePSZ0L1bY9dnZ2uHLlCpKSkrjHv//9b7Rt2xZJSUno2bOnoUqvUX2+P3369MG9e/dQWFjILbt58yb4fD48PT31Wu/z1Kc9xcXF4PPVP2osLCwAVB75Niam/HlAiBojDe4zKtU0n61bt7Lk5GQ2e/ZsZm1tzW7fvs0YY2zBggVs0qRJ3PaqaTH/+c9/WHJyMtu6datJTYvRtj179uxhlpaWbOPGjSw7O5t7PH782FhNUKNte55laqPotW1PQUEB8/T0ZGPHjmXXrl1jp06dYq1bt2Zvv/22sZqgRtv2bN++nVlaWrJNmzax1NRUdvbsWdajRw/24osvGqsJagoKClhiYiJLTExkANj69etZYmIiN+2vsX0eEKLSJAOeMcY2btzIvL29mVAoZIGBgezUqVPcuilTprD+/furbR8bG8u6devGhEIh8/HxYZs3bzZwxXXTpj39+/dnAKo9pkyZYvjCa6Ht96cqUwt4xrRvz/Xr19ngwYOZlZUV8/T0ZHPmzGHFxcUGrrp22rbnq6++Yh06dGBWVlZMIpGwN954g2VlZRm46pqdPHmyzt+Hxvh5QAhjjNHtYgkhhBAz1OTOwRNCCCFNAQU8IYQQYoYo4AkhhBAzRAFPCCGEmCEKeEIIIcQMUcATQgghZogCnhBCCDFDFPCEEEKIGaKAJ6QGt2/fBo/HQ1JSkrFLIYSQeqGAJ41WeHg4Ro0aVW15bGwseDweHj9+XO99e3l5ITs7GwEBAfUvkBBCjKjJ3S6WkOcpLy+HUCiEm5ubsUshhJB6oyN4YvYOHDiAjh07QiQSwcfHB+vWrVNb7+Pjg48//hjh4eGwt7fHtGnTqnXRh4eHg8fjVXvExsYCAPLz8zF58mS0aNECYrEYYWFhSElJ4d4jKioKzZs3x7Fjx9C+fXvY2Nhg6NChyM7ONtSXgRDSxFDAE7OWkJCA8ePH47XXXsOVK1cQGRmJJUuWICoqSm27zz77DAEBAUhISMCSJUuq7efLL79EdnY295g1axZcXFzQrl07AMo/AC5evIhDhw4hPj4ejDEMGzYMMpmM20dxcTE+//xz/PDDDzh9+jTu3LmDefPm6bX9hJAmzMh3syOk3qZMmcIsLCyYtbW12qNZs2YMAMvPz2cTJ05kISEhaq/74IMPWIcOHbjn3t7ebNSoUWrbpKenMwAsMTGx2vseOHCAiUQidubMGcYYYzdv3mQA2Llz57htcnNzmZWVFfvpp58YY8p7ogNgt27d4rbZuHEjc3V1bfDXgRBCakJH8KRRGzBgAJKSktQe33//Pbf++vXr6NOnj9pr+vTpg5SUFMjlcm5Zjx49NHq/xMRETJ48GRs3bkTfvn2597C0tETPnj257RwdHdG2bVtcv36dWyYWi+Hv7889l0gkePDggXYNJoQQDdEgO9KoWVtbo1WrVmrLsrKyuP8zxsDj8dTWM8Zq3M/z5OTkYOTIkYiIiEBERESd+6vpvQUCgdp6Ho9X62sJIaSh6AiemLUOHTrg7Nmzasvi4uLQpk0bWFhYaLyf0tJSvPLKK2jXrh3Wr19f7T0qKirw559/csvy8vJw8+ZNtG/fvmENIISQeqIjeGLW5s6dixdeeAErV67EhAkTEB8fj2+++QabNm3Saj/vvPMOMjMzceLECTx8+JBb7uDggNatW+OVV17BtGnT8N1338HW1hYLFiyAh4cHXnnlFV03iRBCNEJH8MSsBQYG4qeffsLevXsREBCApUuXYsWKFQgPD9dqP6dOnUJ2djY6dOgAiUTCPeLi4gAA27dvR/fu3TF8+HD06tULjDEcOXKkWrc8IYQYCo/RSUBCCCHE7NARPCGEEGKGKOAJIYQQM0QBTwghhJghCnhCCCHEDFHAE0IIIWaIAp4QQggxQxTwhBBCiBmigCeEEELMEAU8IYQQYoYo4AkhhBAzRAFPCCGEmKH/B4YHxYxK7zDVAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| hide\n", - "# Create single mixture and broadcast to N, H, K\n", - "weights = torch.ones((2,3))[None, :, :]\n", + "# Create single mixture and broadcast to N,H,K\n", + "weights = torch.ones((1,3))[None, :, :]\n", "lambdas = torch.Tensor([[5,10,15], [10,20,30]])[None, :, :]\n", "\n", "# Create repetitions for the batch dimension N.\n", @@ -3393,7 +1987,7 @@ "print('lambdas.shape (N,H,K) \\t', lambdas.shape)\n", "\n", "distr = PMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (lambdas, weights)\n", + "distr_args = (lambdas,)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -3498,44 +2092,38 @@ " if self.return_params:\n", " mu_names = [f\"-mu-{i}\" for i in range(1, n_components + 1)]\n", " std_names = [f\"-std-{i}\" for i in range(1, n_components + 1)]\n", - " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", - " self.param_names = [i for j in zip(mu_names, std_names, weight_names) for i in j]\n", - " self.output_names = self.output_names + self.param_names\n", + " mu_std_names = [i for j in zip(mu_names, std_names) for i in j]\n", + " self.output_names = self.output_names + mu_std_names\n", "\n", " # Add first output entry for the sample_mean\n", " self.output_names.insert(0, \"\")\n", "\n", - " self.outputsize_multiplier = 3 * n_components\n", + " self.outputsize_multiplier = 2 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " means, stds, weights = output.chunk(3, dim=-1)\n", - "\n", - " return (means, stds, weights)\n", + " means, stds = torch.tensor_split(output, 2, dim=-1)\n", + " return (means, stds)\n", "\n", - " def scale_decouple(\n", - " self,\n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float = 0.2,\n", - " ):\n", - " \"\"\"Scale Decouple\n", + " def scale_decouple(self, \n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float=0.2):\n", + " \"\"\" Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", - " means, stds, weights = output\n", + " means, stds = output\n", " stds = F.softplus(stds)\n", - " weights = F.softmax(weights, dim=-1)\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(means.size(dim=0), 1, -1)\n", - " scale = scale.view(means.size(dim=0), 1, -1)\n", + " scale = scale.view(means.size(dim=0), 1, -1) \n", " means = (means * scale) + loc\n", " stds = (stds + eps) * scale\n", - "\n", - " return (means, stds, weights)\n", + " return (means, stds)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -3557,11 +2145,17 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " means, stds, weights = distr_args\n", + " means, stds = distr_args\n", " B, H, K = means.size()\n", " Q = len(self.quantiles)\n", " assert means.shape == stds.shape\n", "\n", + " # Sample K ~ Mult(weights)\n", + " # shared across B, H\n", + " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", + " \n", + " weights = (1/K) * torch.ones_like(means, device=means.device)\n", + " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", " means = means.flatten()\n", @@ -3601,15 +2195,17 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " means, stds, weights = distr_args\n", + " means, stds = distr_args\n", " B, H, K = means.size()\n", - " \n", + " \n", + " weights = (1/K) * torch.ones_like(means, device=means.device)\n", + " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", " \n", @@ -3632,7 +2228,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -3682,17 +2278,7 @@ "execution_count": null, "id": "8ebe4250", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['', '-lo-98.0', '-lo-80.0', '-median', '-hi-80.0', '-hi-98.0']\n", - "Parameter containing:\n", - "tensor([0.0100, 0.1000, 0.5000, 0.9000, 0.9900])\n" - ] - } - ], + "outputs": [], "source": [ "# | hide\n", "# Unit tests to check PMM's stored quantiles\n", @@ -3716,40 +2302,7 @@ "execution_count": null, "id": "684d2382", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "means.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "stds.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "samples.shape (N,H,num_samples) torch.Size([2, 2, 1000])\n", - "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", - "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfcAAAEyCAYAAADnUJkgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABDOklEQVR4nO3de1wU5f4H8M8gy3KHALkpIApoZt4yFVHBDEzTND12CjU085IXMjrpUTu5GAeMCq0sSy3g1CHL0jIzlUrJfqiB5rHMMBPQVCJvgKCwwvP7w5hclsvusrDL8nm/XvuSeWbmme9+WffLzDwzIwkhBIiIiMhiWJk6ACIiIjIuFnciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgW9xZ08OBBPPjgg/D394dSqYSXlxdCQ0Px9NNPaywXERGBiIiIFo9HkiSoVCqj9delSxeMHTvWaP01Zu/evZAkCXv37m2V7ekrIiICkiRBkiRYWVnByckJQUFBmDx5Mj766CPU1NRordOlSxdMnz5dr+1kZ2dDpVLhypUreq1Xd1u1+fzoo4/06qcxFRUVUKlU9f6O0tLSIEkSCgoKjLY9ImqYtakDsFSff/45HnjgAURERCA5ORk+Pj44f/48cnNzsWnTJrz88svysm+88YYJI20b+vfvj/3796Nnz56mDqVBXbt2xX//+18AQHl5OfLz8/HJJ59g8uTJGDZsGD777DO4uLjIy2/duhXOzs56bSM7Oxvx8fGYPn06XF1ddV7PkG3pq6KiAvHx8QCg9cfq/fffj/3798PHx6dFYyCim1jcW0hycjICAwOxa9cuWFv/leaHH34YycnJGsuac8EyNbVaDUmS4OzsjMGDB5s6nEbZ2dlpxfj4448jNTUVjz32GGbPno0PPvhAntevX78Wj+natWuws7NrlW01pmPHjujYsaNJYyBqT3hYvoVcvHgRHh4eGoW9lpWVZtrrHpYvKCiAJEl46aWXkJKSgsDAQDg6OiI0NBQHDhzQ6m/Dhg0ICQmBUqlEz549kZGRgenTp6NLly5NxllUVIQ5c+agc+fOsLGxQWBgIOLj43Hjxg2d3+vOnTvRv39/2NnZoUePHnjnnXe0lvnxxx8xfvx43HbbbbC1tUXfvn2Rnp6usUztoeJ3330XTz/9NDp16gSlUomTJ09qHZavzVFDr1u988476NOnD2xtbeHm5oYHH3wQx48f11hm+vTpcHR0xMmTJzFmzBg4OjrCz88PTz/9NCorK3XORX1mzJiBMWPGYPPmzSgsLJTb6x4qr6mpQUJCArp37w47Ozu4urqid+/eeOWVVwAAKpUKzzzzDAAgMDBQfq+1Oak9TbJlyxb069cPtra28p50Q6cArl+/jri4OHh7e8POzg7h4eH4/vvvNZZp6LTRrZ+xgoICuXjHx8fLsdVus6HD8sb+3axbtw59+vSBo6MjnJyc0KNHDyxbtkwrdiJLxz33FhIaGoqNGzciNjYWU6ZMQf/+/aFQKPTq4/XXX0ePHj2wZs0aAMC//vUvjBkzBvn5+fLh3fXr12POnDmYNGkSVq9ejZKSEsTHx+tUkIqKijBw4EBYWVnhueeeQ7du3bB//34kJCSgoKAAqampTfbxv//9D08//TT++c9/wsvLCxs3bsTMmTMRFBSE4cOHAwDy8vIwZMgQeHp64tVXX4W7uzvee+89TJ8+Hb///jsWL16s0efSpUsRGhqKN998E1ZWVvD09ERRUZHGMj4+Pti/f79G2x9//IGpU6eiU6dOcltSUhKWLVuGRx55BElJSbh48SJUKhVCQ0ORk5OD4OBgeVm1Wo0HHngAM2fOxNNPP41vvvkGzz//PFxcXPDcc881mYvGPPDAA9ixYwf27duHgICAepdJTk6GSqXCs88+i+HDh0OtVuPnn3+Wz68//vjjuHTpEl577TVs2bJFPsR965Gfw4cP4/jx43j22WcRGBgIBweHRuNatmwZ+vfvj40bN6KkpAQqlQoRERH4/vvv0bVrV53fn4+PD3bu3In77rsPM2fOxOOPPw4Aje6tG/t3s2nTJsybNw8LFy7ESy+9BCsrK5w8eRI//fSTzu+DyGIIahEXLlwQQ4cOFQAEAKFQKMSQIUNEUlKSKCsr01g2PDxchIeHy9P5+fkCgLjzzjvFjRs35PbvvvtOABDvv/++EEKI6upq4e3tLQYNGqTRX2FhoVAoFCIgIECjHYBYsWKFPD1nzhzh6OgoCgsLNZZ76aWXBABx7NixRt9jQECAsLW11Vj/2rVrws3NTcyZM0due/jhh4VSqRSnT5/WWH/06NHC3t5eXLlyRQghxJ49ewQAMXz4cK1t1c7bs2dPvbGUl5eLgQMHCh8fH1FQUCCEEOLy5cvCzs5OjBkzRmPZ06dPC6VSKaKjo+W2mJgYAUB8+OGHGsuOGTNGdO/evdE8CHHzd3jHHXc0OP+LL74QAMQLL7wgtwUEBIiYmBh5euzYsaJv376NbufFF18UAER+fr7WvICAANGhQweRl5dX77xbt1Wbz/79+4uamhq5vaCgQCgUCvH4449rvLdbP5+1YmJiND5jf/zxh9ZnrFZqaqpG3C3xu1mwYIFwdXXV2jZRe8TD8i3E3d0d+/btQ05ODlatWoXx48fjxIkTWLp0Ke68805cuHChyT7uv/9+dOjQQZ7u3bs3AMiHdvPy8lBUVISHHnpIYz1/f3+EhYU12f/27dsxYsQI+Pr64saNG/Jr9OjRAICsrKwm++jbty/8/f3laVtbW4SEhGgcfv76668xcuRI+Pn5aaw7ffp0VFRUaO2BT5o0qcnt3qq6uhp///vfcfz4cezYsUPeM96/fz+uXbumdTjaz88P99xzD7766iuNdkmSMG7cOI223r17a7wXQwkhmlxm4MCB+N///od58+Zh165dKC0t1Xs7vXv3RkhIiM7LR0dHa5zGCAgIwJAhQ7Bnzx69t62PlvjdDBw4EFeuXMEjjzyCTz/9VKf/Y0SWisW9hQ0YMABLlizB5s2bce7cOTz11FMoKCjQGlRXH3d3d41ppVIJ4OYgKeDmeX0A8PLy0lq3vra6fv/9d3z22WdQKBQarzvuuAMAdPpyrBtjbZy1MdbGWd8oaV9fX433UUvfEdVz587Fzp078dFHH6Fv374a222oP19fX63t2tvbw9bWVuu9XL9+Xa946lNbhGrfc32WLl2Kl156CQcOHMDo0aPh7u6OkSNHIjc3V+ft6Js7b2/vetvq5sbYWuJ3M23aNLzzzjsoLCzEpEmT4OnpiUGDBiEzM7MF3gGReWNxb0UKhQIrVqwAcHOAWXPVFtbff/9da17dc9T18fDwQFRUFHJycup9zZw5s9kx1sZ5/vx5rfZz587Jcdyq7oC4xqhUKmzcuBEbNmxAVFSU1nYBNLjtutttSdu2bYMkSfI4hPpYW1sjLi4Ohw8fxqVLl/D+++/jzJkzGDVqFCoqKnTajj65A+r/nBQVFWn80WZra1vvGI7m7Bm31O9mxowZyM7ORklJCT7//HMIITB27FijHH0haktY3FtIfV9aAOSRwI3twemqe/fu8Pb2xocffqjRfvr0aWRnZze5/tixY/Hjjz+iW7duGDBggNbLGDECwMiRI/H111/LxbzWf/7zH9jb2xt8idvbb7+N+Ph4rFy5st6R4KGhobCzs8N7772n0f7bb7/JpwpaQ2pqKr744gs88sgjGqcwGuPq6oq//e1vmD9/Pi5duiSPMq979Ka53n//fY1TBoWFhcjOztYYHd+lSxecOHFCo8BfvHhR6zOmT2wt/btxcHDA6NGjsXz5clRVVeHYsWPN6o+oreFo+RYyatQodO7cGePGjUOPHj1QU1ODI0eO4OWXX4ajoyOefPLJZm/DysoK8fHxmDNnDv72t7/hsccew5UrVxAfHw8fHx+tS+7qWrlyJTIzMzFkyBDExsaie/fuuH79OgoKCrBjxw68+eab6Ny5c7PjXLFihXx+/7nnnoObmxv++9//4vPPP0dycrLGjV10tX//fsydOxdhYWGIjIzUukRw8ODBcHV1xb/+9S8sW7YMjz76KB555BFcvHgR8fHxsLW1lY+iGMu1a9fkOK5du4ZTp07hk08+wfbt2xEeHo4333yz0fXHjRuHXr16YcCAAejYsSMKCwuxZs0aBAQEyCPH77zzTgDAK6+8gpiYGCgUCnTv3h1OTk4GxVxcXIwHH3wQs2bNQklJCVasWAFbW1ssXbpUXmbatGl46623MHXqVMyaNQsXL15EcnKy1k1xnJycEBAQgE8//RQjR46Em5sbPDw86r0ksyV+N7NmzYKdnR3CwsLg4+ODoqIiJCUlwcXFBXfffbfe/RG1aSYe0GexPvjgAxEdHS2Cg4OFo6OjUCgUwt/fX0ybNk389NNPGss2NFr+xRdf1OoX9YxGXr9+vQgKChI2NjYiJCREvPPOO2L8+PGiX79+Ta77xx9/iNjYWBEYGCgUCoVwc3MTd911l1i+fLm4evVqo+8xICBA3H///Vrt9Y2u/uGHH8S4ceOEi4uLsLGxEX369BGpqakay9SO4N68ebNWn3VHy9eOvm7odauNGzeK3r17CxsbG+Hi4iLGjx+vdSVATEyMcHBw0NruihUrtPqrT3h4uMb2HRwcRNeuXcXf/vY3sXnzZlFdXa21Tt0R7C+//LIYMmSI8PDwEDY2NsLf31/MnDlTHv1fa+nSpcLX11dYWVlp5KSh30d926rN57vvvitiY2NFx44dhVKpFMOGDRO5ubla66enp4vbb79d2Nraip49e4oPPvhAa7S8EEJ8+eWXol+/fkKpVAoA8jbrjpavZczfTXp6uhgxYoTw8vISNjY2wtfXVzz00EPi6NGj9eaEyJJJQugwjJfalCtXriAkJAQTJkzA+vXrTR0OERG1Mh6Wb+OKiorw73//GyNGjIC7uzsKCwuxevVqlJWVGeXQPxERtT0s7m2cUqlEQUEB5s2bh0uXLskD1N588035kjYiImpfeFieiIjIwvBSOCIiIgvD4k5ERGRhLP6ce01NDc6dOwcnJye9795FRGQoIQTKysrg6+vb5D0niIzN4ov7uXPntB5YQkTUWs6cOWOUm0ER6cPii3vtnbvOnDmjdUctapparcbu3bsRFRWl9/PoqWHMa8swp7yWlpbCz8/P4LsHEjWHxRf32kPxzs7OLO4GUKvVsLe3h7Ozs8m/LC0J89oyzDGvPB1IpsATQURERBaGxZ2IiMjCsLgTERFZGIs/505EZM6qq6uhVqtNHQaZOYVCgQ4dOui8PIs7EZEJCCFQVFSEK1eumDoUaiNcXV3h7e2t0yBNFnciIhOoLeyenp6wt7fnqHpqkBACFRUVKC4uBgD4+Pg0uQ6LOxFRK6uurpYLu7u7u6nDoTbAzs4OAFBcXAxPT88mD9FzQB0RUSurPcdub29v4kioLan9vOgyRoN77tSwPUmAsALQA9iXAtyzxNQRtW17kv76uTav1K7xUDzpQ5/PC/fciYiILAyLOxERkYVhcSciIrIwPOdORGQmVmeeaNXtPRUZ0qrb00VERAT69u2LNWvWmDqUNo177kREpLPp06djwoQJWu179+6FJEnNvinPli1b8Pzzzzerj7bgm2++wbhx4+Dr6wtJkvDJJ58YtX8WdyIiMrmqqioAgJubG5ycnEwcjeEiIiKQlpbW5HLl5eXo06cP1q5d2yJxsLgTEZHRVVZWIjY2Fp6enrC1tcXQoUORk5Mjz4+IiMCCBQsQFxcHDw8PREZGyu2LFi0CABQUFECSJK1XRESETtuo7S82NhaLFy+Gm5sbvL29oVKpGo39gQceqHe7kiRh27ZtRsnP6NGjkZCQgIkTJxqlv7pY3ImIyOgWL16Mjz/+GOnp6Th8+DCCgoIwatQoXLp0SV4mPT0d1tbW+L//+z+89dZbWn34+fnh/Pnz8uv777+Hu7s7hg8frvM2arfj4OCAgwcPIjk5GStXrkRmZmaDsaempuL8+fP45ZdfAAA7duyQYxgzZowx0tPiOKCOiIj0sn37djg6Omq0VVdXyz+Xl5dj3bp1SEtLw+jRowEAGzZsQGZmJt5++20888wzAICgoCAkJyc3uJ0OHTrA29sbAHD9+nVMmDABoaGhUKlUOm8DAHr37o0VK1YAAIKDg7F27Vp89dVX8tGCumpvCbx//35IkoShQ4e2uVMF3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PjID2FpzNGjR9GlS5dGC3tiYiIcHR3l1759+zB37lytttZm0uLepUuXes9pzJ8/H8DNJ+GoVCr4+vrCzs4OEREROHbsmClDJiJq9xwcHBAUFKTx6tSpkzxfCAFA+3apQgiNNgcHB522l5CQgJ07d2Lbtm1yodV1G8DNon8rSZJQU1PT5HaPHj2q9YdBXXPnztX4I2fAgAFYuXKlVltrM2lxz8nJ0TifUnsOZPLkyQCA5ORkpKSkYO3atcjJyYG3tzciIyNRVlZmyrCJiKgRQUFBsLGxwbfffiu3qdVq5Obm4vbbb9err48//hgrV67Ehx9+iG7durXINhpSUFCA7t27N7qMm5ubxh85dnZ28PT01GprbSY9596xY0eN6VWrVqFbt24IDw+HEAJr1qzB8uXL5dGE6enp8PLyQkZGBubMmVNvn5WVlaisrJSnS0tLAdz8pevyJB26hbCCWtz8+08trADmr3nEX39Ly3llTo2qNp/mkFdziMFUHBwc8MQTT+CZZ56Bm5sb/P39kZycjIqKCsycOVPnfn788Uc8+uijWLJkCe644w4UFRUBAGxsbODm5maUbTSmpqYGhYWF+O2339CpUyejPujn6tWrOHnypDydn5+PI0eOyO+lucxmQF1VVRXee+89xMXFQZIknDp1CkVFRYiKipKXUSqVCA8PR3Z2doPFPSkpCfHx8Vrtu3fv5uMV9fbXU8syr4YAO3aYMBZLoP0UuMZG7JLhzCGvFRUVeq9jjneMM9SqVatQU1ODadOmoaysDAMGDMCuXbtw22236dxHbm4uKioqkJCQgISEBLk9PDwce/fuNco2GhMbG4vZs2ejR48eKC0tNWpxz83NxYgRI+TpuLg4AEBMTIxO18k3RRK1Jy5M7MMPP0R0dDROnz4NX19fZGdnIywsDGfPnoWvr6+83OzZs1FYWIhdu3bV2099e+5+fn64cOECnJ2dW/x9WJR9KVALK2ReDUGk4wkohi8ydURt274U+Uc5r5GRWucDyXBqtRqZmZlmkdfS0lJ4eHigpKRE67vn+vXryM/PR2BgIGxtbU0UIbU1+nxuzGbP/e2338bo0aM1Cjmg22CJWymVSiiVSq12hUJh8v/sbY7014AThVTD/DWXpD2Ah5/LlmEOeTX19ql9M4tL4QoLC/Hll1/i8ccfl9tqr22sPcdSq7i4GF5eXq0aHxERUVtiFsU9NTUVnp6euP/+++W2wMBAeHt7a5w7q6qqQlZWFoYMGWKKMImIiNoEkx+Wr6mpQWpqKmJiYmBt/Vc4kiRh0aJFSExMRHBwMIKDg5GYmAh7e3tER0ebMGIiIiLzZvLi/uWXX+L06dN47LHHtOYtXrwY165dw7x583D58mUMGjQIu3fvbnO3ASQiImpNJi/uUVFRaGjAviRJUKlUTT7Bh4iIiP5iFufciYiIyHhY3ImIiCwMizsREZGFYXEnIiKyMCYfUEdERH/ak9S62xuxtHW3p4OIiAj07dsXa9asMXUobRr33ImISGfTp0/HhAkTtNr37t0LSZJw5cqVZvW/ZcsWPP/8883qoy1ISkrC3XffDScnJ3h6emLChAnIy8szWv8s7kREZHJVVVUAbj4fvS3fyyQiIkKnp7plZWVh/vz5OHDgADIzM3Hjxg1ERUWhvLzcKHGwuBMRkdFVVlYiNjYWnp6esLW1xdChQ5GTkyPPj4iIwIIFCxAXFwcPDw9ERkbK7YsWLQIAFBQUQJIkrVdERIRO26jtLzY2FosXL4abmxu8vb2bvHfKAw88UO92JUnCtm3bjJKfnTt3Yvr06bjjjjvQp08fpKam4vTp0zh06JBR+mdxJyIio1u8eDE+/vhjpKen4/DhwwgKCsKoUaNw6dIleZn09HRYW1vj//7v//DWW29p9eHn54fz58/Lr++//x7u7u4YPny4ztuo3Y6DgwMOHjyI5ORkrFy5UuO5JXWlpqbi/Pnz+OWXXwAAO3bskGMYM2aMMdKjpaSkBMDNIxfGwAF1RESkl+3bt8PR0VGjrbq6Wv65vLwc69atQ1paGkaPHg0A2LBhAzIzM/H222/jmWeeAQAEBQUhOTm5we106NBBfkLo9evXMWHCBISGhkKlUum8DQDo3bs3VqxYAQAIDg7G2rVr8dVXX8lHC+pyd3cHAOzfvx+SJGHo0KEteqpACIG4uDgMHToUvXr1Mkqf3HMnIiK9jBgxAkeOHNF4bdy4UZ7/66+/Qq1WIywsTG5TKBQYOHAgjh8/LrcNGDBA523OnDkTZWVlyMjIgJWVlc7bAG4W91v5+PiguLi4yW0ePXoUXbp0abSwJyYmwtHRUX7t27cPc+fO1WprzIIFC3D06FG8//77TcakK+65ExGRXhwcHBAUFKTR9ttvv8k/1z4vRJIkjWWEEBptDg4OOm0vISEBO3fuxHfffScXWl23Adws+reSJAk1NTVNbvfo0aNafxjUNXfuXDz00EPy9JQpUzBp0iRMnDhRbuvUqVOD6y9cuBDbtm3DN998g86dOzcZk664505EbdeeJM0XmYWgoCDY2Njg22+/ldvUajVyc3Nx++2369XXxx9/jJUrV+LDDz9Et27dWmQbDSkoKED37t0bXcbNzQ1BQUHyy87ODp6enlptdQkhsGDBAmzZsgVff/01AgMDjRJzLe65ExGRUTk4OOCJJ57AM888Azc3N/j7+yM5ORkVFRWYOXOmzv38+OOPePTRR7FkyRLccccdKCoqAgDY2NjAzc3NKNtoTE1NDQoLC/Hbb7+hU6dOWkcEmmP+/PnIyMjAp59+CicnJ/m9ubi41PvHgL5Y3ImIzIUZ3jHOUKtWrUJNTQ2mTZuGsrIyDBgwALt27cJtt92mcx+5ubmoqKhAQkICEhIS5Pbw8HDs3bvXKNtoTGxsLGbPno0ePXqgtLTUqMV93bp1ACBf1lcrNTUV06dPb3b/kmjoYeoWorS0FC4uLigpKYGzs7Opw2lb9iRBLaywo6wHxjj9DMU9S0wdUdvSyGFiOa9jxmidDyQ91Mmxeug/sGPHDrPIa2PfPdevX0d+fj4CAwNha2trogiprdHnc8Nz7kRERBbG5MX97NmzmDp1Ktzd3WFvb4++fftq3KFHCAGVSgVfX1/Y2dkhIiICx44dM2HERERE5s2kxf3y5csICwuDQqHAF198gZ9++gkvv/wyXF1d5WWSk5ORkpKCtWvXIicnB97e3oiMjERZWZnpAiciIjJjJh1Q98ILL8DPzw+pqalyW5cuXeSfhRBYs2YNli9fLl8zmJ6eDi8vL2RkZGDOnDmtHTIREZHZM2lx37ZtG0aNGoXJkycjKysLnTp1wrx58zBr1iwAQH5+PoqKihAVFSWvo1QqER4ejuzs7HqLe2VlJSorK+Xp0tJSADevf1Sr1S38jiyMsIJa3Dy4oxZWAPOnH9HwgTE5r8xp89TJcW0+zSGvusSgy41UiGrp83kxaXE/deoU1q1bh7i4OCxbtgzfffcdYmNjoVQq8eijj8rX/Xl5eWms5+XlhcLCwnr7TEpKQnx8vFb77t27YW9vb/w3YdF6yD9lXg0BduwwYSxtUY8ml2js4RWkizo5/jOf5pDXioqKBufZ2NjAysoK586dQ8eOHWFjY2PUy6zIsgghUFVVhT/++ANWVlawsbFpch2TXgpnY2ODAQMGIDs7W26LjY1FTk4O9u/fj+zsbISFheHcuXPw8fGRl5k1axbOnDmDnTt3avVZ3567n58fLly4wEvh9LUvBWphhcyrIYh0PAHF8EWmjqht2ZfS4Cw5r5GRJr9kq02rk2P14IXIzMw0i7yWlpbCw8Ojwctwq6qqcP78+Ub/CCC6lb29PXx8fHQq7ibdc/fx8UHPnj012m6//XZ8/PHHACA/DaioqEijuBcXF2vtzddSKpVQKpVa7QqFwuT/2dsc6a9DQAqphvnTl9T0ITTFgdeguHU5C7qJSauom+M/P6Pm8P+9qe3b2NjA398fN27c0HiiGlF9OnToAGtra52P8Ji0uIeFhSEvL0+j7cSJEwgICAAABAYGwtvbG5mZmejXrx+Am3/tZmVl4YUXXmj1eImIjEmSJLP4Q4Qsj0mL+1NPPYUhQ4YgMTERDz30EL777jusX78e69evB3Dzg79o0SIkJiYiODgYwcHBSExMhL29PaKjo00ZOhERkdkyaXG/++67sXXrVixduhQrV65EYGAg1qxZgylTpsjLLF68GNeuXcO8efNw+fJlDBo0CLt37270+bpERETtmckfHDN27FiMHTu2wfmSJEGlUkGlUrVeUERERG2YyW8/S0RERMZl8j13IqIWVffpfLwigdoB7rkTERFZGBZ3IiIiC8PiTkREZGFY3ImIiCwMizsREZGF4Wh5IrIc+1IA9Lj5rw739ieyVNxzJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZGA6oo5vq3qJTl2V4G08iIrPEPXciIiILw+JORERkYXhYngxX36F8HqonIjI57rkTERFZGJMWd5VKBUmSNF7e3t7yfCEEVCoVfH19YWdnh4iICBw7dsyEERMREZk/gw7Lp6Wl4aGHHoK9vX2zA7jjjjvw5ZdfytMdOnSQf05OTkZKSgrS0tIQEhKChIQEREZGIi8vD05OTs3etimszjyh1fZUZIgJIiEiIktl0J770qVL4e3tjZkzZyI7O7tZAVhbW8Pb21t+dezYEcDNvfY1a9Zg+fLlmDhxInr16oX09HRUVFQgIyOjWdskIiKyZAbtuf/222/4/PPPkZaWhhEjRiAwMBAzZsxATEyMxmF1Xfzyyy/w9fWFUqnEoEGDkJiYiK5duyI/Px9FRUWIioqSl1UqlQgPD0d2djbmzJlTb3+VlZWorKyUp0tLSwEAarUaarXagHdrXJKo1mozh7gg6v87T/1nu7qB+dormMF7MReN5KzBvDJ/+qmTP50+r62UY7P4f03tliSEEM3poLi4GO+99x7S0tLw888/47777sPMmTMxbtw4WFk1XhC++OILVFRUICQkBL///jsSEhLw888/49ixY8jLy0NYWBjOnj0LX19feZ3Zs2ejsLAQu3btqrdPlUqF+Ph4rfaMjAyjnEYgItJFRUUFoqOjUVJSAmdnZ1OHQ+1Ms4s7ABw8eBDvvPMO0tPT4ePjgytXrsDV1RWpqamIiIjQuZ/y8nJ069YNixcvxuDBgxEWFoZz587Bx8dHXmbWrFk4c+YMdu7cWW8f9e25+/n54cKFC2bxH+z1PSe12uaPCDJBJHXsS6m3WS2skHk1BJGOJ6DQ5RGaw+KMHFgb1kBOgUbyyvzpp06Odfq8tlKOS0tL4eHhweJOJmHwde6///473n33XaSmpuLUqVOYMGECtm/fjnvvvRfXrl3Ds88+i5iYGBQWFurcp4ODA+6880788ssvmDBhAgCgqKhIo7gXFxfDy8urwT6USiWUSqVWu0KhgEKh0P0NthAhddBqM4e4mnr2tUKq0a24m8N7MRc65Esrr8yffhrIcaOf11bKsVn8v6Z2y6ABdePGjYOfnx/S0tIwa9YsnD17Fu+//z7uvfdeAICdnR2efvppnDlzRq9+Kysrcfz4cfj4+CAwMBDe3t7IzMyU51dVVSErKwtDhgwxJGwiIqJ2waA9d09PT2RlZSE0NLTBZXx8fJCfn99oP//4xz8wbtw4+Pv7o7i4GAkJCSgtLUVMTAwkScKiRYuQmJiI4OBgBAcHIzExEfb29oiOjjYkbCIionbBoOIeHh6O/v37a7VXVVVh06ZNePTRRyFJEgICAhrt57fffsMjjzyCCxcuoGPHjhg8eDAOHDggr7d48WJcu3YN8+bNw+XLlzFo0CDs3r27zV7jTkRE1BoMKu4zZszAfffdB09PT432srIyzJgxA48++qhO/WzatKnR+ZIkQaVSQaVSGRImERFRu2TQOXchBCRJ0mr/7bff4OLi0uygiIiIyHB67bn369dPvgf8yJEjYW391+rV1dXIz8/HfffdZ/QgiYiISHd6Fffay9OOHDmCUaNGwdHRUZ5nY2ODLl26YNKkSUYNkIiIiPSjV3FfsWIFAKBLly74+9//Dltb2xYJioiIiAxn0IC6mJgYY8dBRERERqJzcXdzc8OJEyfg4eGB2267rd4BdbUuXbpklOCIiIhIfzoX99WrV8vXl69evbrR4k5ERESmo3Nxv/VQ/PTp01siFiKixu1JMnUERG2CzsW99rnouuATkIiIiExH5+Lu6ura5KH42pvbVFdXNzswIiIiMozOxX3Pnj0tGQeZqe8KLgHuN/8NC3Q1dTiWr77DziOWtn4cRNSm6Vzcw8PDWzIOIiIiMhKdi/vRo0fRq1cvWFlZ4ejRo40u27t372YHRkRERIbRubj37dsXRUVF8PT0RN++fSFJEoQQWsvxnDsREZFp6Vzc8/Pz0bFjR/lnIiIiMk86F/eAgIB6fyYiIiLzYtC95QEgLy8Pr732Go4fPw5JktCjRw8sXLgQ3bt3N2Z87cLqzBPyz09FhpgwEiIisgRWhqz00UcfoVevXjh06BD69OmD3r174/Dhw+jVqxc2b95s7BiJiIhIDwYV98WLF2Pp0qXYv38/UlJSkJKSguzsbCxbtgxLliwxKJCkpCRIkoRFixbJbUIIqFQq+Pr6ws7ODhERETh27JhB/VPz7T91EftPXTR1GDpZnXlCfhERtTcGFfeioiI8+uijWu1Tp05FUVGR3v3l5ORg/fr1WpfQJScnIyUlBWvXrkVOTg68vb0RGRmJsrIyQ8ImIiJqFww65x4REYF9+/YhKChIo/3bb7/FsGHD9Orr6tWrmDJlCjZs2ICEhAS5XQiBNWvWYPny5Zg4cSIAID09HV5eXsjIyMCcOXPq7a+yshKVlZXydO098dVqNdRqtV6xtQRJNH6ZoMliFPX/nVcjddD4FwDUDSx7c6bpcwxo5tnccgr8lcNGcykvbB45NQtN5EunvLZSPs3h+4baL0nUd7F6PbZt2yb/fO7cOTz33HN46KGHMHjwYADAgQMHsHnzZsTHx2Pu3Lk6BxATEwM3NzesXr0aERER6Nu3L9asWYNTp06hW7duOHz4MPr16ycvP378eLi6uiI9Pb3e/lQqFeLj47XaMzIyYG9vr3NcRETNUVFRgejoaJSUlPBhWtTqdC7uVla6HcHX5yY2mzZtwr///W/k5OTA1tZWo7hnZ2cjLCwMZ8+eha+vr7zO7NmzUVhYiF27dtXbZ3177n5+frhw4YJZ/Ad7fc/JRufPHxHU6PwWsy+l3uYDhSW45DYIbpcOwurPveGBXdwa7mdYXEtEp7db82xuOQVu7llmXg1BpOMJKKSaxvsxk5yahUZyCuiY11bKZ2lpKTw8PFjcySR0PixfU9PEF5Cezpw5gyeffBK7d++Gra1tg8vVfRJd7ZPnGqJUKqFUKrXaFQoFFAqF4QEbibjl8HZ9TBZjA1+EtQXdSlTLPzdajMwgx4Bmns0tp7dSSDVNF3czyalZ0CGnQBN5baV8msP3DbVfBg2oM4ZDhw6huLgYd911F6ytrWFtbY2srCy8+uqrsLa2hpeXFwBoDdArLi6W5xEREZE2g29iU15ejqysLJw+fRpVVVUa82JjY5tcf+TIkfjhhx802mbMmIEePXpgyZIl6Nq1K7y9vZGZmSmfc6+qqkJWVhZeeOEFQ8MmIiKyeAYV9++//x5jxoxBRUUFysvL4ebmhgsXLsDe3h6enp46FXcnJyf06tVLo83BwQHu7u5y+6JFi5CYmIjg4GAEBwcjMTER9vb2iI6ONiRsIiKidsGg4v7UU09h3LhxWLduHVxdXXHgwAEoFApMnToVTz75pNGCW7x4Ma5du4Z58+bh8uXLGDRoEHbv3g0nJyejbYMsH2/vS0TtjUHF/ciRI3jrrbfQoUMHdOjQAZWVlejatSuSk5MRExMjX5eur71792pMS5IElUoFlUplUH9ERETtkUED6hQKhTxi3cvLC6dPnwYAuLi4yD8TERGRaRi0596vXz/k5uYiJCQEI0aMwHPPPYcLFy7g3XffxZ133mnsGNsVHkLWH3NGRKTJoD33xMRE+Pj4AACef/55uLu744knnkBxcTHWr19v1ACJiIhIPwbtuQ8YMED+uWPHjtixY4fRAiIiIqLmMfg6d+DmDWXy8vIgSRK6d++Ojh07GisuMmO3PvY1tKu7SWIwu0e57kkydQSkq/p+VyOWtn4cRC3IoMPypaWlmDZtGjp16oTw8HAMHz4cvr6+mDp1KkpKSowdIxEREenBoOL++OOP4+DBg9i+fTuuXLmCkpISbN++Hbm5uZg1a5axYyQiIiI9GHRY/vPPP8euXbswdOhQuW3UqFHYsGED7rvvPqMFR0RERPozaM/d3d0dLi4uWu0uLi647bbbmh0UERERGc6gPfdnn30WcXFx+M9//iNfEldUVIRnnnkG//rXv4waoCUwu8FfgNagInMYJEdERMahc3Hv16+fxnPUf/nlFwQEBMDf3x8AcPr0aSiVSvzxxx+YM2eO8SMlIiIinehc3CdMmNCCYRAREZGx6FzcV6xY0ZJxEBmFWZ4CISJqZc26ic2hQ4dw/PhxSJKEnj17ol+/fsaKi4iIiAxkUHEvLi7Gww8/jL1798LV1RVCCJSUlGDEiBHYtGkT71RHRERkQgZdCrdw4UKUlpbi2LFjuHTpEi5fvowff/wRpaWliI2NNXaMbcrqzBNt/tDw/lMX5RcREbU9Bu2579y5E19++SVuv/12ua1nz554/fXXERUVZbTgiIiISH8G7bnX1NRAoVBotSsUCtTU1Ojcz7p169C7d284OzvD2dkZoaGh+OKLL+T5QgioVCr4+vrCzs4OEREROHbsmCEhExERtRsGFfd77rkHTz75JM6dOye3nT17Fk899RRGjhypcz+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCTk5ORkpKCtWvXIicnB97e3oiMjERZWZkhYRMREbULBhX3tWvXoqysDF26dEG3bt0QFBSEwMBAlJWV4bXXXtO5n3HjxmHMmDEICQlBSEgI/v3vf8PR0REHDhyAEAJr1qzB8uXLMXHiRPTq1Qvp6emoqKhARkaGIWETERG1Cwadc/fz88Phw4eRmZmJn3/+GUII9OzZE/fee6/BgVRXV2Pz5s0oLy9HaGgo8vPzUVRUpHEOX6lUIjw8HNnZ2Q3eBa+yshKVlZXydGlpKQBArVZDrVYbHJ+uJFEtb69um75aNF6h+XddjdSh3sVq2xuar67TD1ohx4AZ5rRuHpqK48/ltfJX78Ktk9M2oYl86ZVXjRWNn+PW+L4haogkhBD6rHDjxg3Y2triyJEj6NWrV7MD+OGHHxAaGorr16/D0dERGRkZGDNmDLKzsxEWFoazZ8/C19dXXn727NkoLCzErl276u1PpVIhPj5eqz0jIwP29vbNjpeISBcVFRWIjo5GSUkJnJ2dTR0OtTN677lbW1sjICAA1dWG7TnV1b17dxw5cgRXrlzBxx9/jJiYGGRlZcnzb72fPXBzkF3dtlstXboUcXFx8nRpaSn8/PwQFRXVKv/BXt9zskX6nT8iyLgd7kvRmPyu4FK9i9VIHXDJbRDcLh2EVT17ywO7uGk2DIvTWqYlGJpno+exVp18NkUtrJB5NQSRjiegkJoYhNpKOW0TmsizXnm9VQvkuPaoIZEpGPxUuKVLl+K9996Dm5tb0ys0wsbGBkFBN79wBwwYgJycHLzyyitYsmQJgJtPm6t98hxw8wY6Xl5eDfanVCqhVCq12hUKRb0j/I1NNHD4urmMHnudL776Cnfd+fUto/UF2go5BgzPc4t9BvQpJLdQSDVNF6FWymmboGOedcqrxgrGz3FrfN8QNcSg4v7qq6/i5MmT8PX1RUBAABwcHDTmHz582OCAhBCorKxEYGAgvL29kZmZKd/WtqqqCllZWXjhhRcM7p+IiMjSGVTcJ0yYAEmSoOfpei3Lli3D6NGj4efnh7KyMmzatAl79+7Fzp07IUkSFi1ahMTERAQHByM4OBiJiYmwt7dHdHR0s7ZLRERkyfQq7hUVFXjmmWfwySefQK1WY+TIkXjttdfg4eFh0MZ///13TJs2DefPn4eLiwt69+6NnTt3IjIyEgCwePFiXLt2DfPmzcPly5cxaNAg7N69G05OTgZtj4iIqD3Qq7ivWLECaWlpmDJlCuzs7JCRkYEnnngCmzdvNmjjb7/9dqPzJUmCSqWCSqUyqH8iIqL2SK/ivmXLFrz99tt4+OGHAQBTpkxBWFgYqqur0aFDywwkIyIiIv3odaeHM2fOYNiwYfL0wIEDYW1trXEbWiIiIjItvYp7dXU1bGxsNNqsra1x48YNowZFREREhtPrsLwQAtOnT9e4jvz69euYO3euxuVwW7ZsMV6EREREpBe9intMTIxW29SpU40WDFFjVmeeMHUI1Jr2JJk6AqI2S6/inpqa2lJxEBERkZEY9MhXIiIiMl8G3aGOLMf+UxeNtn5oV/fmhkNEREbAPXciIiILw+JORERkYVjciYiILAyLOxERkYVhcSciIrIwLO5kNPtPXeSNZoiIzACLOxERkYVhcSciIrIwLO5EREQWxqTFPSkpCXfffTecnJzg6emJCRMmIC8vT2MZIQRUKhV8fX1hZ2eHiIgIHDt2zEQRExERmT+T3n42KysL8+fPx913340bN25g+fLliIqKwk8//SQ/QjY5ORkpKSlIS0tDSEgIEhISEBkZiby8PDg5OZkyfKrH4NPrgT233IZ2xFLTBWMp6j4djTkloiaYtLjv3LlTYzo1NRWenp44dOgQhg8fDiEE1qxZg+XLl2PixIkAgPT0dHh5eSEjIwNz5swxRdhERERmzaweHFNSUgIAcHNzAwDk5+ejqKgIUVFR8jJKpRLh4eHIzs6ut7hXVlaisrJSni4tLQUAqNVqqNXqlgwfACCJ6hbp1+ixi5tnZGqkDo0uVju/qeVupRa3nO0xYtzGyG2LfQaEfme4anOk1nO9myu1/OfYLBiQG4Pz2gI5bY3vG6KGSEIIYeoggJvn1sePH4/Lly9j3759AIDs7GyEhYXh7Nmz8PX1lZedPXs2CgsLsWvXLq1+VCoV4uPjtdozMjJgb2/fcm+AiOgWFRUViI6ORklJCZydnU0dDrUzZrPnvmDBAhw9ehTffvut1jxJkjSmhRBabbWWLl2KuLg4ebq0tBR+fn6Iiopqlf9gr+852SL9zh8RZPjK+1IanPVdwaVGV62ROuCS2yC4XToIKx33nAd2cftrYlhcwwvqyRi5vTWPtf01K7e1GslxfdTCCplXQxDpeAIKqUa/bRkxp2ZNz5wCzchrC+S09qghkSmYRXFfuHAhtm3bhm+++QadO3eW2729vQEARUVF8PHxkduLi4vh5eVVb19KpRJKpVKrXaFQQKFQGDlybUKPw9f6aFbsjXzJ6VqwrUS1zstqfKkaMefGyO2teaztzyifC30LdG08Uo3+xb0VPsdmwcCcAgbktQVy2hrfN0QNMWlxF0Jg4cKF2Lp1K/bu3YvAwECN+YGBgfD29kZmZib69esHAKiqqkJWVhZeeOEFU4RMJmCWt7StO4Kd2jZekUAWxqTFff78+cjIyMCnn34KJycnFBUVAQBcXFxgZ2cHSZKwaNEiJCYmIjg4GMHBwUhMTIS9vT2io6NNGToREZHZMmlxX7duHQAgIiJCoz01NRXTp08HACxevBjXrl3DvHnzcPnyZQwaNAi7d+/mNe5EREQNMPlh+aZIkgSVSgWVStXyAREREVkA3lueiIjIwrC4ExERWRizuBSurTPL0dxERNRucc+diIjIwnDPnaitqe8ae16XTUS34J47ERGRhWFxJyIisjA8LN9O7D910dQhmAUOfiSi9oB77kRERBaGxZ2IiMjCsLgTERFZGBZ3IiIiC8PiTkREZGE4Wp7MEke1ExEZjnvuREREFobFnYiIyMKwuBMREVkYFnciIiILY9Li/s0332DcuHHw9fWFJEn45JNPNOYLIaBSqeDr6ws7OztERETg2LFjpgmWiIiojTDpaPny8nL06dMHM2bMwKRJk7TmJycnIyUlBWlpaQgJCUFCQgIiIyORl5cHJycnE0RsOrWjx5+KDDFxJHpqA48nvXVkfpvLLxFRPUxa3EePHo3Ro0fXO08IgTVr1mD58uWYOHEiACA9PR1eXl7IyMjAnDlz6l2vsrISlZWV8nRpaSkAQK1WQ61WG/kd3CSJ6hbptz4GvQdhhRqpg0Hbq11Pn/XVookDQjq8h9bM6a10zm9T77Gp7fy5fpO50rnDlvlsm5QBuTFaXo2Qz5b6viHShSSEEKYOAgAkScLWrVsxYcIEAMCpU6fQrVs3HD58GP369ZOXGz9+PFxdXZGenl5vPyqVCvHx8VrtGRkZsLe3b5HYiYjqqqioQHR0NEpKSuDs7GzqcKidMdub2BQVFQEAvLy8NNq9vLxQWFjY4HpLly5FXFycPF1aWgo/Pz9ERUW12H+w1/ecbJF+6zN/RJD+K+1LwXcFlwzaXo3UAZfcBsHt0kFY6bg3PbCLW+MLDItrfD5aN6e30jm/+1KatR21sELm1RBEOp6AQqppVl8AdMppm2NAjo2WVyPks/aoIZEpmG1xryVJksa0EEKr7VZKpRJKpVKrXaFQQKFQGD0+ABAGHvI2hEHvQarRuTA3xEpU69xHk1+qOryH1szprXTOrzEKMm7myijFvYU+2ybVjLw0O69GyGdLfd8Q6cJsi7u3tzeAm3vwPj4+cntxcbHW3nx7xYFgZFHqG3xpKm1gIChRY8z2OvfAwEB4e3sjMzNTbquqqkJWVhaGDBliwsiIiIjMm0n33K9evYqTJ/86t5qfn48jR47Azc0N/v7+WLRoERITExEcHIzg4GAkJibC3t4e0dHRJoyaiIjIvJm0uOfm5mLEiBHydO1AuJiYGKSlpWHx4sW4du0a5s2bh8uXL2PQoEHYvXu3WVzjzqeWNWz/qYvyz6Fd3U0YCRFR+2TS4h4REYHGrsSTJAkqlQoqlar1giIiImrjzPacOxERERnGbEfLm6s2cTjenEYdtzG8AoGILAH33ImIiCwMizsREZGF4WF5C3brqHWycHVPxfCGK0TtGvfciYiILAyLOxERkYXhYfk2pqHR+hqjvM3ot8ob2hARtT7uuRMREVkYM9rHMy9t4np2IiKienDPnYiIyMKwuBMREVkYHpa3QG3h+nZzPu0x+PT6mz/suTkAkIMCiait4Z47ERGRhWFxJyIisjA8LE+tb08SBp9u/NTBAf/ZrRRMw9rC6Y02g08qJGpV3HMnIiKyMG2iuL/xxhsIDAyEra0t7rrrLuzbt8/UIREREZktsz8s/8EHH2DRokV44403EBYWhrfeegujR4/GTz/9BH9/f1OH1+rkkdxkEm125Hx9h8X55Dgii2X2e+4pKSmYOXMmHn/8cdx+++1Ys2YN/Pz8sG7dOlOHRkREZJbMes+9qqoKhw4dwj//+U+N9qioKGRnZ9e7TmVlJSorK+XpkpISAMClS5egVqt13nbl1RIDIm55ZddvtOr2aiSBiooKlF2/AStR3ay+Ll6tkn9u6n2YMv+65vjW96MvtbBCRUUFLkpVUEg1BvfToIt1BgPWF2vdZVpSM3KljxbNq575KisrAwAIIYwbB5EOzLq4X7hwAdXV1fDy8tJo9/LyQlFRUb3rJCUlIT4+Xqs9MDCwRWKklvKKqQNo41RGWob+ojJorbKyMri4uBg3FKImmHVxryVJksa0EEKrrdbSpUsRFxcnT9fU1ODSpUtwd3dvcB1qWGlpKfz8/HDmzBk4OzubOhyLwby2DHPKqxACZWVl8PX1NWkc1D6ZdXH38PBAhw4dtPbSi4uLtfbmaymVSiiVSo02V1fXlgqx3XB2djb5l6UlYl5bhrnklXvsZCpmPaDOxsYGd911FzIzMzXaMzMzMWTIEBNFRUREZN7Mes8dAOLi4jBt2jQMGDAAoaGhWL9+PU6fPo25c+eaOjQiIiKzZPbF/e9//zsuXryIlStX4vz58+jVqxd27NiBgIAAU4fWLiiVSqxYsULrVAc1D/PaMphXopskwes0iIiILIpZn3MnIiIi/bG4ExERWRgWdyIiIgvD4k5ERGRhWNwJAPDNN99g3Lhx8PX1hSRJ+OSTTzTmCyGgUqng6+sLOzs7RERE4NixY6YJto1ISkrC3XffDScnJ3h6emLChAnIy8vTWIZ51d+6devQu3dv+UY1oaGh+OKLL+T5zCkRizv9qby8HH369MHatWvrnZ+cnIyUlBSsXbsWOTk58Pb2RmRkpPxwDNKWlZWF+fPn48CBA8jMzMSNGzcQFRWF8vJyeRnmVX+dO3fGqlWrkJubi9zcXNxzzz0YP368XMCZUyIAgqgOAGLr1q3ydE1NjfD29harVq2S265fvy5cXFzEm2++aYII26bi4mIBQGRlZQkhmFdjuu2228TGjRuZU6I/cc+dmpSfn4+ioiJERUXJbUqlEuHh4Q0+epe01T5+2M3NDQDzagzV1dXYtGkTysvLERoaypwS/YnFnZpU++AefR69S5qEEIiLi8PQoUPRq1cvAMxrc/zwww9wdHSEUqnE3LlzsXXrVvTs2ZM5JfqT2d9+lsyHPo/eJU0LFizA0aNH8e2332rNY1711717dxw5cgRXrlzBxx9/jJiYGGRlZcnzmVNq77jnTk3y9vYGAL0evUt/WbhwIbZt24Y9e/agc+fOcjvzajgbGxsEBQVhwIABSEpKQp8+ffDKK68wp0R/YnGnJgUGBsLb21vj0btVVVXIysrio3cbIYTAggULsGXLFnz99dcIDAzUmM+8Go8QApWVlcwp0Z94WJ4AAFevXsXJkyfl6fz8fBw5cgRubm7w9/fHokWLkJiYiODgYAQHByMxMRH29vaIjo42YdTmbf78+cjIyMCnn34KJycneW/SxcUFdnZ2kCSJeTXAsmXLMHr0aPj5+aGsrAybNm3C3r17sXPnTuaUqJYph+qT+dizZ48AoPWKiYkRQty8bGvFihXC29tbKJVKMXz4cPHDDz+YNmgzV18+AYjU1FR5GeZVf4899pgICAgQNjY2omPHjmLkyJFi9+7d8nzmlEgIPvKViIjIwvCcOxERkYVhcSciIrIwLO5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7URMKCgogSRKOHDli6lCIiHTC4k5thhAC9957L0aNGqU174033oCLiwtOnz5tgsiIiMwLizu1GZIkITU1FQcPHsRbb70lt+fn52PJkiV45ZVX4O/vb8IIiYjMA4s7tSl+fn545ZVX8I9//AP5+fkQQmDmzJkYOXIkpk+frrX8I488gocfflijTa1Ww8PDA6mpqQCAnTt3YujQoXB1dYW7uzvGjh2LX3/9tcEY0tLS4OrqqtH2ySefQJIkjbbPPvsMd911F2xtbdG1a1fEx8fjxo0b8nyVSgV/f38olUr4+voiNjZWz2wQEdWPj3ylNicmJgZbt27FjBkzMGnSJPz444/48ccf6112ypQpeOihh3D16lU4OjoCAHbt2oXy8nJMmjQJAFBeXo64uDjceeedKC8vx3PPPYcHH3wQR44cgZWVYX//7tq1C1OnTsWrr76KYcOG4ddff8Xs2bMBACtWrMBHH32E1atXY9OmTbjjjjtQVFSE//3vfwZti4hIi2kfSkdkmN9//1107NhRWFlZiS1btjS4XFVVlfDw8BD/+c9/5LZHHnlETJ48ucF1iouLBQD5MaH5+fkCgPj++++FEEKkpqYKFxcXjXW2bt0qbv3vNGzYMJGYmKixzLvvvit8fHyEEEK8/PLLIiQkRFRVVen0fomI9MHD8tQmeXp6Yvbs2bj99tvx4IMPNricQqHA5MmT8d///hfAzb30Tz/9FFOmTJGX+fXXXxEdHY2uXbvC2dkZgYGBANCswXmHDh3CypUr4ejoKL9mzZqF8+fPo6KiApMnT8a1a9fQtWtXzJo1C1u3btU4ZE9E1Bw8LE9tlrW1Naytm/4IT5kyBeHh4SguLkZmZiZsbW0xevRoef64cePg5+eHDRs2wNfXFzU1NejVqxeqqqrq7c/KygpCCI02tVqtMV1TU4P4+HhMnDhRa31bW1v4+fkhLy8PmZmZ+PLLLzFv3jy8+OKLyMrKgkKh0OXtExE1iMWdLN6QIUPg5+eHDz74AF988QUmT54MGxsbAMDFixdx/PhxvPXWWxg2bBgA4Ntvv220v44dO6KsrAzl5eVwcHAAAK1r4Pv374+8vDwEBQU12I+dnR0eeOABPPDAA5g/fz569OiBH374Af3792/GuyUiYnGndkCSJERHR+PNN9/EiRMnsGfPHnnebbfdBnd3d6xfvx4+Pj44ffo0/vnPfzba36BBg2Bvb49ly5Zh4cKF+O6775CWlqaxzHPPPYexY8fCz88PkydPhpWVFY4ePYoffvgBCQkJSEtLQ3V1tdzXu+++Czs7OwQEBLRECoioneE5d2oXpkyZgp9++gmdOnVCWFiY3G5lZYVNmzbh0KFD6NWrF5566im8+OKLjfbl5uaG9957Dzt27MCdd96J999/HyqVSmOZUaNGYfv27cjMzMTdd9+NwYMHIyUlRS7erq6u2LBhA8LCwtC7d2989dVX+Oyzz+Du7m70905E7Y8k6p48JCIiojaNe+5EREQWhsWdiIjIwrC4ExERWRgWdyIiIgvD4k5ERGRhWNyJiIgsDIs7ERGRhWFxJyIisjAs7kRERBaGxZ2IiMjCsLgTERFZmP8HzQms27H+BcMAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABcFElEQVR4nO3dd1iTV/sH8G8CCSQs2UO2RK2guCp1FRyoqDi6bG2rtNYurVqrba1txVF81WqHVrsc1Mmvtb6v1klVUOuoUqy4QVBR9pA9EvL8/njMA4EASQgkhPtzXbk0z8o5EHLnnOec+/AYhmFACCGEEKPC13cBCCGEEKJ7FOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOB14MqVK5gxYwa6dOkCkUgEkUgEiUSCt956C5cuXVI6NjIyEjweD3w+H6mpqQ2uVVZWBmtra/B4PERERHDb7969Cx6PBx6Ph8jISJXleP3117ljmqMoh+IhFArh4+ODuXPn4tGjR5pUv0mKcn/55Zc6u2ZcXBx4PB5+++23Zo9V1LOukJAQhISEKG1r6ufamEOHDjV6jre3t9LvryVCQkKUflcikQiBgYH4+uuvIZfLdfIaTVH8vOPi4rhtERER8Pb21vhaGzduxLZt2xpsV7xPVO0jhGiHAnwL/fDDD+jXrx8uXLiAuXPn4o8//sDBgwcxb948XLt2DU8++STu3LnT4DxLS0ts3bq1wfZff/0VUqkUAoFA5etZWVlh27ZtDT7YS0tL8euvv8La2lqj8h85cgTnzp3DwYMHMWnSJKxfvx5hYWEwlgzGb7zxBs6dO9fscefOncMbb7yh0bUPHTqEpUuXqty3b98+fPbZZxpdrym+vr44d+4czp07h5iYGHTu3Bnvv/8+Fi1apLPX0MRnn32Gffv2aXxeYwHe1dUV586dw7hx43RQOkIIAJjquwDt2V9//YV3330X48aNw2+//QahUMjtGz58OGbNmoVff/0VIpGowblTpkxBdHQ0li5dCj6/9nvW5s2bMXnyZOzfv1/la06ZMgU///wzjh8/jtDQUG57TEwMampqMGnSJOzYsUPtOvTr1w8ODg4AgNDQUOTn52P79u04e/YsBg8erPKc8vJyiMVitV9Dn9zd3eHu7t7scU899ZROX7dPnz46vZ5IJFIqY1hYGLp3744NGzZgxYoVKr8QMgyDyspKle+/lurSpYtOr2dmZqbz3wEhHR214FsgKioKJiYm+OGHH5SCe13PP/883NzcGmx//fXXkZ6ejtjYWG7b7du3cebMGbz++uuNvma3bt0waNAgbNmyRWn7li1b8Mwzz8DGxkbL2rAUH7L37t0DwHYPBwQE4NSpUxg0aBDEYjFXvvv37+OVV16Bk5MTzMzM8MQTT2Dt2rUqu43lcjm++OILeHp6wtzcHP3798fx48eVjklJScFrr70GiUQCsViMzp07Izw8HElJSSrLWllZifnz58PFxQUikQjBwcFITExUOkZVF70q9bvoy8vLsWDBAvj4+MDc3Bx2dnbo378/du/eDYDtov7uu++4cxWPu3fvAlDdRf/o0SN88MEH8PX1hZmZGZycnDB27FjcvHmz2fLVJxAI0K9fP5SXlyM3N5crx+zZs/H999/jiSeegJmZGaKjowEAycnJmDp1qtLvSlH+um7evIkxY8ZALBbDwcEBb7/9NkpKShocp6qLXi6XY/369ejduzdEIhE6deqEp556ivuy6u3tjWvXriE+Pp77eSmu0VgX/ZkzZzBixAhYWVlBLBZj0KBBOHjwoNIx27ZtA4/Hw8mTJ/HOO+/AwcEB9vb2eOaZZ5CRkaF07IkTJxASEgJ7e3uIRCJ4enri2WefRXl5udo/e0LaC2rBa6mmpgYnT55E//794erqqvH5EokEQ4cOxZYtWzB69GgAbJD29vbGiBEjmjx3xowZmDVrFgoLC2Fra4tbt27h7NmzWLFiBfbu3atVfRRSUlIAAI6Ojty2zMxMvPLKK/jwww8RFRUFPp+P3NxcDBo0CNXV1Vi+fDm8vb3xxx9/YMGCBbhz5w42btyodN0NGzbAy8uLu2+8evVqhIWFIT4+HgMHDgQAZGRkwN7eHv/5z3/g6OiIgoICREdHIygoCImJiejWrZvSNT/55BP07dsXP//8M4qKihAZGYmQkBAkJibC19e3RT+H+fPnY/v27VixYgX69OmDsrIyXL16Ffn5+QDYLuqysjL89ttvSrcAGnsvlJSUYMiQIbh79y4++ugjBAUFobS0FKdOnUJmZia6d++ucRnv3LkDU1NT2Nractv++9//4vTp0/j888/h4uICJycnXL9+HYMGDYKnpyfWrl0LFxcXHD16FHPmzEFeXh6WLFkCAMjOzkZwcDAEAgE2btwIZ2dn7Ny5E7Nnz1arPBEREdixYwdmzJiBZcuWQSgU4p9//uG+9Ozbtw/PPfccbGxsuPeHmZlZo9eLj49HaGgoevXqhc2bN8PMzAwbN25EeHg4du/ejSlTpigd/8Ybb2DcuHHYtWsX0tPTsXDhQrzyyis4ceIEAPZLxLhx47i/u06dOuHhw4c4cuQIqqur202vFCFqY4hWsrKyGADMiy++2GCfTCZjpFIp95DL5dy+JUuWMACY3NxcZuvWrYyZmRmTn5/PyGQyxtXVlYmMjGQYhmEsLCyY6dOnc+elpaUxAJg1a9YwJSUljKWlJbNhwwaGYRhm4cKFjI+PDyOXy5lZs2Yx6vxaFeXIyspipFIpU1hYyOzYsYMRiUSMh4cHU1FRwTAMwwQHBzMAmOPHjyud//HHHzMAmAsXLihtf+eddxgej8fcunVLqdxubm7cNRmGYYqLixk7Oztm5MiRjZZRJpMx1dXVjEQiYd5//31u+8mTJxkATN++fZV+tnfv3mUEAgHzxhtvNKhnXcHBwUxwcLDSNgDMkiVLuOcBAQHMpEmTGi0bwzBN/qy9vLyUfn/Lli1jADCxsbFNXlOV4OBgxt/fn3s/ZWRkcD//559/XqkONjY2TEFBgdL5o0ePZtzd3ZmioiKl7bNnz2bMzc254z/66COGx+Mxly9fVjouNDSUAcCcPHmS2zZ9+nTGy8uLe37q1CkGALN48eIm6+Lv79/gZ88wte+TrVu3ctueeuopxsnJiSkpKeG2yWQyJiAggHF3d+d+91u3bmUAMO+++67SNVevXs0AYDIzMxmGYZjffvuNAdCgfoQYK+qibwX9+vWDQCDgHmvXrlV53PPPPw+hUIidO3fi0KFDyMrKUmvktaWlJZ5//nls2bIFMpkMv/zyC1577TW1uqLrc3FxgUAggK2tLV555RX07dsXR44cgbm5OXeMra0thg8frnTeiRMn0KNHDwwYMEBpe0REBBiG4VpNCs8884zSNa2srBAeHo5Tp06hpqYGACCTyRAVFYUePXpAKBTC1NQUQqEQycnJuHHjRoOyT506VanOXl5eGDRoEE6ePKnxz6G+AQMG4PDhw/j4448RFxeHioqKFl3v8OHD6Nq1K0aOHKnV+deuXePeT25ubli7di1efvll/PTTT0rHDR8+XKlFX1lZiePHj2Py5MkQi8WQyWTcY+zYsaisrMT58+cBACdPnoS/vz8CAwOVrjl16lS16gcAs2bN0qp+9ZWVleHChQt47rnnYGlpyW03MTHBq6++igcPHuDWrVtK50yYMEHpea9evQDU3m7q3bs3hEIh3nzzTURHR6ucxUKIMaEuei05ODhAJBJxHx517dq1C+Xl5cjMzGzwoVOXhYUFpkyZgi1btsDLywsjR46El5eXWq8/Y8YMDBkyBF988QVyc3O1npL1559/wsbGBgKBAO7u7rC3t29wjKpu5/z8fJXTpBTjDRRd2QouLi4NjnVxcUF1dTVKS0thY2OD+fPn47vvvsNHH32E4OBg2Nrags/n44033lAZYBu75r///ttofdX17bffwt3dHTExMVi1ahXMzc0xevRorFmzBhKJROPr5ebmwtPTU+vydOnSBXv27AGPx4O5uTl8fHxUdinX/13l5+dDJpNh/fr1WL9+vcpr5+Xlccf6+Pg02K/q51xfbm4uTExM1DpWHYWFhWAYRuV7r7H3WP33rqL7X/He6dKlC/7880+sXr0as2bNQllZGXx9fTFnzhzMnTtXJ+UmxJBQgNeSiYkJhg8fjmPHjiEzM1Ppg6hHjx4AwN17bMrrr7+On3/+GVeuXMHOnTvVfv3BgwejW7duWLZsGUJDQ+Hh4aFxHQAgMDCQG0XfGFU9A/b29sjMzGywXTGoqf41s7KyGhyblZUFoVDItdB27NiBadOmISoqSum4vLw8dOrUSeX5qrap+pKiKQsLCyxduhRLly5FdnY215oPDw/XalCco6MjHjx4oHV5FAMTm1P/d2Vra8u1ehtrXSuCur29faM/0+Y4OjqipqYGWVlZWo1JqU/x5U6T95g6hg4diqFDh6KmpgaXLl3C+vXrMW/ePDg7O+PFF19scbkJMSTURd8CixYtQk1NDd5++21IpVKtrjFw4EC8/vrrmDx5MiZPnqzRuZ9++inCw8PxwQcfaPXaLTFixAhcv34d//zzj9L2X375BTweD8OGDVPa/vvvv6OyspJ7XlJSggMHDmDo0KEwMTEBwAan+oOuDh48iIcPH6osw+7du5Xm69+7dw9nz55tkMSmpZydnREREYGXXnoJt27d4kZc128hNiUsLAy3b99ucOuitYnFYgwbNgyJiYno1asX+vfv3+Ch+EI0bNgwXLt2rUEPyK5du5p9nbCwMADApk2bmjzOzMxMrZ+XhYUFgoKC8PvvvysdL5fLsWPHDri7u6Nr167NXqcxJiYmCAoK4mYS1H8fE2IMqAXfAoMHD8Z3332H9957D3379sWbb74Jf39/ruWhGNHeXPKZzZs3a/X6r7zyCl555RWtzm2p999/H7/88gvGjRuHZcuWwcvLCwcPHsTGjRvxzjvvNPjwNTExQWhoKObPnw+5XI5Vq1ahuLhYKVHM+PHjsW3bNnTv3h29evVCQkIC1qxZ0+g89pycHEyePBkzZ85EUVERlixZAnNzc50kfwkKCsL48ePRq1cv2Nra4saNG9i+fTsGDhzIdY337NkTALBq1SqEhYXBxMQEvXr1Ujllct68eYiJicHEiRPx8ccfY8CAAaioqEB8fDzGjx/f4AuRLn3zzTcYMmQIhg4dinfeeQfe3t4oKSlBSkoKDhw4wH3pmDdvHrZs2YJx48ZhxYoV3Ch6dXoshg4dildffRUrVqxAdnY2xo8fDzMzMyQmJkIsFuO9994DwP7M9uzZg5iYGPj6+sLc3Jz7Oda3cuVKhIaGYtiwYViwYAGEQiE2btyIq1evYvfu3RqPOfn+++9x4sQJjBs3Dp6enqisrOSmm2o7NoIQQ0YBvoXefvttDBw4EN988w2++uorZGRkgMfjwd3dHYMGDcLx48cbDFAzBo6Ojjh79iwWLVqERYsWobi4GL6+vli9ejXmz5/f4PjZs2ejsrISc+bMQU5ODvz9/XHw4EGlZDrffPMNBAIBVq5cidLSUvTt2xe///47Pv30U5VliIqKwsWLF/Haa6+huLgYAwYMwJ49e3SShGX48OHYv38/vvrqK5SXl6Nz586YNm0aFi9ezB0zdepU/PXXX9i4cSOWLVsGhmGQlpamcmyClZUVzpw5g8jISPz4449YunQpbG1t8eSTT+LNN99scXmb0qNHD/zzzz9Yvnw5Pv30U+Tk5KBTp06QSCQYO3Ysd5yLiwvi4+Mxd+5cvPPOOxCLxZg8eTI2bNiAiRMnNvs627ZtQ9++fbF582Zs27YNIpEIPXr0wCeffMIds3TpUmRmZmLmzJkoKSmBl5dXo7eygoODceLECSxZsgQRERGQy+UIDAzE/v37MX78eI1/Dr1798axY8ewZMkSZGVlwdLSEgEBAdi/fz9GjRql8fUIMXQ8hjGSnKSEEEII4dA9eEIIIcQIUYAnhBBCjBAFeEIIIcQIUYAnhBBCjBAFeEIIIcQIGf00OblcjoyMDFhZWWmVq50QQrTBMAxKSkrg5uYGPp/aUqTtGX2Az8jI0DqNKyGEtFR6enqjyZoIaU1GH+CtrKwAsH9kzWWUU5BKpTh27BhGjRoFgUDQmsVrE1Qfw2Zs9QGMr07a1Ke4uBgeHh7cZxAhbc3oA7yiW97a2lqjAC8Wi2FtbW00H05UH8NlbPUBjK9OLakP3Rok+kI3hgghhBAjRAGeEEIIMUIU4AkhhBAjZPT34AkhxJDV1NRAKpXquxiknRAIBDAxMVHrWArwhBCiBwzDICsrC48ePdJ3UUg706lTJ7i4uDQ7gJMCPCGE6IEiuDs5OUEsFtNoe9IshmFQXl6OnJwcAICrq2uTx1OAJ4SQNlZTU8MFd3t7e30Xh7QjIpEIAJCTkwMnJ6cmu+tpkB0hhDSjokK311PccxeLxbq9MOkQFO+b5sZuUAueEEJUkMuBvDwgJwdQc0yTxqhbnmhD3feNXlvwmzZtQq9evbgscwMHDsThw4e5/QzDIDIyEm5ubhCJRAgJCcG1a9f0WGJCiLGTSoGHD4ErV4D0dKCqSt8lIkQ7eg3w7u7u+M9//oNLly7h0qVLGD58OCZOnMgF8dWrV2PdunXYsGEDLl68CBcXF4SGhqKkpESfxSaEGKHyciAtDUhKArKygJoafZeIkJbRaxd9eHi40vMvvvgCmzZtwvnz59GjRw98/fXXWLx4MZ555hkAQHR0NJydnbFr1y689dZbKq9ZVVWFqjpfuYuLiwGw9yrUnWuqOM5Y5qZSfQybsdUHaF91Ki5mu+FLSxs/pqZG8/q0h7obspCQEPTu3Rtff/01AMDb2xvz5s3DvHnzWu014+LiMGzYMADAxIkT8d///rfVXqsxiu53GxubFk+hNJh78DU1Nfj1119RVlaGgQMHIi0tDVlZWRg1ahR3jJmZGYKDg3H27NlGA/zKlSuxdOnSBtuPHTum8YCW2NhYzSph4Kg+hs3Y6gMYX500qU95eXkrlqTjuXjxIiwsLNrktW7dugUnJyeNzomIiEB0dLTStqCgIJw/f557XlVVhQULFmD37t2oqKjAiBEjsHHjRqXlhDMzMxETE4MlS5a0rBIwgACflJSEgQMHorKyEpaWlti3bx969OiBs2fPAgCcnZ2Vjnd2dsa9e/cavd6iRYswf/587rliycZRo0ZptJpcbGwsQkNDjWYlLKqP4TK2+gCGWyeZjB04l5fH/l9dIpEUd+5oVh9F7yHRDUdHxzZ7LScnJ3Tq1Enj88aMGYOtW7dyz4VCodL+efPm4cCBA9izZw/s7e3xwQcfYPz48UhISOCmu7m4uMDGxqZF5VfQe4Dv1q0bLl++jEePHmHv3r2YPn064uPjuf31RwsyDNPkCEIzMzOYmZk12C4QCDT+oNHmHENG9TFsxlYfwHDqVFkJZGcDBQXs6HgA4GswAkkxil6T+mhSb0UCE33QJMlOSEgIevbsCRMTE0RHR0MoFGL58uV4+eWXMXv2bPz2229wcnLChg0bEBYWxp13/fp1LFiwAKdOnYKFhQVGjRqFr776Cg4ODgCAsrIyvPPOO/j9999hZWWFBQsWNHjt+l3069atw9atW5Gamgo7OzuEh4dj9erVsLS0BABs27YN8+bNQ0xMDObNm4f09HQMGTIEW7dubTZBTH2qynfgwAGlWwgAG39cXFxUXqOoqAibN2/G9u3bMXLkSADAjh074OHhgT///BOjR4/WqEzq0Ps8eKFQCD8/P/Tv3x8rV65EYGAgvvnmG+6HlJWVpXR8Tk5Og1Y9IYSoUlICpKQA166xrXZFcDc05eXlsLS01MtD0y8W0dHRcHBwwN9//4333nsP77zzDp5//nkMGjQI//zzD0aPHo1XX32Vu25mZiaCg4PRu3dvXLp0CUeOHEF2djZeeOEF7poLFy7EyZMnsW/fPhw7dgxxcXFISEhoshx8Ph/ffvstrl69iujoaJw4cQIffvhhg5/rl19+ie3bt+PUqVO4f/++yi8PzVG3fHFxcXByckLXrl0xc+ZMLuMcACQkJEAqlSrddnZzc0NAQADXY61reg/w9TEMg6qqKvj4+MDFxUXpnld1dTXi4+MxaNAgPZaQEGLIGAbIzwdu3ABu3waKivRdIuMSGBiITz/9FBKJBIsWLYJIJIKDgwNmzpwJiUSCzz//HPn5+bhy5QoAdjp03759ERUVhe7du6NPnz7YsmULTp48idu3b6O0tBSbN2/Gl19+idDQUPTs2RPR0dGoaWYaw7x58zBs2DD4+Phg+PDhWL58Of7v//5P6RipVIrvv/8e/fv3R9++fTF79mwcP35co/qqW76wsDDs3LkTJ06cwNq1a3Hx4kUMHz6cG/SdlZUFoVAIW1tbpfOcnZ0bNGR1Ra9d9J988gnCwsLg4eGBkpIS7NmzB3FxcThy5Ah4PB7mzZuHqKgoSCQSSCQSREVFQSwWY+rUqfosNiHEANXUALm57Ij49jaAXSwWo7SpYfyt/Nqa6NWrF/d/ExMT2Nvbo2fPntw2RQ+rovWakJCAkydPcl3ndd25cwcVFRWorq7GwIEDue12dnbo1q1bk+U4efIkoqKicP36dRQXF0Mmk6GyshJlZWXcYDyxWIwuXbpw57i6uiq1qtVx584dtco3ZcoU7v8BAQHo378/vLy8cPDgQW4mmCrN3XZuCb0G+OzsbLz66qvIzMyEjY0NevXqhSNHjiA0NBQA8OGHH6KiogLvvvsuCgsLERQUhGPHjsHKykqfxSaEGJCqKjaoG3IXfHN4PF6bjRBvqfpjC3g8ntI2RbCSP/5lyOVyhIeHY9WqVQ2u5erqiuTkZI3LcO/ePYwdOxZvv/02li9fDjs7O5w5cwYzZsxQmp6oqqwMw2j0Wpoer+Dq6govLy+ufi4uLqiurkZhYaFSKz4nJ6fVeqX1GuA3b97c5H4ej4fIyEhERka2TYEIIe1GaSk7cI5WWzVsffv2xd69e+Ht7Q1T04Yhx8/PDwKBAOfPn4enpycAoLCwELdv30ZwcLDKa166dAkymQxr164F//Foyfrd87qiTfkAID8/H+np6dyAvn79+kEgECA2NpYbf5CZmYmrV69i9erVrVJ2g7sHTwghjWEYoLAQuHkTuHWLgnt7MGvWLBQUFOCll17C33//jdTUVBw7dgyvv/46ampqYGlpiRkzZmDhwoU4fvw4rl69ioiICC5wq9KlSxfIZDKsX78eqamp2L59O77//vtWKb865SstLcWCBQtw7tw53L17F3FxcQgPD4eDgwMmT54MgE1cM2PGDHzwwQc4fvw4EhMT8corr6Bnz57cqHpd0/s0OUIIaU7dhV8oN3z74ubmhr/++gsfffQRRo8ejaqqKnh5eWHMmDFckFyzZg1KS0sxYcIEWFlZ4YMPPkBRE6Mje/fujXXr1mHVqlVYtGgRnn76aaxcuRLTpk1rlTo0Vz4TExMkJSXhl19+waNHj+Dq6ophw4YhJiZG6ZbyV199BVNTU7zwwgtcoptt27Y1ueRrS1CAJ4QYrOrq2vvrlBte/+Li4hpsu3v3boNt9e9bSyQS/P77741e19LSEtu3b8f27du5bQsXLmzydd5//328//77StteffVV7v8RERGIiIhQ2j9p0iSt7qmrKt/Bgwe5/4tEIhw9erTZ65ibm2P9+vVYv369xmXQBgV4QojBKS9n768XFrLd8oS0JXd3d4SHh2P37t1t/tqWlpaQyWQwNzdv8bUowBNCDEZRERvYacFIog9BQUHcqHdV0/rawuXLlwFAJ932FOAJIXoll7OJaXJy2JSyhOiLSCSCn5+fWsequl2hC+q+vjoowBNC9EIqZRPT5OZqtvALIUQ9FOAJIW2qooJtrefn0/11QloTBXhCSJsoLmbvr9MqqoS0DQrwhJBWo1j4JTubbbkTQtoOBXhCiM4p7qlfv07z1wnRFwrwhBCdqapiW+u5uexzqRRoIuMoIaQVUYAnhLRY/YVf2uuqbvr2449t+3pvvqnZ8SEhIYiPjwcAJCYmonfv3rovlIFSrJJnY2ODR+1kEQT6bk0I0Qot/NIxzZw5E5mZmQgICFDr+Li4OEycOBGurq6wsLBA7969sXPnzgbH8Hi8Bo+bN2+2uLyqrsvj8bBmzRrumJCQkAb7X3zxRaXrZGZm4uuvv25xedoSteAJIRqpqald+KW6Wt+lIW1NLBbDxcVF7ePPnj2LXr164aOPPoKzszMOHjyIadOmwdraGuHh4UrH3rp1C9bW1txzR0fHFpc3MzNT6fnhw4cxY8YMPPvss0rbZ86ciWXLlnHPRSKR0n4XFxfY2Ni0uDxtiQI8IUQttPALqS8uLg7Dhg3DH3/8gU8++QS3bt1CYGAgfv75Z/Ts2RMA8MknnyidM2fOHBw9ehT79u1rEOCdnJzQqVMntV8/JCSE60nYsWMHTExM8M4772D58uVcl3r9LyP/+9//MGzYMPj6+ipt1/SLS3tAXfSEkCaVlwNpacDVq+x9dgrupL6FCxfiyy+/xMWLF+Hk5IQJEyZAKpU2enxRURHs7OwabO/Tpw9cXV0xYsQInDx5Uq3Xjo6OhqmpKS5cuIBvv/0WX331FX7++WeVx2ZnZ+PgwYOYMWNGg307d+6Eg4MD/P39sWDBApQYwYII1IInhKj06BEb0EtL9V0SYuiWLFmC0NBQAGzAdXd3x759+/DCCy80OPa3337DxYsX8cMPP3DbXF1d8eOPP6Jfv36oqqrC9u3bMWLECMTFxeHpp59u8rU9PDzw1VdfgcfjoVu3bkhKSsJXX32FmTNnNjg2OjoaVlZWeOaZZ5S2v/zyy/Dx8YGLiwuuXr2KRYsW4d9//0VsbKw2Pw6DQQGeEMJRLPySnc1OeSNEHQMHDuT+b2dnh27duuHGjRsNjouLi0NERAR++ukn+Pv7c9u7deuGbt26KV0vPT0dX375JZ5++mmcPn0aYWFh3P4ffvgBL7/8MgDgqaee4rrjFeeuXbsWNTU1DVZk27JlC15++eUGS7HW/TIQEBAAiUSC/v37459//kHfvn01/XEYDArwhBBIpbX312nhF6ILdYMuAMTHxyM8PBzr1q3DtGnTmj3/qaeewo4dOwAA/fv355ZRBQBnZ2eNy3P69GncunULMTExzR7bt29fCAQCJCcnU4AnhLRPFRVsa72ggBZ+Ido7f/48PD09AQCFhYW4ffs2unfvzu2Pi4vD+PHjsWrVKryp5uT7xMREuLq6Amh6Gdfz5883eC6RSBq03jdv3ox+/fohMDCw2de+du0apFIp9/rtFQV4QjogWviF6NKyZctgb28PZ2dnLF68GA4ODpg0aRIANriPGzcOc+fOxbPPPousrCwAgFAo5Abaff311/D29oa/vz+qq6uxY8cO7N27F3v37m32tdPT0zF//ny89dZb+Oeff7B+/XqsXbtW6Zji4mL8+uuvDbYDwJ07d7Bz506MHTsWDg4OuH79Oj744AP06dMHgwcPbuFPRr8owBPSQSgWfsnJoYVfDJWmmeUMxX/+8x/MnTsXycnJCAwMxP79+yEUCgEA27ZtQ3l5OVauXImVK1dy5wQHByMuLg4AUF1djQULFuDhw4cQiUTw9/fHwYMHMXbs2GZfe9q0aaioqMCAAQNgYmKC9957r0EvwZ49e8AwDF566aUG5wuFQhw/fhzffPMNSktL4eHhgXHjxmHJkiUNegHaGwrwhBg5mYzNDZ+by95rJ0TXhgwZgqtXr6rct23bNmzbtq3J8z/88EN8+OGHWr22QCDA119/jU2bNjV6zJtvvtnorQEPDw8u/a6xoQBPiJGqrGRb6/n5lBue6M7GjRvx888/49y5c/ouSpuytLSETCZrMALfkFGAJ8TIlJSw99eLivRdEmJsdu7ciYrH93c8PT1x9uxZPZeo7ShG8benbnsK8IQYAcXCL9nZbOY5QlpD586dlZ6HhISA0eP0C8U9/LbQ2Ch+Q0YBnpB2jBZ+IYQ0Rq+56FeuXIknn3wSVlZWcHJywqRJk3Dr1i2lYyIiIhos4/fUU0/pqcSEGIbqaiA9HUhKAh48oOBOCGlIrwE+Pj4es2bNwvnz5xEbGwuZTIZRo0ahrKxM6bgxY8YgMzOTexw6dEhPJSZEv8rKgNRUduGXnBxa+IUQ0ji9dtEfOXJE6fnWrVvh5OSEhIQEpQUGzMzM1F7Gr6qqClV1kmgXP87kIZVKm1zdqC7Fceoeb+ioPoZNnfoUFbEBvd53X4Mll0uV/m3vamo0f88Zy/uTtF8GdQ++6PGw3/rLCMbFxXHrBAcHB+OLL76Ak5OTymusXLkSS5cubbD92LFjEIvFGpWnva8kVB/Vx7AZW30AICvLuOqkye+onEY7Ej3jMfocAlkHwzCYOHEiCgsLcfr0aW57TEwMLC0t4eXlhbS0NHz22WeQyWRISEiAmZlZg+uoasF7eHggLy8P1tbWapVFKpUiNjYWoaGhEAgELa+cnlF9DFv9+kil7MC5vLz22wUvl0uRlRULF5dQ8Pnt/3ckEklx545m77ni4mI4ODigqKiowWdPZWUl0tLS4OPj067mVRPDoO77x2Ba8LNnz8aVK1dw5swZpe1Tpkzh/h8QEID+/fvDy8sLBw8ebLCmL8B256sK/AKBQONgoM05hozqY9hqagTIyREoLfzC1+somZbj8wVGEeAVU581ec9p895MSND4lBbp10+z40NCQrisb4mJiejdu7fuC6UH3t7euHfvHgB2sZxOnTrpt0A6YhAfH++99x7279+PkydPwt3dvcljXV1d4eXlheTk5DYqHSGtS7Hgy82bbNY5w+hTI0S1mTNnIjMzEwEBAWodX1lZiYiICPTs2ROmpqbcIjS6UFJSgnnz5sHLywsikQiDBg3CxYsXlY7Jzs5GREQE3NzcIBaLMWbMmAbx4+LFi2otbNPe6DXAMwyD2bNn4/fff8eJEyfg4+PT7Dn5+flIT09v98v4kY6NYdgu+GvX2FHxhLQXYrEYLi4uMDVVrwO4pqYGIpEIc+bMwciRI3ValjfeeAOxsbHYvn07kpKSMGrUKIwcORIPHz4EwMaYSZMmITU1Ff/73/+QmJgILy8vjBw5Umm2lqOjY4OxX8ZArwF+1qxZ2LFjB3bt2gUrKytkZWUhKyuLS4VYWlqKBQsW4Ny5c7h79y7i4uIQHh4OBwcHTJ48WZ9FJ0QrMhmQmQlcuQLcu8fmiyekvYqLiwOPx8PBgwcRGBgIc3NzBAUFISkpiTvGwsICmzZtwsyZM9WeDQWwOVAmTZqEpUuXwsnJCdbW1njrrbdQ/TjpQ0VFBfbu3YvVq1fj6aefhp+fHyIjI+Hj48MtPJOcnIzz589j06ZNePLJJ9GtWzds3LgRpaWl2L17t25/GAZIrwF+06ZNKCoqQkhICFxdXblHTEwMADbnb1JSEiZOnIiuXbti+vTp6Nq1K86dOwcrKyt9Fp0QjVRWsgE9KQnIyGADPSHGYuHChfjyyy9x8eJFODk5YcKECTqZJnj8+HHcuHEDJ0+exO7du7Fv3z5ulpRMJkNNTU2DQWYikYgby6UYcF33GBMTEwiFwgbjvYyRXgfZNTeAXyQS4ejRo21UGkJ0jxZ+IR3BkiVLEBoaCgCIjo6Gu7s79u3bhxdeeKFF1xUKhdiyZQvEYjH8/f2xbNkyLFy4EMuXL4eVlRUGDhyI5cuX44knnoCzszN2796NCxcuQCKRAAC6d+8OLy8vLFq0CD/88AMsLCywbt06ZGVlITMzs8X1NnQGMciOEGPCMEBBAXDjBnD7NgV3YvwGDhzI/d/Ozg7dunXDjRs31Dr3/v37sLS05B5RUVHcvsDAQKX8JQMHDkRpaSnS09MBANu3bwfDMOjcuTPMzMzw7bffYurUqdyKbwKBAHv37sXt27dhZ2cHsViMuLg4hIWFtatV4bRlMNPkCGnvamqA3Fw24xwlMSMdHY/HU+s4Nzc3bilWoGGis6au3aVLF8THx6OsrAzFxcVwdXXFlClTlAZs9+vXD5cvX0ZRURGqq6vh6OiIoKAg9O/fX7MKtUMU4AlpoaoqNqjn5QFyub5LQ0jbO3/+PDw9PQGw88hv376N7t27q3Wuqalpo0ux/vvvv6ioqIBIJOJex9LSssF0agsLC1hYWKCwsBBHjx7F6tWrG1zLxsYGADvw7tKlS1i+fLna9WuvKMAToqWyMvb++qNHNHeddGzLli2Dvb09nJ2dsXjxYjg4OCjNd79+/Tqqq6tRUFCAkpISrsXeXKKc6upqzJgxA59++inu3buHJUuWYPbs2eA/zgB19OhRMAyDbt26ISUlBQsXLkS3bt3w2muvcdf49ddf4ejoCE9PTyQlJWHu3LmYNGkSRo0apesfg8GhAE+IBhiGDejZ2e1n4RfSfmiaWc5Q/Oc//8HcuXORnJyMwMBA7N+/H0KhkNs/duxYLlMcAPTp0wdA8wOtR4wYAYlEgqeffhpVVVV48cUXERkZye0vKirCokWL8ODBA9jZ2eHZZ5/FF198oZRFMDMzE/Pnz0d2djZcXV0xbdo0fPbZZzqquWGjAE+IGuRytgs+J4ftkieE1BoyZAiuXr3a6P67d+9qfe2lS5eqXEAMAF544YVmR+rPmTMHc+bM0fr12zMaRU9IE6RS4OFDNjFNejoFd0I2btwIS0tLpWQ27Z2/vz/CwsL0XQydoxY8ISqUl7Pd8IWFdH+dEIWdO3dymUY9PT1x9uxZPZdINw4dOsQl5lF31dH2gAI8IXUUFbGBvaRE3yUhxPB07txZ6XlISEiz99G1tW3btla5ripeXl5t9lptiQI86fDkcnYVt5wcyg1PCDEeFOBJhyWTsUE9N5dywxP9kFPiBKIFdd83FOBJh1NZyXbDFxRQYhqiH0KhEHw+HxkZGXB0dIRQKFQ78xvpuBiGQXV1NXJzc8Hn85WmIqpCAZ50GMXFbIudcsMTfePz+fDx8UFmZiYyMjL0XRzSzojFYnh6enIJfxpDAZ4YNcXCL9nZwOPBv4QYBKFQCE9PT27ZU0LUYWJiAlNTU7V6fCjAE6NEC7+Q9oDH40EgEChlXiNEVyjAE6NSVcW21vPz6f46IaRjowBPjEJpae3CL4QQQijAk3aMYdhMc7TwCyGENEQBnrQ7ivFIN27Q/HVCCGkMBXjSblRXs4PmcnJqnzczS4QQQjosCvDE4NVf+IUGzxFCSPMowBOD9egRG9hLS/VdEkIIaX8owBODQgu/EEKIbrQ4wNfU1CApKQleXl6wtbXVRZlIBySVsolpaOEXQgjRDY2HKM2bNw+bN28GwAb34OBg9O3bFx4eHoiLi9N1+YiRq6gA7t4FkpKAzEwK7oQQoisaB/jffvsNgYGBAIADBw4gLS0NN2/exLx587B48WKdF5AYp+JiIDkZuH6d7ZJnGH2XiJCG5HK2VyktTd8lIURzGnfR5+XlwcXFBQBw6NAhPP/88+jatStmzJiBb7/9VucFJMaDFn4h7UF1Nfsezcxk/5VKAS8vwNlZ3yUjRDMat+CdnZ1x/fp11NTU4MiRIxg5ciQAoLy8HCYmJhpda+XKlXjyySdhZWUFJycnTJo0Cbdu3VI6hmEYREZGws3NDSKRCCEhIbh27ZqmxSZ6JJOxH5ZJSWx3PAV3YmhKS9kepVOngEOHgIsXgQcPaKEi0r5pHOBfe+01vPDCCwgICACPx0NoaCgA4MKFC+jevbtG14qPj8esWbNw/vx5xMbGQiaTYdSoUSirk3d09erVWLduHTZs2ICLFy/CxcUFoaGhKCkp0bTopI1VVQH377OBPSODPiyJ4WAYIC+PfW8eO8Y+kpLYbZRngRgLjbvoIyMjERAQgPT0dDz//PMwMzMDwK5R+/HHH2t0rSNHjig937p1K5ycnJCQkICnn34aDMPg66+/xuLFi/HMM88AAKKjo+Hs7Ixdu3bhrbfe0rT4pA3Qwi/EEEmlyl3v1dX6LhEhrUuraXLPPfccAKCyzkTl6dOnt7gwRUVFAAA7OzsAQFpaGrKysjBq1CjuGDMzMwQHB+Ps2bMqA3xVVRWqqqq458XFxQAAqVQKqZpNSMVx6h5v6NqiPgzDBvScnNbvgpfLpUr/tnfGVh/AcOpUVsYG86wsdvxH3da5ummO5fIapKaeRX7+da7HUh3G8vlB2i+NA3xNTQ2ioqLw/fffIzs7G7dv34avry8+++wzeHt7Y8aMGVoVhGEYzJ8/H0OGDEFAQAAAICsrCwB7378uZ2dn3Lt3T+V1Vq5ciaVLlzbYfuzYMYjFYo3KFBsbq9Hxhs7Y6pOVRfUxdIZQJ3NzwNubfairvLwcly9fxsWLF5GQkIDi4mL07NkTPXr00OgahOiTxgH+iy++QHR0NFavXo2ZM2dy23v27ImvvvpK6wA/e/ZsXLlyBWfOnGmwj8fjKT1nGKbBNoVFixZh/vz53PPi4mJ4eHhg1KhRsLa2VqssUqkUsbGxCA0NhUAg0KAWhqk16lNdzd6vzM+vXd2trcjlUmRlxcLFJRR8fvv//RhbfYC2rZNMxvYcZWe3rOu9sDANyckHkZx8CPfuxSv1PohENrC3t8fIkSMhFArVup6i95AQfdE4wP/yyy/48ccfMWLECLz99tvc9l69euHmzZtaFeK9997D/v37cerUKbi7u3PbFdPxsrKy4Orqym3Pyclp0KpXMDMz48YF1CUQCDQObtqcY8h0UR9Fl+ejR7Vz1/W1ohufLzCagAgYX32A1qtTeTl7Lz0zU/uBcXJ5DR4+PIeUlD+QnHwAeXnXlfbb2Ung5xcOiSQcgwcPgJtbLIRCodp/Q8b02UHaJ40D/MOHD+Hn59dgu1wu1/ieE8MweO+997Bv3z7ExcXBx8dHab+Pjw9cXFwQGxuLPn36AACqq6sRHx+PVatWaVp00gK08AvRt4KC2qCubeO4srIIqalHkZLyB+7cOYSKinxuH49nAg+PoZBIxsPPLxz29l25fSYmdD+dtD8aB3h/f3+cPn0aXl5eStt//fVXLgira9asWdi1axf+97//wcrKirvnbmNjA5FIBB6Ph3nz5iEqKgoSiQQSiQRRUVEQi8WYOnWqpkUnGlIs/JKdzU55I6QtKbreMzPZQXLavgcLC+8gOfkAUlL+wP378ZDLa/Mhm5vbokuXMPj5jYev7xiIRLSeBjEeGgf4JUuW4NVXX8XDhw8hl8vx+++/49atW/jll1/wxx9/aHStTZs2AQBCQkKUtm/duhUREREAgA8//BAVFRV49913UVhYiKCgIBw7dgxWVlaaFp2oSSplP1jz8ig3PGlbFRW1AT0nR9uudxkePDiHlJQDSE7+A/n5N5T229l1g0QSDj+/8fDwGAw+nxbVJMZJ43d2eHg4YmJiEBUVBR6Ph88//xx9+/bFgQMHNJpCArBd9M3h8XiIjIxEZGSkpkUlGqqoYFvrBQWUG560ncLC2qCube4Etuv9CJKTDyA19TAqKgq4fXy+6eOudzao29lJNLo2jwdYWmpXLkL0SauvrqNHj8bo0aN1XRaiJ0VFbGuJBv2StiCTsb1DivvpddJpaKSgIOVxK/0A0tNPq+h6HwuJJBy+vqNhbt5Jo2vz+YCTE+DmBri4AHZ2wO3b2pWTEH2hvqkOimFq769r+wFLiLoqK9kWemYm+2VSm6mVcrkM6el/caPeCwqU162wt+/+uJUeDnf3gRp3vQsE7IIyiqBuSp+OpJ3T+C3M5/MbnYMOsIlwiOGSydjlL3NzKTc8aX23b7NBvbBQu/MrKgqRmnrk8aj3w6isrL0Qn28KT89g+PmNf9z13nB2T3PMzQFXV/bh5KS/KZ+EtAaNA/y+ffuUnkulUiQmJiI6OlplBjliOB48YO9x0mIapDXU1LBfHBUD5Hr0AG7e1Pz9lp9/mxsgl55+GgxT22gQiezRpcvYx6PeR8Pc3Ebjclpasq10V1fA3l7j0wlpNzQO8BMnTmyw7bnnnoO/vz9iYmK0zmRHWkdJCbuSG8De96QWCtElRde7IqgrZl1o8j6rqZHiwYO/uKlsBQXKN7sdHHpwXe+dOz8FPl+zZakBwNaWDehuboCaCS0Jafd0dpcpKChIKXUt0R+GYbtEs7PZjF/UYie6VFRUez+9sFC7GRcVFQV1Rr0fQWXlI24fny+Ap2cwN+rd1tZX4+vz+WzrXNFS13AZCkKMgk4CfEVFBdavX6+UZpa0PUUXaW4uLYVJdEcur+16z8xkvzRqimEY5Off4gbIPXjwV72udwf4+Y2Fn184fH1HwcxM82a2iQk7SE5xT13NlPGEGC2NA7ytra3SIDuGYVBSUgKxWIwdO3botHBEPdXVbGtd25zchNRXVVW7dnpOjnYDMmUyGdLSTiI5mW2pFxamKO13dAyAn994SCThcHML0qrrXShkR7y7ubGD5GjkOyG1NP5z+Oqrr5QCPJ/Ph6OjI4KCgmBrS2ke25KqhV8I0VZJSe3cdG2THZWX5yM19TBSUvZj3bqDSkum8vkCeHkNexzUx6NTJ58mrtQ4sbj2frqDA5uIhhDSkMYBXpFCluiP4v56WZm+S0LaM8VaA4qgrs37ie16v4nkZDbhzMOHZ8Ewtd1IYrEj/PzGwc8vHD4+oTAz0y7FtLU1G9Dd3IBOnbS6BCEdjloB/sqVK2pfsFevXloXhjROLme74HNyaOEXoj2ptPZeena2dl3vNTXVuH//NJdF7tGjVKX9jo490bXrWEyYYIeiojkAzDV+DR6PzR6nGCRHqWIJ0ZxaAb53797g8XjN5o7n8XiU6EbHFAu/5OZql/2LkNLS2lzv+fnajdMoL8/DnTuHkZx8AGlpR1FVVZvX2MRE+LjrPRwSyXjY2HiBz5eiW7dDuHTJRO3X4/MBR0egc2f2vrq55t8LCCF1qBXg09LSWrscpJ7ycraFpe00JNJxKdIQK1rqJSXaXINBXt51btT7w4fn6nW9O8HPbxwkErbrXSjUrolN6WEJaT1q/TnVX/udtJ6iIjawa/OhTDouqZR93yiSzmgzTZLteo9HcvIfSEk5gEePlL/YOzkF1hn1/iR4PO2yJlF6WELahtbfl69fv4779++jut4nyYQJE1pcqI5GLmdHLdPCL0QTZWW1Xe/aTpEsK8vFnTuHkJLyB1JTj6K6uvabpYmJGby9h8PPLxx+fuNgY+OpdVktLNiud1dX9t46jXwnpPVpHOBTU1MxefJkJCUlKd2XV0ydo3vw6pPJau+vy2TNH086NoZhvwgqgro2y/syDIPc3GvcALmHD88DqL0HZGHhzLXSvb1HQii0aFGZu3en9LCE6IvGAX7u3Lnw8fHBn3/+CV9fX/z999/Iz8/HBx98gC+//LI1ymh0KivZ1npBASWmIU2TyWoTzmjb9S6TVeH+/bjHXe9/oKjortJ+Z+c+XFB3de2ndde7Ij2sovu9sBDo2pW64AnRF40D/Llz53DixAk4OjqCz+eDz+djyJAhWLlyJebMmYPExMTWKKdRKClhP6yLivRdEmLIystrB8jl5mrb9Z6DlJSDSEn5A2lpx1BdXcrtMzU1h7f3CG6ZVWtr7VNMN5YeVi7XfolYQohuaBzga2pqYPl4UqqDgwMyMjLQrVs3eHl54datWzovYHun6FbNydEuhzfpOG7eZIO6Nl8A2a73JG5FtocPL6Bu17ulpSsX0L29R7So653SwxLSPmj8pxkQEIArV67A19cXQUFBWL16NYRCIX788Uf4+mq+6pOxUiz8om0eb2LcZDL2/aHI9R4QANy+rVlrXSarxL17cVxQLy6+r7TfxaUvNzfdxaWv1l3vgHJ6WHt76nYnpD3QOMB/+umnKHuc03LFihUYP348hg4dCnt7e8TExOi8gO1NVRX7gU0Lv5D6Kitr08LWTVykSbAsLc3GnTsHHyeciYVUWptflu16H/l4mdVxsLLq3KLyUnpYQto3jQP86NGjuf/7+vri+vXrKCgoaLDKXEdTWlq78AshCo8e1QZ1bd4bDMMgJ+dfbm56RsbfSvstLd3qjHofDoGgZQufK9LDurlRelhC2juNA3x0dDSee+45WFjU3sOzs7PTaaHaC4ZhP7Rp4ReioLg1oxj1XlGh+TVkskrcvXsCKSl/PO56T1fa7+ranwvqzs59WvTFWpEeVpHzndLDEmI8NA7wCxYswLvvvovw8HC88sorGDNmDEw72Cibmho2FWh2tnbTlohxqayszSCXna3dmgEFBQVITNyC5OTDj7vea0dkmpqK4OMT+niQ3DhYWbm1qLymprWD5Jyd2XSxhBDjo3FkzszMxJEjR7B79268+OKLEIlEeP755/HKK69g0KBBrVFGg1FdXXt/nfL5dGxFRbWt9IICzc9nGAbZ2ZeRnHwAd+4cQEbGJaX9VladuQFyXl7DIRCIWlReM7PaQXKUHpaQjkHjAG9qaorx48dj/PjxKC8vx759+7Br1y4MGzYM7u7uuHPnTmuUU69o4Rcilyt3vWsz5VEqrcC9eye4Ue8lJQ+V9ru59Yef3wT4+Y2Hs3PvFo9psbCo7Xq3t6f0sIR0NC3qWxeLxRg9ejQKCwtx79493LhxQ1flMgjFxWxXPC380jFVVdUmnMnJ0S6dcElJxuOEMweQlvYnZLLam/ICgRg+PqGQSMbiueeEuHPnZcjlLesv79SpNqjb2LToUoSQdk6rAK9oue/cuRN//vknPDw88NJLL+HXX3/Vdfn0KjWVujI7muLi2lHv2vTYMAyDrKx/uGVWs7ISlPZbW3twA+S8vIbB1NQcfL4UtraHtCovjwc4ONR2v4tbNoieEGJENA7wL730Eg4cOACxWIznn38ecXFxWt97P3XqFNasWYOEhARkZmZi3759mDRpErc/IiIC0dHRSucEBQXh/PnzWr0eIfXJ5eyYCkXXuzazIaTScty9e/xx1/tBlJZm1NnLg5vbgMdz08fDyalXi7ve66aHdXFh768TQkh9Ggd4Ho+HmJgYjB49usWj58vKyhAYGIjXXnsNzz77rMpjxowZg61bt3LPhYpk14RoqbqaHVORkaF9psGSkofc3PS7d49DJqtd51cgsICPzyhIJOHo0mUsLC2dW1xmSg9LCNGUxh8Tu3bt0tmLh4WFISwsrMljzMzM4OLiorPXJB1TSUnt/fT8fG263uXIyvoHycnsMqvZ2cqLKllbe0IiCYdEEg5Pz2CYmrZ8QrlYzAb1zp0pPSwhRHMG3w6Ii4uDk5MTOnXqhODgYHzxxRdwcnJq9PiqqipUVVVxz4sfL5otlUohVbOppjhOLjeOJPKKenSk+ihWM1PMTS+tXUwNPJ56I8qrq8tw9+4J3L59ECkph1FamllnLw+dOw+ARDIOXbuOg6NjQL2ud/V/1ny+lPvX2rq2+71+etj2lPrY2N5zNTVsPdT9DNH0WEJaA49hDGPiF4/Ha3APPiYmBpaWlvDy8kJaWho+++wzyGQyJCQkwKyRG4+RkZFYunRpg+27du2CmEYgkWbk5uYiISEBFy9eRFJSEqrrZDIyNzdH79698eSTT6Jfv37oRAnaSRPKy8sxdepUFBUVwdraWt/FIR2Q2gH+wYMHcHfXft3oZguiIsDXl5mZCS8vL+zZswfPPPOMymNUteA9PDyQl5en9h+ZVCpFbGwsXFxCwee3/zRfcrkUWVnGWZ+KCgGys2sTzmjaymUYOTIyEpCcfBDJyQeRnf2v0n4bGy9IJOMgkYyDl9fTMDVt+Yg2Pp/tcldkkhMKjev3Axjfe04kkuLOnViEhoZCoGbqv+LiYjg4OFCAJ3qjdhd9QEAA1q9fj1dffbU1y9MkV1dXeHl5ITk5udFjzMzMVLbuBQKB2n+YCny+wCg+nBSMoT4Mw3a9A0B8vABFRZrXp7q6DGlpsUhJYUe9l5Vl19nLg7v7QPj5saPeHR39lbrete0mNzVlg7mbG3tfve5bUXFNY/j91GcsdTIxYf/V5HNE088bQnRN7QAfFRWFWbNm4b///S9+/PFH2Nvbt2a5VMrPz0d6ejpcXV3b/LWJ/shktbnes7LY5/37a5aAqKjoPjc3/d69k6ipqe3lEQqt4Os7GhJJOHx9w2Bh4aiTctdND+voWBskCCGkLagd4N99912EhYVhxowZ8Pf3x48//ogJEya06MVLS0uRkpLCPU9LS8Ply5dhZ2cHOzs7REZG4tlnn4Wrqyvu3r2LTz75BA4ODpg8eXKLXpcYvvJydhpbVhY7T71uy1md0eRs1/vFx3PTDyAn54rS/k6dfB7neg+Hp+fTMDHRzfRLSg9LCDEUGo2i9/HxwYkTJ7BhwwY8++yzeOKJJxrMhf/nn3/Uvt6lS5cwbNgw7vn8+fMBANOnT8emTZuQlJSEX375BY8ePYKrqyuGDRuGmJgYWFlZaVJs0g4out4VWeQeT37QSHV1KVJTjz1eZvUgystzuH08Hh+dOw+CRDIefn7hcHB4osUJZxQoPSwhxBBpPE3u3r172Lt3L+zs7DBx4sQWJbsJCQlBU2P8jh49qvW1ieGTydhEM4oscnXGRqqtqOget3gL2/VeO+rdzMwavr5j4Oc3Hl26hEEsdtBJuSk9LCGkPdAoOv/000/44IMPMHLkSFy9ehWOjrq5V0k6joqK2lZ6bq7mg9bk8hpkZJzHjRvbcfr0p8jJuaq039a2CzdAztNzqM663k1M2AxyikFylB6WEGLo1A7wY8aMwd9//40NGzZg2rRprVkmYmTqdr0XFWl+flVVCdLSjj1eO/0QystzuX08Hh/u7oO5XO/29t111vWuSA/r6sqOgKf0sISQ9kTtj6yamhpcuXKlVefCE+MgkymvnV5Z2fw59T16lMbler93L04pI5qZmQ2efLIn7O3fgK9vOEQiO52VXSSq7Xp3cKD0sISQ9kvtAB8bG9ua5SDtXGWl8trpNTWanS+X1+Dhw/PcVLa8vGtK+21t/R630sPh5RWEoKBYXLo0tsXrpwOAlRUb0N3cAFvbFl+OEEIMAnU6Eq09elQb1BXJZzRRVVWM1NSjj0e9H0JFRR63j8czgYfHEG7tdHv7btw+Re72lrCzY1vqnTsDlpYtvhwhhBgcCvBEbTU1bNe7IuFMebnm1ygsTOVGvd+/H6/U9W5u3gm+vmGQSMbD13eMTrve+Xy2y13RUjdv+WJvhBBi0CjAkyYput4Vq7Jp1/V+jgvqeXnXlfbb2XXlBsi5uw+GiYnu0ns2lR6WEEKMHQV40kBRUe0AucJCzddOr6wsetz1zo56r6go4PaxXe9D64x676rTslN6WEIIYVGAJ5DLa7veMzO163ovKEjhBsilp5+CXC7j9pmb26JLlzD4+YWjS5cxMDfvpLvCg9LDEkKIKhTgO6iqKuWud5ms+XPqkstlePDgLBfU8/NvKu23t+9ep+t9EPh83b/Vunen9LCEENIYCvAdSGlpbSu9oECbrvdHuHPnyOOu98OorKwdOs/nm8LD42kuqNvZ+em07Dxe7RrqLi7sCP6uXWmeOiGENIYCvBGTy4H8fDag29kBJ05onhq2oCAZyckHHne9nwbD1I6yE4ns0KXLWPj5hcPXd5TOu975fHaQnKsr+1Ckh5XL2QBPCCGkcRTgjUx1NdvlnpnJ/iuVsoHSTs0ZZ3K5DOnpfyElhQ3qBQW3lfbb2z8BiYRdZrVz56d03vUuENQGdEoPSwgh2qOPTyNQWlqb672gQPNWekVFIVJTjyA5+QBSUw+jsvIRt4/PN4WnZ/DjtdPHw9a2i24LD0oPSwghrYECfDvEMLVd71lZQEmJ5tfIz7/FzU1PTz9Tr+vdHl26jIVEEg4fn1EwN9f9KDYrq9qgrm7vAiGEEPVRgG8npFLlrvfq6ubPqUsmk+Hu3Tjcvn0YKSl/oKAgWWm/g4M/N0CO7XrX/QRyRXpYNzc2wBNCCGk9FOANmGLUe1YWkJenTdd7Ae7cOYyUlP9h3bqDKK8zwZ3PF8DLK+Tx2unjYGvrq+PSU3pYQgjRJwrwBoRh2Hvoiq734mJNz2eQn3+LGyD34MFfYJjabwVisQO6dBn3uOs9FGZm1jquAaWHJYQQQ0EBXs9kstqu96wszbvea2qkSE8//fh++gEUFt5R2u/o2BMSSRgmTLBDcfFcALpvRguFta10Sg9LCCGGgQK8HpSX145616brvbw8H6mphx+Pej+Cqqrapr6JiRBeXsPg5zcefn7j0amTN/h8Kbp3P4RLl0w0fq3GiMW1QZ3SwxJCiOGhAN9GFF3vmZnadb3n5d143PX+Bx4+PFuv690Jfn7j4Oc3/nHXe+uMYLOxqc353qlTq7wEIYQQHaEA30pkMiAnp3aQXGWlZufX1FTj/v1T3FS2R49SlfY7OfXi5qa7uQ0Aj6f7yeN108O6urKLuhBCCGkfKMDrUEVFba73nBzNu97LynLrdL0fRXV17QR3tut9ODeVzcbGU8elZ/H5gJNTbVBXpIclhBDSvlCAb6FHj2q73jXNj84wDHJzryEl5Q+kpBzAgwfnANSuAGNh4fy46z0cPj4jIRRa6rLoHIGAHfHu6sr+S+lhCSGk/aOPcg3V1LBrpytGvVdUaHa+TFaF+/fjuWVWi4ruKu13du4NP7/xkEjC4erav1W63gF2Trqile7oSOlhCSHE2FCAV0NlpXLXe01N8+fUVVaWgzt3DiE5+Q+kpR1FdXUpt8/ExAze3iMeB/XxsLb20HHpa1lZ1c5Rp/SwhBBi3CjAN+H2bTawFxRodh7b9X6VGyD38OF5KHe9u0AiYaexeXuPhFDYeqPXFOlhAWDYMGqpE0JIR0EBvgk3b6o/UI7teo/jgnpR0T2l/c7OfbgBcq6u/Vqt671uelhXV3alNrkcyMholZcjhBBioPQa4E+dOoU1a9YgISEBmZmZ2LdvHyZNmsTtZxgGS5cuxY8//ojCwkIEBQXhu+++g7+/v/4KXUdpafbjrvcDSEs7Bqm0jNtnamr+uOudzfVube3eauUwMWG73jt3pvSwhBBCWHoN8GVlZQgMDMRrr72GZ599tsH+1atXY926ddi2bRu6du2KFStWIDQ0FLdu3YKVHpYjYxgGOTlXuAFyGRl/o27Xu6WlKzdAztt7BAQCcauVRSisXZnNyYnSwxJCCFGm1wAfFhaGsLAwlfsYhsHXX3+NxYsX45lnngEAREdHw9nZGbt27cJbb72l8ryqqipUVVVxz4sfp42TSqWQSqVqlUtxHJ8vhUxWibt345CcfAjJyYdQXHxf6VhX176QSMZBIhkHF5c+4CnlbFXv9dQlFtdOZ7OzU04P29StBLlcqvRve0f1MXzGVqeaGrYe6n6GaHosIa3BYO/Bp6WlISsrC6NGjeK2mZmZITg4GGfPnm00wK9cuRJLly5tsP3YsWMQi9VrURcWFuLSpUu4eDEK//77r9IXBqFQiMDAQDz55JPo378/7Ljh6FkADqtdv5aoqmJH9GsqKytW94XRI6qP4TO2OsXGql+fusszE6IPBhvgs7KyAADOzs5K252dnXHv3j1VpwAAFi1ahPnz53PPi4uL4eHhgVGjRsHauvnlUTMyMuDt7a20zcqqMySSsZBIxsHbexgEAhEAIDWVfegSjwfY2tYut6rmd5ImyeVSZGXFwsUlFHx++79BT/UxfMZWJ5FIijt3YhEaGgqBmoNcijVddIIQHTPYAK/Aq7dMGcMwDbbVZWZmBjMV+VUFAoFaf5heXl7w9/dHVVUV3Nymws9vEpydeyu9pq5WZFNoq/SwfL7AKD5sFag+hs9Y6qQY46Lu54jiWEL0yWADvIuLCwC2Je+qmMgNICcnp0GrXtfOnj2LkydP4tKlsZDLW+ePlNLDEmL4TE3ZrI+00BJpjww2rPj4+MDFxQWxsbHo06cPAKC6uhrx8fFYtWpVq762SCRqletSelhCDA+Px/aamZs3fCha7jRejrRHeg3wpaWlSElJ4Z6npaXh8uXLsLOzg6enJ+bNm4eoqChIJBJIJBJERUVBLBZj6tSpeiy1ZqysaqezUXpYQvRHIFAdyIVC5RkphBgLvQb4S5cuYdiwYdxzxeC46dOnY9u2bfjwww9RUVGBd999l0t0c+zYMb3MgdeEYpCcmxsb4AkhbYPHqw3c9YM55YogHY1eA3xISAgYhml0P4/HQ2RkJCIjI9uuUFpQpIdVtNRbqYefEPKYQKA6kLfWAFVC2iODvQdv6BTpYRX31GnALCG6xec3bIUrnlNrnJDmUYDXAKWHJUT36rbG6wZyao0T0jIU4JshFtfeT7e3p8E4hGhDVWtcEcjpizIhrYMCfBOCg9kBc4QQ9Sha4wIBu0Rxly6ApSXb+0UIaVsU4JtgY6PvEhBiePh81aPUzc1rcztIpcCVK+wsEhqfQoh+UIAnhKgkFKoO5NQaJ6R9oABPSAemaI2rCuSUaZGQ9o0CPCEdgKI1Xj+YU2ucEONFAZ4QI1G3NV4/kFNrnJCOhwI8Ie1M/da44kGD2QghdVGAJ8QAmZg0Pm+cWuOEEHVQgCdEj8zM2DXHAcDdnZ0zTq1xQoguUIAnpJWZmKgepa5ojUulwK1b7IJFFNgJIbpCAZ4QHVEE8PqBnII2IUQfKMATogFFa1zV3HFap4AQYkgowBNSD4/X+Lxxao0TQtoLCvCkw6rfGq8bzKk1Tghp7yjAE6OmqjWueJjSu58QYsToI44YBVPTxueNU2ucENIRUYAn7QaPVztvPCMD8PConTdOrXFCCFFGH4vE4JiaNj5vnMdj543fvAnY29OgN0IIaQwFeKIXita4qulm1BonhJCWo49S0qoUrfH6gZzujRNCSOuiAE9arH5rvG4wp9Y4IYToB338ErXVb40rHkIhtcYJIcTQUIAnShprjZubs4lhCCGEtA8U4DsogUB1IKfWOCGEGAcK8EaMx6vNn56RAXh61s4bp9Y4IYQYN76+C9CUyMhI8Hg8pYeLi4u+i2VwBALAygpwdATc3QE/PyAgAOjbF+jRA/D2Zo+zswMsLCi4E0JIR2DwLXh/f3/8+eef3HOTDhqd+PzG54130B8JIYSQJhh8gDc1NdWo1V5VVYWqqirueXFxMQBAKpVCKpWqdQ3FcXK5esfrUt1743X/FQpVHy+Xs4+mKOqjbv0NHdXH8BlbnbSpj7HUnbRfPIZhGH0XojGRkZFYs2YNbGxsYGZmhqCgIERFRcHX17fJc5YuXdpg+65duyAWi1uzuIQQwikvL8fUqVNRVFQEa2trfReHdEAGHeAPHz6M8vJydO3aFdnZ2VixYgVu3ryJa9euwd7eXuU5qlrwHh4eyMvLU/uPTCqVIjY2Fi4uoeDztU92rmlrvLUo6hMaGgqBESRvp/oYPmOrkzb1KS4uhoODAwV4ojcG3UUfFhbG/b9nz54YOHAgunTpgujoaMyfP1/lOWZmZjAzM2uwXSAQaPxBw+cLmg3w9e+N133wDWwIozY/A0NG9TF8xlYnTepjTPUm7ZNBB/j6LCws0LNnTyQnJ7f5awuFqlc4a+vWOCGEEKKOdhXgq6qqcOPGDQwdOrRNXs/Lq3beuKG1xgkhhJCmGHTYWrBgAeLj45GWloYLFy7gueeeQ3FxMaZPn94mr29rC4jFFNwJIYS0Pwbdgn/w4AFeeukl5OXlwdHREU899RTOnz8PLy8vfReNEEIIMWgGHeD37Nmj7yIQQggh7RJ1PhNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQowBNCCCFGiAI8IYQQYoQMepqcLijW0lEsG6sOqVSK8vJyFBcXG0U+aaqPYTO2+gDGVydt6qP4zDHg9byIkTP6AF9SUgIA8PDw0HNJCCEdUUlJCWxsbPRdDNIBGfRysbogl8uRkZEBKysr8Hg8tc5RLDGbnp5uFMs8Un0Mm7HVBzC+OmlTH4ZhUFJSAjc3N/Ap3zXRA6NvwfP5fLi7u2t1rrW1tVF8OClQfQybsdUHML46aVofarkTfaKvlYQQQogRogBPCCGEGCEK8CqYmZlhyZIlMDMz03dRdILqY9iMrT6A8dXJ2OpDOgajH2RHCCGEdETUgieEEEKMEAV4QgghxAhRgCeEEEKMEAV4QgghxAh12AC/ceNG+Pj4wNzcHP369cPp06ebPD4+Ph79+vWDubk5fH198f3337dRSdWjSX1+//13hIaGwtHREdbW1hg4cCCOHj3ahqVtnqa/H4W//voLpqam6N27d+sWUEOa1qeqqgqLFy+Gl5cXzMzM0KVLF2zZsqWNSts8Teuzc+dOBAYGQiwWw9XVFa+99hry8/PbqLRNO3XqFMLDw+Hm5gYej4f//ve/zZ5j6J8HhAAAmA5oz549jEAgYH766Sfm+vXrzNy5cxkLCwvm3r17Ko9PTU1lxGIxM3fuXOb69evMTz/9xAgEAua3335r45Krpml95s6dy6xatYr5+++/mdu3bzOLFi1iBAIB888//7RxyVXTtD4Kjx49Ynx9fZlRo0YxgYGBbVNYNWhTnwkTJjBBQUFMbGwsk5aWxly4cIH566+/2rDUjdO0PqdPn2b4fD7zzTffMKmpqczp06cZf39/ZtKkSW1cctUOHTrELF68mNm7dy8DgNm3b1+Txxv65wEhCh0ywA8YMIB5++23lbZ1796d+fjjj1Ue/+GHHzLdu3dX2vbWW28xTz31VKuVUROa1keVHj16MEuXLtV10bSibX2mTJnCfPrpp8ySJUsMKsBrWp/Dhw8zNjY2TH5+flsUT2Oa1mfNmjWMr6+v0rZvv/2WcXd3b7UyakudAG/onweEKHS4Lvrq6mokJCRg1KhRSttHjRqFs2fPqjzn3LlzDY4fPXo0Ll26BKlU2mplVYc29alPLpejpKQEdnZ2rVFEjWhbn61bt+LOnTtYsmRJaxdRI9rUZ//+/ejfvz9Wr16Nzp07o2vXrliwYAEqKiraoshN0qY+gwYNwoMHD3Do0CEwDIPs7Gz89ttvGDduXFsUWecM+fOAkLqMfrGZ+vLy8lBTUwNnZ2el7c7OzsjKylJ5TlZWlsrjZTIZ8vLy4Orq2mrlbY429alv7dq1KCsrwwsvvNAaRdSINvVJTk7Gxx9/jNOnT8PU1LDe0trUJzU1FWfOnIG5uTn27duHvLw8vPvuuygoKND7fXht6jNo0CDs3LkTU6ZMQWVlJWQyGSZMmID169e3RZF1zpA/Dwipq8O14BXqLx3LMEyTy8mqOl7Vdn3RtD4Ku3fvRmRkJGJiYuDk5NRaxdOYuvWpqanB1KlTsXTpUnTt2rWtiqcxTX4/crkcPB4PO3fuxIABAzB27FisW7cO27ZtM4hWPKBZfa5fv445c+bg888/R0JCAo4cOYK0tDS8/fbbbVHUVmHonweEAB2wBe/g4AATE5MGrY2cnJwG38oVXFxcVB5vamoKe3v7ViurOrSpj0JMTAxmzJiBX3/9FSNHjmzNYqpN0/qUlJTg0qVLSExMxOzZswGwAZJhGJiamuLYsWMYPnx4m5RdFW1+P66urujcubPSUqNPPPEEGIbBgwcPIJFIWrXMTdGmPitXrsTgwYOxcOFCAECvXr1gYWGBoUOHYsWKFe2uxWvInweE1NXhWvBCoRD9+vVDbGys0vbY2FgMGjRI5TkDBw5scPyxY8fQv39/CASCViurOrSpD8C23CMiIrBr1y6DuheqaX2sra2RlJSEy5cvc4+3334b3bp1w+XLlxEUFNRWRVdJm9/P4MGDkZGRgdLSUm7b7du3wefz4e7u3qrlbY429SkvLwefr/xRY2JiAqC25dueGPLnASFK9DS4T68U03w2b97MXL9+nZk3bx5jYWHB3L17l2EYhvn444+ZV199lTteMS3m/fffZ65fv85s3rzZoKbFaFqfXbt2Maampsx3333HZGZmco9Hjx7pqwpKNK1PfYY2il7T+pSUlDDu7u7Mc889x1y7do2Jj49nJBIJ88Ybb+irCko0rc/WrVsZU1NTZuPGjcydO3eYM2fOMP3792cGDBigryooKSkpYRITE5nExEQGALNu3TomMTGRm/bX3j4PCFHokAGeYRjmu+++Y7y8vBihUMj07duXiY+P5/ZNnz6dCQ4OVjo+Li6O6dOnDyMUChlvb29m06ZNbVzipmlSn+DgYAZAg8f06dPbvuCN0PT3U5ehBXiG0bw+N27cYEaOHMmIRCLG3d2dmT9/PlNeXt7GpW6cpvX59ttvmR49ejAikYhxdXVlXn75ZebBgwdtXGrVTp482eTfQ3v8PCCEYRiGloslhBBCjFCHuwdPCCGEdAQU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEEIIMUIU4AkhhBAjRAGeEBXu3r0LHo+Hy5cv67sohBCiFQrwpN2KiIjApEmTGmyPi4sDj8fDo0ePtL62h4cHMjMzERAQoH0BCSFEjzrccrGENKe6uhpCoRAuLi76LgohhGiNWvDE6O3duxf+/v4wMzODt7c31q5dq7Tf29sbK1asQEREBGxsbDBz5swGXfQRERHg8XgNHnFxcQCAwsJCTJs2Dba2thCLxQgLC0NycjL3Gtu2bUOnTp1w9OhRPPHEE7C0tMSYMWOQmZnZVj8GQkgHQwGeGLWEhAS88MILePHFF5GUlITIyEh89tln2LZtm9Jxa9asQUBAABISEvDZZ581uM4333yDzMxM7jF37lw4OTmhe/fuANgvAJcuXcL+/ftx7tw5MAyDsWPHQiqVctcoLy/Hl19+ie3bt+PUqVO4f/8+FixY0Kr1J4R0YHpezY4QrU2fPp0xMTFhLCwslB7m5uYMAKawsJCZOnUqExoaqnTewoULmR49enDPvby8mEmTJikdk5aWxgBgEhMTG7zu3r17GTMzM+b06dMMwzDM7du3GQDMX3/9xR2Tl5fHiEQi5v/+7/8YhmHXRAfApKSkcMd89913jLOzc4t/DoQQogq14Em7NmzYMFy+fFnp8fPPP3P7b9y4gcGDByudM3jwYCQnJ6Ompobb1r9/f7VeLzExEdOmTcN3332HIUOGcK9hamqKoKAg7jh7e3t069YNN27c4LaJxWJ06dKFe+7q6oqcnBzNKkwIIWqiQXakXbOwsICfn5/StgcPHnD/ZxgGPB5PaT/DMCqv05ysrCxMmDABM2bMwIwZM5q8nqrXFggESvt5PF6j5xJCSEtRC54YtR49euDMmTNK286ePYuuXbvCxMRE7etUVlZi4sSJ6N69O9atW9fgNWQyGS5cuMBty8/Px+3bt/HEE0+0rAKEEKIlasETo/bBBx/gySefxPLlyzFlyhScO3cOGzZswMaNGzW6zltvvYX09HQcP34cubm53HY7OztIJBJMnDgRM2fOxA8//AArKyt8/PHH6Ny5MyZOnKjrKhFCiFqoBU+MWt++ffF///d/2LNnDwICAvD5559j2bJliIiI0Og68fHxyMzMRI8ePeDq6so9zp49CwDYunUr+vXrh/Hjx2PgwIFgGAaHDh1q0C1PCCFthcfQTUBCCCHE6FALnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFCFOAJIYQQI0QBnhBCCDFC/w9tmPyLlQOVXwAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -3766,7 +2319,7 @@ "print('stds.shape (N,H,K) \\t', stds.shape)\n", "\n", "distr = GMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (means, stds, weights)\n", + "distr_args = (means, stds)\n", "samples, sample_mean, quants = distr.sample(distr_args)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", @@ -3866,55 +2419,47 @@ " # If True, predict_step will return Distribution's parameters\n", " self.return_params = return_params\n", " if self.return_params:\n", - " total_count_names = [\n", - " f\"-total_count-{i}\" for i in range(1, n_components + 1)\n", - " ]\n", + " total_count_names = [f\"-total_count-{i}\" for i in range(1, n_components + 1)]\n", " probs_names = [f\"-probs-{i}\" for i in range(1, n_components + 1)]\n", - " weight_names = [f\"-weight-{i}\" for i in range(1, n_components + 1)]\n", - " self.param_names = [i for j in zip(total_count_names, probs_names, weight_names) for i in j]\n", - " self.output_names = self.output_names + self.param_names\n", + " param_names = [i for j in zip(total_count_names, probs_names) for i in j]\n", + " self.output_names = self.output_names + param_names\n", "\n", " # Add first output entry for the sample_mean\n", - " self.output_names.insert(0, \"\")\n", + " self.output_names.insert(0, \"\") \n", "\n", - " self.outputsize_multiplier = 3 * n_components\n", + " self.outputsize_multiplier = 2 * n_components\n", " self.is_distribution_output = True\n", "\n", " def domain_map(self, output: torch.Tensor):\n", - " mu, alpha, weights = output.chunk(3, dim=-1)\n", + " mu, alpha = torch.tensor_split(output, 2, dim=-1)\n", + " return (mu, alpha)\n", "\n", - " return mu, alpha, weights\n", - "\n", - " def scale_decouple(\n", - " self,\n", - " output,\n", - " loc: Optional[torch.Tensor] = None,\n", - " scale: Optional[torch.Tensor] = None,\n", - " eps: float = 1e-6,\n", - " ):\n", - " \"\"\"Scale Decouple\n", + " def scale_decouple(self, \n", + " output,\n", + " loc: Optional[torch.Tensor] = None,\n", + " scale: Optional[torch.Tensor] = None,\n", + " eps: float=0.2):\n", + " \"\"\" Scale Decouple\n", "\n", " Stabilizes model's output optimization, by learning residual\n", " variance and residual location based on anchoring `loc`, `scale`.\n", " Also adds domain protection to the distribution parameters.\n", " \"\"\"\n", " # Efficient NBinomial parametrization\n", - " mu, alpha, weights = output\n", - " mu = F.softplus(mu) + eps\n", - " alpha = F.softplus(alpha) + eps # alpha = 1/total_counts\n", - " weights = F.softmax(weights, dim=-1)\n", + " mu, alpha = output\n", + " mu = F.softplus(mu) + 1e-8\n", + " alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts\n", " if (loc is not None) and (scale is not None):\n", " loc = loc.view(mu.size(dim=0), 1, -1)\n", " mu *= loc\n", - " alpha /= loc + 1.0\n", + " alpha /= (loc + 1.)\n", "\n", " # mu = total_count * (probs/(1-probs))\n", " # => probs = mu / (total_count + mu)\n", " # => probs = mu / [total_count * (1 + mu * (1/total_count))]\n", " total_count = 1.0 / alpha\n", - " probs = (mu * alpha / (1.0 + mu * alpha))\n", - " probs = torch.clamp(probs, eps, 1 - eps)\n", - " return (total_count, probs, weights)\n", + " probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 \n", + " return (total_count, probs)\n", "\n", " def sample(self, distr_args, num_samples=None):\n", " \"\"\"\n", @@ -3936,10 +2481,16 @@ " if num_samples is None:\n", " num_samples = self.num_samples\n", " \n", - " total_count, probs, weights = distr_args\n", + " total_count, probs = distr_args\n", " B, H, K = total_count.size()\n", " Q = len(self.quantiles)\n", " assert total_count.shape == probs.shape\n", + "\n", + " # Sample K ~ Mult(weights)\n", + " # shared across B, H\n", + " # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2)\n", + " \n", + " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", " \n", " # Avoid loop, vectorize\n", " weights = weights.reshape(-1, K)\n", @@ -3982,15 +2533,17 @@ "\n", " def neglog_likelihood(self,\n", " y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None):\n", "\n", " if mask is None: \n", " mask = torch.ones_like(y)\n", " \n", - " total_count, probs, weights = distr_args\n", + " total_count, probs = distr_args\n", " B, H, K = total_count.size()\n", " \n", + " weights = (1/K) * torch.ones_like(probs, device=probs.device)\n", + " \n", " y = y[:,:, None]\n", " mask = mask[:,:,None]\n", "\n", @@ -4014,7 +2567,7 @@ " return loss\n", " \n", " def __call__(self, y: torch.Tensor,\n", - " distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],\n", + " distr_args: Tuple[torch.Tensor, torch.Tensor],\n", " mask: Union[torch.Tensor, None] = None,):\n", "\n", " return self.neglog_likelihood(y=y, distr_args=distr_args, mask=mask)" @@ -4055,40 +2608,7 @@ "execution_count": null, "id": "b67e2931", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "weights.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "counts.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "probs.shape (N,H,K) \t torch.Size([2, 2, 3])\n", - "samples.shape (N,H,num_samples) torch.Size([2, 2, 2000])\n", - "sample_mean.shape (N,H) torch.Size([2, 2, 1])\n", - "quants.shape (N,H,Q) \t\t torch.Size([2, 2, 5])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAEyCAYAAACMImjBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABHt0lEQVR4nO3deVxU1f8/8NcFhmEVBGRTQEzUFHfTxAX8KJi7aZo7+DHTXJAwNbMSzKSw1FzSNAM/mdmn0jIzlUpNv5gLSi750SxATYkUBRSFEc7vD3/cHIdlGAZmez0fj3nonHvuOe8zA8x7zj33XkkIIUBEREQWxcrQARAREVHdYwJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJARERkgZgAEBERWSAmAERERBaICQAREZEFYgJgYEeOHMHTTz8Nf39/KJVKeHl5oWvXrpg9e7ZavbCwMISFhdV6PJIkIS4uTm/tNW7cGAMHDtRbe5XZv38/JEnC/v3766S/6goLC4MkSZAkCVZWVnB2dkbTpk0xYsQIfPHFFygtLdXYp3HjxoiKiqpWP6mpqYiLi8OtW7eqtd+jfZW9nl988UW12qlMYWEh4uLiyn2PkpOTIUkSMjMz9dYfEVXMxtABWLJvv/0WgwcPRlhYGBITE+Hj44Nr167h+PHj2Lp1K95991257vvvv2/ASE1Dhw4dcPjwYbRs2dLQoVSoSZMm+OSTTwAAd+7cQUZGBr766iuMGDECPXr0wDfffAMXFxe5/vbt21GvXr1q9ZGamor4+HhERUXB1dVV6/106au6CgsLER8fDwAaCe2AAQNw+PBh+Pj41GoMRPQAEwADSkxMRGBgIPbs2QMbm3/eilGjRiExMVGtrjF/qBmaSqWCJEmoV68ennzySUOHUyl7e3uNGJ977jkkJSXh3//+N55//nl89tln8rb27dvXekx3796Fvb19nfRVmQYNGqBBgwYGjYHIkvAQgAHduHEDHh4eah/+Zays1N+aRw8BZGZmQpIkvPPOO1i2bBkCAwPh5OSErl274ueff9Zob8OGDWjWrBmUSiVatmyJLVu2ICoqCo0bN64yzuzsbEyZMgWNGjWCra0tAgMDER8fj/v372s91t27d6NDhw6wt7dHixYt8NFHH2nUOXPmDIYMGYL69evDzs4O7dq1w6ZNm9TqlE1Lf/zxx5g9ezYaNmwIpVKJixcvahwCKHuNKno87KOPPkLbtm1hZ2cHNzc3PP300zh37pxanaioKDg5OeHixYvo378/nJyc4Ofnh9mzZ6OoqEjr16I8EydORP/+/fH5558jKytLLn90Wr60tBSLFy9G8+bNYW9vD1dXV7Rp0wbvvfceACAuLg5z5swBAAQGBspjLXtNyg7JbNu2De3bt4ednZ38jbyiww337t1DbGwsvL29YW9vj9DQUJw8eVKtTkWHqB7+GcvMzJQ/4OPj4+XYyvqs6BCAvt+btWvXom3btnBycoKzszNatGiBV155RSN2InPHGQAD6tq1Kz788ENER0dj7Nix6NChAxQKRbXaWLNmDVq0aIEVK1YAAF577TX0798fGRkZ8lTy+vXrMWXKFAwfPhzLly9HXl4e4uPjtfrQys7ORufOnWFlZYXXX38djz32GA4fPozFixcjMzMTSUlJVbbxyy+/YPbs2Xj55Zfh5eWFDz/8EJMmTULTpk3Rs2dPAMD58+cREhICT09PrFy5Eu7u7ti8eTOioqLw119/Ye7cuWptzp8/H127dsW6detgZWUFT09PZGdnq9Xx8fHB4cOH1cr+/vtvjBs3Dg0bNpTLEhIS8Morr2D06NFISEjAjRs3EBcXh65du+LYsWMICgqS66pUKgwePBiTJk3C7Nmz8dNPP+GNN96Ai4sLXn/99Spfi8oMHjwYu3btwsGDBxEQEFBuncTERMTFxeHVV19Fz549oVKp8L///U8+3v/cc88hNzcXq1atwrZt2+Tp9IdnkE6cOIFz587h1VdfRWBgIBwdHSuN65VXXkGHDh3w4YcfIi8vD3FxcQgLC8PJkyfRpEkTrcfn4+OD3bt346mnnsKkSZPw3HPPAUCl3/r1/d5s3boV06ZNw8yZM/HOO+/AysoKFy9exK+//qr1OIjMhiCDuX79uujevbsAIAAIhUIhQkJCREJCgigoKFCrGxoaKkJDQ+XnGRkZAoBo3bq1uH//vlx+9OhRAUB8+umnQgghSkpKhLe3t+jSpYtae1lZWUKhUIiAgAC1cgBi4cKF8vMpU6YIJycnkZWVpVbvnXfeEQDE2bNnKx1jQECAsLOzU9v/7t27ws3NTUyZMkUuGzVqlFAqleLSpUtq+/fr1084ODiIW7duCSGE2LdvnwAgevbsqdFX2bZ9+/aVG8udO3dE586dhY+Pj8jMzBRCCHHz5k1hb28v+vfvr1b30qVLQqlUijFjxshlkZGRAoD473//q1a3f//+onnz5pW+DkI8eA9btWpV4fbvvvtOABBvv/22XBYQECAiIyPl5wMHDhTt2rWrtJ+lS5cKACIjI0NjW0BAgLC2thbnz58vd9vDfZW9nh06dBClpaVyeWZmplAoFOK5555TG9vDP59lIiMj1X7G/v77b42fsTJJSUlqcdfGezNjxgzh6uqq0TeRJeIhAANyd3fHwYMHcezYMbz11lsYMmQILly4gPnz56N169a4fv16lW0MGDAA1tbW8vM2bdoAgDyNfP78eWRnZ2PkyJFq+/n7+6Nbt25Vtr9z50706tULvr6+uH//vvzo168fAODAgQNVttGuXTv4+/vLz+3s7NCsWTO1qe4ff/wRvXv3hp+fn9q+UVFRKCws1PgmP3z48Cr7fVhJSQmeffZZnDt3Drt27ZK/YR8+fBh3797VmPr28/PDv/71L/zwww9q5ZIkYdCgQWplbdq0URuLroQQVdbp3LkzfvnlF0ybNg179uxBfn5+tftp06YNmjVrpnX9MWPGqB0yCQgIQEhICPbt21ftvqujNt6bzp0749atWxg9ejS+/vprrX7HiMwVEwAj0KlTJ8ybNw+ff/45rl69ihdffBGZmZkaCwHL4+7urvZcqVQCeLCwC3iwzgAAvLy8NPYtr+xRf/31F7755hsoFAq1R6tWrQBAqz+gj8ZYFmdZjGVxlrf629fXV20cZaq7Unzq1KnYvXs3vvjiC7Rr106t34ra8/X11ejXwcEBdnZ2GmO5d+9eteIpT9kHVdmYyzN//ny88847+Pnnn9GvXz+4u7ujd+/eOH78uNb9VPe18/b2Lrfs0ddG32rjvRk/fjw++ugjZGVlYfjw4fD09ESXLl2QkpJSCyMgMm5MAIyMQqHAwoULATxYFFdTZR++f/31l8a2R4+Zl8fDwwMRERE4duxYuY9JkybVOMayOK9du6ZRfvXqVTmOhz26iK8ycXFx+PDDD7FhwwZERERo9Augwr4f7bc27dixA5IkyesiymNjY4PY2FicOHECubm5+PTTT3H58mX07dsXhYWFWvVTndcOKP/nJDs7Wy2xs7OzK3dNSU2+YdfWezNx4kSkpqYiLy8P3377LYQQGDhwoF5mcYhMCRMAAyrvDxsAeYVzZd8EtdW8eXN4e3vjv//9r1r5pUuXkJqaWuX+AwcOxJkzZ/DYY4+hU6dOGg99xAgAvXv3xo8//ih/4Jf5z3/+AwcHB51P79u4cSPi4+OxaNGicle4d+3aFfb29ti8ebNa+ZUrV+TDEnUhKSkJ3333HUaPHq12uKQyrq6ueOaZZzB9+nTk5ubKq+cfnQWqqU8//VTt8ERWVhZSU1PVVv03btwYFy5cUEsCbty4ofEzVp3Yavu9cXR0RL9+/bBgwQIUFxfj7NmzNWqPyNTwLAAD6tu3Lxo1aoRBgwahRYsWKC0tRXp6Ot599104OTlh1qxZNe7DysoK8fHxmDJlCp555hn8+9//xq1btxAfHw8fHx+N0w0ftWjRIqSkpCAkJATR0dFo3rw57t27h8zMTOzatQvr1q1Do0aNahznwoUL5fUGr7/+Otzc3PDJJ5/g22+/RWJiotrFcbR1+PBhTJ06Fd26dUN4eLjG6ZFPPvkkXF1d8dprr+GVV17BhAkTMHr0aNy4cQPx8fGws7OTZ2P05e7du3Icd+/exR9//IGvvvoKO3fuRGhoKNatW1fp/oMGDUJwcDA6deqEBg0aICsrCytWrEBAQIC8Ir5169YAgPfeew+RkZFQKBRo3rw5nJ2ddYo5JycHTz/9NCZPnoy8vDwsXLgQdnZ2mD9/vlxn/Pjx+OCDDzBu3DhMnjwZN27cQGJiosaFhZydnREQEICvv/4avXv3hpubGzw8PMo9HbU23pvJkyfD3t4e3bp1g4+PD7Kzs5GQkAAXFxc88cQT1W6PyKQZeBGiRfvss8/EmDFjRFBQkHBychIKhUL4+/uL8ePHi19//VWtbkVnASxdulSjXZSzynr9+vWiadOmwtbWVjRr1kx89NFHYsiQIaJ9+/ZV7vv333+L6OhoERgYKBQKhXBzcxMdO3YUCxYsELdv3650jAEBAWLAgAEa5eWtGj99+rQYNGiQcHFxEba2tqJt27YiKSlJrU7ZyvTPP/9co81HzwIoW1Ve0eNhH374oWjTpo2wtbUVLi4uYsiQIRpnOERGRgpHR0eNfhcuXKjRXnlCQ0PV+nd0dBRNmjQRzzzzjPj8889FSUmJxj6Prsx/9913RUhIiPDw8BC2trbC399fTJo0ST6rocz8+fOFr6+vsLKyUntNKno/yuur7PX8+OOPRXR0tGjQoIFQKpWiR48e4vjx4xr7b9q0STz++OPCzs5OtGzZUnz22WcaZwEIIcT3338v2rdvL5RKpQAg9/noWQBl9PnebNq0SfTq1Ut4eXkJW1tb4evrK0aOHClOnTpV7mtCZM4kIbRYekxm59atW2jWrBmGDh2K9evXGzocIiKqYzwEYAGys7Px5ptvolevXnB3d0dWVhaWL1+OgoICvRxmICIi08MEwAIolUpkZmZi2rRpyM3NlRfVrVu3Tj6dj4iILAsPARAREVkgngZIRERkgZgAEBERWSCuAcCDW6xevXoVzs7O1b5KGhGRroQQKCgogK+vb5XX5CDSNyYAeHBZ0UdvQkNEVFcuX76slwtqEVUHEwBAvkLa5cuXNa5cVhGVSoW9e/ciIiICCoWiNsOrcxybaTLXsZnruAAgPz8ffn5+Ol+lkagmmADgn5uj1KtXr1oJgIODA+rVq2d2f5Q4NtNkrmMz13E9jIceyRB40ImIiMgCMQEgIiKyQEwAiIiILBDXABARGbHS0lIUFxcbOgwyAQqFAtbW1lrXZwJARGSkiouLkZGRgdLSUkOHQibC1dUV3t7eWi0sZQJARGSEhBC4du0arK2t4efnxwsFUaWEECgsLEROTg4AwMfHp8p9mAAQERmh+/fvo7CwEL6+vnBwcDB0OGQC7O3tAQA5OTnw9PSs8nAAU0oiIiNUUlICALC1tTVwJGRKypJFlUpVZV3OAJiA5SkXyi1/MbxZHUdCRHWNFwmi6qjOzwtnAIiIiCwQEwAiIiILxASAiIjIAnENgAkrb20A1wUQmbeK1gTVFmP8mxIWFoZ27dphxYoVhg7FpHEGgIiI9CoqKgpDhw7VKN+/fz8kScKtW7dq1P62bdvwxhtv1KgNU/DTTz9h0KBB8PX1hSRJ+Oqrr/TaPmcAjExdZ/dERKaiuLgYtra2cHNzM3QoNRIWFoaoqChERUVVWu/OnTto27YtJk6ciOHDh+s9Ds4AEBGRQRQVFSE6Ohqenp6ws7ND9+7dcezYMXl7WFgYZsyYgdjYWHh4eCA8PFwuj4mJAQBkZmZCkiSNR1hYmFZ9lLUXHR2NuXPnws3NDd7e3oiLi6s09sGDB5fbryRJ2LFjh15en379+mHx4sUYNmyYXtp7FBMAIiIyiLlz5+LLL7/Epk2bcOLECTRt2hR9+/ZFbm6uXGfTpk2wsbHB//3f/+GDDz7QaMPPzw/Xrl2THydPnoS7uzt69uypdR9l/Tg6OuLIkSNITEzEokWLkJKSUmHsSUlJuHbtGn777TcAwK5du+QY+vfvr4+Xp9YZNAGozvGNKVOmQJIkjUUfRUVFmDlzJjw8PODo6IjBgwfjypUrtRs4ERFVaufOnXByclJ79OvXT95+584drF27FkuXLkW/fv3QsmVLbNiwAfb29ti4caNcr2nTpkhMTETz5s3RokULjX6sra3h7e0Nb29vuLq6YurUqejatSvi4uK07gMA2rRpg4ULFyIoKAgTJkxAp06d8MMPP1Q4Pnd3d3h7e+Pvv/+GJEno3r27HIeNjWkcXTdoAlB2fGP16tWV1vvqq69w5MgR+Pr6amyLiYnB9u3bsXXrVhw6dAi3b9/GwIED5ctoEhFR3evVqxfS09PVHh9++KG8/ffff4dKpUK3bt3kMoVCgc6dO+PcuXNyWadOnbTuc9KkSSgoKMCWLVtgZWWldR/AgwTgYT4+PvKNdSpz6tQpNG7cGM7OzhXWWbJkiVoidPDgQUydOlWjrK4ZNE3p16+fWkZYnj///BMzZszAnj17MGDAALVteXl52LhxIz7++GP06dMHALB582b4+fnh+++/R9++fWstdnO1POUCJFGCQABr9l2EkKyN8jQgIjJujo6OaNq0qVrZw7OzQggAmpeuFUKolTk6OmrV3+LFi7F7924cPXpU/jDWtg/gQWLwMEmStLoN86lTpzSSh0dNnToVI0eOlJ+PHTsWw4cPVzu237Bhwyr70jejnqcoLS3F+PHjMWfOHLRq1Upje1paGlQqFSIiIuQyX19fBAcHIzU1tcIEoKioCEVFRfLz/Px8AA9unqDNDRTK6j78r75IomYzFzWNRxIlcgxl/+p7jIZUW++bMTDXsZnruADzHJO2mjZtCltbWxw6dAhjxowB8OD1OH78uLzAT1tffvklFi1ahO+++w6PPfZYrfRRkczMTAQHB1dax83NTe3MBXt7e3h6emokSHXNqBOAt99+GzY2NoiOji53e3Z2NmxtbVG/fn21ci8vL2RnZ1fYbkJCAuLj4zXK9+7dW+3bbla2SEQXgTXcf9eump1G+HD/je/9rpc2jZG+3zdjYq5jM8dxFRYWGjoEg3F0dMQLL7yAOXPmwM3NDf7+/khMTERhYSEmTZqkdTtnzpzBhAkTMG/ePLRq1Ur+2192uqA++qhMaWkpsrKycOXKFTRs2FCvN2+6ffs2Ll68KD/PyMhAenq6PJaaMtoEIC0tDe+99x5OnDhR7Re0vOmdh82fPx+xsbHy8/z8fPj5+SEiIgL16tXTqg+VSoWUlBSEh4drTB3VxJp9F6uuVInpvWqWUa7ZdxGSKEHje78j0+4xCMm6xm0ak9p634yBuY7NXMcF/DP7WB3mdEjurbfekmd6CwoK0KlTJ+zZs0fjS11ljh8/jsLCQixevBiLFy+Wy0NDQ7F//3699FGZ6OhoPP/882jRogXy8/P1mgAcP34cvXr1kp+XfW5FRkYiOTm5xu0bbQJw8OBB5OTkqGU5JSUlmD17NlasWIHMzEx4e3ujuLgYN2/eVHszc3JyEBISUmHbSqUSSqVSo1yhUFT7D4wu+1RGSNY12r+msTzcv5CsISRrs/ujC+j/fTMm5jo2cxyXuY2nTEUfTmFhYfJxeQCws7PDypUrsXLlynLr79+/v8ryqi6oU1UfFfWj7VX3+vXrh8uXL2tVt7L+yvPo66VvRnsdgPHjx+PUqVNqK0h9fX0xZ84c7NmzBwDQsWNHKBQKtanBa9eu4cyZM5UmAERERJbOoDMAVR3fcHd3V6uvUCjg7e2N5s2bAwBcXFwwadIkzJ49G+7u7nBzc8NLL72E1q1by2cFEBERkSaDJgD6OL6xfPly2NjYYOTIkbh79y569+6N5ORkWFvXbCqdiIjInBk0Aaju8Y3MzEyNMjs7O6xatQqrVq3SY2RERETmzWjXABAREVHtYQJARERkgZgAEBERWSCjvQ4A6WZ5iuZV+8zpwiFERKQfnAEgIiKyQEwAiIiILBAPARARmZJ9CXXbX6/5ddufFsLCwtCuXTusWLHC0KGYNM4AEBGRXkVFRWHo0KEa5fv374ckSbh161aN2t+2bRveeOONGrVhChISEvDEE0/A2dkZnp6eGDp0KM6fP6+39pkAEBGRSSguLgYAuLm5wdnZ2cDR6C4sLEyrq90eOHAA06dPx88//4yUlBTcv38fERERuHPnjl7iYAJAREQGUVRUhOjoaHh6esLOzg7du3fHsWPH5O1hYWGYMWMGYmNj4eHhgfDwcLk8JiYGwIMrxEqSpPEICwvTqo+y9qKjozF37ly4ubnB29sbcXFxlcY+ePDgcvuVJAk7duzQy+uze/duREVFoVWrVmjbti2SkpJw6dIlpKWl6aV9JgBERGQQc+fOxZdffolNmzbhxIkTaNq0Kfr27Yvc3Fy5zqZNm2BjY4P/+7//wwcffKDRhp+fH65duyY/Tp48CXd3d/Ts2VPrPsr6cXR0xJEjR5CYmIhFixap3Wn2UUlJSbh27Rp+++03AMCuXbvkGPr376+Pl0dDXl4egAczIPrARYAWrOyaAU9eWi+XPQmgVLLGdfcQPHElGVaiBNjnXu5CIF5zgIgqsnPnTjg5OamVlZSUyP+/c+cO1q5di+TkZPTr1w8AsGHDBqSkpGDjxo2YM2cOAKBp06ZITEyssB9ra2t4e3sDAO7du4ehQ4eia9euiIuL07oPAGjTpg0WLlwIAAgKCsLq1avxww8/yLMOjyq7W+3hw4chSRK6d+9eq4clhBCIjY1F9+7dERwcrJc2OQNARER616tXL6Snp6s9PvzwQ3n777//DpVKhW7dusllCoUCnTt3xrlz5+SyTp06ad3npEmTUFBQgC1btsDKykrrPoAHCcDDfHx8kJOTU2Wfp06dQuPGjSv98F+yZAmcnJzkx8GDBzF16lSNssrMmDEDp06dwqefflplTNriDAAREemdo6MjmjZtqlZ25coV+f9ld4KVJEmtjhBCrczR0VGr/hYvXozdu3fj6NGj8oextn0ADxKDh0mShNLS0ir7PXXqlEby8KipU6di5MiR8vOxY8di+PDhGDZsmFzWsGHDCvefOXMmduzYgZ9++gmNGjWqMiZtcQaAiIjqXNOmTWFra4tDhw7JZSqVCsePH8fjjz9erba+/PJLLFq0CP/973/x2GOP1UofFcnMzETz5s0rrePm5oamTZvKD3t7e3h6emqUPUoIgRkzZmDbtm348ccfERgYqJeYy3AGgIiI6pyjoyNeeOEFzJkzB25ubvD390diYiIKCwsxadIkrds5c+YMJkyYgHnz5qFVq1bIzs4GANja2sLNzU0vfVSmtLQUWVlZuHLlCho2bKgxs1AT06dPx5YtW/D111/D2dlZHpuLi0u5CUN1MQEgIjIlRnhlPl299dZbKC0txfjx41FQUIBOnTphz549qF+/vtZtHD9+HIWFhVi8eDEWL14sl4eGhmL//v166aMy0dHReP7559GiRQvk5+frNQFYu3YtAMinNJZJSkpCVFRUjdtnAkBERHpV0UVuwsLC5OPyAGBnZ4eVK1di5cqV5dbfv39/leVRUVGVfhhW1UdF/Xz11VcV1n9Yv379cPnyZa3qVtZfeR5+rWqDQdcA/PTTTxg0aBB8fX0hSZLaC65SqTBv3jy0bt0ajo6O8PX1xYQJE3D16lW1NoqKijBz5kx4eHjA0dERgwcPVltoQkRERJoMmgDcuXMHbdu2xerVqzW2FRYW4sSJE3jttddw4sQJbNu2DRcuXMDgwYPV6sXExGD79u3YunUrDh06hNu3b2PgwIFq55sSERGROoMeAujXr598cYZHubi4aFyFadWqVejcuTMuXboEf39/5OXlYePGjfj444/Rp08fAMDmzZvh5+eH77//Hn379i237aKiIhQVFcnP8/PzATyYdVCpVFrFXlZP2/rakoT+E5eKYizrq1SyVisve172r0pYAeW0UV6sK/ae0yib3qupRpmh1Nb7ZgzMdWzmOi7APMdEpsOk1gDk5eVBkiS4uroCANLS0qBSqRARESHX8fX1RXBwMFJTUytMABISEhAfH69RvnfvXjg4OFQrpsouFakL/Z7k8cCuXZpX7Hu4r+vuIeVuz3Xr8mD/AgC7dlW4v679G5K+3zdjYq5jM8dxFRYWGjoEsmAmkwDcu3cPL7/8MsaMGYN69eoBALKzs2Fra6uxmtPLy0s+XaI88+fPR2xsrPw8Pz8ffn5+iIiIkNuuikqlQkpKCsLDwzUuIFETa/Zd1FtbZSr6Bl7W1xNXktXKSyVr5Lp1gVvuEViJEnRu7Ab0iK1wf137N4Taet+MgbmOzVzHBfwz+1iZ2l4IRuZFm4sXlTGJBEClUmHUqFEoLS3F+++/X2X98q7y9DClUgmlUqlRrlAoqv0HRpd9KiMemY7Xh4riK+vLqoLDDlaiBFaiBMcz/sbPJRmaFbSM1Rj/aOv7fTMm5jo2cxxXZeNRKBSQJAl///03GjRooNfTy8j8CCFQXFyMv//+G1ZWVrC1ta1yH6NPAFQqFUaOHImMjAz8+OOPat/Qvb29UVxcjJs3b6rNAuTk5CAkpPxpbSIiU2BtbY1GjRrhypUryMzMNHQ4ZCIcHBzg7+8PK6uq1/gbdQJQ9uH/22+/Yd++ffLdl8p07NgRCoUCKSkp8nWWr127hjNnzlR69ygiIlPg5OSEoKAgLhYkrVhbW8PGxkbr2SKDJgC3b9/GxYv/HEfOyMhAeno63Nzc4Ovri2eeeQYnTpzAzp07UVJSIh/Xd3Nzg62tLVxcXDBp0iTMnj0b7u7ucHNzw0svvYTWrVvLZwUQ1bl9CVXXMaOruVHtsra2hrW1/g8NEhk0ATh+/Dh69eolPy9bmBcZGYm4uDjs2LEDANCuXTu1/fbt2ydfGnH58uWwsbHByJEjcffuXfTu3RvJyckW8wvz5KX1WtR6p9bjICIi02LQBODRy0I+SpvVr3Z2dli1ahVWrVqlz9CIiIjMGm8HTEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGM+kJApCcVnJf+5KUbWjdR1emGP/s/X62QylXV+fM8d56ISG84A0BERGSBOANgAQ7/of03fSIisgycASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJABr0S4E8//YSlS5ciLS0N165dw/bt2zF06FB5uxAC8fHxWL9+PW7evIkuXbpgzZo1aNWqlVynqKgIL730Ej799FPcvXsXvXv3xvvvv49GjRoZYETVszzlgqFDMD9V3U+g+0t1EwcRkZEzaAJw584dtG3bFhMnTsTw4cM1ticmJmLZsmVITk5Gs2bNsHjxYoSHh+P8+fNwdnYGAMTExOCbb77B1q1b4e7ujtmzZ2PgwIFIS0uDtbV1XQ+JalNVH+7mRJux8uZIRFQDOh0CSE5ORmFhYY0779evHxYvXoxhw4ZpbBNCYMWKFViwYAGGDRuG4OBgbNq0CYWFhdiyZQsAIC8vDxs3bsS7776LPn36oH379ti8eTNOnz6N77//vsbxERERmSudZgDmz5+P6OhojBgxApMmTUJISIi+40JGRgays7MREREhlymVSoSGhiI1NRVTpkxBWloaVCqVWh1fX18EBwcjNTUVffv2LbftoqIiFBUVyc/z8/MBACqVCiqVSqv4yuppW788kijRed8ypZL+ZznK2qxO25WNRevXSNT+khR9vG9V0mYcVfWvQxt1MjYDMNdxAeY5JjIdOiUAV65cwbfffovk5GT06tULgYGBmDhxIiIjI+Ht7a2XwLKzswEAXl5eauVeXl7IysqS69ja2qJ+/foadcr2L09CQgLi4+M1yvfu3QsHB4dqxZmSklKt+g8L1HnPf1x313/yVSbXrYvWdQPvVryeYdcubdc6tNC6P539//erJu9b1bQYx65dtdZG7Y7NcMxxXPqYSSXSlU4JgLW1NQYPHozBgwcjJycHmzdvRnJyMl577TU89dRTmDRpEgYNGgQrq5p/o5MkSe25EEKj7FFV1Zk/fz5iY2Pl5/n5+fDz80NERATq1aunVVwqlQopKSkIDw+HQqHQap9Hrdl3Uaf9HvbEleQat/GoUskauW5d4JZ7BFZazlIcaxRV4bbpvZpq1/HBZdrVqwHVkzNr/L5VSZtx9IitfLsObejjZ9IYmeu4gH9mH4kMocaLAD09PdGtWzecP38eFy5cwOnTpxEVFQVXV1ckJSUhLCxMp3bLZhKys7Ph4+Mjl+fk5MizAt7e3iguLsbNmzfVZgFycnIqPSyhVCqhVCo1yhUKRbX/wOiyTxmhh+l7bT+gdW1b2/YrG4vWr49Uql29mvj/sdTkfauSNuOoqu8atFGrYzMgcxyXuY2HTIvOX9H/+usvvPPOO2jVqhXCwsKQn5+PnTt3IiMjA1evXsWwYcMQGRmpc2CBgYHw9vZWm/YrLi7GgQMH5A/3jh07QqFQqNW5du0azpw5UyvrEoiIiMyFTjMAgwYNwp49e9CsWTNMnjwZEyZMgJubm7zd3t4es2fPxvLlyytt5/bt27h48Z9p8IyMDKSnp8PNzQ3+/v6IiYnBkiVLEBQUhKCgICxZsgQODg4YM2YMAMDFxQWTJk3C7Nmz4e7uDjc3N7z00kto3bo1+vTpo8vQiIiILIJOCYCnpycOHDiArl27VljHx8cHGRkZlbZz/Phx9OrVS35edlw+MjISycnJmDt3Lu7evYtp06bJFwLau3evfA0AAFi+fDlsbGwwcuRI+UJAycnJvAYAERFRJXRKAEJDQ9GhQweN8uLiYmzduhUTJkyAJEkICAiotJ2wsDAIISrcLkkS4uLiEBcXV2EdOzs7rFq1CqtWrdI6fiIiIkun0xqAiRMnIi8vT6O8oKAAEydOrHFQREREVLt0SgAqOs3uypUrcHFxqXFQREREVLuqdQigffv2kCQJkiShd+/esLH5Z/eSkhJkZGTgqaee0nuQREREpF/VSgDK7tSXnp6Ovn37wsnJSd5ma2uLxo0bl3tTHyKjcXAZgBYP/q3oXHtTucnOozcMElZQG5upjIOIDKJaCcDChQsBAI0bN8azzz4LOzu7WgmKiIiIapdOZwHU5AI/REREZHhaJwBubm64cOECPDw8UL9+/UqvtZ+bm6uX4IiIiKh2aJ0ALF++XL4Az/Lly6u8IQ+RmkePVwM4/McNjbKuTdzrIhoiIoundQLw8LR/VFRUbcRCREREdUTrBKA6t63U9pa6REREZBhaJwCurq5VTvuXXSCopKT2blFLxunJS+sr3riP0/pERMZG6wRg3759tRkHERER1SGtE4DQ0NDajIOIiIjqkNYJwKlTpxAcHAwrKyucOnWq0rpt2rSpcWBEBlPOGQtqeIU9IjIDWicA7dq1Q3Z2Njw9PdGuXTtIklTurXy5BoCIiMj4aZ0AZGRkoEGDBvL/iYiIyHRpnQAEBASU+38ii1PVIQJjwUMZRFQJne4FAADnz5/HqlWrcO7cOUiShBYtWmDmzJlo3ry5PuMjIiKiWmCly05ffPEFgoODkZaWhrZt26JNmzY4ceIEgoOD8fnnn+stuPv37+PVV19FYGAg7O3t0aRJEyxatAilpf/cxlUIgbi4OPj6+sLe3h5hYWE4e/as3mIgIiIyRzrNAMydOxfz58/HokWL1MoXLlyIefPmYcSIEXoJ7u2338a6deuwadMmtGrVCsePH8fEiRPh4uKCWbNmAQASExOxbNkyJCcno1mzZli8eDHCw8Nx/vx5+d4FREREpE6nBCA7OxsTJkzQKB83bhyWLl1a46DKHD58GEOGDMGAAQMAAI0bN8ann36K48ePA3jw7X/FihVYsGABhg0bBgDYtGkTvLy8sGXLFkyZMkVvsVDd4A2CiIjqhk4JQFhYGA4ePIimTZuqlR86dAg9evTQS2AA0L17d6xbtw4XLlxAs2bN8Msvv+DQoUNYsWIFgAdnI2RnZyMiIkLeR6lUIjQ0FKmpqRUmAEVFRSgqKpKfl93nQKVSQaVSaRVbWT1t65dHEjU/XbJUsq5xGxW1qa+2VaL8I03atl/R/jWJRZ9t6hZIFT83OsRX7bHV4Ge3Lunjd81YmeOYyHRonQDs2LFD/v/gwYMxb948pKWl4cknnwQA/Pzzz/j8888RHx+vt+DmzZuHvLw8tGjRAtbW1igpKcGbb76J0aNHA3gwEwEAXl5eavt5eXkhKyurwnYTEhLKjXPv3r1wcHCoVowpKSnVqv+wQJ33/Md19xA9tFK+XLcuemlnV0EFG7T8Yl/h/jWQcruZ/hutjl27qqjQQuemtR5blTEYl5r8rhmrwsJCQ4dAFkwS5V3NpxxWVtp9q9DnhYC2bt2KOXPmYOnSpWjVqhXS09MRExODZcuWITIyEqmpqejWrRuuXr0KHx8feb/Jkyfj8uXL2L17d7ntljcD4Ofnh+vXr2t9J0OVSoWUlBSEh4dDoVDoNL41+y7qtN/DnriSXOM2HlUqWSPXrQvcco/ASg+zFJ0bu5VbfjQzt0b760IlrJByuxnCnS5AIZVWvUNt6RFb+faDy6rdZLXHVlUMRkIfv2vGKj8/Hx4eHsjLy+NdVKnOaT0D8PDK+7oyZ84cvPzyyxg1ahQAoHXr1sjKykJCQgIiIyPh7e0N4MFMwMMJQE5OjsaswMOUSiWUSqVGuUKhqPYfGF32KSP0MMWujw/oytrWR/sVfRhp23ZtfFArpFLDJgBV/czUIDatx2ZiH6Y1+V0zVuY2HjItBj4QWrnCwkKNmQdra2s5GQkMDIS3t7fa1GBxcTEOHDiAkJDamxonIiIydTpfCOjOnTs4cOAALl26hOLiYrVt0dHRNQ4MAAYNGoQ333wT/v7+aNWqFU6ePIlly5bh3//+N4AHhxtiYmKwZMkSBAUFISgoCEuWLIGDgwPGjBmjlxiIiIjMkU4JwMmTJ9G/f38UFhbizp07cHNzw/Xr1+Hg4ABPT0+9JQCrVq3Ca6+9hmnTpiEnJwe+vr6YMmUKXn/9dbnO3LlzcffuXUybNg03b95Ely5dsHfvXl4DgIiIqBI6JQAvvvgiBg0ahLVr18LV1RU///wzFAoFxo0bJ1+gRx+cnZ2xYsUK+bS/8kiShLi4OMTFxemtXyIiInOn0xqA9PR0zJ49G9bW1rC2tkZRURH8/PyQmJiIV155Rd8xEhERkZ7plAAoFApIkgTgwTn3ly5dAgC4uLjI/yciIiLjpdMhgPbt2+P48eNo1qwZevXqhddffx3Xr1/Hxx9/jNatW+s7RiIiItIznRKAJUuWoKDgweXZ3njjDURGRuKFF15A06ZNkZSUpNcAiXh/ACIi/dMpAejUqZP8/wYNGmCXiV1SlIgA7Euouk6v+bUfBxEZhM7XAQAeXHHv/PnzkCQJzZs3R4MGDfQVF5F50+bDl4ioFum0CDA/Px/jx49Hw4YNERoaip49e8LX1xfjxo1DXl6evmMkIiIiPdMpAXjuuedw5MgR7Ny5E7du3UJeXh527tyJ48ePY/LkyfqOkYiIiPRMp0MA3377Lfbs2YPu3bvLZX379sWGDRvw1FNP6S04IiIiqh06zQC4u7vDxcVFo9zFxQX169evcVBERERUu3RKAF599VXExsbi2rVrcll2djbmzJmD1157TW/BERERUe3Q+hBA+/bt5av/AcBvv/2GgIAA+Pv7AwAuXboEpVKJv//+G1OmTNF/pERERKQ3WicAQ4cOrcUwiIiIqC5pnQAsXLiwNuMgIiKiOlSjCwGlpaXh3LlzkCQJLVu2RPv27fUVFxEREdUinRKAnJwcjBo1Cvv374erqyuEEMjLy0OvXr2wdetWXhGQiIjIyOl0FsDMmTORn5+Ps2fPIjc3Fzdv3sSZM2eQn5+P6OhofcdIREREeqZTArB7926sXbsWjz/+uFzWsmVLrFmzBt99953eggOAP//8E+PGjYO7uzscHBzQrl07pKWlyduFEIiLi4Ovry/s7e0RFhaGs2fP6jUGIiIic6NTAlBaWgqFQqFRrlAoUFpaWuOgyty8eRPdunWDQqHAd999h19//RXvvvsuXF1d5TqJiYlYtmwZVq9ejWPHjsHb2xvh4eHy7YqJiIhIk04JwL/+9S/MmjULV69elcv+/PNPvPjii+jdu7fegnv77bfh5+eHpKQkdO7cGY0bN0bv3r3x2GOPAXjw7X/FihVYsGABhg0bhuDgYGzatAmFhYXYsmWL3uIgIiIyNzotAly9ejWGDBmCxo0bw8/PD5Ik4dKlS2jdujU2b96st+B27NiBvn37YsSIEThw4AAaNmyIadOmyTccysjIQHZ2NiIiIuR9lEolQkNDkZqaWuEFiYqKilBUVCQ/z8/PBwCoVCqoVCqtYiurp239NfsuapRJ5dSrrlLJWg+tlN+mvtpWifLzzJq0X1Gb2u6n6/7GrFbGpuXPd22q7u+aKTHHMZHpkIQQQtedU1JS8L///Q9CCLRs2RJ9+vTRZ2yws7MDAMTGxmLEiBE4evQoYmJi8MEHH2DChAlITU1Ft27d8Oeff8LX11fe7/nnn0dWVhb27NlTbrtxcXGIj4/XKN+yZQscHBz0OgYioooUFhZizJgxyMvLQ7169QwdDlmYaicA9+/fh52dHdLT0xEcHFxbcQEAbG1t0alTJ6Smpspl0dHROHbsGA4fPiwnAFevXoWPj49cZ/Lkybh8+TJ2795dbrvlzQD4+fnh+vXrWv8SqlQqpKSkIDw8vNz1EI8qbwZAH564kqz3Nksla+S6dYFb7hFYiZIat9e5sVu55Uczc/XeZlVUwgopt5sh3OkCFJL+1qsYg1oZW49Y/bRTA9X9XTMl+fn58PDwYAJABlHtQwA2NjYICAhASUnNPxiq4uPjg5YtW6qVPf744/jyyy8BAN7e3gAe3Ijo4QQgJycHXl5eFbarVCqhVCo1yhUKRbX/wGi7j6iFqXoAevmArqxtfbRf0YdRTdqu6QecQio1uwSgjF7HZkQfuLr8fho7cxsPmRad7wY4f/585Obq/g1OG926dcP58+fVyi5cuICAgAAAQGBgILy9vZGSkiJvLy4uxoEDBxASElKrsREREZkynRYBrly5EhcvXoSvry8CAgLg6Oiotv3EiRN6Ce7FF19ESEgIlixZgpEjR+Lo0aNYv3491q9fDwCQJAkxMTFYsmQJgoKCEBQUhCVLlsDBwQFjxozRSwxERETmSKcEYOjQoZAkCTVYP6iVJ554Atu3b8f8+fOxaNEiBAYGYsWKFRg7dqxcZ+7cubh79y6mTZuGmzdvokuXLti7dy+cnZ1rNTYiIiJTVq0EoLCwEHPmzMFXX30FlUqF3r17Y9WqVfDw8Kit+DBw4EAMHDiwwu2SJCEuLg5xcXG1FgMREZG5qdYagIULFyI5ORkDBgzA6NGj8f333+OFF16ordiIiIiollRrBmDbtm3YuHEjRo0aBQAYO3YsunXrhpKSElhb184qdyIiItK/as0AXL58GT169JCfd+7cGTY2NmqXBCYiIiLjV60EoKSkBLa2tmplNjY2uH//vl6DIiIiotpVrUMAQghERUWpXUTn3r17mDp1qtqpgNu2bdNfhERERKR31UoAIiMjNcrGjRunt2CIiIioblQrAUhKSqqtOIiq5fAfN8ot79rEvY4jISIyTeZ3T1QiIiKqEhMAIiIiC8QEgIiIyALpdC8Aouqo6Hg9mYB9CZVv7zW/buIgIr3jDAAREZEF4gyAkXvy0npDh0BERGaIMwBEREQWiAkAERGRBWICQEREZIG4BoDM3sNnIZRK1gAvFkhExBkAIiIiS2RSCUBCQgIkSUJMTIxcJoRAXFwcfH19YW9vj7CwMJw9e9ZwQRIREZkAk0kAjh07hvXr16NNmzZq5YmJiVi2bBlWr16NY8eOwdvbG+Hh4SgoKDBQpERERMbPJBKA27dvY+zYsdiwYQPq168vlwshsGLFCixYsADDhg1DcHAwNm3ahMLCQmzZssWAERMRERk3k1gEOH36dAwYMAB9+vTB4sWL5fKMjAxkZ2cjIiJCLlMqlQgNDUVqaiqmTJlSbntFRUUoKiqSn+fn5wMAVCoVVCqVVjGV1dO2viRKtKr3qFLJWqf9aqKsT0P0XVMqoZnTPjyOsv+XV8/UlY2pTsem5c9/zbqo3u+aKTHHMZHpMPoEYOvWrThx4gSOHTumsS07OxsA4OXlpVbu5eWFrKysCttMSEhAfHy8RvnevXvh4OBQrfhSUlK0qhdYrVb/cd09RMc9ay7XrYvB+tbVrvKO/JSz6j/ldrNaj8VQ6nRsu3bVWVfa/q6ZksLCQkOHQBbMqBOAy5cvY9asWdi7dy/s7OwqrCdJktpzIYRG2cPmz5+P2NhY+Xl+fj78/PwQERGBevXqaRWbSqVCSkoKwsPDoVAoqqy/Zt9Frdp91BNXknXaryZKJWvkunWBW+4RWOk4c2EonRu7aZQdzcyV/182tnCnC1BIpXUZWq1TCSuk3G5mfGPrEVt1nUpU93fNlJTNPhIZglEnAGlpacjJyUHHjh3lspKSEvz0009YvXo1zp8/D+DBTICPj49cJycnR2NW4GFKpRJKpVKjXKFQVPsPjLb7CB2n0w35AWwlSkwuATie8bdGWXkT4gqp1Lg+JPXI6Mampw9tXX4/jZ25jYdMi1EfCO3duzdOnz6N9PR0+dGpUyeMHTsW6enpaNKkCby9vdWmBouLi3HgwAGEhBhu6pyIiMjYGfUMgLOzM4KDg9XKHB0d4e7uLpfHxMRgyZIlCAoKQlBQEJYsWQIHBweMGTPGECETERGZBKNOALQxd+5c3L17F9OmTcPNmzfRpUsX7N27F87OzoYOjYiIyGiZXAKwf/9+teeSJCEuLg5xcXEGiYeIiMgUGfUaACIiIqodTACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgsEBMAIiIiC8QEgIiIyAIxASAiIrJATACIiIgskMldCtjYLU+5YOgQiIiIqsQZACIiIgvEBICIiMgC8RAAERnWvoTKtwsrAC3qJBQiS8IZACIiIgvEGQAiql1VfcMnIoPgDAAREZEFYgJARERkgYz6EEBCQgK2bduG//3vf7C3t0dISAjefvttNG/eXK4jhEB8fDzWr1+PmzdvokuXLlizZg1atWplwMi19+Sl9YYOgYiILJBRzwAcOHAA06dPx88//4yUlBTcv38fERERuHPnjlwnMTERy5Ytw+rVq3Hs2DF4e3sjPDwcBQUFBoyciIjIuBn1DMDu3bvVniclJcHT0xNpaWno2bMnhBBYsWIFFixYgGHDhgEANm3aBC8vL2zZsgVTpkwpt92ioiIUFRXJz/Pz8wEAKpUKKpVKq9jK6j1aXxIl2g3u/yuVrKtVvy6UxWSMsdVU2ZhUwqhzX52UjcncxiaPS8vfTVNijmMi0yEJIYShg9DWxYsXERQUhNOnTyM4OBh//PEHHnvsMZw4cQLt27eX6w0ZMgSurq7YtGlTue3ExcUhPj5eo3zLli1wcHCotfiJiB5WWFiIMWPGIC8vD/Xq1TN0OGRhTCYBEEJgyJAhuHnzJg4ePAgASE1NRbdu3fDnn3/C19dXrvv8888jKysLe/bsKbet8mYA/Pz8cP36da1/CVUqFVJSUhAeHg6FQiGXr9l3sVrjeuJKcrXq14VSyRq5bl3glnsEVtWc0TB2ZWMLd7oAhVRq6HD0SiWskHK7mdmNTR7XI79r5iA/Px8eHh5MAMggjPoQwMNmzJiBU6dO4dChQxrbJElSey6E0Ch7mFKphFKp1ChXKBTV/gPz6D6imtPmxvwBayVKjDq+mlBIpWb1Ifkwcx2bLr+fxs7cxkOmxSQSgJkzZ2LHjh346aef0KhRI7nc29sbAJCdnQ0fHx+5PCcnB15eXnUeJ5mOo5m55SY3XZu4GyAaIqK6Z9SrhYQQmDFjBrZt24Yff/wRgYGBatsDAwPh7e2NlJQUuay4uBgHDhxASEhIXYdLRERkMox6BmD69OnYsmULvv76azg7OyM7OxsA4OLiAnt7e0iShJiYGCxZsgRBQUEICgrCkiVL4ODggDFjxhg4eiIiIuNl1AnA2rVrAQBhYWFq5UlJSYiKigIAzJ07F3fv3sW0adPkCwHt3bsXzs7OdRwtERGR6TDqBECbExQkSUJcXBzi4uJqPyAiIiIzYdRrAIiIiKh2MAEgIiKyQEZ9CICISHZwGVDR9Q16za/bWIjMAGcAiIiILBBnAIgecviPGxplvDgQEZkjJgBEVWBSQETmiIcAiIiILBATACIiIgvEBICIiMgCcQ0AEZm+fQmVb+dpgkQaOANARERkgZgAEBERWSAeAiDSQXmnBlaEpwwSkTHiDAAREZEF4gwAUS3jhYTMRFULDQEuNiSTwgSghtbsuwghWZe77clL6+s4GiIiIu0wASAi88dv70QazGYNwPvvv4/AwEDY2dmhY8eOOHjwoKFDIiIiMlpmkQB89tlniImJwYIFC3Dy5En06NED/fr1w6VLlwwdGhERkVEyi0MAy5Ytw6RJk/Dcc88BAFasWIE9e/Zg7dq1SEjQYuqPqI5V5zTC8nARIRHVlMknAMXFxUhLS8PLL7+sVh4REYHU1NRy9ykqKkJRUZH8PC8vDwCQm5sLlUqlVb8qlQqFhYUoKsmrcBFgwb37WrVlbEolgcLCQhTcuw8rUWLocPTKXMZ243axRplKWKGwsBA3pGIopFIDRFU76mxcN6pIysp5zavdxiMKCgoAAEKIau1HpA8mnwBcv34dJSUl8PLyUiv38vJCdnZ2ufskJCQgPj5eozwwMLBWYiQiUxBnsDYKCgrg4uKih/6JtGfyCUAZSZLUngshNMrKzJ8/H7GxsfLz0tJS5Obmwt3dvcJ9HpWfnw8/Pz9cvnwZ9erV0z1wI8SxmSZzHZu5jgt48HeqoKAAvr6+hg6FLJDJJwAeHh6wtrbW+Lafk5OjMStQRqlUQqlUqpW5urrq1H+9evXM7o9SGY7NNJnr2Mx1XPzmT4Zi8mcB2NraomPHjkhJSVErT0lJQUhIiIGiIiIiMm4mPwMAALGxsRg/fjw6deqErl27Yv369bh06RKmTp1q6NCIiIiMklkkAM8++yxu3LiBRYsW4dq1awgODsauXbsQEBBQa30qlUosXLhQ41CCOeDYTJO5js1cx0VkaJLg+SdEREQWx+TXABAREVH1MQEgIiKyQEwAiIiILBATACIiIgvEBEAH5nDr4YSEBDzxxBNwdnaGp6cnhg4divPnz6vVEUIgLi4Ovr6+sLe3R1hYGM6ePWugiHWXkJAASZIQExMjl5ny2P7880+MGzcO7u7ucHBwQLt27ZCWliZvN9Wx3b9/H6+++ioCAwNhb2+PJk2aYNGiRSgt/ef6/6Y6NiKjJKhatm7dKhQKhdiwYYP49ddfxaxZs4Sjo6PIysoydGjV0rdvX5GUlCTOnDkj0tPTxYABA4S/v7+4ffu2XOett94Szs7O4ssvvxSnT58Wzz77rPDx8RH5+fkGjLx6jh49Kho3bizatGkjZs2aJZeb6thyc3NFQECAiIqKEkeOHBEZGRni+++/FxcvXpTrmOrYFi9eLNzd3cXOnTtFRkaG+Pzzz4WTk5NYsWKFXMdUx0ZkjJgAVFPnzp3F1KlT1cpatGghXn75ZQNFpB85OTkCgDhw4IAQQojS0lLh7e0t3nrrLbnOvXv3hIuLi1i3bp2hwqyWgoICERQUJFJSUkRoaKicAJjy2ObNmye6d+9e4XZTHtuAAQPEv//9b7WyYcOGiXHjxgkhTHtsRMaIhwCqoezWwxEREWrlld162FSU3RLZzc0NAJCRkYHs7Gy1sSqVSoSGhprMWKdPn44BAwagT58+auWmPLYdO3agU6dOGDFiBDw9PdG+fXts2LBB3m7KY+vevTt++OEHXLhwAQDwyy+/4NChQ+jfvz8A0x4bkTEyiysB1hVdbj1sCoQQiI2NRffu3REcHAwA8njKG2tWVladx1hdW7duxYkTJ3Ds2DGNbaY8tj/++ANr165FbGwsXnnlFRw9ehTR0dFQKpWYMGGCSY9t3rx5yMvLQ4sWLWBtbY2SkhK8+eabGD16NADTft+IjBETAB1U59bDpmDGjBk4deoUDh06pLHNFMd6+fJlzJo1C3v37oWdnV2F9UxxbKWlpejUqROWLFkCAGjfvj3Onj2LtWvXYsKECXI9UxzbZ599hs2bN2PLli1o1aoV0tPTERMTA19fX0RGRsr1THFsRMaIhwCqQZdbDxu7mTNnYseOHdi3bx8aNWokl3t7ewOASY41LS0NOTk56NixI2xsbGBjY4MDBw5g5cqVsLGxkeM3xbH5+PigZcuWamWPP/44Ll26BMC037c5c+bg5ZdfxqhRo9C6dWuMHz8eL774IhISEgCY9tiIjBETgGowp1sPCyEwY8YMbNu2DT/++CMCAwPVtgcGBsLb21ttrMXFxThw4IDRj7V37944ffo00tPT5UenTp0wduxYpKeno0mTJiY7tm7dummcrnnhwgX5xlem/L4VFhbCykr9T5K1tbV8GqApj43IKBlwAaJJKjsNcOPGjeLXX38VMTExwtHRUWRmZho6tGp54YUXhIuLi9i/f7+4du2a/CgsLJTrvPXWW8LFxUVs27ZNnD59WowePdpkT7l6+CwAIUx3bEePHhU2NjbizTffFL/99pv45JNPhIODg9i8ebNcx1THFhkZKRo2bCifBrht2zbh4eEh5s6dK9cx1bERGSMmADpYs2aNCAgIELa2tqJDhw7yqXOmBEC5j6SkJLlOaWmpWLhwofD29hZKpVL07NlTnD592nBB18CjCYApj+2bb74RwcHBQqlUihYtWoj169erbTfVseXn54tZs2YJf39/YWdnJ5o0aSIWLFggioqK5DqmOjYiY8TbARMREVkgrgEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEgIiKyQEwAiIiILBATACIiIgvEBICIiMgCMQEg0oPMzExIkoT09HRDh0JEpBUmAGRWhBDo06cP+vbtq7Ht/fffh4uLi3znPCIiS8YEgMyKJElISkrCkSNH8MEHH8jlGRkZmDdvHt577z34+/sbMEIiIuPABIDMjp+fH9577z289NJLyMjIgBACkyZNQu/evREVFaVRf/To0Rg1apRamUqlgoeHB5KSkgAAu3fvRvfu3eHq6gp3d3cMHDgQv//+e4UxJCcnw9XVVa3sq6++giRJamXffPMNOnbsCDs7OzRp0gTx8fG4f/++vD0uLg7+/v5QKpXw9fVFdHR0NV8NIqLy2Rg6AKLaEBkZie3bt2PixIkYPnw4zpw5gzNnzpRbd+zYsRg5ciRu374NJycnAMCePXtw584dDB8+HABw584dxMbGonXr1rhz5w5ef/11PP3000hPT9e4h7229uzZg3HjxmHlypXo0aMHfv/9dzz//PMAgIULF+KLL77A8uXLsXXrVrRq1QrZ2dn45ZdfdOqLiEiDYW9GSFR7/vrrL9GgQQNhZWUltm3bVmG94uJi4eHhIf7zn//IZaNHjxYjRoyocJ+cnBwBQL4VbUZGhgAgTp48KYQQIikpSbi4uKjts337dvHwr1yPHj3EkiVL1Op8/PHHwsfHRwghxLvvviuaNWsmiouLtRovEVF18BAAmS1PT088//zzePzxx/H0009XWE+hUGDEiBH45JNPADz4tv/1119j7Nixcp3ff/8dY8aMQZMmTVCvXj0EBgYCQI0WFKalpWHRokVwcnKSH5MnT8a1a9dQWFiIESNG4O7du2jSpAkmT56M7du3qx0eICKqCR4CILNmY2MDG5uqf8zHjh2L0NBQ5OTkICUlBXZ2dujXr5+8fdCgQfDz88OGDRvg6+uL0tJSBAcHo7i4uNz2rKysIIRQK1OpVGrPS0tLER8fj2HDhmnsb2dnBz8/P5w/fx4pKSn4/vvvMW3aNCxduhQHDhyAQqHQZvhERBViAkAEICQkBH5+fvjss8/w3XffYcSIEbC1tQUA3LhxA+fOncMHH3yAHj16AAAOHTpUaXsNGjRAQUEB7ty5A0dHRwDQuEZAhw4dcP78eTRt2rTCduzt7TF48GAMHjwY06dPR4sWLXD69Gl06NChBqMlImICQATgwemDY8aMwbp163DhwgXs27dP3la/fn24u7tj/fr18PHxwaVLl/Dyyy9X2l6XLl3g4OCAV155BTNnzsTRo0eRnJysVuf111/HwIED4efnhxEjRsDKygqnTp3C6dOnsXjxYiQnJ6OkpERu6+OPP4a9vT0CAgJq4yUgIgvDNQBE/9/YsWPx66+/omHDhujWrZtcbmVlha1btyItLQ3BwcF48cUXsXTp0krbcnNzw+bNm7Fr1y60bt0an376KeLi4tTq9O3bFzt37kRKSgqeeOIJPPnkk1i2bJn8Ae/q6ooNGzagW7duaNOmDX744Qd88803cHd31/vYicjySOLRA5VERERk9jgDQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQWiAkAERGRBWICQEREZIGYABAREVkgJgBEREQW6P8BNNz4zl/QBbAAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfgAAAEyCAYAAAAWW8KtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABb6ElEQVR4nO3deVxU1fsH8M8MDMMMm7IPsiui4r7hmpiCS25tVlZKmlmWaVaWaYlLmJZmm5ZlYi5lZfb1Vy6gCWm4oGIumJosgjAgCLLDLOf3xzQXLusM2yw879drXjD33rnzHGaYZ85yzxEwxhgIIYQQYlaEhg6AEEIIIS2PEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEnwzRUVFQSAQwNraGmlpabX2h4SEoGfPnrxtvr6+EAgE3M3a2hpdunTB4sWLkZubyzs2IiICAoEAQqEQycnJtc5fUlICe3t7CAQChIeHNxpvzee2tbVFcHAwvvvuO/0K3ght3DXL0xzh4eGwtbXV6VhfX1/e3yM1NRUCgQBRUVHcNu1rl5qaqlcckZGR+PXXX2ttj42NhUAgQGxsrF7nq4s2Xu1NKBTCyckJEydOxKlTp5p9fl2Eh4fD19eXt00gECAiIkKv82RmZiIiIgIXL16stU/7PiGEtDxK8C2koqICy5cv1/n44cOH49SpUzh16hQOHTqEefPm4auvvsL48ePrPN7W1hbbt2+vtf2nn36CQqGASCRq0nNrk9ysWbOwZcsWnc9h7Pbv34933323wWMeeughnDp1CjKZTK9z15fg+/fvj1OnTqF///56na8hCxYswKlTp3DixAmsXbsWf//9N0aPHo3ExMQWew59nDp1Cs8//7xej8nMzMTKlSvrTPDPP/98m31hIaS9sTR0AOZi/Pjx2LNnD9544w306dOn0eM7dOiAIUOGcPdHjx6NoqIirF69Gjdu3EDXrl15xz/xxBPYsWMHVq5cCaGw6nvZtm3b8PDDD+PAgQM6x1rzuceOHQsfHx9s3LgRL730Up2PUalUUCqVEIvFOj+PIfXr16/RY1xcXODi4tJiz2lvb8/7u7YEb29v7pzDhw9Hly5dMGbMGGzevBlff/11nY8pKyuDtbV1q9SMW7p8np6e8PT0bNFzEkI0qAbfQpYsWQInJye89dZbTT6Hg4MDANRZG589ezbS09MRExPDbbtx4wZOnjyJ2bNnN/k5AU3CDwwM5LoYtM3D69evx5o1a+Dn5wexWIzjx48DAA4cOIChQ4dCKpXCzs4OoaGh9dbC0tPT8cgjj8De3h4ODg545plncPfuXd4xe/fuRVhYGGQyGSQSCbp37463334bJSUldZ7z6tWrGDNmDGxsbODi4oJXXnkFpaWlvGNqNtHXpa4m+sTEREyaNAmurq4Qi8Xw8PDAQw89hIyMDACaJuqSkhLs2LGDaz4PCQkBUH8T/ZkzZzB58mQ4OTnB2toanTt3xqJFixqMrT7aBKt9rbRliI6OxuzZs+Hi4gKpVIqKigoAmr/t0KFDYWNjA1tbW4wbN67O2n9UVBQCAwMhFovRvXv3erts6mqiv3PnDl544QV4eXnBysoKHh4eeOyxx5CdnY3Y2FgMGjQIAPDcc89xfzPtOepqoler1Vi/fj26desGsVgMV1dXzJw5k3sNtLTdXwkJCRg5ciSkUin8/f3xwQcfQK1W8863Zs0aBAYGQiKRoEOHDujduzc++eQTHf/qhJgmSvAtxM7ODsuXL8eRI0fwxx9/NHo8YwxKpRJKpRLFxcU4fvw4Nm3ahOHDh8PPz6/W8QEBARg5ciS+/fZbbtu3334LX19fjBkzplmxKxQKpKWl1arNfvrpp/jjjz/w0Ucf4dChQ+jWrRv27NmDqVOnwt7eHt9//z22bduG/Px8hISE4OTJk7XO/fDDD6NLly74+eefERERgV9//RXjxo2DQqHgjrl58yYmTpyIbdu24fDhw1i0aBF+/PFHTJ48uc5YJ06ciDFjxuDXX3/FK6+8gq+++gpPPPFEs/4GgGY8Q2hoKLKzs/HFF18gJiYGmzZtgre3N4qKigBomqglEgnXF37q1Cls3ry53nMeOXIEI0eOxO3bt7Fx40YcOnQIy5cvR3Z2dpNi/PfffwGg1ms1e/ZsiEQi7Ny5Ez///DNEIhEiIyPx1FNPoUePHvjxxx+xc+dOFBUVYeTIkUhKSuIeGxUVheeeew7du3fHvn37sHz5cqxevVqn9/GdO3cwaNAg7N+/H4sXL8ahQ4ewadMmODg4ID8/H/379+e6lpYvX879zRpq5n/ppZfw1ltvITQ0FAcOHMDq1atx+PBhDBs2rNaYDrlcjqeffhrPPPMMDhw4gAkTJmDp0qXYtWsXd8z69esRERGBp556Cr///jv27t2LOXPmoKCgoNHyEWLSGGmW7du3MwAsISGBVVRUMH9/fzZw4ECmVqsZY4yNGjWKBQUF8R7j4+PDANS6DR48mGVlZfGOXbFiBQPA7t69y7Zv387EYjHLy8tjSqWSyWQyFhERwRhjzMbGhs2aNavReH18fNjEiROZQqFgCoWCpaSksFmzZjEA7M0332SMMZaSksIAsM6dO7PKykrusSqVinl4eLBevXoxlUrFbS8qKmKurq5s2LBhteJ+7bXXeM+/e/duBoDt2rWrzvjUajVTKBQsLi6OAWB///03t08b5yeffMJ7zPvvv88AsJMnT/LKWf3voS3T9u3buW3a1y4lJYUxxti5c+cYAPbrr782+Des7299/PhxBoAdP36c29a5c2fWuXNnVlZW1uA5a9LGu27dOqZQKFh5eTk7f/48GzRoEAPAfv/9d14ZZs6cyXv87du3maWlJVuwYAFve1FREXN3d2fTp09njFW9pv379+fes4wxlpqaykQiEfPx8eE9HgBbsWIFd3/27NlMJBKxpKSkesuSkJBQ62+vpX2faF27do0BYPPnz+cdd+bMGQaAvfPOO9y2UaNGMQDszJkzvGN79OjBxo0bx92fNGkS69u3b73xEWKuqAbfgqysrLBmzRqcO3cOP/74Y4PHjhgxAgkJCUhISMBff/2Fbdu24e7du3jwwQfrHXn++OOPw8rKCrt378bBgwchl8t1Gjlf08GDByESiSASieDn54cff/wRCxYswJo1a3jHTZkyhdddcP36dWRmZuLZZ5/ljQOwtbXFo48+itOnT9dqKn/66ad596dPnw5LS0uuuR8AkpOTMWPGDLi7u8PCwgIikQijRo0CAFy7dq1W/DXPOWPGDADgnbMpunTpgo4dO+Ktt97Cl19+yavlNsWNGzdw69YtzJkzB9bW1k06x1tvvQWRSARra2sMGDAAt2/fxldffYWJEyfyjnv00Ud5948cOQKlUomZM2dyLUVKpRLW1tYYNWoU142gfU1nzJjBayr38fHBsGHDGo3v0KFDGD16NLp3796k8tWkfQ1rvq8HDx6M7t2749ixY7zt7u7uGDx4MG9b7969eVe0DB48GH///Tfmz5+PI0eOoLCwsEViJcTY0SC7Fvbkk0/io48+wrJly/DII4/Ue5yDgwMGDhzI3R82bBh69OiBoUOHYsOGDVi7dm2tx9jY2OCJJ57At99+Cx8fH25wnL5GjBiBjz/+GAKBAFKpFJ07d4aVlVWt42qOLs/Ly6tzOwB4eHhArVYjPz8fUqmU2+7u7s47ztLSEk5OTty5iouLMXLkSFhbW2PNmjXo2rUrpFIp13dfVlZW5+Or0z6H9pxN5eDggLi4OLz//vt45513kJ+fD5lMhrlz52L58uV6XakAgBtr0JxBZAsXLsQzzzwDoVCIDh06wM/Pr87BczVfE20XgLb/uybtFzTt36zm66Td1tglhHfv3m3RQXKNvcdqXopa870AAGKxmPe+Wbp0KWxsbLBr1y58+eWXsLCwwAMPPIB169bx/gcJMTeU4FuYQCDAunXrEBoaiq1bt+r12N69ewMA/v7773qPmT17Nr755htcunQJu3fvblKMNb9c1KdmItF+mGZlZdU6NjMzE0KhEB07duRtl8vl6NSpE3dfqVQiLy+PO9cff/yBzMxMxMbGcrV2APX2j9Z8vPY5qsfXHL169cIPP/wAxhguXbqEqKgorFq1ChKJBG+//bZe59L2k9ccHKYPT0/PJr1Wzs7OAICff/65wS+B2r+Z9m9YXV3banJxcWlW+eqLJysrq9YXh8zMTK5c+rC0tMTixYuxePFiFBQU4OjRo3jnnXcwbtw4pKen876QEmJOqIm+FYwdOxahoaFYtWoViouLdX6c9jphV1fXeo8ZOnQoZs+ejYcffhgPP/xwc0PVS2BgIDp16oQ9e/aAMcZtLykpwb59+7iR9dXV/BLy448/QqlUciPPtYmp5uV3X331Vb1x1Dznnj17AIA7Z0sQCATo06cPPv74Y3To0AEXLlzg9tWsIdana9eu6Ny5M7799ltuVHtbGTduHCwtLXHr1i0MHDiwzhugeU1lMhm+//573mualpaG+Pj4Rp9nwoQJOH78OK5fv17vMdrXVpe/2YMPPggAvEFyAJCQkIBr1641e0Bphw4d8Nhjj+Hll1/GvXv39J7kiBBTQjX4VrJu3ToMGDAAOTk5CAoKqrW/oKAAp0+fBqAZGX7t2jVERkZCLBbj5ZdfbvDc27Zta5WYGyMUCrF+/Xo8/fTTmDRpEubNm4eKigp8+OGHKCgowAcffFDrMb/88gssLS0RGhqKq1ev4t1330WfPn0wffp0AJquiY4dO+LFF1/EihUrIBKJsHv37npbMaysrLBhwwYUFxdj0KBBiI+Px5o1azBhwgSMGDGiWeX77bffsHnzZkybNg3+/v5gjOGXX35BQUEBQkNDueN69eqF2NhY/N///R9kMhns7OwQGBhY5zm/+OILTJ48GUOGDMFrr70Gb29v3L59G0eOHGlyC4wufH19sWrVKixbtgzJyckYP348OnbsiOzsbJw9exY2NjbcnAqrV6/G888/j4cffhhz585FQUEBIiIi6my2r2nVqlU4dOgQHnjgAbzzzjvo1asXCgoKcPjwYSxevBjdunVD586dIZFIsHv3bnTv3h22trbw8PCAh4dHrfMFBgbihRdewGeffQahUIgJEyYgNTUV7777Lry8vPDaa6/p/beYPHkyevbsiYEDB8LFxQVpaWnYtGkTfHx8EBAQoPf5CDEZhh3jZ/qqj6KvacaMGQxAo6PoLSwsmLe3N3vsscdYYmIi79jqo+gbos8o+oceeqjBY7QjuD/88MM69//6668sODiYWVtbMxsbGzZmzBj2119/1Rn3+fPn2eTJk5mtrS2zs7NjTz31FMvOzuYdGx8fz4YOHcqkUilzcXFhzz//PLtw4UKtkdezZs1iNjY27NKlSywkJIRJJBLm6OjIXnrpJVZcXFyrnPqOov/nn3/YU089xTp37swkEglzcHBggwcPZlFRUbxzX7x4kQ0fPpxJpVIGgI0aNYoxVvcoesYYO3XqFJswYQJzcHBgYrGYde7cudbVBTU19hrULENd7z/GNK/V6NGjmb29PROLxczHx4c99thj7OjRo7zjvvnmGxYQEMCsrKxY165d2bfffstmzZrV6Ch6xhhLT09ns2fPZu7u7kwkEjEPDw82ffp03uv8/fffs27dujGRSMQ7R81R9IxpRvavW7eOde3alYlEIubs7MyeeeYZlp6ezjuuritUGGO14t6wYQMbNmwYc3Z2ZlZWVszb25vNmTOHpaam1vk3I8RcCBir1i5HCCGEELNAffCEEEKIGaIETwghhJghSvCEEEKIGaIETwghhJghSvCEEEKIGTL76+DVajUyMzNhZ2fXKutjE0JIXRhjKCoqgoeHB2/tBkLaitkn+MzMTHh5eRk6DEJIO5Went6i8/UToiuzT/B2dnYANP9k9vb2Oj1GoVAgOjoaYWFhei8wYoyoPMbN3MoDmF+ZmlKewsJCeHl5cZ9BhLQ1s0/w2mZ5e3t7vRK8VCqFvb292Xw4UXmMl7mVBzC/MjWnPNQ1SAyFOoYIIYQQM0QJnhBCCDFDlOAJIYQQM2T2ffCEEGLMVCoVFAqFocMgJkIkEsHCwkKnYynBE0KIATDGIJfLUVBQYOhQiInp0KED3N3dGx3ASQmeEEIMQJvcXV1dIZVKabQ9aRRjDKWlpcjJyQEAyGSyBo+nBE8IIW1MpVJxyd3JycnQ4RATIpFIAAA5OTlwdXVtsLmeBtkRQkgDGAPKylr2nNo+d6lU2rInJu2C9n3T2NgNqsETQkgdlEogNxe4exewbKVPSmqWJ02h6/uGEjwhhFRTVgbk5AD37gFqtWZbayV4QloTvW0JIe0eY8D9+5rEXlRk6GgIaRnUB08IabdUKkAuB65cAW7douRuTEJCQrBo0SLuvq+vLzZt2tSqzxkbGwuBQACBQIBp06a16nPVR/v8HTp0aPa5KMETQtqdsjIgLQ24dAm4cweorDR0RKQxCQkJeOGFF9rkua5fv46oqCi9HhMeHs4lZ+1tyJAhvGMqKiqwYMECODs7w8bGBlOmTEFGRgbvmKysrBb7IkMJnhDSbhQUADduAElJmgF02j52YvxcXFza7KoDV1fXJtWgx48fj6ysLO528OBB3v5FixZh//79+OGHH3Dy5EkUFxdj0qRJUKlU3DHu7u5wcHBobhEAUIInhJg5lQrIzgYuXzbeZnjGGEpKSgxyY4zpHGdISAgWLFiARYsWoWPHjnBzc8PWrVtRUlKC5557DnZ2dujcuTMOHTrEe1xSUhImTpwIW1tbuLm54dlnn0Vubi63v6SkBDNnzoStrS1kMhk2bNhQ67lrNtFv3LgRvXr1go2NDby8vDB//nwUFxdz+6OiotChQwccOXIE3bt3h62tLZeA9VVXfDW7EABALBbD3d2duzk6OnL77t+/j23btmHDhg0YO3Ys+vXrh127duHy5cs4evSo3jHpghI8IcQslZcDt29rmuEzMoy7Gb60tBS2trYGuZWWluoV644dO+Ds7IyzZ89iwYIFeOmll/D4449j2LBhuHDhAsaNG4dnn32WO29WVhZGjRqFvn374ty5czh8+DCys7Mxffp07pxvvvkmjh8/jv379yM6OhqxsbE4f/58g3EIhUJ8+umnuHLlCnbs2IE//vgDS5YsqfV3/eijj7Bz5078+eefuH37Nt544w29yqtPfLGxsXB1dUXXrl0xd+5cbsY5ADh//jwUCgXCwsK4bR4eHujZsyfi4+P1jkkXNIqeEGJWCgpoNHxr6tOnD5YvXw4AWLp0KT744AM4Oztj7ty5AID33nsPW7ZswaVLlzBkyBBs2bIF/fv3R2RkJHeOb7/9Fl5eXrhx4wY8PDywbds2fPfddwgNDQWg+RLh6enZYBzVa89+fn5YvXo1XnrpJWzevJnbrlAo8OWXX6Jz584AgFdeeQWrVq3Sq7zFxcU6xTdhwgQ8/vjj8PHxQUpKCt599108+OCDOH/+PMRiMeRyOaysrNCxY0fe49zc3CCXy/WKSVeU4AkhJk+lqpqUpqLC0NHoTyqV8pqX2/q59dG7d2/udwsLCzg5OaFXr17cNjc3NwDgaq/nz5/H8ePHYWtrW+tct27dQllZGSorKzF06FBuu6OjIwIDAxuM4/jx44iMjERSUhIKCwuhVCpRXl6OkpIS2NjYcGXTJndAM3d79Vq1Lm7duqVTfE888QT3e8+ePTFw4ED4+Pjg999/xyOPPFLv+RljrTbhESV4QojJKi/X1Nbz8kx7wJxAIOCSkrETiUS8+wKBgLdNm6zU/70garUakydPxrp162qdSyaT4ebNm3rHkJaWhokTJ+LFF1/E6tWr4ejoiJMnT2LOnDm86VvrilWfMQcA9D5eSyaTwcfHhyufu7s7KisrkZ+fz6vF5+TkYNiwYU16jsZQHzwhxOTcvw/cvAlcvaqptZtycjd3/fv3x9WrV+Hr64suXbrwbjY2NujSpQtEIhFOnz7NPSY/Px83btyo95znzp2DUqnEhg0bMGTIEHTt2hWZmZmtEn9T4gOAvLw8pKencyu+DRgwACKRCDExMdwxWVlZuHLlCiV4Qkj7plJpautXrgD//gsUFho6IqKLl19+Gffu3cNTTz2Fs2fPIjk5GdHR0Zg9ezZUKhVsbW0xZ84cvPnmmzh27BiuXLmC8PBwCIX1p6fOnTtDqVTis88+Q3JyMnbu3Ikvv/yyVeLXJb7i4mK88cYbOHXqFFJTUxEbG4vJkyfD2dkZDz/8MADAwcEBc+bMweuvv45jx44hMTERzzzzDHr16oWxY8e2SuzURE8IMWrm0gzfXnl4eOCvv/7CW2+9hXHjxqGiogI+Pj4YP348lyQ//PBDFBcXY8qUKbCzs8Prr7+O+/fv13vOvn37YuPGjVi3bh2WLl2KBx54AGvXrsXMmTNbpQyNxWdhYYHLly/ju+++Q0FBAWQyGUaPHo29e/fCzs6OO+7jjz+GpaUlpk+fjrKyMowZMwZRUVENLvnaHJTgCSFGSTs3PNXUjUdsbGytbampqbW21ey3DggIwC+//FLveW1tbbFz507s3LmT2/bmm282+DyvvfYaXnvtNd62Z599lvs9PDwc4eHhvP3Tpk1rUp96XfH9/vvv3O8SiQRHjhxp9DzW1tb47LPP8Nlnn+kdQ1NQgieEGA2VSlNTz8kxzdHwxDx4enpi8uTJ+P7779v8uW1tbaFUKmFtbd3sc1GCJ4QYXHm5ZrBcXp4myRNiCMHBwdyo97ou62sLFy9eBIAWabY3+CC7O3fu4JlnnoGTkxOkUin69u3LmyGIMYaIiAh4eHhAIpEgJCQEV69eNWDEhJCWUn00fE4OJXdiWBKJhBvh7+7u3uCxsbGxrbK6nfb5/fz8mn0ugyb4/Px8DB8+HCKRCIcOHUJSUhI2bNjAm+R//fr12LhxIz7//HMkJCTA3d0doaGhKKJpqggxSWq1JplfvUqj4QlpTQZtol+3bh28vLywfft2bpuvry/3O2MMmzZtwrJly7iZgHbs2AE3Nzfs2bMH8+bNa+uQCSFNVFFRNRqeauqEtD6DJvgDBw5g3LhxePzxxxEXF4dOnTph/vz53JzGKSkpkMvlvMn5xWIxRo0ahfj4+DoTfEVFBSqqjc4p/K96oFAoeDMcNUR7nK7HGzsqj3Ezt/IA/DIVFWn61025pq5S6f8amdPrSUyTQRN8cnIytmzZgsWLF+Odd97B2bNn8eqrr0IsFmPmzJncBPzauY213NzckJaWVuc5165di5UrV9baHh0drfecy9VnHDIHVB7jZm7lAcyvTPqUR99V2ghpaQZN8Gq1GgMHDuRWGerXrx+uXr2KLVu28CYsqDkRf0OT8y9duhSLFy/m7hcWFsLLywthYWGwt7fXKS6FQoGYmBiEhobWmsvYFFF5jJs5laeyUlNbz81V4M6dGLi7h0IoNO0yAYBEosCtW/q9RoWm3GRBzIJBE7xMJkOPHj1427p37459+/YBADeKUS6Xc/P5AprJ+WvW6rXEYjHEYnGt7SKRSO8Pz6Y8xphReYybKZensFDTv66d3Es7l4hQKDKLBK+9Ykmf18hUX0tiPgya4IcPH47r16/ztt24cQM+Pj4ANGv8uru7IyYmBv369QMAVFZWIi4urs6ViQghbUetrpqUprzc0NGYh61b2/b5XnhBv+NDQkIQFxcHAEhMTETfvn1bPigjpW01dnBwQEFBgWGD0ZFBL5N77bXXcPr0aURGRuLff//Fnj17sHXrVrz88ssANH/QRYsWITIyEvv37+cm+ZdKpZgxY4YhQyek3aqoADIygEuXgNu3Kbm3N3PnzkVWVhZ69uyp0/GxsbGYOnUqZDIZbGxs0LdvX+zevbvWMQKBoNbtn3/+aXa8dZ1XIBDgww8/5I4JCQmptf/JJ5/knScrK6tVrntvTQatwQ8aNAj79+/H0qVLsWrVKvj5+WHTpk14+umnuWOWLFmCsrIyzJ8/H/n5+QgODkZ0dDRvAn9CSOsrKtLU1k2k8kJaiVQqbXQSmOri4+PRu3dvvPXWW3Bzc8Pvv/+OmTNnwt7eHpMnT+Yde/36dd5YKRcXl2bHm5WVxbt/6NAhzJkzB48++ihv+9y5c7Fq1SruvkQi4e13d3eHg4NDs+NpSwafqnbSpEmYNGlSvfsFAgEiIiIQERHRdkERQgBUNcPfvQuUlRk6GmJsYmNjMXr0aPz222945513cP36dfTp0wfffPMNevXqBQB45513eI959dVXceTIEezfv79Wgnd1deVNdNaYkJAQriVh165dsLCwwEsvvYTVq1dzTeo1v4z873//w+jRo+Hv78/bru8XF1Ng8KlqCSHGp7KS3wxPyZ005M0338RHH32EhIQEuLq6YsqUKQ3OA3D//n04OjrW2t6vXz/IZDKMGTMGx48f1+m5d+zYAUtLS5w5cwaffvopPv74Y3zzzTd1HpudnY3ff/8dc+bMqbVv9+7dcHZ2RlBQEN544w2zmC3V4DV4Qojx0DbD379fNRKekMasWLECoaGhADQJ19PTE/v378f06dNrHfvzzz8jISEBX331FbdNJpNh69atGDBgACoqKrBz506MGTMGsbGxeOCBBxp8bi8vL3z88ccQCAQIDAzE5cuX8fHHH3MTplW3Y8cO2NnZcTOjaj399NPcoO4rV65g6dKl+Pvvv01+HgdK8IS0c2o1cO+eJrFTTZ00xdChQ7nfHR0dERgYiGvXrtU6LjY2FuHh4fj6668RFBTEbQ8MDERgYCDvfOnp6fjoo4/wwAMP4MSJE5gwYQK3/6uvvuLGag0ZMoQ3L8rQoUOxYcMGqFSqWiuyffvtt3j66adrLcVa/ctAz549ERAQgIEDB+LChQvo37+/vn8Oo0EJnpB2qmpSGkCpNHQ0xNzUnIwsLi4OkydPxsaNG3kTmdVnyJAh2LVrFwBg4MCB3DKqQO3ZTXVx4sQJXL9+HXv37m302P79+0MkEuHmzZuU4AkhpoOa4UlLO336NLy9vQFoVgm9ceMGunXrxu2PjY3FpEmTsG7dOryg48X3iYmJ3ARn2mVc63vumvcDAgJq1d63bduGAQMGoE+fPo0+99WrV6FQKHgTrJkiSvCEtAPUDE9a06pVq+Dk5AQ3NzcsW7YMzs7OmDZtGgBNcn/ooYewcOFCPProo9waI1ZWVtxAu02bNsHX1xdBQUGorKzErl27sG/fPm5W04akp6dj8eLFmDdvHi5cuIDPPvsMGzZs4B1TWFiIn376qdZ2ALh16xZ2796NiRMnwtnZGUlJSXj99dfRr18/DB8+vJl/GcOiBE+IGaNmeNOi78xyxuKDDz7AwoULcfPmTfTp0wcHDhyAlZUVACAqKgqlpaVYu3Yt1q5dyz1m1KhRiI2NBaCZofSNN97AnTt3IJFIEBQUhN9//x0TJ05s9LlnzpyJsrIyDB48GBYWFliwYEGtVoIffvgBjDE89dRTtR5vZWWFY8eO4ZNPPkFxcTG8vLzw0EMPYcWKFbVaAUwNJXhCzFBxcdWkNNQMT1rbiBEjcOXKlTr3RUVFISoqqsHHL1myBEuWLGnSc4tEImzatAlbtmyp95gXXnih3q4BLy8vbvpdc0MJnhAzwVhVMzytVEpay+bNm/HNN9/g1KlThg6lTdna2kKpVNYagW/MKMETYuIUCk1Sp2Z40tp2796Nsv8GcXh7eyM+Pt7AEbUd7Sh+U2q2pwRPiImiZnjS1jp16sS7HxISAmbAN5+2D78t1DeK35hRgifEhFAzPCFEV5TgCTEBCoVmNPzdu9QMTwjRDSV4QoxYSYmmtp6fT83whBD9UIInxMhom+Hv3tUkeEIIaQpK8IQYCW0zfG6u5ndCCGkOSvCEGJh2sFxSElBjfQ5CCGkySvCEGABjmn71nBzN4i/abZTg27fz59v2+QYM0O/4kJAQbta3xMRE9O3bt+WDMgBfX1+kpaUB0CyW06FDB8MG1EKEhg6AkPZEoQCysoDLl4GUFOpjJ6Zn7ty5yMrKQs+ePXU6vry8HOHh4ejVqxcsLS25RWhaQlFRERYtWgQfHx9IJBIMGzYMCQkJvGOys7MRHh4ODw8PSKVSjB8/Hjdv3uQdk5CQoNPCNqaGEjwhbaC0VJPQL18GMjOpj52YLqlUCnd3d1ha6tYArFKpIJFI8Oqrr2Ls2LEtGsvzzz+PmJgY7Ny5E5cvX0ZYWBjGjh2LO3fuAAAYY5g2bRqSk5Pxv//9D4mJifDx8cHYsWNRUu3btYuLC7eynTmhBE9IK9GOhv/nH+DaNc3vdKkbMSexsbEQCAT4/fff0adPH1hbWyM4OBiXL1/mjrGxscGWLVswd+5cuLu763zu8PBwTJs2DStXroSrqyvs7e0xb948VFZWAgDKysqwb98+rF+/Hg888AC6dOmCiIgI+Pn5cQvP3Lx5E6dPn8aWLVswaNAgBAYGYvPmzSguLsb333/fsn8MI0QJnpAWplRSMzxpX95880189NFHSEhIgKurK6ZMmQJFCzRTHTt2DNeuXcPx48fx/fffY//+/Vi5ciUAQKlUQqVS1Vr8RSKR4OTJkwCAiooKAOAdY2FhASsrK+4Yc0YJnpAWUloKpKYCly5RMzxpX1asWIHQ0FD06tULO3bsQHZ2Nvbv39/s81pZWeHbb79FUFAQHnroIaxatQqffvop1Go17OzsMHToUKxevRqZmZlQqVTYtWsXzpw5g6ysLABAt27d4OPjg6VLlyI/Px+VlZX44IMPIJfLuWPMGSV4QppBOxr++nVNM3xeHjXDk/Zn6NCh3O+Ojo4IDAzEtWvXdHrs7du3YWtry90iIyO5fX369IFUKuU9T3FxMdLT0wEAO3fuBGMMnTp1glgsxqeffooZM2ZwK76JRCLs27cPN27cgKOjI6RSKWJjYzFhwgSTWhWuqQya4CMiIiAQCHi36n00jDFERETAw8MDEokEISEhuHr1qgEjJkSjejN8crJmZTdCSBWBjtd8enh44OLFi9ztxRdf1PncnTt3RlxcHJf0z549C4VCAT8/P+7YAQMG4OLFiygoKEBWVhYOHz6MvLw83jHmyuA1+KCgIGRlZXG36oMz1q9fj40bN+Lzzz9HQkIC3N3dERoaiiLthcOEtDFtMzyNhiekyunTp7nf8/PzcePGDXTr1k2nx1paWqJLly7crfpo9r///ptbf177PLa2tvD09OSdw8bGBjKZDPn5+Thy5AimTp1a63kcHBzg4uKCmzdv4ty5c3UeY24MPtGNpaVlnSMrGWPYtGkTli1bhkceeQQAsGPHDri5uWHPnj2YN29eW4dK2inGNGuu5+RQTZ2QuqxatQpOTk5wc3PDsmXL4OzszLvePSkpCZWVlbh37x6Kiopw8eJFAGh0opzKykrMmTMHy5cvR1paGlasWIFXXnkFQqGmbnrkyBEwxhAYGIh///0Xb775JgIDA/Hcc89x5/jpp5/g4uICb29vXL58GQsXLsS0adMQFhbW0n8Go2PwBH/z5k14eHhALBYjODgYkZGR8Pf3R0pKCuRyOe9FEIvFGDVqFOLj4+tN8BUVFdzISQAoLCwEACgUCp1HdWqPa4lRoMaAytM0SqWmTz0vD/jvypxWoVYreD/NgbmVSaXS/z3XlPenvjPLGYsPPvgACxcuxM2bN9GnTx8cOHAAVlZW3P6JEydyM8UBQL9+/QBoKnINGTNmDAICAvDAAw+goqICTz75JCIiIrj99+/fx9KlS5GRkQFHR0c8+uijeP/99yESibhjsrKysHjxYmRnZ0Mmk2HmzJl49913W6jkxs2gCT44OBjfffcdunbtiuzsbKxZswbDhg3D1atXIZfLAQBubm68x7i5ufHeKDWtXbuWu4yiuujoaN5gDV3ExMTodbyxo/IYN7ncvMoDmF+Z9HnPlWoXGWgHRowYgStXrtS7PzU1tcnnXrlyZZ2f6QAwffp0TJ8+vcHHv/rqq3j11Veb/PymzKAJfsKECdzvvXr1wtChQ9G5c2fs2LEDQ4YMAVB7oAZjrMHBG0uXLsXixYu5+4WFhfDy8kJYWBjs7e11ikuhUCAmJgahoaG8b4KmisrTOMaA+/cNs0SrWq2AXB4Dd/dQCIWm//oA5lcmiUSBW7f0e89pWw/NzebNm/HNN9/g1KlThg6lxQQFBSE5OdnQYbQ4gzfRV2djY4NevXrh5s2bXP+NXC6HTCbjjsnJyalVq69OLBZDLBbX2i4SifROBk15jDGj8tSmVGqWZ717t6oZXmigoadCocgskmF15lIm7RVV+rznzOl/TWv37t3coDdvb2/Ex8cbOKKWcfDgQa5LRdeKoCkwqgRfUVGBa9euYeTIkfDz84O7uztiYmK4/prKykrExcVh3bp1Bo6UmLqyMs2guXv3ALXa0NEQYho6derEux8SEtJoP3pTRUVFtcp56+Lj49Nmz9WWDJrg33jjDUyePBne3t7IycnBmjVrUFhYiFmzZkEgEGDRokWIjIxEQEAAAgICEBkZCalUihkzZhgybGKitM3w1ZdoJYQQc2XQBJ+RkYGnnnoKubm5cHFxwZAhQ3D69Gnu29SSJUtQVlaG+fPnIz8/H8HBwYiOjoadnZ0hwyYmRqXSNMFXb4YnxBioqfmINIGu7xuDJvgffvihwf0CgQARERG8yyII0RU1wxNjZWVlBaFQiMzMTLi4uMDKykrnmd9I+8UYQ2VlJe7evQuhUMi7FLEuRtUHT0hL0E5KQ83wxFgJhUL4+fkhKysLmZmZhg6HmBipVApvb29uwp/6UIInZkGlqhoNX22eI0KMlpWVFby9vbllTwnRhYWFBSwtLXVq8aEET0xaebmmtp6XR83wxPQIBAKzu3yVGA9K8MRk/fuvZvEXQgghtVGCJyZD2wyfna25X1xsuElpCCHE2FGCJ0avZjM8NcUTQkjjKMETo6WdlMZMp/QmhJBWRQmeGBWVSlNTz8mh0fCEENIclOCJUaDR8IQQ0rIowRODomZ4QghpHZTgSZujZnhiChjTvE+zsgCRCLC1NXREhOin2QlepVLh8uXL8PHxQceOHVsiJmKmKiqqmuFp4i5ijJRKzXs0MxOQy6sWJ/LxoQRPTI/eCX7RokXo1asX5syZA5VKhVGjRiE+Ph5SqRS//fYbQkJCWiFMYsoKCzUfmvfvGzoSQmorL9ck86wszfuUvnwSc6F3gv/555/xzDPPAAD+7//+DykpKfjnn3/w3XffYdmyZfjrr79aPEhietTqqrnhy8sNHQ0hfMXFmlp6VpZmtUHGDB0RIS1P7wSfm5sLd3d3AMDBgwfx+OOPo2vXrpgzZw4+/fTTFg+QmBZqhifGiDFNIs/K0txopUHSHuid4N3c3JCUlASZTIbDhw9j8+bNAIDS0lJYWFi0eIDENFAzPDE2KpWmBUlbU6cBnaS90TvBP/fcc5g+fTpkMhkEAgFCQ0MBAGfOnEG3bt1aPEBivNTqqtHw1AxPjEFFRVV/enY2tSKR9k3vBB8REYGePXsiPT0djz/+OMRiMQDNGrVvv/12iwdIjE9FhaZmlJtLH6DE8IqLq5re8/KoP50QrSZdJvfYY48BAMqrVdtmzZrVMhERo0XN8MRY5OdXNb3TJEmE1E3vBK9SqRAZGYkvv/wS2dnZuHHjBvz9/fHuu+/C19cXc+bMaY04iYFom+Hv3gXKygwdDWmv1GrNl0u5XJPYqUuIkMbpvZr2+++/j6ioKKxfvx5WVlbc9l69euGbb75p0eCI4VRWAhkZwKVLwO3blNxJ21MogPR04MwZ4LffgPh4IDmZkjshutK7Bv/dd99h69atGDNmDF588UVue+/evfHPP/+0aHCk7RUVaWpKBQWGjoS0R6WlVU3vtPAQIc2jd4K/c+cOunTpUmu7Wq2GQqFokaBI21KrNdcI5+RQTZ20vYKCqqRO4zsIaTl6J/igoCCcOHECPj4+vO0//fQT+vXr12KBkdZXWVk1KY1SaehoSHuhVmvGdGRlafrUS0sNHREh5knvBL9ixQo8++yzuHPnDtRqNX755Rdcv34d3333HX777bcmB7J27Vq88847WLhwITZt2gQAYIxh5cqV2Lp1K/Lz8xEcHIwvvvgCQUFBTX4eUtUMf/8+XVJE2oZCobkuXZvUqbGPkNan9yC7yZMnY+/evTh48CAEAgHee+89XLt2Df/3f//HTXqjr4SEBGzduhW9e/fmbV+/fj02btyIzz//HAkJCXB3d0doaCiKaJ7JJsnLA5KSgBs3NM2ilNxJa0tNBU6eBH7/HTh7VjNojpI7IW1D7wQPAOPGjUNcXByKi4tRWlqKkydPIiwsrEkBFBcX4+mnn8bXX3/NW26WMYZNmzZh2bJleOSRR9CzZ0/s2LEDpaWl2LNnT5Oeqz2qrNTUmgDNhyv1sZPWdP8+8M8/wJ9/au5fuqRpLaLBcoS0vWavB99cL7/8Mh566CGMHTsWa9as4banpKRALpfzvjiIxWJuedp58+bVeb6KigpUVJt0uvC/WTAUCoXOgwC1x5nyoMGSEk0/5/37gEqlKYdabbrlqU5bDiqP4WkHaMrl/P50oVDB+2nqBAL9PxNM+fODmAe9E7xQKIRAIKh3v0qPuUt/+OEHXLhwAQkJCbX2yeVyAJrFbapzc3NDWlpavedcu3YtVq5cWWt7dHQ0pFKpzrEBQExMjF7HGzu5nMpjzEy5PI6OmltN/fubbpnqos9nQimNHiQGpneC379/P+++QqFAYmIiduzYUWdirU96ejoWLlyI6OhoWFtb13tczS8TjLEGv2AsXboUixcv5u4XFhbCy8sLYWFhsLe31yk2hUKBmJgYhIaGQiQS6fQYQ6qs1PSv1zcaXq1WQC6Pgbt7KIRC4y9PY6g8ba+8vGqQXG5u403uQqEC/fvH4MKFUKjVxlkmfXh6lkMq/RWPPvqozp8JhTSHLjEwvRP81KlTa2177LHHEBQUhL179+o8Ve358+eRk5ODAQMGcNtUKhX+/PNPfP7557h+/ToATU1eJpNxx+Tk5NSq1VcnFou5BXCqE4lEeifrpjymLRUXV01Kox0wJ2xgVIVQKDLaBNIUVJ7WVVhYtYjLvXtNO4daLTLJBM8YQ0FBMlJSYpCSEoP09D/Qt28PPPnkkzp/JhjzZwdpH1qsDz44OBhz587V+fgxY8bg8uXLvG3PPfccunXrhrfeegv+/v5wd3dHTEwMd319ZWUl4uLisG7dupYK2+QwVjUpDbUAkpbEmKYVKCtLM/FMSYmhI2pbpaV5SEv7g0vq9++n8vbfunULjC49ISakRRJ8WVkZPvvsM3h6eur8GDs7O/Ts2ZO3zcbGBk5OTtz2RYsWITIyEgEBAQgICEBkZCSkUilmzJjREmGbFIVCM2ju7l2alIa0HKVS82UxM1MzSK6y0tARtR2lsgIZGX9xCV0uvwCgKoELhSJ06jQUfn6hGDZsNAYNym6we5AQY6N3gu/YsSPvTc4YQ1FREaRSKXbt2tWiwS1ZsgRlZWWYP38+N9FNdHQ07OzsWvR5jFldzfCENEd5uSaZZ2Vp3lt6jIs1aYypkZNzuVqz+wkolfzrRp2dg+DnFwo/v1B4ez8AKytbAICPjwIWFgcNETYhTaZ3gv/44495CV4oFMLFxQXBwcG869ibIjY2lndfIBAgIiICERERzTqvqaFmeNLSioqqmt7z89vPl8XCwgwuoaemHkNpaQ5vv62tDL6+Y/9L6mNhayur50yEmB69E3x4eHgrhEEAaoYnLUf7JVE7SK69TP5YUVGItLRYLqnfu3edt18ksoG39yiulu7s3IOa3YnZ0inBX7p0SecT1pxuljSupERTW29PNSvS8lQqzZdD7cps1eZ7MlsqlQKZmWf/q6EfxZ07p8FYVZ+DQCCETDaIS+idOg2BhYWVASMmpO3olOD79u0LgUDQ6AhSgUCg10Q37RljmoSek9P+RiuTllNRUdWfnp1t/v3pjDHk5V3/L6HHIC0tFpWV/OaJjh27VOtHD4FE0ryuQ0JMlU4JPiUlpbXjaDe0zfC5ubToBmma4uKqpve8PPNv9SkpyUFq6tH/mt2Poqgog7dfInGCr+8Y+PmFwtd3LDp08DVMoIQYGZ0SfM2134n+qBmeNEf1/nRznyBNoShFevoJrh89J4ffRWhhIYaX1wiulu7m1hcCQZPWzSLErDX5OvikpCTcvn0blTUunJ0yZUqzgzIX1AxPmkrbn65N6uXlho6o9ajVKmRnJ3I19IyMk1Cp+J8rbm59uYTu6TkCIpHEQNESYjr0TvDJycl4+OGHcfnyZV6/vHYkKvXBa5rec3M1H9DUDE90VVlZNUguO9u8r6QoKEjhEnpa2jGUlfHnwrW39/qvyT0Uvr4PwsbG1UCREmK69E7wCxcuhJ+fH44ePQp/f3+cPXsWeXl5eP311/HRRx+1Rowmo7RU88FMzfBEV6WlmoRubQ1ER5tvUi8ry+emgU1NPYr8/Fu8/WKxPXx8RsPXV3M9uqNjV7p8jZBm0jvBnzp1Cn/88QdcXFwgFAohFAoxYsQIrF27Fq+++ioSExNbI06jRc3wRF8FBVWXst2/r1kgaODAxldoMyVKZQUuX76M48dPISXlOLKyzoGxqgIKhZbw8BjCTTDj4TEYQmGLLY1BCEETErxKpYKtrWb6RmdnZ2RmZiIwMBA+Pj7cCnDtgVJZNSkNNcOThqjVVf3pcrl5zk7IGMPdu1e4Gvrt23FQKPgFdXLqziV0b+8QiMXtZ8ppQgxB7wTfs2dPXLp0Cf7+/ggODsb69ethZWWFrVu3wt/fvzViNCqlpZra+r171AxP6qdQaLprtP3p5vglsKgok0voKSlHUVIi5+3v0KEDPD0nwNc3DL6+Y2Fvr/tiVISQ5tM7wS9fvhwl/7VFr1mzBpMmTcLIkSPh5OSEvXv3tniAxoAxTbNqTo7mGmRC6lJWVtX0nptrXk3uAFBRUYTbt+O4a9Jzc5N4+y0tJdw0sP7+ozBxYjrOn3/IJNeDJ8Qc6J3gx40bx/3u7++PpKQk3Lt3r9Yqc+ZAqaTR8KRh9+9XLeJSUGDoaFqWWq1EZmYCl9Dv3DkFtbr6KEABZLKB3AQznp7DYGkpBgAIhQoIBBl1n5gQ0ib0TvA7duzAY489BhsbG26bo6NjiwZlLJKSGj+GtC9qtWb2OG1N3Zz60xljuHfvJpfQ09KOo6LiPu+YDh38uYTu6/sgJBLz/N8nxBzoneDfeOMNzJ8/H5MnT8YzzzyD8ePHw9LSPEe/qtWaEc6kfVMqNYPjtLcaczuZtJKSu0hNPcYl9cLC27z91tYd4es7hltStWNH8x9nQ4i50DszZ2Vl4fDhw/j+++/x5JNPQiKR4PHHH8czzzyDYcOGtUaMhLS58vKqpve7d82nP12hKENGxkmkpGgSenY2/7JWCwsreHoO5xK6u3t/CIUWBoqWENIceid4S0tLTJo0CZMmTUJpaSn279+PPXv2YPTo0fD09MStW7caPwkhRqiwUJPQ5XLNVRLmgDE1srMvcgk9I+MklEr+vLeurr25hO7lNRJWVjb1nI0QYkqa1bYulUoxbtw45OfnIy0tDdeuXWupuAhpdYxp+tO1NXVzmajo/v00LqGnph5DWVkub7+trQc3r7uv7xjY2robKFLjJRAA9vaAk5Pm5uUFZNCYQWJimpTgtTX33bt34+jRo/Dy8sJTTz2Fn376qaXjI6RFKZWayx21NXVz6E8vLy9AWlost0b6vXs3efutrGzh7R3CJXUnp25md8VLc4lEQMeOgLMz4OiouVUfWiSVGi42QppK7wT/1FNP4f/+7/8glUrx+OOPIzY2lvreiVErL9ck86wsTXI39fWQVKpK3LlzhkvomZlnedPACgQW6NQpmGt29/AIhoUFXYtenY2Npmbu6Kj5aW+vqbUTYk70TvACgQB79+7FuHHjzHb0PDF9RUVVTe+mvvgPYwy5ude4hH77dhwqK/kzLjk6BsLPT5PQvb1DYG3tYKBojY9QCHToUNXc7uioWdyHEHOnd4bes2dPa8RBSLMwphkYp10/vajI0BE1T3GxnLt0LSXlKIqLM3n7JRJnLqH7+o6Fg4O3gSI1PlZW/Kb2jh0BC7oQgLRDVAUnJkul0szzrk3qFRWGjqjpysvL8e+/h5GcfBwpKTG4e/cKb7+lpTW8vEZy/eiurr0hENAkDYCmeV3b1O7kBPy3FhYh7R4leGJSKio0/ekWFsDhw6Y7hbBarYJcfv6/Gno0MjPjoVTyp4F1d+/3Xw09FF5ew2FpSe3KFhZVyVz7U0TDCwipk84JPiMjA56eLbsa1JYtW7BlyxakpqYCAIKCgvDee+9hwoQJADR9jytXrsTWrVuRn5+P4OBgfPHFFwgKCmrROIhxKy6uqqXn5WkGQw0caHqD5fLzb/2X0GOQlvYHyssLePsdHHy4hO7r+yCkUmfDBGpEpNKqpnZnZ01tnWaXJEQ3Oif4nj174rPPPsOzzz7bYk/u6emJDz74AF26dAGgmed+6tSpSExMRFBQENavX4+NGzciKioKXbt2xZo1axAaGorr16/Dzo7WkjZn1fvTCwv5+0xltHNpaR7S0v7gllQtKEjh7ReLHeDj8yA6d34QU6ZYIiNjNhizMlC0hicU8q89d3Sky9MIaQ6dE3xkZCRefvll/Prrr9i6dSucnJya/eSTJ0/m3X///fexZcsWnD59Gj169MCmTZuwbNkyPPLIIwA0XwDc3NywZ88ezJs3r9nPT4yHSqWZElab1MvLG3+MsVEqK5CR8RdXS5fLLwCoGr4vFIrQqdNQrh9dJhsAodASQqECMtlB3LkjMOnR/vqystIMgNMm9I4d+deeE0KaR+d/p/nz52PChAmYM2cOgoKCsHXrVkyZMqXFAlGpVPjpp59QUlKCoUOHIiUlBXK5HGFhYdwxYrEYo0aNQnx8fL0JvqKiAhXVRlsV/lf9UygUUOjYYas9Tq020Q7eGrTlMLbyVFZqrkuXyzU/q3dBN9QMKxQqeD8NhTE1cnIuIzn5GFJSjuH27ZNQKst4x7i49ICf31j4+4+Bt/dIWFlVHwHGACiMpjwtqa4y2dpWJfSOHYG6GuGMdc5/lUpTDl0/Q/Q9lpDWIGBM/zrD559/jtdeew3du3evdS38hQsX9DrX5cuXMXToUJSXl8PW1hZ79uzBxIkTER8fj+HDh+POnTvw8PDgjn/hhReQlpaGI0eO1Hm+iIgIrFy5stb2PXv2QErtfaSZcnNz8ffff+PixYu4dOkS7t/nL6fasWNH9OnTh7uZ61LKpHGlpaWYMWMG7t+/D3t7e0OHQ9ohvRvE0tLSsG/fPjg6OmLq1KnNnuwmMDAQFy9eREFBAfbt24dZs2YhLi6O219zSk3GWIPTbC5duhSLFy/m7hcWFsLLywthYWE6/5MpFArExMTA3T0UQqHpD9FVqxWQyw1Xnvv3Nc3u2dma35tLKFSgf/8YXLgQCrW6dctTUVGItLS4/2rpR5GXd4O3XySygY/PA/DzGwN//zFwdu7BvT+TkzW3xrRleVqLtbWmVq4dEGdnp0BOjvn8D0kkCty6FYPQ0FCIdBy2X1hz8AghbUyv7Pz111/j9ddfx9ixY3HlyhW4uLg0OwArKytukN3AgQORkJCATz75BG+99RYAQC6XQyaTccfn5OTAzc2t3vOJxWKIxeJa20Uikc7/mFpCocgsPpy02qo8anVVf7pcDpSWttbziFo8IapUCmRmnuUGxt25cxqMVQ3XFwiEkMkGcf3onToNgYVF1cA4xpo+a15rlKc1CASa5vXqk8nUvPZc29RuLv9D2oly9Pkc0ffzhpCWpnOCHz9+PM6ePYvPP/8cM2fObLWAGGOoqKiAn58f3N3dERMTg379+gEAKisrERcXh3Xr1rXa85OmUSg0NfTMTM1PU+l+ZIwhL+86Nw1sWlosKiv50+B17NiFS+je3iGQSDoaKFrDsLSsSuTa0e2UuwgxfjoneJVKhUuXLrXotfDvvPMOJkyYAC8vLxQVFeGHH35AbGwsDh8+DIFAgEWLFiEyMhIBAQEICAhAZGQkpFIpZsyY0WIxkKYrK9Mk9KwsIDfXeAdI1VRSksObBraoiL8OqETiBF/fMdw0sB06+BomUAORSvmXqjk4mM6liYSQKjon+JiYmBZ/8uzsbDz77LPIysqCg4MDevfujcOHDyM0NBQAsGTJEpSVlWH+/PncRDfR0dF0DbwBFRRomt0zMzW/mwKFohTp6Se4hJ6T8zdvv4WFGF5eI7iE7u7er91MA6tdiKX6ZDK0EAsh5sGgV51u27atwf0CgQARERGIiIhom4BILWq1ZvY4bU29tfrTW5JarUJ2diKX0DMyTkKl4i/87ubWl2t29/QcAZFIYqBo25aVFX/e9g4d6NpzQswV/WuTWpRKTS1de6usbPwxhlZQkMIl9LS0Yygru8fbb2/vxZsG1sbG1UCRti07O/687dT4RUj7QQmeANDMHKddP/3uXePvTy8uLsa1a78gOTkWqakxyM+/xdsvFtvDx2c0fH1D4ec3Fo6OXRu8vNIcWFhUXaqmHeFu1X5nviWk3aME344VFmoSulyumfvdmCmVFbhz5xQ32l0uPw91tW8hQqElPDyG/NfsPhYeHoMhFJr329vauqqp3clJMxiOFmIhhGiZ9ycg4WFMM9pdW1MvKTF0RPVjjOHu3Svc9ei3b8dBoeAPAHB27gZf3zD4+Y2Ft3cIxGLzbX8WCDQJvHr/OU3MSAhpCCV4M6dUaq5LB4AjR4x7EZeiokwuoaekHEVJiZy338bGDb6+Y+HvPxpTpwKpqTNNYmKYphCJNM3t1SeTocFwhBB90EeGGSovr7qULSdHs23gQOMbLFdRUYTbt+O4a9Jzc5N4+y0tJfD2HgU/v7Hw8wuFi0svCAQCCIUKODsfRGqqYeJuLV5eVcnc3p6uPSeENA8leDNRVFTV9J6fz58u1Vj6ZdVqJTIzE7iEfufOKajV1ZaQgwAy2UDuenRPz2GwtKw97bCp0157Xv1StXv3gH79jOe1IoSYPkrwJooxTVLQrp9eVNT4Y9oaYwz37t3kEnpa2nFUVPBXm+nQwZ9L6L6+D0IiMb/V18TiqkvVHB01Te/auc0B479igRBimijBmxCVStPkrk3q1Za9NxolJXeRlvbHf9ekx6Cw8DZvv7V1R/j4PMhNMtOxo7+BIm099vb8a89rLsRCCCFtgRK8kauo0PSna5dbVakaf0xbUijKkJHxF5fQs7MTefstLKzg6Tkcvr6afnR39/4QCi3qOZvpsbDgL8Li5EQLsRBCjAMleCNUXFzVn37vXtOXH20NjKmRnX0RKSmaZveMjJNQKvlD811de3MJ3ctrJKysbAwUbcuTSvnzttNCLIQQY0UJ3khU708vLDR0NHz376dxCT019RjKynJ5+21tPbgmd1/fMbC1dTdQpC1LKKxqbtfeJO1jynpCiBmgBG8gKpVmSlhtUjem69PLywuQlhbLzRp3795N3n4rK1t4e4dwSd3JqZtZTAOrXYhF29TesSNde04IMV308dWGKiv5/elKZeOPaQsqVSXu3DnDJfTMzLNgrGpot0BgAQ+PwVxC9/AIhoWF6Xc029ry5223tzd0RIQQ0nIowbeykpKqWnpennFcEsUYQ27uNS6h374dh8rKYt4xjo6B3AQz3t4hsLZ2MFC0LUMo1NTItU3tjo6ay9cIIcRcUYJvBQUFmgFymZnG05+en5+Py5d3Izn5OFJSjqK4OJO3XyJx5hK6r+9YODh4GyjSlmFtXXvdc5pEhhDSnlCCbwFqNb8/vazM0BEBlZUlSE//87/L16Jx9+5V3n5LS2t4eY3kmt1dXXtDIDDNDCgQaNY5rz5vO117Tghp7yjBN5FCoelHz8zU/FQoDBuPWq2CXH6eux49IyMeanVVUAKBAG5ufeHnFwZf31B4eQ2HpaW1ASNuOu3At8DAqoRO154TQggfJXg9lJZW1dJzcw3fn56ff4tL6Glpf6C8vIC338HBB35+ofD3H41p01S4ceNJk1x9TSrl953b2Wleg8BAanYnhJD6UIJvREFB1cpsBQWGjaW0NI+bBjY19SgKClJ4+8VihxrTwHbmVl+ztz9ooKj1o12IpfpkMtY1GhoM/cWKEEJMASX4Bhw9qplVzlCUygreNLBy+QUAVdPaCYUidOo0lEvoMtkACIWm9ZJaWfGnee3Qga49J4SQlkAfpQ0oLW3b52NMjZycy1xCT08/AaWSP2LP2TmIS+je3g/Aysq0RpPZ2fETup2doSMihBDzRAnewAoLM7iEnpp6DKWlObz9trYybl53X98xsLPzMFCk+rOw0Fx7Xn0yGSsrQ0dFCCHtAyX4NlZRUchNA5uSEoN7967z9otENvD2HsXV0p2de5jMNLASCf/acwcHGgRHCCGGYtAEv3btWvzyyy/4559/IJFIMGzYMKxbtw6BgYHcMYwxrFy5Elu3bkV+fj6Cg4PxxRdfICgoyICR606lUiAz8yw3MO7OndNgrGrNV4FACJlsEJfQO3UaAgsL46/mCgSaBF49oUulho6KEEKIlkETfFxcHF5++WUMGjQISqUSy5YtQ1hYGJKSkmBjo1lidP369di4cSOioqLQtWtXrFmzBqGhobh+/TrsjLADlzGGvLzrXEJPSzuOysoi3jEdO3ap1o8eAomko4Gi1Z1IpGlurz6ZDA2GI4QQ42XQj+jDhw/z7m/fvh2urq44f/48HnjgATDGsGnTJixbtgyPPPIIAGDHjh1wc3PDnj17MG/ePEOEXUtJSQ5SU4/+1+x+FEVFGbz9EokTfH3HcNPAdujga5hA9WBjw7/23N6e1j0nhBBTYlR1sPv37wMAHB0dAQApKSmQy+UICwvjjhGLxRg1ahTi4+PrTPAVFRWoqKjg7hf+Nxm8QqGAQsfp5rTHCYV1H69QlOL27ZNISfkDKSlHkZ19ibffwkIML6/h8PcfAz+/MXB371tjGti2nfZOW476yiMUVjW3Ozpqauo1rz1nTHMzBtoZ+qrP1GfKzK08gPmVSaXSlEPXzxB9jyWkNRhNgmeMYfHixRgxYgR69uwJAJDL5QAANzc33rFubm5IS0ur8zxr167FypUra22Pjo6GVM9O4v79YwAAKpUKKSkpuHjxIv7++29cu3YNyhprvfr5+aFPnz7o27cvunfvDjG3VJkcAL+lwlC05WkIY8C9e20QTAuQyxsvjykxt/IA5lemmBjdy1Pa1tfZElKD0ST4V155BZcuXcLJkydr7as5ipwxVu/I8qVLl2Lx4sXc/cLCQnh5eSEsLAz2Oi74rVAosGvXLhw4UInk5Fikph5HWRk/69nbe8HPbwz8/B6En9+DsLFxBaBZ4/3yZZ2eps106KBAly4xUKlC4egown/DG0yWWq2AXB4Dd/dQCIWmN/VuTeZWHsD8yiSRKHDrVgxCQ0Mh0nHhg0JjWUqStFtGkeAXLFiAAwcO4M8//4Snpye33d3dHYCmJi+TybjtOTk5tWr1WmKxuFrtuYpIJNLpH1Mul2PEiBG4detWjfPaw8dnNHx9Q+HnNxaOjl15XzKMZfpUC4uqke3anxYWmql2vbxEZvFhqyUUUnmMnbmUycJC81PXzxHtsYQYkkETPGMMCxYswP79+xEbGws/Pz/efj8/P7i7uyMmJgb9+vUDAFRWViIuLg7r1q1rlZjc3NxQUlICCwsLeHgMga9vGPz8xsLDY7BRTgMrlfLnbXdwqD0Yzli+fBBCCGk7Bs1YL7/8Mvbs2YP//e9/sLOz4/rcHRwcIJFIIBAIsGjRIkRGRiIgIAABAQGIjIyEVCrFjBkzWiUmgUCA//3vf0hOTsbVq48a1epr1QfDaUe4SySGjooQQogxMmiC37JlCwAgJCSEt3379u0IDw8HACxZsgRlZWWYP38+N9FNdHR0q14D369fP2RlZbXa+XVlZVVVO3dy0oxup2vPCSGE6MLgTfSNEQgEiIiIQEREROsHZGC2tvx523UcE0gIIYTUQvVBAxEKNTXy6pPJ1DE2kBBCCGkSSvBtxNqa33feoQMtxEIIIaT1UIJvBQKBZp1zbVO7kxNM/tpzQgghpoUSfAuwtORfe+7oqFmchRBCCDEUSvBNIJXy+87ruvacEEIIMSRK8I0QCjX95dX7z2suxEIIIYQYG0rwDRg+XJPYtdNUEkLaD0tLzVwUIhFNKEVMEyX4Bjg50Uh3QsyNQKBJ2iJRVQKv62f1bjda+ZWYIkrwhBCzIRQ2nLS1iZ2Q9oASPCHEJFhaNl7rpu40QqpQgieEGJRAwO/vru8ndZcRoh9K8ISQViMUaqZgbih5W1rSZaaEtAZK8ISQJrGwaLi2nZkJ9O5Nfd6EGAoleEJILY31dTfWZE6jzgkxPErwhLQjAoFuo8ypyZwQ00cJnhAzYWHReM3bkv7jCWk36N+dEBOgy8QsNMqcEFIdJXhCDEjbZA5o1jyQSOpO4NRkTgjRFyV4QlqJrk3mCgWQmgr4+tKIc0JIy6EET0gT6DIxC82qRggxJErwhFSjXYikrpHl9S1EQgghxogSPGk3aCESQkh7QgmemAVaiIQQQvgowROjVtfa3dppUAMCqkad0yVihBDCZ9CPxT///BOTJ0+Gh4cHBAIBfv31V95+xhgiIiLg4eEBiUSCkJAQXL161TDBkhanXYjEzg5wdATc3QEvL6BzZ6BbN8085v37A716ae77+2v2u7pqHm9jo3k8JXdCCKnNoDX4kpIS9OnTB8899xweffTRWvvXr1+PjRs3IioqCl27dsWaNWsQGhqK69evw87OzgARE11RkzkhhBiWQRP8hAkTMGHChDr3McawadMmLFu2DI888ggAYMeOHXBzc8OePXswb968tgyVVNPchUgIIYS0PqPtg09JSYFcLkdYWBi3TSwWY9SoUYiPj683wVdUVKCiooK7X1hYCABQKBRQ6LjElfY4tdo8lsTSlqOx8giFmuRc/Rrv6v3f2lp5Y5eIqVSaW2vRvj66vp7GztzKA5hfmZpSHnMpOzFdRpvg5XI5AMDNzY233c3NDWlpafU+bu3atVi5cmWt7dHR0ZBKpXrGEKPX8cbO3MoTE0PlMXbmViZ9ylNaWtqKkRDSOKNN8FqCGtVFxlitbdUtXboUixcv5u4XFhbCy8sLYWFhsLe31+k5FQoFYmJi4O4eCqHQ+C+MbqjWralxK3D0aAxCQ0MhMoMLvbWvD5XHeJlbmZpSHm3rISGGYrQJ3t3dHYCmJi+TybjtOTk5tWr11YnFYojF4lrbRSKR3h80QqHIoAm+pdbu1rYUNuVvYMyoPMbP3MqkT3nMqdzENBltgvfz84O7uztiYmLQr18/AEBlZSXi4uKwbt06A0fXfLR2NyGEkNZk0BRSXFyMf//9l7ufkpKCixcvwtHREd7e3li0aBEiIyMREBCAgIAAREZGQiqVYsaMGQaMunG0EAkhhBBDM2iCP3fuHEaPHs3d1/adz5o1C1FRUViyZAnKysowf/585OfnIzg4GNHR0Qa7Br6+hUho7W5CCCHGxqAJPiQkBIyxevcLBAJEREQgIiKi7YKqxtNTMxUqLURCCCHE1FAvbwOcnSmpE0IIMU003xghhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWaIEjwhhBBihijBE0IIIWbI7Ce60c6Up8/SjQqFAqWlpSgsLDSLFaGoPMbN3MoDmF+ZmlIe7WdOQ7N1EtKazD7BFxUVAQC8vLwMHAkhpD0qKiqCg4ODocMg7ZCAmfnXS7VajczMTNjZ2UGg4yowhYWF8PLyQnp6Ouzt7Vs5wtZH5TFu5lYewPzK1JTyMMZQVFQEDw8PCIXUG0rantnX4IVCITw9PZv0WHt7e7P4cNKi8hg3cysPYH5l0rc8VHMnhkRfKwkhhBAzRAmeEEIIMUOU4OsgFouxYsUKiMViQ4fSIqg8xs3cygOYX5nMrTykfTD7QXaEEEJIe0Q1eEIIIcQMUYInhBBCzBAleEIIIcQMUYInhBBCzFC7TfCbN2+Gn58frK2tMWDAAJw4caLB4+Pi4jBgwABYW1vD398fX375ZRtFqht9yvPLL78gNDQULi4usLe3x9ChQ3HkyJE2jLZx+r4+Wn/99RcsLS3Rt2/f1g1QT/qWp6KiAsuWLYOPjw/EYjE6d+6Mb7/9to2ibZy+5dm9ezf69OkDqVQKmUyG5557Dnl5eW0UbcP+/PNPTJ48GR4eHhAIBPj1118bfYyxfx4QAgBg7dAPP/zARCIR+/rrr1lSUhJbuHAhs7GxYWlpaXUen5yczKRSKVu4cCFLSkpiX3/9NROJROznn39u48jrpm95Fi5cyNatW8fOnj3Lbty4wZYuXcpEIhG7cOFCG0deN33Lo1VQUMD8/f1ZWFgY69OnT9sEq4OmlGfKlCksODiYxcTEsJSUFHbmzBn2119/tWHU9dO3PCdOnGBCoZB98sknLDk5mZ04cYIFBQWxadOmtXHkdTt48CBbtmwZ27dvHwPA9u/f3+Dxxv55QIhWu0zwgwcPZi+++CJvW7du3djbb79d5/FLlixh3bp1422bN28eGzJkSKvFqA99y1OXHj16sJUrV7Z0aE3S1PI88cQTbPny5WzFihVGleD1Lc+hQ4eYg4MDy8vLa4vw9KZveT788EPm7+/P2/bpp58yT0/PVouxqXRJ8Mb+eUCIVrtroq+srMT58+cRFhbG2x4WFob4+Pg6H3Pq1Klax48bNw7nzp2DQqFotVh10ZTy1KRWq1FUVARHR8fWCFEvTS3P9u3bcevWLaxYsaK1Q9RLU8pz4MABDBw4EOvXr0enTp3QtWtXvPHGGygrK2uLkBvUlPIMGzYMGRkZOHjwIBhjyM7Oxs8//4yHHnqoLUJuccb8eUBIdWa/2ExNubm5UKlUcHNz4213c3ODXC6v8zFyubzO45VKJXJzcyGTyVot3sY0pTw1bdiwASUlJZg+fXprhKiXppTn5s2bePvtt3HixAlYWhrXW7op5UlOTsbJkydhbW2N/fv3Izc3F/Pnz8e9e/cM3g/flPIMGzYMu3fvxhNPPIHy8nIolUpMmTIFn332WVuE3OKM+fOAkOraXQ1eq+bSsYyxBpeTrev4urYbir7l0fr+++8RERGBvXv3wtXVtbXC05uu5VGpVJgxYwZWrlyJrl27tlV4etPn9VGr1RAIBNi9ezcGDx6MiRMnYuPGjYiKijKKWjygX3mSkpLw6quv4r333sP58+dx+PBhpKSk4MUXX2yLUFuFsX8eEAK0wxq8s7MzLCwsatU2cnJyan0r13J3d6/zeEtLSzg5ObVarLpoSnm09u7dizlz5uCnn37C2LFjWzNMnelbnqKiIpw7dw6JiYl45ZVXAGgSJGMMlpaWiI6OxoMPPtgmsdelKa+PTCZDp06deEuNdu/eHYwxZGRkICAgoFVjbkhTyrN27VoMHz4cb775JgCgd+/esLGxwciRI7FmzRqTq/Ea8+cBIdW1uxq8lZUVBgwYgJiYGN72mJgYDBs2rM7HDB06tNbx0dHRGDhwIEQiUavFqoumlAfQ1NzDw8OxZ88eo+oL1bc89vb2uHz5Mi5evMjdXnzxRQQGBuLixYsIDg5uq9Dr1JTXZ/jw4cjMzERxcTG37caNGxAKhfD09GzVeBvTlPKUlpZCKOR/1FhYWACoqvmaEmP+PCCEx0CD+wxKe5nPtm3bWFJSElu0aBGzsbFhqampjDHG3n77bfbss89yx2svi3nttddYUlIS27Ztm1FdFqNvefbs2cMsLS3ZF198wbKysrhbQUGBoYrAo295ajK2UfT6lqeoqIh5enqyxx57jF29epXFxcWxgIAA9vzzzxuqCDz6lmf79u3M0tKSbd68md26dYudPHmSDRw4kA0ePNhQReApKipiiYmJLDExkQFgGzduZImJidxlf6b2eUCIVrtM8Iwx9sUXXzAfHx9mZWXF+vfvz+Li4rh9s2bNYqNGjeIdHxsby/r168esrKyYr68v27JlSxtH3DB9yjNq1CgGoNZt1qxZbR94PfR9faoztgTPmP7luXbtGhs7diyTSCTM09OTLV68mJWWlrZx1PXTtzyffvop69GjB5NIJEwmk7Gnn36aZWRktHHUdTt+/HiD/w+m+HlACGOM0XKxhBBCiBlqd33whBBCSHtACZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCSGEEDNECZ4QQggxQ5TgCalDamoqBAIBLl68aOhQCCGkSSjBE5MVHh6OadOm1doeGxsLgUCAgoKCJp/by8sLWVlZ6NmzZ9MDJIQQA2p3y8US0pjKykpYWVnB3d3d0KEQQkiTUQ2emL19+/YhKCgIYrEYvr6+2LBhA2+/r68v1qxZg/DwcDg4OGDu3Lm1mujDw8MhEAhq3WJjYwEA+fn5mDlzJjp27AipVIoJEybg5s2b3HNERUWhQ4cOOHLkCLp37w5bW1uMHz8eWVlZbfVnIIS0M5TgiVk7f/48pk+fjieffBKXL19GREQE3n33XURFRfGO+/DDD9GzZ0+cP38e7777bq3zfPLJJ8jKyuJuCxcuhKurK7p16wZA8wXg3LlzOHDgAE6dOgXGGCZOnAiFQsGdo7S0FB999BF27tyJP//8E7dv38Ybb7zRquUnhLRjBl7NjpAmmzVrFrOwsGA2Nja8m7W1NQPA8vPz2YwZM1hoaCjvcW+++Sbr0aMHd9/Hx4dNmzaNd0xKSgoDwBITE2s97759+5hYLGYnTpxgjDF248YNBoD99ddf3DG5ublMIpGwH3/8kTGmWRMdAPv333+5Y7744gvm5ubW7L8DIYTUhWrwxKSNHj0aFy9e5N2++eYbbv+1a9cwfPhw3mOGDx+OmzdvQqVScdsGDhyo0/MlJiZi5syZ+OKLLzBixAjuOSwtLREcHMwd5+TkhMDAQFy7do3bJpVK0blzZ+6+TCZDTk6OfgUmhBAd0SA7YtJsbGzQpUsX3raMjAzud8YYBAIBbz9jrM7zNEYul2PKlCmYM2cO5syZ0+D56npukUjE2y8QCOp9LCGENBfV4IlZ69GjB06ePMnbFh8fj65du8LCwkLn85SXl2Pq1Kno1q0bNm7cWOs5lEolzpw5w23Ly8vDjRs30L179+YVgBBCmohq8MSsvf766xg0aBBWr16NJ554AqdOncLnn3+OzZs363WeefPmIT09HceOHcPdu3e57Y6OjggICMDUqVMxd+5cfPXVV7Czs8Pbb7+NTp06YerUqS1dJEII0QnV4IlZ69+/P3788Uf88MMP6NmzJ9577z2sWrUK4eHhep0nLi4OWVlZ6NGjB2QyGXeLj48HAGzfvh0DBgzApEmTMHToUDDGcPDgwVrN8oQQ0lYEjDoBCSGEELNDNXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDFGCJ4QQQswQJXhCCCHEDP0/abufTubPN7wAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| hide\n", "# Create single mixture and broadcast to N,H,K\n", @@ -4105,7 +2625,7 @@ "print('probs.shape (N,H,K) \\t', probs.shape)\n", "\n", "model = NBMM(quantiles=[0.1, 0.40, 0.5, 0.60, 0.9])\n", - "distr_args = (counts, probs, weights)\n", + "distr_args = (counts, probs)\n", "samples, sample_mean, quants = model.sample(distr_args, num_samples=2000)\n", "\n", "print('samples.shape (N,H,num_samples) ', samples.shape)\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 6bafac332..c1852c18a 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -22,7 +22,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting.\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting. \n", "\n", "\n", "**References**
\n", @@ -30,13 +30,10 @@ "\n", "\n", ":::{.callout-warning collapse=\"false\"}\n", - "#### Exogenous Variables, Losses, and Parameters Availability\n", + "#### Losses\n", "\n", - "Given the sampling procedure during inference, DeepNPTS only supports `DistributionLoss` as training loss.\n", + "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast, rather than a sampled distributional output. Consequently, DeepNPTS only supports point losses as training loss.\n", "\n", - "Note that DeepNPTS generates a non-parametric forecast distribution using Monte Carlo. We use this sampling procedure also during validation to make it closer to the inference procedure. Therefore, only the `MQLoss` is available for validation.\n", - "\n", - "Aditionally, Monte Carlo implies that historic exogenous variables are not available for the model.\n", ":::" ] }, @@ -47,17 +44,15 @@ "outputs": [], "source": [ "#| export\n", - "import numpy as np\n", - "\n", "import torch\n", "import torch.nn as nn\n", + "import torch.nn.functional as F\n", "import neuralforecast.losses.pytorch as losses\n", "from typing import Optional\n", - "from functools import partial\n", "\n", "\n", "from neuralforecast.common._base_windows import BaseWindows\n", - "from neuralforecast.losses.pytorch import MQLoss, GMM, PMM, NBMM\n" + "from neuralforecast.losses.pytorch import MAE\n" ] }, { @@ -102,7 +97,7 @@ "class DeepNPTS(BaseWindows):\n", " \"\"\" DeepNPTS\n", "\n", - " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", + " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", "\n", " **Parameters:**
\n", " `h`: int, Forecast horizon.
\n", @@ -111,7 +106,6 @@ " `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", " `dropout`: float=0.1, dropout.
\n", " `n_layers`: int=2, number of dense layers.
\n", - " `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", " `stat_exog_list`: str list, static exogenous columns.
\n", " `hist_exog_list`: str list, historic exogenous columns.
\n", " `futr_exog_list`: str list, future exogenous columns.
\n", @@ -152,15 +146,14 @@ " batch_norm: bool = True,\n", " dropout: float = 0.1,\n", " n_layers: int = 2,\n", - " trajectory_samples: int = 100,\n", " futr_exog_list = None,\n", " hist_exog_list = None,\n", " stat_exog_list = None,\n", " exclude_insample_y = False,\n", - " loss = GMM(),\n", - " valid_loss = MQLoss(level=[80, 90]),\n", + " loss = MAE(),\n", + " valid_loss = MAE(),\n", " max_steps: int = 1000,\n", - " learning_rate: float = 1e-5,\n", + " learning_rate: float = 1e-3,\n", " num_lr_decays: int = 3,\n", " early_stop_patience_steps: int =-1,\n", " val_check_steps: int = 100,\n", @@ -178,25 +171,12 @@ " optimizer_kwargs = None,\n", " **trainer_kwargs):\n", "\n", - " if hist_exog_list is not None:\n", - " raise Exception('DeepNPTS does not support historical exogenous variables.')\n", - "\n", " if exclude_insample_y:\n", " raise Exception('DeepNPTS has no possibility for excluding y.')\n", - " \n", - " supported_losses = (losses.GMM,\n", - " losses.PMM,\n", - " losses.NBMM)\n", "\n", - " if not isinstance(loss, supported_losses):\n", - " raise Exception('DeepNPTS only supports GMM, PMM or NBMM as loss function.') \n", - " \n", - " if not isinstance(valid_loss, losses.MQLoss):\n", - " raise Exception('DeepNPTS only supports MQLoss as validation loss.')\n", + " if not isinstance(loss, losses.BasePointLoss):\n", + " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function.') \n", " \n", - " # Overwrite n_components, it has to be the input_size in DeepNPTS\n", - " loss.n_components = input_size\n", - " \n", " # Inherit BaseWindows class\n", " super(DeepNPTS, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -226,16 +206,15 @@ " **trainer_kwargs)\n", "\n", " self.h = h\n", - " self.h_backup = self.h # Used because h=1 during training\n", - " self.use_softmax = True\n", " self.hidden_size = hidden_size\n", " self.dropout = dropout\n", - " self.trajectory_samples = trajectory_samples\n", "\n", " self.futr_exog_size = len(self.futr_exog_list)\n", " self.stat_exog_size = len(self.stat_exog_list)\n", + " self.hist_exog_size = len(self.hist_exog_list)\n", "\n", - " input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size\n", + " input_dim = input_size * (1 + self.futr_exog_size + self.hist_exog_size) + self.stat_exog_size + self.h * self.futr_exog_size\n", + " \n", " # Create DeepNPTSNetwork\n", " modules = [] \n", " for i in range(n_layers):\n", @@ -246,503 +225,57 @@ " if dropout > 0.0:\n", " modules.append(nn.Dropout(dropout))\n", "\n", + " modules.append(nn.Linear(hidden_size, input_size * self.h))\n", " self.deepnptsnetwork = nn.Sequential(*modules)\n", - " self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07))\n", - "\n", - " # Add output layers for Mixture distribution \n", - " output_modules = []\n", - " if dropout > 0.0:\n", - " output_modules.append(nn.Dropout(self.dropout))\n", - " \n", - " if isinstance(loss, GMM):\n", - " output_modules.append(nn.Linear(hidden_size, input_size + 1))\n", - " elif isinstance(loss, PMM):\n", - " output_modules.append(nn.Linear(hidden_size, input_size))\n", - " elif isinstance(loss, NBMM):\n", - " output_modules.append(nn.Linear(hidden_size, input_size))\n", - "\n", - " self.output_layer = nn.Sequential(*output_modules)\n", - " self.output_layer.apply(self._init_weights)\n", - "\n", - "\n", - " @staticmethod\n", - " def _init_weights(module, scale=1.0):\n", - " if type(module) == nn.Linear:\n", - " nn.init.uniform_(module.weight, -scale, scale)\n", - " nn.init.zeros_(module.bias)\n", - "\n", - " def _domain_map(self, o_t, insample_y):\n", - " if isinstance(self.loss, GMM):\n", - " weights = o_t[:, :-1] # [B, L + 1] -> [B, L]\n", - " kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1]\n", - " kernel_width = torch.repeat_interleave(input=kernel_width,\n", - " repeats=weights.shape[1],\n", - " dim=-1) # [B, 1] -> [B, L]\n", - " output = torch.cat([insample_y, kernel_width, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", - " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L]\n", - " elif isinstance(self.loss, PMM):\n", - " weights = o_t # [B, L] -> [B, L]\n", - " output = torch.cat([insample_y, weights], dim=-1) # [B, L] + [B, L] = [B, 2 * L]\n", - " output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] \n", - " elif isinstance(self.loss, NBMM):\n", - " weights = torch.ones_like(o_t) # [B, L] -> [B, L]\n", - " output = torch.cat([insample_y, o_t, weights], dim=-1) # [B, L] + [B, L] + [B, L] = [B, 3 * L]\n", - " output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * \n", - "\n", - " else:\n", - " raise NotImplementedError\n", - " \n", - " return output\n", - "\n", - " # Override BaseWindows method\n", - " def training_step(self, batch, batch_idx):\n", - " \n", - " # Only train one-step ahead\n", - " self.h = 1\n", - " self.quantiles = self.loss.quantiles\n", - "\n", - " # Create and normalize windows [Ws, L+H, C]\n", - " y_idx = batch[\"y_idx\"]\n", - " windows = self._create_windows(batch, step=\"train\")\n", - " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h :, y_idx])\n", - " windows = self._normalization(windows=windows, y_idx=y_idx)\n", - "\n", - " # Parse windows\n", - " (\n", - " insample_y,\n", - " insample_mask,\n", - " outsample_y,\n", - " outsample_mask,\n", - " _,\n", - " futr_exog,\n", - " stat_exog,\n", - " ) = self._parse_windows(batch, windows)\n", - "\n", - " windows_batch = dict(\n", - " insample_y=insample_y, # [Ws, L]\n", - " insample_mask=insample_mask, # [Ws, L]\n", - " futr_exog=futr_exog, # [Ws, L+H]\n", - " hist_exog=None, \n", - " stat_exog=stat_exog, # [Ws, 1]\n", - " y_idx=y_idx # [Ws, 1]\n", - " ) \n", - "\n", - " # Model Predictions\n", - " output = self.train_forward(windows_batch)\n", - "\n", - " _, y_loc, y_scale = self._inv_normalization(\n", - " y_hat=outsample_y, \n", - " temporal_cols=batch[\"temporal_cols\"], \n", - " y_idx=y_idx\n", - " )\n", - " # outsample_y = original_insample_y\n", - " outsample_y = original_outsample_y\n", - " distr_args = self.loss.scale_decouple(\n", - " output=output, loc=y_loc, scale=y_scale\n", - " )\n", - " loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask)\n", - "\n", - " if torch.isnan(loss):\n", - " print(\"Model Parameters\", self.hparams)\n", - " print(\"insample_y\", torch.isnan(insample_y).sum())\n", - " print(\"outsample_y\", torch.isnan(outsample_y).sum())\n", - " print(\"output\", torch.isnan(output).sum())\n", - " raise Exception(\"Loss is NaN, training stopped.\")\n", - "\n", - " self.log(\"train_loss\", loss, prog_bar=True, on_epoch=True)\n", - " self.train_trajectories.append((self.global_step, float(loss)))\n", - "\n", - " self.h = self.h_backup \n", - " \n", - " return loss\n", - "\n", - " # Override BaseWindows method\n", - " def validation_step(self, batch, batch_idx):\n", - "\n", - " self.h = self.h_backup\n", - " self.quantiles = self.valid_loss.quantiles\n", - "\n", - " if self.val_size == 0:\n", - " return np.nan\n", - "\n", - " # TODO: Hack to compute number of windows\n", - " windows = self._create_windows(batch, step=\"val\")\n", - " n_windows = len(windows[\"temporal\"])\n", - " y_idx = batch[\"y_idx\"]\n", - "\n", - " # Number of windows in batch\n", - " windows_batch_size = self.inference_windows_batch_size\n", - " if windows_batch_size < 0:\n", - " windows_batch_size = n_windows\n", - " n_batches = int(np.ceil(n_windows / windows_batch_size))\n", - "\n", - " valid_losses = []\n", - " batch_sizes = []\n", - " for i in range(n_batches):\n", - " # Create and normalize windows [Ws, L+H, C]\n", - " w_idxs = np.arange(\n", - " i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows)\n", - " )\n", - " windows = self._create_windows(batch, step=\"val\", w_idxs=w_idxs)\n", - " original_outsample_y = torch.clone(windows[\"temporal\"][:, -self.h:, 0])\n", - " windows = self._normalization(windows=windows, y_idx=y_idx)\n", - "\n", - " # Parse windows\n", - " (\n", - " insample_y,\n", - " insample_mask,\n", - " _,\n", - " outsample_mask,\n", - " _,\n", - " futr_exog,\n", - " stat_exog,\n", - " ) = self._parse_windows(batch, windows)\n", - " \n", - " windows_batch = dict(\n", - " insample_y=insample_y, # [Ws, L]\n", - " insample_mask=insample_mask, # [Ws, L]\n", - " futr_exog=futr_exog, # [Ws, L+H]\n", - " hist_exog=None, # [Ws, L]\n", - " stat_exog=stat_exog,\n", - " y_idx=y_idx,\n", - " ) # [Ws, 1]\n", - "\n", - " # Model Predictions\n", - " output_batch = self(windows_batch)\n", - " # Monte Carlo already returns y_hat with mean and quantiles\n", - " output_batch = output_batch[:,:, 1:] # Remove mean\n", - " valid_loss_batch = self.valid_loss(y=original_outsample_y, y_hat=output_batch, mask=outsample_mask)\n", - " valid_losses.append(valid_loss_batch)\n", - " batch_sizes.append(len(output_batch))\n", - "\n", - " valid_loss = torch.stack(valid_losses)\n", - " batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device)\n", - " valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes)\n", - "\n", - " if torch.isnan(valid_loss):\n", - " raise Exception(\"Loss is NaN, training stopped.\")\n", - "\n", - " self.log(\"valid_loss\", valid_loss, prog_bar=True, on_epoch=True)\n", - " self.validation_step_outputs.append(valid_loss)\n", - " return valid_loss\n", - "\n", - " # Override BaseWindows method\n", - " def predict_step(self, batch, batch_idx):\n", - "\n", - " self.h == self.h_backup\n", - " self.quantiles = self.loss.quantiles\n", - "\n", - " # TODO: Hack to compute number of windows\n", - " windows = self._create_windows(batch, step='predict')\n", - " n_windows = len(windows['temporal'])\n", - " y_idx = batch['y_idx']\n", - "\n", - " # Number of windows in batch\n", - " windows_batch_size = self.inference_windows_batch_size\n", - " if windows_batch_size < 0:\n", - " windows_batch_size = n_windows\n", - " n_batches = int(np.ceil(n_windows/windows_batch_size))\n", - "\n", - " y_hats = []\n", - " for i in range(n_batches):\n", - " # Create and normalize windows [Ws, L+H, C]\n", - " w_idxs = np.arange(i*windows_batch_size, \n", - " min((i+1)*windows_batch_size, n_windows))\n", - " windows = self._create_windows(batch, step='predict', w_idxs=w_idxs)\n", - " windows = self._normalization(windows=windows, y_idx=y_idx)\n", - "\n", - " # Parse windows\n", - " insample_y, insample_mask, _, _, _, futr_exog, stat_exog = self._parse_windows(batch, windows)\n", - " windows_batch = dict(insample_y=insample_y, # [Ws, L]\n", - " insample_mask=insample_mask, # [Ws, L]\n", - " futr_exog=futr_exog, # [Ws, L+H]\n", - " stat_exog=stat_exog,\n", - " y_idx=y_idx)\n", - " \n", - " # Model Predictions\n", - " y_hat = self(windows_batch)\n", - " # Monte Carlo already returns y_hat with mean and quantiles\n", - " y_hats.append(y_hat)\n", - " y_hat = torch.cat(y_hats, dim=0)\n", - " return y_hat\n", - "\n", - " def train_forward(self, windows_batch):\n", - " # Parse windows_batch\n", - " x_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", - " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", - " stat_exog = windows_batch['stat_exog'] # [B, S]\n", - "\n", - " batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len\n", - "\n", - " # Concatenate x_t with future exogenous\n", - " if self.futr_exog_size > 0: \n", - " futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F]\n", - " x_t = torch.cat((x_t, futr_exog_t), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", - " \n", - " x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)]\n", - "\n", - " # Concatenate x_t with static exogenous\n", - " if self.stat_exog_size > 0:\n", - " x_t = torch.cat((x_t, stat_exog), dim=1) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S]\n", - "\n", - " # Run through DeepNPTSNetwork\n", - " h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size]\n", - " o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1]\n", - "\n", - " output = self._domain_map(o_t, windows_batch['insample_y']) # [B, L + 1], [B, L] -> [B, 3 * L]\n", - " output = self.loss.domain_map(output) # [B, 3 * L] -> ([B, L], [B, L], [B, L])\n", - "\n", - " return output\n", "\n", " def forward(self, windows_batch):\n", " # Parse windows_batch\n", - " insample_y_t = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " x = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " hist_exog = windows_batch['hist_exog'] # [B, L, X]\n", " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", " stat_exog = windows_batch['stat_exog'] # [B, S]\n", - " y_idx = windows_batch['y_idx']\n", - "\n", - " batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len\n", - " device = insample_y_t.device\n", - " dtype = insample_y_t.dtype\n", - "\n", - " # Repeat insample_y for trajectory samples\n", - " insample_y_t = torch.repeat_interleave(input=insample_y_t, \n", - " repeats=self.trajectory_samples, \n", - " dim=0) # [B, L, 1] -> [B * n_samples, L, 1]\n", - " \n", - " # Input x_t is insample_y at time t\n", - " x_t = insample_y_t\n", "\n", - " # Repeat futr_exog if available for trajectory samples and add to x_t \n", + " batch_size, seq_len = x.shape[:2] # B = batch_size, L = seq_len\n", + " insample_y = windows_batch['insample_y'].unsqueeze(-1) \n", + " \n", + " # Concatenate x_t with future exogenous of input\n", " if self.futr_exog_size > 0: \n", - " futr_exog = torch.repeat_interleave(input=futr_exog, \n", - " repeats=self.trajectory_samples, \n", - " dim=0) # [B, L + h, F] -> [B * n_samples, L + h, F] \n", - " x_t = torch.cat((x_t, futr_exog[:, :seq_len]), dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", + " x = torch.cat((x, futr_exog[:, :seq_len]), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", " \n", - " x_t = x_t.reshape(batch_size * self.trajectory_samples, -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + " # Concatenate x_t with historic exogenous\n", + " if self.hist_exog_size > 0: \n", + " x = torch.cat((x, hist_exog), dim=2) # [B, L, 1 + F] + [B, L, X] -> [B, L, 1 + F + X] \n", "\n", - " # Repeat stat_exog if available for trajectory samples and add to x_t\n", - " if self.stat_exog_size > 0:\n", - " stat_exog = torch.repeat_interleave(\n", - " input=stat_exog, \n", - " repeats=self.trajectory_samples, \n", - " dim=0) # [B, S] -> [B * n_samples, S] \n", - " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", + " x = x.reshape(batch_size, -1) # [B, L, 1 + F + X] -> [B, L * (1 + F + X)]\n", "\n", - " # Scales for inverse normalization\n", - " y_scale = self.scaler.x_scale[:, :, y_idx]\n", - " y_loc = self.scaler.x_shift[:, :, y_idx]\n", - " y_scale = torch.repeat_interleave(input=y_scale, \n", - " repeats=self.trajectory_samples, \n", - " dim=0)\n", - " y_loc = torch.repeat_interleave(input=y_loc, \n", - " repeats=self.trajectory_samples, \n", - " dim=0)\n", - " # Create forecasts tensor\n", - " forecasts = torch.zeros((batch_size, \n", - " self.h,\n", - " len(self.quantiles) + 1), \n", - " device=device, \n", - " dtype=dtype)\n", - " \n", - " # Recursive predictions\n", - " for t in range(self.h):\n", - " # Run input throught DeepNPTSNetwork\n", - " h_t = self.deepnptsnetwork(x_t) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size]\n", - " o_t = self.output_layer(h_t) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)]\n", - " output = self._domain_map(o_t, insample_y_t.squeeze(-1)) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L]\n", - " output = self.loss.domain_map(output) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L])\n", - "\n", - " # Inverse normalization\n", - " distr_args = self.loss.scale_decouple(output=output, \n", - " loc=y_loc, \n", - " scale=y_scale)\n", + " # Concatenate x with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x = torch.cat((x, stat_exog), dim=1) # [B, L * (1 + F + X)] + [B, S] -> [B, L * (1 + F + X) + S]\n", "\n", - " # Sample and create probabilistic outputs\n", - " samples_t_flat, _, _ = self.loss.sample(distr_args=distr_args, \n", - " num_samples=1)\n", + " # Concatenate x_t with future exogenous of horizon\n", + " if self.futr_exog_size > 0:\n", + " futr_exog = futr_exog[:, seq_len:] # [B, L + h, F] -> [B, h, F]\n", + " futr_exog = futr_exog.reshape(batch_size, -1) # [B, L + h, F] -> [B, h * F]\n", + " x = torch.cat((x, futr_exog), dim=1) # [B, L * (1 + F + X) + S] + [B, h * F] -> [B, L * (1 + F + X) + S + h * F] \n", "\n", - " samples_t_flat = samples_t_flat.squeeze()\n", - " samples_t = samples_t_flat.reshape(batch_size, \n", - " self.trajectory_samples) # [B * n_samples] -> [B, n_samples] \n", - " \n", - " samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] \n", - " quantiles_t = torch.quantile(input=samples_t, \n", - " q=self.quantiles, \n", - " dim=-1) # [B, n_samples] -> [Q, B]\n", - " forecasts[:, t, 0] = samples_t_mean\n", - " forecasts[:, t, 1:] = quantiles_t.permute(1, 0)\n", + " # Run through DeepNPTSNetwork\n", + " weights = self.deepnptsnetwork(x) # [B, L * (1 + F + X) + S + h * F] -> [B, L * h]\n", "\n", - " insample_y_t_next = self.scaler.scaler(samples_t_flat, \n", - " y_loc.squeeze(), \n", - " y_scale.squeeze()) # [B * n_samples] -> [B * n_samples]\n", - " insample_y_t_next = insample_y_t_next.unsqueeze(-1)\\\n", - " .unsqueeze(-1) # [B * n_samples] -> [B * n_samples, 1, 1]\n", + " # Apply softmax for weighted input predictions\n", + " weights = weights.reshape(batch_size, seq_len, -1) # [B, L * h] -> [B, L, h]\n", + " x = F.softmax(weights, dim=1) * insample_y # [B, L, h] * [B, L, 1] = [B, L, h]\n", + " output = torch.sum(x, dim=1).unsqueeze(-1) # [B, L, h] -> [B, h, 1]\n", "\n", - " # Update insample_y_t \n", - " insample_y_t = torch.cat([insample_y_t[:, 1:], \n", - " insample_y_t_next], \n", - " dim=1) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1]\n", - " \n", - " # Update input\n", - " x_t = insample_y_t\n", - " # Concatenate x_t with future exogenous\n", - " if self.futr_exog_size > 0: \n", - " x_t = torch.cat((x_t, \n", - " futr_exog[:, t:seq_len + t]), \n", - " dim=2) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] \n", - " \n", - " x_t = x_t.reshape(batch_size * self.trajectory_samples\n", - " , -1) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)]\n", + " forecast = self.loss.domain_map(output) # [B, h, 1] -> [B, h, 1]\n", "\n", - " # Concatenate x_t with static exogenous\n", - " if self.stat_exog_size > 0:\n", - " x_t = torch.cat((x_t, stat_exog), dim=1) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S]\n", - " \n", - " return forecasts\n", - "\n" + " return forecast" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DeepNPTS\n", - "\n", - "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", - "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", - "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", - "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", - "> max_steps:int=1000, learning_rate:float=0.001,\n", - "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", - "> val_check_steps:int=100, batch_size:int=32,\n", - "> valid_batch_size:Optional[int]=None,\n", - "> windows_batch_size:int=1024,\n", - "> inference_windows_batch_size:int=-1,\n", - "> start_padding_enabled=False, step_size:int=1,\n", - "> scaler_type:str='standard', random_seed:int=1,\n", - "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", - "> optimizer_kwargs=None, **trainer_kwargs)\n", - "\n", - "DeepNPTS\n", - "\n", - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", - "\n", - "**Parameters:**
\n", - "`h`: int, Forecast horizon.
\n", - "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - "`hidden_size`: int=32, hidden size of dense layers.
\n", - "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - "`dropout`: float=0.1, dropout.
\n", - "`n_layers`: int=2, number of dense layers.
\n", - "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", - "`stat_exog_list`: str list, static exogenous columns.
\n", - "`hist_exog_list`: str list, historic exogenous columns.
\n", - "`futr_exog_list`: str list, future exogenous columns.
\n", - "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`max_steps`: int=1000, maximum number of training steps.
\n", - "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - "`batch_size`: int=32, number of different series in each batch.
\n", - "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - "`step_size`: int=1, step size between each window of temporal data.
\n", - "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - "`alias`: str, optional, Custom name of the model.
\n", - "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - "**References**
\n", - "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L20){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DeepNPTS\n", - "\n", - "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", - "> dropout:float=0.5, n_layers:int=2, trajectory_samples:int=100,\n", - "> futr_exog_list=None, hist_exog_list=None, stat_exog_list=None,\n", - "> exclude_insample_y=False, loss=GMM(), valid_loss=MQLoss(),\n", - "> max_steps:int=1000, learning_rate:float=0.001,\n", - "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", - "> val_check_steps:int=100, batch_size:int=32,\n", - "> valid_batch_size:Optional[int]=None,\n", - "> windows_batch_size:int=1024,\n", - "> inference_windows_batch_size:int=-1,\n", - "> start_padding_enabled=False, step_size:int=1,\n", - "> scaler_type:str='standard', random_seed:int=1,\n", - "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", - "> optimizer_kwargs=None, **trainer_kwargs)\n", - "\n", - "DeepNPTS\n", - "\n", - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. \n", - "\n", - "**Parameters:**
\n", - "`h`: int, Forecast horizon.
\n", - "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - "`hidden_size`: int=32, hidden size of dense layers.
\n", - "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - "`dropout`: float=0.1, dropout.
\n", - "`n_layers`: int=2, number of dense layers.
\n", - "`trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
\n", - "`stat_exog_list`: str list, static exogenous columns.
\n", - "`hist_exog_list`: str list, historic exogenous columns.
\n", - "`futr_exog_list`: str list, future exogenous columns.
\n", - "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`max_steps`: int=1000, maximum number of training steps.
\n", - "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - "`batch_size`: int=32, number of different series in each batch.
\n", - "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - "`step_size`: int=1, step size between each window of temporal data.
\n", - "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - "`alias`: str, optional, Custom name of the model.
\n", - "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - "**References**
\n", - "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS, title_level=3)" ] @@ -751,73 +284,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### DeepNPTS.fit\n", - "\n", - "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ], - "text/plain": [ - "---\n", - "\n", - "### DeepNPTS.fit\n", - "\n", - "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" ] @@ -826,53 +293,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### DeepNPTS.predict\n", - "\n", - "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ], - "text/plain": [ - "---\n", - "\n", - "### DeepNPTS.predict\n", - "\n", - "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" ] @@ -891,194 +312,26 @@ "metadata": {}, "outputs": [], "source": [ + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "\n", "from neuralforecast import NeuralForecast\n", - "from neuralforecast.losses.pytorch import MQLoss, DistributionLoss, GMM\n", - "from neuralforecast.tsdataset import TimeSeriesDataset\n", - "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic" + "from neuralforecast.utils import AirPassengersPanel, AirPassengersStatic" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Seed set to 1\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "b74158f17d254e4884139ee5c48e5706", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Sanity Checking: | | 0/? [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", - "import pandas as pd\n", - "import pytorch_lightning as pl\n", - "import matplotlib.pyplot as plt\n", - "\n", - "from neuralforecast import NeuralForecast\n", - "#from neuralforecast.models import DeepAR\n", - "from neuralforecast.losses.pytorch import DistributionLoss, HuberMQLoss\n", - "from neuralforecast.utils import AirPassengers, AirPassengersPanel, AirPassengersStatic\n", - "\n", - "#AirPassengersPanel['y'] = AirPassengersPanel['y'] + 10\n", "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds=AirPassengersPanel['ds'].values[-12]].reset_index(drop=True) # 12 test\n", "\n", "nf = NeuralForecast(\n", " models=[DeepNPTS(h=12,\n", - " input_size=12,\n", - " trajectory_samples=100,\n", - " loss=GMM(),\n", - " # learning_rate=1e-5,\n", - " n_layers = 2,\n", - " dropout=0.0,\n", + " input_size=24,\n", " stat_exog_list=['airline1'],\n", " futr_exog_list=['trend'],\n", " max_steps=1000,\n", @@ -1100,29 +353,9 @@ "plot_df = plot_df[plot_df.unique_id=='Airline1'].drop('unique_id', axis=1)\n", "plt.plot(plot_df['ds'], plot_df['y'], c='black', label='True')\n", "plt.plot(plot_df['ds'], plot_df['DeepNPTS'], c='red', label='mean')\n", - "plt.plot(plot_df['ds'], plot_df['DeepNPTS-median'], c='blue', label='median')\n", - "plt.fill_between(x=plot_df['ds'][-12:], \n", - " y1=plot_df['DeepNPTS-lo-90'][-12:].values, \n", - " y2=plot_df['DeepNPTS-hi-90'][-12:].values,\n", - " alpha=0.4, label='level 90')\n", - "plt.legend()\n", "plt.grid()\n", "plt.plot()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/neuralforecast/_modidx.py b/neuralforecast/_modidx.py index 275d7598b..3406a3604 100644 --- a/neuralforecast/_modidx.py +++ b/neuralforecast/_modidx.py @@ -516,20 +516,8 @@ 'neuralforecast/models/deepnpts.py'), 'neuralforecast.models.deepnpts.DeepNPTS.__init__': ( 'models.deepnpts.html#deepnpts.__init__', 'neuralforecast/models/deepnpts.py'), - 'neuralforecast.models.deepnpts.DeepNPTS._domain_map': ( 'models.deepnpts.html#deepnpts._domain_map', - 'neuralforecast/models/deepnpts.py'), - 'neuralforecast.models.deepnpts.DeepNPTS._init_weights': ( 'models.deepnpts.html#deepnpts._init_weights', - 'neuralforecast/models/deepnpts.py'), 'neuralforecast.models.deepnpts.DeepNPTS.forward': ( 'models.deepnpts.html#deepnpts.forward', - 'neuralforecast/models/deepnpts.py'), - 'neuralforecast.models.deepnpts.DeepNPTS.predict_step': ( 'models.deepnpts.html#deepnpts.predict_step', - 'neuralforecast/models/deepnpts.py'), - 'neuralforecast.models.deepnpts.DeepNPTS.train_forward': ( 'models.deepnpts.html#deepnpts.train_forward', - 'neuralforecast/models/deepnpts.py'), - 'neuralforecast.models.deepnpts.DeepNPTS.training_step': ( 'models.deepnpts.html#deepnpts.training_step', - 'neuralforecast/models/deepnpts.py'), - 'neuralforecast.models.deepnpts.DeepNPTS.validation_step': ( 'models.deepnpts.html#deepnpts.validation_step', - 'neuralforecast/models/deepnpts.py')}, + 'neuralforecast/models/deepnpts.py')}, 'neuralforecast.models.dilated_rnn': { 'neuralforecast.models.dilated_rnn.AttentiveLSTMLayer': ( 'models.dilated_rnn.html#attentivelstmlayer', 'neuralforecast/models/dilated_rnn.py'), 'neuralforecast.models.dilated_rnn.AttentiveLSTMLayer.__init__': ( 'models.dilated_rnn.html#attentivelstmlayer.__init__', diff --git a/neuralforecast/losses/pytorch.py b/neuralforecast/losses/pytorch.py index 2e5ede2f5..fdcd5a623 100644 --- a/neuralforecast/losses/pytorch.py +++ b/neuralforecast/losses/pytorch.py @@ -13,7 +13,13 @@ import torch.nn.functional as F from torch.distributions import Distribution -from torch.distributions import Bernoulli, Normal, StudentT, Poisson, NegativeBinomial +from torch.distributions import ( + Bernoulli, + Normal, + StudentT, + Poisson, + NegativeBinomial, +) from torch.distributions import constraints @@ -1166,20 +1172,17 @@ def __init__( # If True, predict_step will return Distribution's parameters self.return_params = return_params if self.return_params: - lambda_names = [f"-lambda-{i}" for i in range(1, n_components + 1)] - weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] - self.param_names = [i for j in zip(lambda_names, weight_names) for i in j] + self.param_names = [f"-lambda-{i}" for i in range(1, n_components + 1)] self.output_names = self.output_names + self.param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 2 * n_components + self.outputsize_multiplier = n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - lambdas, weights = output.chunk(2, dim=-1) - return (lambdas, weights) + return (output,) # , weights def scale_decouple( self, @@ -1193,15 +1196,13 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - lambdas, weights = output - weights = F.softmax(weights, dim=-1) - + lambdas = output[0] if (loc is not None) and (scale is not None): loc = loc.view(lambdas.size(dim=0), 1, -1) scale = scale.view(lambdas.size(dim=0), 1, -1) lambdas = (lambdas * scale) + loc lambdas = F.softplus(lambdas) - return (lambdas, weights) + return (lambdas,) def sample(self, distr_args, num_samples=None): """ @@ -1223,10 +1224,15 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - lambdas, weights = distr_args + lambdas = distr_args[0] B, H, K = lambdas.size() Q = len(self.quantiles) + # Sample K ~ Mult(weights) + # shared across B, H + # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) + weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) + # Avoid loop, vectorize weights = weights.reshape(-1, K) lambdas = lambdas.flatten() @@ -1267,7 +1273,7 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: @@ -1276,9 +1282,11 @@ def neglog_likelihood( mask = mask * ((y > 0) * 1) eps = 1e-10 - lambdas, weights = distr_args + lambdas = distr_args[0] B, H, K = lambdas.size() + weights = (1 / K) * torch.ones_like(lambdas, device=lambdas.device) + y = y[:, :, None] mask = mask[:, :, None] @@ -1305,7 +1313,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1367,22 +1375,18 @@ def __init__( if self.return_params: mu_names = [f"-mu-{i}" for i in range(1, n_components + 1)] std_names = [f"-std-{i}" for i in range(1, n_components + 1)] - weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] - self.param_names = [ - i for j in zip(mu_names, std_names, weight_names) for i in j - ] - self.output_names = self.output_names + self.param_names + mu_std_names = [i for j in zip(mu_names, std_names) for i in j] + self.output_names = self.output_names + mu_std_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 3 * n_components + self.outputsize_multiplier = 2 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - means, stds, weights = output.chunk(3, dim=-1) - - return (means, stds, weights) + means, stds = torch.tensor_split(output, 2, dim=-1) + return (means, stds) def scale_decouple( self, @@ -1397,16 +1401,14 @@ def scale_decouple( variance and residual location based on anchoring `loc`, `scale`. Also adds domain protection to the distribution parameters. """ - means, stds, weights = output + means, stds = output stds = F.softplus(stds) - weights = F.softmax(weights, dim=-1) if (loc is not None) and (scale is not None): loc = loc.view(means.size(dim=0), 1, -1) scale = scale.view(means.size(dim=0), 1, -1) means = (means * scale) + loc stds = (stds + eps) * scale - - return (means, stds, weights) + return (means, stds) def sample(self, distr_args, num_samples=None): """ @@ -1428,11 +1430,17 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - means, stds, weights = distr_args + means, stds = distr_args B, H, K = means.size() Q = len(self.quantiles) assert means.shape == stds.shape + # Sample K ~ Mult(weights) + # shared across B, H + # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) + + weights = (1 / K) * torch.ones_like(means, device=means.device) + # Avoid loop, vectorize weights = weights.reshape(-1, K) means = means.flatten() @@ -1473,16 +1481,18 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - means, stds, weights = distr_args + means, stds = distr_args B, H, K = means.size() + weights = (1 / K) * torch.ones_like(means, device=means.device) + y = y[:, :, None] mask = mask[:, :, None] @@ -1510,7 +1520,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): @@ -1568,29 +1578,25 @@ def __init__( f"-total_count-{i}" for i in range(1, n_components + 1) ] probs_names = [f"-probs-{i}" for i in range(1, n_components + 1)] - weight_names = [f"-weight-{i}" for i in range(1, n_components + 1)] - self.param_names = [ - i for j in zip(total_count_names, probs_names, weight_names) for i in j - ] - self.output_names = self.output_names + self.param_names + param_names = [i for j in zip(total_count_names, probs_names) for i in j] + self.output_names = self.output_names + param_names # Add first output entry for the sample_mean self.output_names.insert(0, "") - self.outputsize_multiplier = 3 * n_components + self.outputsize_multiplier = 2 * n_components self.is_distribution_output = True def domain_map(self, output: torch.Tensor): - mu, alpha, weights = output.chunk(3, dim=-1) - - return mu, alpha, weights + mu, alpha = torch.tensor_split(output, 2, dim=-1) + return (mu, alpha) def scale_decouple( self, output, loc: Optional[torch.Tensor] = None, scale: Optional[torch.Tensor] = None, - eps: float = 1e-6, + eps: float = 0.2, ): """Scale Decouple @@ -1599,10 +1605,9 @@ def scale_decouple( Also adds domain protection to the distribution parameters. """ # Efficient NBinomial parametrization - mu, alpha, weights = output - mu = F.softplus(mu) + eps - alpha = F.softplus(alpha) + eps # alpha = 1/total_counts - weights = F.softmax(weights, dim=-1) + mu, alpha = output + mu = F.softplus(mu) + 1e-8 + alpha = F.softplus(alpha) + 1e-8 # alpha = 1/total_counts if (loc is not None) and (scale is not None): loc = loc.view(mu.size(dim=0), 1, -1) mu *= loc @@ -1612,9 +1617,8 @@ def scale_decouple( # => probs = mu / (total_count + mu) # => probs = mu / [total_count * (1 + mu * (1/total_count))] total_count = 1.0 / alpha - probs = mu * alpha / (1.0 + mu * alpha) - probs = torch.clamp(probs, eps, 1 - eps) - return (total_count, probs, weights) + probs = (mu * alpha / (1.0 + mu * alpha)) + 1e-8 + return (total_count, probs) def sample(self, distr_args, num_samples=None): """ @@ -1636,11 +1640,17 @@ def sample(self, distr_args, num_samples=None): if num_samples is None: num_samples = self.num_samples - total_count, probs, weights = distr_args + total_count, probs = distr_args B, H, K = total_count.size() Q = len(self.quantiles) assert total_count.shape == probs.shape + # Sample K ~ Mult(weights) + # shared across B, H + # weights = torch.repeat_interleave(input=weights, repeats=H, dim=2) + + weights = (1 / K) * torch.ones_like(probs, device=probs.device) + # Avoid loop, vectorize weights = weights.reshape(-1, K) total_count = total_count.flatten() @@ -1682,16 +1692,18 @@ def sample(self, distr_args, num_samples=None): def neglog_likelihood( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): if mask is None: mask = torch.ones_like(y) - total_count, probs, weights = distr_args + total_count, probs = distr_args B, H, K = total_count.size() + weights = (1 / K) * torch.ones_like(probs, device=probs.device) + y = y[:, :, None] mask = mask[:, :, None] @@ -1722,7 +1734,7 @@ def neglog_likelihood( def __call__( self, y: torch.Tensor, - distr_args: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + distr_args: Tuple[torch.Tensor, torch.Tensor], mask: Union[torch.Tensor, None] = None, ): diff --git a/neuralforecast/models/__init__.py b/neuralforecast/models/__init__.py index ee07166ab..e519db838 100644 --- a/neuralforecast/models/__init__.py +++ b/neuralforecast/models/__init__.py @@ -33,4 +33,4 @@ from .itransformer import iTransformer from .bitcn import BiTCN from .tide import TiDE -from .deepnpts import DeepNPTS \ No newline at end of file +from .deepnpts import DeepNPTS diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index d4da85974..678f89c11 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -4,23 +4,21 @@ __all__ = ['DeepNPTS'] # %% ../../nbs/models.deepnpts.ipynb 3 -import numpy as np - import torch import torch.nn as nn +import torch.nn.functional as F import neuralforecast.losses.pytorch as losses from typing import Optional -from functools import partial from ..common._base_windows import BaseWindows -from ..losses.pytorch import MQLoss, GMM, PMM, NBMM +from ..losses.pytorch import MAE # %% ../../nbs/models.deepnpts.ipynb 7 class DeepNPTS(BaseWindows): """DeepNPTS - Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. + Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series. **Parameters:**
`h`: int, Forecast horizon.
@@ -29,7 +27,6 @@ class DeepNPTS(BaseWindows): `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
`dropout`: float=0.1, dropout.
`n_layers`: int=2, number of dense layers.
- `trajectory_samples`: int=100, number of Monte Carlo trajectories during inference.
`stat_exog_list`: str list, static exogenous columns.
`hist_exog_list`: str list, historic exogenous columns.
`futr_exog_list`: str list, future exogenous columns.
@@ -72,15 +69,14 @@ def __init__( batch_norm: bool = True, dropout: float = 0.1, n_layers: int = 2, - trajectory_samples: int = 100, futr_exog_list=None, hist_exog_list=None, stat_exog_list=None, exclude_insample_y=False, - loss=GMM(), - valid_loss=MQLoss(level=[80, 90]), + loss=MAE(), + valid_loss=MAE(), max_steps: int = 1000, - learning_rate: float = 1e-5, + learning_rate: float = 1e-3, num_lr_decays: int = 3, early_stop_patience_steps: int = -1, val_check_steps: int = 100, @@ -99,22 +95,13 @@ def __init__( **trainer_kwargs ): - if hist_exog_list is not None: - raise Exception("DeepNPTS does not support historical exogenous variables.") - if exclude_insample_y: raise Exception("DeepNPTS has no possibility for excluding y.") - supported_losses = (losses.GMM, losses.PMM, losses.NBMM) - - if not isinstance(loss, supported_losses): - raise Exception("DeepNPTS only supports GMM, PMM or NBMM as loss function.") - - if not isinstance(valid_loss, losses.MQLoss): - raise Exception("DeepNPTS only supports MQLoss as validation loss.") - - # Overwrite n_components, it has to be the input_size in DeepNPTS - loss.n_components = input_size + if not isinstance(loss, losses.BasePointLoss): + raise Exception( + "DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function." + ) # Inherit BaseWindows class super(DeepNPTS, self).__init__( @@ -147,16 +134,19 @@ def __init__( ) self.h = h - self.h_backup = self.h # Used because h=1 during training - self.use_softmax = True self.hidden_size = hidden_size self.dropout = dropout - self.trajectory_samples = trajectory_samples self.futr_exog_size = len(self.futr_exog_list) self.stat_exog_size = len(self.stat_exog_list) + self.hist_exog_size = len(self.hist_exog_list) + + input_dim = ( + input_size * (1 + self.futr_exog_size + self.hist_exog_size) + + self.stat_exog_size + + self.h * self.futr_exog_size + ) - input_dim = input_size * (1 + self.futr_exog_size) + self.stat_exog_size # Create DeepNPTSNetwork modules = [] for i in range(n_layers): @@ -167,391 +157,61 @@ def __init__( if dropout > 0.0: modules.append(nn.Dropout(dropout)) + modules.append(nn.Linear(hidden_size, input_size * self.h)) self.deepnptsnetwork = nn.Sequential(*modules) - self.deepnptsnetwork.apply(partial(self._init_weights, scale=0.07)) - - # Add output layers for Mixture distribution - output_modules = [] - if dropout > 0.0: - output_modules.append(nn.Dropout(self.dropout)) - - if isinstance(loss, GMM): - output_modules.append(nn.Linear(hidden_size, input_size + 1)) - elif isinstance(loss, PMM): - output_modules.append(nn.Linear(hidden_size, input_size)) - elif isinstance(loss, NBMM): - output_modules.append(nn.Linear(hidden_size, input_size)) - - self.output_layer = nn.Sequential(*output_modules) - self.output_layer.apply(self._init_weights) - - @staticmethod - def _init_weights(module, scale=1.0): - if type(module) == nn.Linear: - nn.init.uniform_(module.weight, -scale, scale) - nn.init.zeros_(module.bias) - - def _domain_map(self, o_t, insample_y): - if isinstance(self.loss, GMM): - weights = o_t[:, :-1] # [B, L + 1] -> [B, L] - kernel_width = o_t[:, -1:] # [B, L + 1] -> [B, 1] - kernel_width = torch.repeat_interleave( - input=kernel_width, repeats=weights.shape[1], dim=-1 - ) # [B, 1] -> [B, L] - output = torch.cat( - [insample_y, kernel_width, weights], dim=-1 - ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] - output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * L] - elif isinstance(self.loss, PMM): - weights = o_t # [B, L] -> [B, L] - output = torch.cat( - [insample_y, weights], dim=-1 - ) # [B, L] + [B, L] = [B, 2 * L] - output = output.unsqueeze(1) # [B, 2 * L] = [B, 1, 2 * L] - elif isinstance(self.loss, NBMM): - weights = torch.ones_like(o_t) # [B, L] -> [B, L] - output = torch.cat( - [insample_y, o_t, weights], dim=-1 - ) # [B, L] + [B, L] + [B, L] = [B, 3 * L] - output = output.unsqueeze(1) # [B, 3 * L] = [B, 1, 3 * - - else: - raise NotImplementedError - - return output - - # Override BaseWindows method - def training_step(self, batch, batch_idx): - - # Only train one-step ahead - self.h = 1 - self.quantiles = self.loss.quantiles - - # Create and normalize windows [Ws, L+H, C] - y_idx = batch["y_idx"] - windows = self._create_windows(batch, step="train") - original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, y_idx]) - windows = self._normalization(windows=windows, y_idx=y_idx) - - # Parse windows - ( - insample_y, - insample_mask, - outsample_y, - outsample_mask, - _, - futr_exog, - stat_exog, - ) = self._parse_windows(batch, windows) - - windows_batch = dict( - insample_y=insample_y, # [Ws, L] - insample_mask=insample_mask, # [Ws, L] - futr_exog=futr_exog, # [Ws, L+H] - hist_exog=None, - stat_exog=stat_exog, # [Ws, 1] - y_idx=y_idx, # [Ws, 1] - ) - - # Model Predictions - output = self.train_forward(windows_batch) - - _, y_loc, y_scale = self._inv_normalization( - y_hat=outsample_y, temporal_cols=batch["temporal_cols"], y_idx=y_idx - ) - # outsample_y = original_insample_y - outsample_y = original_outsample_y - distr_args = self.loss.scale_decouple(output=output, loc=y_loc, scale=y_scale) - loss = self.loss(y=outsample_y, distr_args=distr_args, mask=outsample_mask) - - if torch.isnan(loss): - print("Model Parameters", self.hparams) - print("insample_y", torch.isnan(insample_y).sum()) - print("outsample_y", torch.isnan(outsample_y).sum()) - print("output", torch.isnan(output).sum()) - raise Exception("Loss is NaN, training stopped.") - - self.log("train_loss", loss, prog_bar=True, on_epoch=True) - self.train_trajectories.append((self.global_step, float(loss))) - - self.h = self.h_backup - - return loss - - # Override BaseWindows method - def validation_step(self, batch, batch_idx): - - self.h = self.h_backup - self.quantiles = self.valid_loss.quantiles - - if self.val_size == 0: - return np.nan - - # TODO: Hack to compute number of windows - windows = self._create_windows(batch, step="val") - n_windows = len(windows["temporal"]) - y_idx = batch["y_idx"] - - # Number of windows in batch - windows_batch_size = self.inference_windows_batch_size - if windows_batch_size < 0: - windows_batch_size = n_windows - n_batches = int(np.ceil(n_windows / windows_batch_size)) - - valid_losses = [] - batch_sizes = [] - for i in range(n_batches): - # Create and normalize windows [Ws, L+H, C] - w_idxs = np.arange( - i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) - ) - windows = self._create_windows(batch, step="val", w_idxs=w_idxs) - original_outsample_y = torch.clone(windows["temporal"][:, -self.h :, 0]) - windows = self._normalization(windows=windows, y_idx=y_idx) - - # Parse windows - ( - insample_y, - insample_mask, - _, - outsample_mask, - _, - futr_exog, - stat_exog, - ) = self._parse_windows(batch, windows) - - windows_batch = dict( - insample_y=insample_y, # [Ws, L] - insample_mask=insample_mask, # [Ws, L] - futr_exog=futr_exog, # [Ws, L+H] - hist_exog=None, # [Ws, L] - stat_exog=stat_exog, - y_idx=y_idx, - ) # [Ws, 1] - - # Model Predictions - output_batch = self(windows_batch) - # Monte Carlo already returns y_hat with mean and quantiles - output_batch = output_batch[:, :, 1:] # Remove mean - valid_loss_batch = self.valid_loss( - y=original_outsample_y, y_hat=output_batch, mask=outsample_mask - ) - valid_losses.append(valid_loss_batch) - batch_sizes.append(len(output_batch)) - - valid_loss = torch.stack(valid_losses) - batch_sizes = torch.tensor(batch_sizes, device=valid_loss.device) - valid_loss = torch.sum(valid_loss * batch_sizes) / torch.sum(batch_sizes) - - if torch.isnan(valid_loss): - raise Exception("Loss is NaN, training stopped.") - - self.log("valid_loss", valid_loss, prog_bar=True, on_epoch=True) - self.validation_step_outputs.append(valid_loss) - return valid_loss - # Override BaseWindows method - def predict_step(self, batch, batch_idx): - - self.h == self.h_backup - self.quantiles = self.loss.quantiles - - # TODO: Hack to compute number of windows - windows = self._create_windows(batch, step="predict") - n_windows = len(windows["temporal"]) - y_idx = batch["y_idx"] - - # Number of windows in batch - windows_batch_size = self.inference_windows_batch_size - if windows_batch_size < 0: - windows_batch_size = n_windows - n_batches = int(np.ceil(n_windows / windows_batch_size)) - - y_hats = [] - for i in range(n_batches): - # Create and normalize windows [Ws, L+H, C] - w_idxs = np.arange( - i * windows_batch_size, min((i + 1) * windows_batch_size, n_windows) - ) - windows = self._create_windows(batch, step="predict", w_idxs=w_idxs) - windows = self._normalization(windows=windows, y_idx=y_idx) - - # Parse windows - insample_y, insample_mask, _, _, _, futr_exog, stat_exog = ( - self._parse_windows(batch, windows) - ) - windows_batch = dict( - insample_y=insample_y, # [Ws, L] - insample_mask=insample_mask, # [Ws, L] - futr_exog=futr_exog, # [Ws, L+H] - stat_exog=stat_exog, - y_idx=y_idx, - ) - - # Model Predictions - y_hat = self(windows_batch) - # Monte Carlo already returns y_hat with mean and quantiles - y_hats.append(y_hat) - y_hat = torch.cat(y_hats, dim=0) - return y_hat - - def train_forward(self, windows_batch): + def forward(self, windows_batch): # Parse windows_batch - x_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + x = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] + hist_exog = windows_batch["hist_exog"] # [B, L, X] futr_exog = windows_batch["futr_exog"] # [B, L + h, F] stat_exog = windows_batch["stat_exog"] # [B, S] - batch_size, seq_len = x_t.shape[:2] # B = batch_size, L = seq_len + batch_size, seq_len = x.shape[:2] # B = batch_size, L = seq_len + insample_y = windows_batch["insample_y"].unsqueeze(-1) - # Concatenate x_t with future exogenous + # Concatenate x_t with future exogenous of input if self.futr_exog_size > 0: - futr_exog_t = futr_exog[:, :seq_len] # [B, L + h, F] -> [B, L, F] - x_t = torch.cat( - (x_t, futr_exog_t), dim=2 + x = torch.cat( + (x, futr_exog[:, :seq_len]), dim=2 ) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] - x_t = x_t.reshape(batch_size, -1) # [B, L, 1 + F] -> [B, L * (1 + F)] + # Concatenate x_t with historic exogenous + if self.hist_exog_size > 0: + x = torch.cat( + (x, hist_exog), dim=2 + ) # [B, L, 1 + F] + [B, L, X] -> [B, L, 1 + F + X] - # Concatenate x_t with static exogenous - if self.stat_exog_size > 0: - x_t = torch.cat( - (x_t, stat_exog), dim=1 - ) # [B, L * (1 + F)] + [B, S] -> [B, L * (1 + F) + S] - - # Run through DeepNPTSNetwork - h_t = self.deepnptsnetwork(x_t) # [B, L * (1 + F) + S] -> [B, hidden_size] - o_t = self.output_layer(h_t) # [B, hidden_size] -> [B, L + 1] - - output = self._domain_map( - o_t, windows_batch["insample_y"] - ) # [B, L + 1], [B, L] -> [B, 3 * L] - output = self.loss.domain_map( - output - ) # [B, 3 * L] -> ([B, L], [B, L], [B, L]) - - return output - - def forward(self, windows_batch): - # Parse windows_batch - insample_y_t = windows_batch["insample_y"].unsqueeze(-1) # [B, L, 1] - futr_exog = windows_batch["futr_exog"] # [B, L + h, F] - stat_exog = windows_batch["stat_exog"] # [B, S] - y_idx = windows_batch["y_idx"] + x = x.reshape(batch_size, -1) # [B, L, 1 + F + X] -> [B, L * (1 + F + X)] - batch_size, seq_len = insample_y_t.shape[:2] # B = batch_size, L = seq_len - device = insample_y_t.device - dtype = insample_y_t.dtype - - # Repeat insample_y for trajectory samples - insample_y_t = torch.repeat_interleave( - input=insample_y_t, repeats=self.trajectory_samples, dim=0 - ) # [B, L, 1] -> [B * n_samples, L, 1] - - # Input x_t is insample_y at time t - x_t = insample_y_t - - # Repeat futr_exog if available for trajectory samples and add to x_t - if self.futr_exog_size > 0: - futr_exog = torch.repeat_interleave( - input=futr_exog, repeats=self.trajectory_samples, dim=0 - ) # [B, L + h, F] -> [B * n_samples, L + h, F] - x_t = torch.cat( - (x_t, futr_exog[:, :seq_len]), dim=2 - ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] - - x_t = x_t.reshape( - batch_size * self.trajectory_samples, -1 - ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] - - # Repeat stat_exog if available for trajectory samples and add to x_t + # Concatenate x with static exogenous if self.stat_exog_size > 0: - stat_exog = torch.repeat_interleave( - input=stat_exog, repeats=self.trajectory_samples, dim=0 - ) # [B, S] -> [B * n_samples, S] - x_t = torch.cat( - (x_t, stat_exog), dim=1 - ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] - - # Scales for inverse normalization - y_scale = self.scaler.x_scale[:, :, y_idx] - y_loc = self.scaler.x_shift[:, :, y_idx] - y_scale = torch.repeat_interleave( - input=y_scale, repeats=self.trajectory_samples, dim=0 - ) - y_loc = torch.repeat_interleave( - input=y_loc, repeats=self.trajectory_samples, dim=0 - ) - # Create forecasts tensor - forecasts = torch.zeros( - (batch_size, self.h, len(self.quantiles) + 1), device=device, dtype=dtype - ) + x = torch.cat( + (x, stat_exog), dim=1 + ) # [B, L * (1 + F + X)] + [B, S] -> [B, L * (1 + F + X) + S] - # Recursive predictions - for t in range(self.h): - # Run input throught DeepNPTSNetwork - h_t = self.deepnptsnetwork( - x_t - ) # [B * n_samples, L * (1 + F) + S] -> [B, hidden_size] - o_t = self.output_layer( - h_t - ) # [B * n_samples, hidden_size] -> [B * n_samples, L (+ 1)] - output = self._domain_map( - o_t, insample_y_t.squeeze(-1) - ) # [B * n_samples, L + 1], [B * n_samples, L] -> [B * n_samples, 3 * L] - output = self.loss.domain_map( - output - ) # [B * n_samples, 3 * L] -> ([B * n_samples, L], [B * n_samples, L], [B * n_samples, L]) - - # Inverse normalization - distr_args = self.loss.scale_decouple( - output=output, loc=y_loc, scale=y_scale - ) - - # Sample and create probabilistic outputs - samples_t_flat, _, _ = self.loss.sample( - distr_args=distr_args, num_samples=1 - ) - - samples_t_flat = samples_t_flat.squeeze() - samples_t = samples_t_flat.reshape( - batch_size, self.trajectory_samples - ) # [B * n_samples] -> [B, n_samples] - - samples_t_mean = torch.mean(samples_t, dim=-1) # [B, n_samples] -> [B] - quantiles_t = torch.quantile( - input=samples_t, q=self.quantiles, dim=-1 - ) # [B, n_samples] -> [Q, B] - forecasts[:, t, 0] = samples_t_mean - forecasts[:, t, 1:] = quantiles_t.permute(1, 0) - - insample_y_t_next = self.scaler.scaler( - samples_t_flat, y_loc.squeeze(), y_scale.squeeze() - ) # [B * n_samples] -> [B * n_samples] - insample_y_t_next = insample_y_t_next.unsqueeze(-1).unsqueeze( - -1 - ) # [B * n_samples] -> [B * n_samples, 1, 1] - - # Update insample_y_t - insample_y_t = torch.cat( - [insample_y_t[:, 1:], insample_y_t_next], dim=1 - ) # [B * n_samples, L - 1, 1] + [B * n_samples, 1, 1] -> [B * n_samples, L, 1] + # Concatenate x_t with future exogenous of horizon + if self.futr_exog_size > 0: + futr_exog = futr_exog[:, seq_len:] # [B, L + h, F] -> [B, h, F] + futr_exog = futr_exog.reshape( + batch_size, -1 + ) # [B, L + h, F] -> [B, h * F] + x = torch.cat( + (x, futr_exog), dim=1 + ) # [B, L * (1 + F + X) + S] + [B, h * F] -> [B, L * (1 + F + X) + S + h * F] - # Update input - x_t = insample_y_t - # Concatenate x_t with future exogenous - if self.futr_exog_size > 0: - x_t = torch.cat( - (x_t, futr_exog[:, t : seq_len + t]), dim=2 - ) # [B * n_samples, L, 1] + [B * n_samples, L, F] -> [B * n_samples, L, 1 + F] + # Run through DeepNPTSNetwork + weights = self.deepnptsnetwork( + x + ) # [B, L * (1 + F + X) + S + h * F] -> [B, L * h] - x_t = x_t.reshape( - batch_size * self.trajectory_samples, -1 - ) # [B * n_samples, L, 1 + F] -> [B * n_samples, L * (1 + F)] + # Apply softmax for weighted input predictions + weights = weights.reshape(batch_size, seq_len, -1) # [B, L * h] -> [B, L, h] + x = ( + F.softmax(weights, dim=1) * insample_y + ) # [B, L, h] * [B, L, 1] = [B, L, h] + output = torch.sum(x, dim=1).unsqueeze(-1) # [B, L, h] -> [B, h, 1] - # Concatenate x_t with static exogenous - if self.stat_exog_size > 0: - x_t = torch.cat( - (x_t, stat_exog), dim=1 - ) # [B * n_samples, L * (1 + F)] + [B * n_samples, S] -> [B * n_samples, L * (1 + F) + S] + forecast = self.loss.domain_map(output) # [B, h, 1] -> [B, h, 1] - return forecasts + return forecast From d702d637f73df146886b148e6476276ffa220d45 Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Mon, 6 May 2024 09:40:12 +0200 Subject: [PATCH 06/11] update_model_files --- nbs/models.deepnpts.ipynb | 567 +++++++++++++++++++++++++++++- neuralforecast/_modidx.py | 5 + neuralforecast/models/deepnpts.py | 7 +- 3 files changed, 572 insertions(+), 7 deletions(-) diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index c1852c18a..7b6cac9e0 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -32,7 +32,7 @@ ":::{.callout-warning collapse=\"false\"}\n", "#### Losses\n", "\n", - "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast, rather than a sampled distributional output. Consequently, DeepNPTS only supports point losses as training loss.\n", + "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast. Therefore, it only supports point losses.\n", "\n", ":::" ] @@ -160,7 +160,7 @@ " batch_size: int = 32,\n", " valid_batch_size: Optional[int] = None,\n", " windows_batch_size: int = 1024,\n", - " inference_windows_batch_size: int = -1,\n", + " inference_windows_batch_size: int = 1024,\n", " start_padding_enabled = False,\n", " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", @@ -177,6 +177,9 @@ " if not isinstance(loss, losses.BasePointLoss):\n", " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function.') \n", " \n", + " if not isinstance(valid_loss, losses.BasePointLoss):\n", + " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as valid loss function.') \n", + " \n", " # Inherit BaseWindows class\n", " super(DeepNPTS, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -275,7 +278,139 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L18){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.1, n_layers:int=2, futr_exog_list=None,\n", + "> hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=MAE(), valid_loss=MAE(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=1024,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ], + "text/plain": [ + "---\n", + "\n", + "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L18){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "\n", + "### DeepNPTS\n", + "\n", + "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", + "> dropout:float=0.1, n_layers:int=2, futr_exog_list=None,\n", + "> hist_exog_list=None, stat_exog_list=None,\n", + "> exclude_insample_y=False, loss=MAE(), valid_loss=MAE(),\n", + "> max_steps:int=1000, learning_rate:float=0.001,\n", + "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", + "> val_check_steps:int=100, batch_size:int=32,\n", + "> valid_batch_size:Optional[int]=None,\n", + "> windows_batch_size:int=1024,\n", + "> inference_windows_batch_size:int=1024,\n", + "> start_padding_enabled=False, step_size:int=1,\n", + "> scaler_type:str='standard', random_seed:int=1,\n", + "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", + "> optimizer_kwargs=None, **trainer_kwargs)\n", + "\n", + "DeepNPTS\n", + "\n", + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", + "\n", + "**Parameters:**
\n", + "`h`: int, Forecast horizon.
\n", + "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + "`hidden_size`: int=32, hidden size of dense layers.
\n", + "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + "`dropout`: float=0.1, dropout.
\n", + "`n_layers`: int=2, number of dense layers.
\n", + "`stat_exog_list`: str list, static exogenous columns.
\n", + "`hist_exog_list`: str list, historic exogenous columns.
\n", + "`futr_exog_list`: str list, future exogenous columns.
\n", + "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + "`max_steps`: int=1000, maximum number of training steps.
\n", + "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + "`batch_size`: int=32, number of different series in each batch.
\n", + "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + "`step_size`: int=1, step size between each window of temporal data.
\n", + "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + "`alias`: str, optional, Custom name of the model.
\n", + "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + "**References**
\n", + "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DeepNPTS, title_level=3)" ] @@ -284,7 +419,73 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.fit\n", + "\n", + "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", + "> distributed_config=None)\n", + "\n", + "Fit.\n", + "\n", + "The `fit` method, optimizes the neural network's weights using the\n", + "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", + "and the `loss` function as defined during the initialization.\n", + "Within `fit` we use a PyTorch Lightning `Trainer` that\n", + "inherits the initialization's `self.trainer_kwargs`, to customize\n", + "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", + "\n", + "The method is designed to be compatible with SKLearn-like classes\n", + "and in particular to be compatible with the StatsForecast library.\n", + "\n", + "By default the `model` is not saving training checkpoints to protect\n", + "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`val_size`: int, validation size for temporal cross-validation.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`test_size`: int, test size for temporal cross-validation.
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" ] @@ -293,7 +494,53 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ], + "text/plain": [ + "---\n", + "\n", + "### DeepNPTS.predict\n", + "\n", + "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", + "> **data_module_kwargs)\n", + "\n", + "Predict.\n", + "\n", + "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", + "\n", + "**Parameters:**
\n", + "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", + "`test_size`: int=None, test size for temporal cross-validation.
\n", + "`step_size`: int=1, Step size between each window.
\n", + "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", + "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" ] @@ -323,7 +570,315 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Seed set to 1\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a683239fc3e5435aad7174b0d136376d", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Sanity Checking: | | 0/? [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "#| eval: false\n", "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds Date: Mon, 6 May 2024 20:22:54 +0200 Subject: [PATCH 07/11] merge_conflicts --- nbs/models.ipynb | 397 +++++++++++++++-------------------------- neuralforecast/auto.py | 103 +++++++++-- neuralforecast/core.py | 3 - 3 files changed, 229 insertions(+), 274 deletions(-) diff --git a/nbs/models.ipynb b/nbs/models.ipynb index 428eeabc1..43dfe80e7 100644 --- a/nbs/models.ipynb +++ b/nbs/models.ipynb @@ -54,6 +54,7 @@ "from neuralforecast.models.dlinear import DLinear\n", "from neuralforecast.models.nlinear import NLinear\n", "from neuralforecast.models.tide import TiDE\n", + "from neuralforecast.models.deepnpts import DeepNPTS\n", "\n", "from neuralforecast.models.tft import TFT\n", "from neuralforecast.models.vanillatransformer import VanillaTransformer\n", @@ -2099,89 +2100,7 @@ "execution_count": null, "id": "d31d3bfa", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### AutoTiDE\n", - "\n", - "> AutoTiDE (h, loss=MAE(), valid_loss=None, config=None,\n", - "> search_alg= object at 0x0000022D7EF8FC10>, num_samples=10,\n", - "> refit_with_val=False, cpus=20, gpus=1, verbose=False,\n", - "> alias=None, backend='ray', callbacks=None)\n", - "\n", - "Class for Automatic Hyperparameter Optimization, it builds on top of `ray` to\n", - "give access to a wide variety of hyperparameter optimization tools ranging\n", - "from classic grid search, to Bayesian optimization and HyperBand algorithm.\n", - "\n", - "The validation loss to be optimized is defined by the `config['loss']` dictionary\n", - "value, the config also contains the rest of the hyperparameter search space.\n", - "\n", - "It is important to note that the success of this hyperparameter optimization\n", - "heavily relies on a strong correlation between the validation and test periods.\n", - "\n", - "| | **Type** | **Default** | **Details** |\n", - "| -- | -------- | ----------- | ----------- |\n", - "| h | int | | Forecast horizon |\n", - "| loss | MAE | MAE() | Instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html). |\n", - "| valid_loss | NoneType | None | Instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html). |\n", - "| config | NoneType | None | Dictionary with ray.tune defined search space or function that takes an optuna trial and returns a configuration dict. |\n", - "| search_alg | BasicVariantGenerator | | For ray see https://docs.ray.io/en/latest/tune/api_docs/suggestion.html
For optuna see https://optuna.readthedocs.io/en/stable/reference/samplers/index.html. |\n", - "| num_samples | int | 10 | Number of hyperparameter optimization steps/samples. |\n", - "| refit_with_val | bool | False | Refit of best model should preserve val_size. |\n", - "| cpus | int | 20 | Number of cpus to use during optimization. Only used with ray tune. |\n", - "| gpus | int | 1 | Number of gpus to use during optimization, default all available. Only used with ray tune. |\n", - "| verbose | bool | False | Track progress. |\n", - "| alias | NoneType | None | Custom name of the model. |\n", - "| backend | str | ray | Backend to use for searching the hyperparameter space, can be either 'ray' or 'optuna'. |\n", - "| callbacks | NoneType | None | List of functions to call during the optimization process.
ray reference: https://docs.ray.io/en/latest/tune/tutorials/tune-metrics.html
optuna reference: https://optuna.readthedocs.io/en/stable/tutorial/20_recipes/007_optuna_callback.html |" - ], - "text/plain": [ - "---\n", - "\n", - "### AutoTiDE\n", - "\n", - "> AutoTiDE (h, loss=MAE(), valid_loss=None, config=None,\n", - "> search_alg= object at 0x0000022D7EF8FC10>, num_samples=10,\n", - "> refit_with_val=False, cpus=20, gpus=1, verbose=False,\n", - "> alias=None, backend='ray', callbacks=None)\n", - "\n", - "Class for Automatic Hyperparameter Optimization, it builds on top of `ray` to\n", - "give access to a wide variety of hyperparameter optimization tools ranging\n", - "from classic grid search, to Bayesian optimization and HyperBand algorithm.\n", - "\n", - "The validation loss to be optimized is defined by the `config['loss']` dictionary\n", - "value, the config also contains the rest of the hyperparameter search space.\n", - "\n", - "It is important to note that the success of this hyperparameter optimization\n", - "heavily relies on a strong correlation between the validation and test periods.\n", - "\n", - "| | **Type** | **Default** | **Details** |\n", - "| -- | -------- | ----------- | ----------- |\n", - "| h | int | | Forecast horizon |\n", - "| loss | MAE | MAE() | Instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html). |\n", - "| valid_loss | NoneType | None | Instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html). |\n", - "| config | NoneType | None | Dictionary with ray.tune defined search space or function that takes an optuna trial and returns a configuration dict. |\n", - "| search_alg | BasicVariantGenerator | | For ray see https://docs.ray.io/en/latest/tune/api_docs/suggestion.html
For optuna see https://optuna.readthedocs.io/en/stable/reference/samplers/index.html. |\n", - "| num_samples | int | 10 | Number of hyperparameter optimization steps/samples. |\n", - "| refit_with_val | bool | False | Refit of best model should preserve val_size. |\n", - "| cpus | int | 20 | Number of cpus to use during optimization. Only used with ray tune. |\n", - "| gpus | int | 1 | Number of gpus to use during optimization, default all available. Only used with ray tune. |\n", - "| verbose | bool | False | Track progress. |\n", - "| alias | NoneType | None | Custom name of the model. |\n", - "| backend | str | ray | Backend to use for searching the hyperparameter space, can be either 'ray' or 'optuna'. |\n", - "| callbacks | NoneType | None | List of functions to call during the optimization process.
ray reference: https://docs.ray.io/en/latest/tune/tutorials/tune-metrics.html
optuna reference: https://optuna.readthedocs.io/en/stable/tutorial/20_recipes/007_optuna_callback.html |" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(AutoTiDE, title_level=3)" ] @@ -2191,19 +2110,7 @@ "execution_count": null, "id": "7ae8f192", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2024-04-15 19:19:42,074\tINFO worker.py:1752 -- Started a local Ray instance.\n", - "2024-04-15 19:19:43,810\tINFO tune.py:263 -- Initializing Ray automatically. For cluster usage or custom Ray initialization, call `ray.init(...)` before `Tuner(...)`.\n", - "2024-04-15 19:19:43,813\tINFO tune.py:613 -- [output] This uses the legacy output and progress reporter, as Jupyter notebooks are not supported by the new engine, yet. For more information, please see https://github.com/ray-project/ray/issues/36949\n", - "2024-04-15 19:19:50,851\tINFO tune.py:1016 -- Wrote the latest version of all result files and experiment state to 'C:/Users/ospra/ray_results/_train_tune_2024-04-15_19-19-40' in 0.0053s.\n", - "Seed set to 1\n" - ] - } - ], + "outputs": [], "source": [ "%%capture\n", "# Use your own config or AutoTiDE.default_config\n", @@ -2223,165 +2130,7 @@ "execution_count": null, "id": "d66600b9", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[36m(_train_tune pid=30124)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\ray\\tune\\integration\\pytorch_lightning.py:194: `ray.tune.integration.pytorch_lightning.TuneReportCallback` is deprecated. Use `ray.tune.integration.pytorch_lightning.TuneReportCheckpointCallback` instead.\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\pytorch_lightning\\utilities\\parsing.py:199: Attribute 'loss' is an instance of `nn.Module` and is already saved during checkpointing. It is recommended to ignore them using `self.save_hyperparameters(ignore=['loss'])`.\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\pytorch_lightning\\utilities\\parsing.py:199: Attribute 'valid_loss' is an instance of `nn.Module` and is already saved during checkpointing. It is recommended to ignore them using `self.save_hyperparameters(ignore=['valid_loss'])`.\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m Seed set to 11\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m GPU available: True (cuda), used: True\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m TPU available: False, using: 0 TPU cores\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m IPU available: False, using: 0 IPUs\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m HPU available: False, using: 0 HPUs\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m `Trainer(val_check_interval=1)` was configured so validation will run after every batch.\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m You are using a CUDA device ('NVIDIA GeForce RTX 3090') that has Tensor Cores. To properly utilize them, you should set `torch.set_float32_matmul_precision('medium' | 'high')` which will trade-off precision for performance. For more details, read https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html#torch.set_float32_matmul_precision\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m Missing logger folder: C:\\Users\\ospra\\AppData\\Local\\Temp\\ray\\session_2024-04-15_19-19-40_426885_27112\\artifacts\\2024-04-15_19-19-55\\_train_tune_2024-04-15_19-19-55\\working_dirs\\_train_tune_55d90_00000\\lightning_logs\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0]\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m | Name | Type | Params\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m ---------------------------------------------------\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 0 | loss | MAE | 0 \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 1 | padder_train | ConstantPad1d | 0 \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 2 | scaler | TemporalNorm | 0 \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 3 | dense_encoder | Sequential | 1.1 M \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 4 | dense_decoder | Sequential | 361 K \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 5 | temporal_decoder | MLPResidual | 1.3 K \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 6 | global_skip | Linear | 156 \n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m ---------------------------------------------------\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 1.4 M Trainable params\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 0 Non-trainable params\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 1.4 M Total params\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m 5.706 Total estimated model params size (MB)\n", - "\u001b[36m(_train_tune pid=30124)\u001b[0m c:\\Users\\ospra\\miniconda3\\envs\\neuralforecast\\lib\\site-packages\\pytorch_lightning\\trainer\\connectors\\data_connector.py:441: The 'val_dataloader' does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` to `num_workers=19` in the `DataLoader` to improve performance.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sanity Checking: | | 0/? [00:00 Date: Mon, 6 May 2024 20:24:29 +0200 Subject: [PATCH 08/11] add_deepnpts_to_eval --- action_files/test_models/src/evaluation.py | 2 +- action_files/test_models/src/models.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/action_files/test_models/src/evaluation.py b/action_files/test_models/src/evaluation.py index cbe4e35c6..50a37a8d8 100644 --- a/action_files/test_models/src/evaluation.py +++ b/action_files/test_models/src/evaluation.py @@ -43,7 +43,7 @@ def evaluate(model: str, dataset: str, group: str): groups = ['Monthly'] models = ['AutoDilatedRNN', 'RNN', 'TCN', 'DeepAR', 'NHITS', 'TFT', 'AutoMLP', 'DLinear', 'VanillaTransformer', - 'BiTCN', 'TiDE'] + 'BiTCN', 'TiDE', 'DeepNPTS'] datasets = ['M3'] evaluation = [evaluate(model, dataset, group) for model, group in product(models, groups) for dataset in datasets] evaluation = [eval_ for eval_ in evaluation if eval_ is not None] diff --git a/action_files/test_models/src/models.py b/action_files/test_models/src/models.py index 7fb66f2d2..5dc70a308 100644 --- a/action_files/test_models/src/models.py +++ b/action_files/test_models/src/models.py @@ -28,6 +28,7 @@ from neuralforecast.models.dlinear import DLinear from neuralforecast.models.bitcn import BiTCN from neuralforecast.models.tide import TiDE +from neuralforecast.models.deepnpts import DeepNPTS from neuralforecast.auto import ( AutoMLP, @@ -76,6 +77,7 @@ def main(dataset: str = 'M3', group: str = 'Monthly') -> None: DeepAR(h=horizon, input_size=2 * horizon, scaler_type='minmax1', max_steps=1000), BiTCN(h=horizon, input_size=2 * horizon, loss=MAE(), dropout=0.0, max_steps=1000, val_check_steps=500), TiDE(h=horizon, input_size=2 * horizon, loss=MAE(), max_steps=1000, val_check_steps=500), + DeepNPTS(h=horizon, input_size=2 * horizon, loss=MAE(), max_steps=1000, val_check_steps=500), ] # Models From 237be0d686c4904a5db152fd667921773c99b29c Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Mon, 6 May 2024 20:26:38 +0200 Subject: [PATCH 09/11] add_deepnpts_to_eval --- nbs/models.deepnpts.ipynb | 560 +------------------------------------- 1 file changed, 4 insertions(+), 556 deletions(-) diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 7b6cac9e0..f70dff2ec 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -278,139 +278,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L18){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DeepNPTS\n", - "\n", - "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", - "> dropout:float=0.1, n_layers:int=2, futr_exog_list=None,\n", - "> hist_exog_list=None, stat_exog_list=None,\n", - "> exclude_insample_y=False, loss=MAE(), valid_loss=MAE(),\n", - "> max_steps:int=1000, learning_rate:float=0.001,\n", - "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", - "> val_check_steps:int=100, batch_size:int=32,\n", - "> valid_batch_size:Optional[int]=None,\n", - "> windows_batch_size:int=1024,\n", - "> inference_windows_batch_size:int=1024,\n", - "> start_padding_enabled=False, step_size:int=1,\n", - "> scaler_type:str='standard', random_seed:int=1,\n", - "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", - "> optimizer_kwargs=None, **trainer_kwargs)\n", - "\n", - "DeepNPTS\n", - "\n", - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", - "\n", - "**Parameters:**
\n", - "`h`: int, Forecast horizon.
\n", - "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - "`hidden_size`: int=32, hidden size of dense layers.
\n", - "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - "`dropout`: float=0.1, dropout.
\n", - "`n_layers`: int=2, number of dense layers.
\n", - "`stat_exog_list`: str list, static exogenous columns.
\n", - "`hist_exog_list`: str list, historic exogenous columns.
\n", - "`futr_exog_list`: str list, future exogenous columns.
\n", - "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`max_steps`: int=1000, maximum number of training steps.
\n", - "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - "`batch_size`: int=32, number of different series in each batch.
\n", - "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - "`step_size`: int=1, step size between each window of temporal data.
\n", - "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - "`alias`: str, optional, Custom name of the model.
\n", - "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - "**References**
\n", - "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/deepnpts.py#L18){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### DeepNPTS\n", - "\n", - "> DeepNPTS (h, input_size:int=-1, hidden_size:int=32, batch_norm:bool=True,\n", - "> dropout:float=0.1, n_layers:int=2, futr_exog_list=None,\n", - "> hist_exog_list=None, stat_exog_list=None,\n", - "> exclude_insample_y=False, loss=MAE(), valid_loss=MAE(),\n", - "> max_steps:int=1000, learning_rate:float=0.001,\n", - "> num_lr_decays:int=3, early_stop_patience_steps:int=-1,\n", - "> val_check_steps:int=100, batch_size:int=32,\n", - "> valid_batch_size:Optional[int]=None,\n", - "> windows_batch_size:int=1024,\n", - "> inference_windows_batch_size:int=1024,\n", - "> start_padding_enabled=False, step_size:int=1,\n", - "> scaler_type:str='standard', random_seed:int=1,\n", - "> num_workers_loader=0, drop_last_loader=False, optimizer=None,\n", - "> optimizer_kwargs=None, **trainer_kwargs)\n", - "\n", - "DeepNPTS\n", - "\n", - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", - "\n", - "**Parameters:**
\n", - "`h`: int, Forecast horizon.
\n", - "`input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - "`hidden_size`: int=32, hidden size of dense layers.
\n", - "`batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - "`dropout`: float=0.1, dropout.
\n", - "`n_layers`: int=2, number of dense layers.
\n", - "`stat_exog_list`: str list, static exogenous columns.
\n", - "`hist_exog_list`: str list, historic exogenous columns.
\n", - "`futr_exog_list`: str list, future exogenous columns.
\n", - "`exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - "`loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - "`max_steps`: int=1000, maximum number of training steps.
\n", - "`learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - "`num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - "`early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - "`val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - "`batch_size`: int=32, number of different series in each batch.
\n", - "`valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - "`windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - "`inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - "`start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - "`step_size`: int=1, step size between each window of temporal data.
\n", - "`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - "`random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - "`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - "`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - "`alias`: str, optional, Custom name of the model.
\n", - "`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - "`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - "`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - "**References**
\n", - "- [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS, title_level=3)" ] @@ -419,73 +287,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### DeepNPTS.fit\n", - "\n", - "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ], - "text/plain": [ - "---\n", - "\n", - "### DeepNPTS.fit\n", - "\n", - "> DeepNPTS.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" ] @@ -494,53 +296,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### DeepNPTS.predict\n", - "\n", - "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ], - "text/plain": [ - "---\n", - "\n", - "### DeepNPTS.predict\n", - "\n", - "> DeepNPTS.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" ] @@ -570,315 +326,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Seed set to 1\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "a683239fc3e5435aad7174b0d136376d", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Sanity Checking: | | 0/? [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds Date: Mon, 6 May 2024 14:40:39 -0600 Subject: [PATCH 10/11] test From d3bf75b0925d617fef95c3b322fcc9690e61e441 Mon Sep 17 00:00:00 2001 From: Olivier Sprangers Date: Mon, 6 May 2024 23:16:24 +0200 Subject: [PATCH 11/11] remove_eval_false_from_usage_example --- nbs/models.deepnpts.ipynb | 745 +++++++++++++++++++------------------- 1 file changed, 372 insertions(+), 373 deletions(-) diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index e26906ee5..4f0d41445 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -1,373 +1,372 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp models.deepnpts" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# DeepNPTS" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting. \n", - "\n", - "\n", - "**References**
\n", - "[Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", - "\n", - "\n", - ":::{.callout-warning collapse=\"false\"}\n", - "#### Losses\n", - "\n", - "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast. Therefore, it only supports point losses.\n", - "\n", - ":::" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "import torch\n", - "import torch.nn as nn\n", - "import torch.nn.functional as F\n", - "import neuralforecast.losses.pytorch as losses\n", - "from typing import Optional\n", - "\n", - "\n", - "from neuralforecast.common._base_windows import BaseWindows\n", - "from neuralforecast.losses.pytorch import MAE\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import logging\n", - "import warnings\n", - "\n", - "from fastcore.test import test_eq\n", - "from nbdev.showdoc import show_doc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "logging.getLogger(\"pytorch_lightning\").setLevel(logging.ERROR)\n", - "warnings.filterwarnings(\"ignore\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2. DeepNPTS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "class DeepNPTS(BaseWindows):\n", - " \"\"\" DeepNPTS\n", - "\n", - " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", - "\n", - " **Parameters:**
\n", - " `h`: int, Forecast horizon.
\n", - " `input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", - " `hidden_size`: int=32, hidden size of dense layers.
\n", - " `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", - " `dropout`: float=0.1, dropout.
\n", - " `n_layers`: int=2, number of dense layers.
\n", - " `stat_exog_list`: str list, static exogenous columns.
\n", - " `hist_exog_list`: str list, historic exogenous columns.
\n", - " `futr_exog_list`: str list, future exogenous columns.
\n", - " `exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", - " `loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - " `valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", - " `max_steps`: int=1000, maximum number of training steps.
\n", - " `learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", - " `num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", - " `early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", - " `val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", - " `batch_size`: int=32, number of different series in each batch.
\n", - " `valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", - " `windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", - " `inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", - " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", - " `step_size`: int=1, step size between each window of temporal data.
\n", - " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", - " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `alias`: str, optional, Custom name of the model.
\n", - " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", - " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", - " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", - "\n", - " **References**
\n", - " - [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", - "\n", - " \"\"\"\n", - " # Class attributes\n", - " SAMPLING_TYPE = 'windows'\n", - " \n", - " def __init__(self,\n", - " h,\n", - " input_size: int = -1,\n", - " hidden_size: int = 32,\n", - " batch_norm: bool = True,\n", - " dropout: float = 0.1,\n", - " n_layers: int = 2,\n", - " futr_exog_list = None,\n", - " hist_exog_list = None,\n", - " stat_exog_list = None,\n", - " exclude_insample_y = False,\n", - " loss = MAE(),\n", - " valid_loss = MAE(),\n", - " max_steps: int = 1000,\n", - " learning_rate: float = 1e-3,\n", - " num_lr_decays: int = 3,\n", - " early_stop_patience_steps: int =-1,\n", - " val_check_steps: int = 100,\n", - " batch_size: int = 32,\n", - " valid_batch_size: Optional[int] = None,\n", - " windows_batch_size: int = 1024,\n", - " inference_windows_batch_size: int = 1024,\n", - " start_padding_enabled = False,\n", - " step_size: int = 1,\n", - " scaler_type: str = 'standard',\n", - " random_seed: int = 1,\n", - " num_workers_loader = 0,\n", - " drop_last_loader = False,\n", - " optimizer = None,\n", - " optimizer_kwargs = None,\n", - " **trainer_kwargs):\n", - "\n", - " if exclude_insample_y:\n", - " raise Exception('DeepNPTS has no possibility for excluding y.')\n", - "\n", - " if not isinstance(loss, losses.BasePointLoss):\n", - " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function.') \n", - " \n", - " if not isinstance(valid_loss, losses.BasePointLoss):\n", - " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as valid loss function.') \n", - " \n", - " # Inherit BaseWindows class\n", - " super(DeepNPTS, self).__init__(h=h,\n", - " input_size=input_size,\n", - " futr_exog_list=futr_exog_list,\n", - " hist_exog_list=hist_exog_list,\n", - " stat_exog_list=stat_exog_list,\n", - " exclude_insample_y = exclude_insample_y,\n", - " loss=loss,\n", - " valid_loss=valid_loss,\n", - " max_steps=max_steps,\n", - " learning_rate=learning_rate,\n", - " num_lr_decays=num_lr_decays,\n", - " early_stop_patience_steps=early_stop_patience_steps,\n", - " val_check_steps=val_check_steps,\n", - " batch_size=batch_size,\n", - " windows_batch_size=windows_batch_size,\n", - " valid_batch_size=valid_batch_size,\n", - " inference_windows_batch_size=inference_windows_batch_size,\n", - " start_padding_enabled=start_padding_enabled,\n", - " step_size=step_size,\n", - " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " drop_last_loader=drop_last_loader,\n", - " random_seed=random_seed,\n", - " optimizer=optimizer,\n", - " optimizer_kwargs=optimizer_kwargs,\n", - " **trainer_kwargs)\n", - "\n", - " self.h = h\n", - " self.hidden_size = hidden_size\n", - " self.dropout = dropout\n", - "\n", - " self.futr_exog_size = len(self.futr_exog_list)\n", - " self.stat_exog_size = len(self.stat_exog_list)\n", - " self.hist_exog_size = len(self.hist_exog_list)\n", - "\n", - " input_dim = input_size * (1 + self.futr_exog_size + self.hist_exog_size) + self.stat_exog_size + self.h * self.futr_exog_size\n", - " \n", - " # Create DeepNPTSNetwork\n", - " modules = [] \n", - " for i in range(n_layers):\n", - " modules.append(nn.Linear(input_dim if i == 0 else hidden_size, hidden_size))\n", - " modules.append(nn.ReLU())\n", - " if batch_norm:\n", - " modules.append(nn.BatchNorm1d(hidden_size))\n", - " if dropout > 0.0:\n", - " modules.append(nn.Dropout(dropout))\n", - "\n", - " modules.append(nn.Linear(hidden_size, input_size * self.h))\n", - " self.deepnptsnetwork = nn.Sequential(*modules)\n", - "\n", - " def forward(self, windows_batch):\n", - " # Parse windows_batch\n", - " x = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", - " hist_exog = windows_batch['hist_exog'] # [B, L, X]\n", - " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", - " stat_exog = windows_batch['stat_exog'] # [B, S]\n", - "\n", - " batch_size, seq_len = x.shape[:2] # B = batch_size, L = seq_len\n", - " insample_y = windows_batch['insample_y'].unsqueeze(-1) \n", - " \n", - " # Concatenate x_t with future exogenous of input\n", - " if self.futr_exog_size > 0: \n", - " x = torch.cat((x, futr_exog[:, :seq_len]), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", - " \n", - " # Concatenate x_t with historic exogenous\n", - " if self.hist_exog_size > 0: \n", - " x = torch.cat((x, hist_exog), dim=2) # [B, L, 1 + F] + [B, L, X] -> [B, L, 1 + F + X] \n", - "\n", - " x = x.reshape(batch_size, -1) # [B, L, 1 + F + X] -> [B, L * (1 + F + X)]\n", - "\n", - " # Concatenate x with static exogenous\n", - " if self.stat_exog_size > 0:\n", - " x = torch.cat((x, stat_exog), dim=1) # [B, L * (1 + F + X)] + [B, S] -> [B, L * (1 + F + X) + S]\n", - "\n", - " # Concatenate x_t with future exogenous of horizon\n", - " if self.futr_exog_size > 0:\n", - " futr_exog = futr_exog[:, seq_len:] # [B, L + h, F] -> [B, h, F]\n", - " futr_exog = futr_exog.reshape(batch_size, -1) # [B, L + h, F] -> [B, h * F]\n", - " x = torch.cat((x, futr_exog), dim=1) # [B, L * (1 + F + X) + S] + [B, h * F] -> [B, L * (1 + F + X) + S + h * F] \n", - "\n", - " # Run through DeepNPTSNetwork\n", - " weights = self.deepnptsnetwork(x) # [B, L * (1 + F + X) + S + h * F] -> [B, L * h]\n", - "\n", - " # Apply softmax for weighted input predictions\n", - " weights = weights.reshape(batch_size, seq_len, -1) # [B, L * h] -> [B, L, h]\n", - " x = F.softmax(weights, dim=1) * insample_y # [B, L, h] * [B, L, 1] = [B, L, h]\n", - " output = torch.sum(x, dim=1).unsqueeze(-1) # [B, L, h] -> [B, h, 1]\n", - "\n", - " forecast = self.loss.domain_map(output) # [B, h, 1] -> [B, h, 1]\n", - "\n", - " return forecast" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "show_doc(DeepNPTS, title_level=3)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Usage Example" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import pandas as pd\n", - "import matplotlib.pyplot as plt\n", - "\n", - "from neuralforecast import NeuralForecast\n", - "from neuralforecast.utils import AirPassengersPanel, AirPassengersStatic" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| eval: false\n", - "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds=AirPassengersPanel['ds'].values[-12]].reset_index(drop=True) # 12 test\n", - "\n", - "nf = NeuralForecast(\n", - " models=[DeepNPTS(h=12,\n", - " input_size=24,\n", - " stat_exog_list=['airline1'],\n", - " futr_exog_list=['trend'],\n", - " max_steps=1000,\n", - " val_check_steps=10,\n", - " early_stop_patience_steps=3,\n", - " scaler_type='robust',\n", - " enable_progress_bar=True),\n", - " ],\n", - " freq='M'\n", - ")\n", - "nf.fit(df=Y_train_df, static_df=AirPassengersStatic, val_size=12)\n", - "Y_hat_df = nf.predict(futr_df=Y_test_df)\n", - "\n", - "# Plot quantile predictions\n", - "Y_hat_df = Y_hat_df.reset_index(drop=False).drop(columns=['unique_id','ds'])\n", - "plot_df = pd.concat([Y_test_df, Y_hat_df], axis=1)\n", - "plot_df = pd.concat([Y_train_df, plot_df])\n", - "\n", - "plot_df = plot_df[plot_df.unique_id=='Airline1'].drop('unique_id', axis=1)\n", - "plt.plot(plot_df['ds'], plot_df['y'], c='black', label='True')\n", - "plt.plot(plot_df['ds'], plot_df['DeepNPTS'], c='red', label='mean')\n", - "plt.grid()\n", - "plt.plot()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.deepnpts" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# DeepNPTS" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a non-parametric baseline model for time-series forecasting. This model generates predictions by sampling from the empirical distribution according to a tunable strategy. This strategy is learned by exploiting the information across multiple related time series. This model provides a strong, simple baseline for time series forecasting. \n", + "\n", + "\n", + "**References**
\n", + "[Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", + "\n", + "\n", + ":::{.callout-warning collapse=\"false\"}\n", + "#### Losses\n", + "\n", + "This implementation differs from the original work in that a weighted sum of the empirical distribution is returned as forecast. Therefore, it only supports point losses.\n", + "\n", + ":::" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import neuralforecast.losses.pytorch as losses\n", + "from typing import Optional\n", + "\n", + "\n", + "from neuralforecast.common._base_windows import BaseWindows\n", + "from neuralforecast.losses.pytorch import MAE\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import logging\n", + "import warnings\n", + "\n", + "from fastcore.test import test_eq\n", + "from nbdev.showdoc import show_doc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "logging.getLogger(\"pytorch_lightning\").setLevel(logging.ERROR)\n", + "warnings.filterwarnings(\"ignore\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. DeepNPTS" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class DeepNPTS(BaseWindows):\n", + " \"\"\" DeepNPTS\n", + "\n", + " Deep Non-Parametric Time Series Forecaster (`DeepNPTS`) is a baseline model for time-series forecasting. This model generates predictions by (weighted) sampling from the empirical distribution according to a learnable strategy. The strategy is learned by exploiting the information across multiple related time series.\n", + "\n", + " **Parameters:**
\n", + " `h`: int, Forecast horizon.
\n", + " `input_size`: int, autorregresive inputs size, y=[1,2,3,4] input_size=2 -> y_[t-2:t]=[1,2].
\n", + " `hidden_size`: int=32, hidden size of dense layers.
\n", + " `batch_norm`: bool=True, if True, applies Batch Normalization after each dense layer in the network.
\n", + " `dropout`: float=0.1, dropout.
\n", + " `n_layers`: int=2, number of dense layers.
\n", + " `stat_exog_list`: str list, static exogenous columns.
\n", + " `hist_exog_list`: str list, historic exogenous columns.
\n", + " `futr_exog_list`: str list, future exogenous columns.
\n", + " `exclude_insample_y`: bool=False, the model skips the autoregressive features y[t-input_size:t] if True.
\n", + " `loss`: PyTorch module, instantiated train loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + " `valid_loss`: PyTorch module=`loss`, instantiated valid loss class from [losses collection](https://nixtla.github.io/neuralforecast/losses.pytorch.html).
\n", + " `max_steps`: int=1000, maximum number of training steps.
\n", + " `learning_rate`: float=1e-3, Learning rate between (0, 1).
\n", + " `num_lr_decays`: int=-1, Number of learning rate decays, evenly distributed across max_steps.
\n", + " `early_stop_patience_steps`: int=-1, Number of validation iterations before early stopping.
\n", + " `val_check_steps`: int=100, Number of training steps between every validation loss check.
\n", + " `batch_size`: int=32, number of different series in each batch.
\n", + " `valid_batch_size`: int=None, number of different series in each validation and test batch, if None uses batch_size.
\n", + " `windows_batch_size`: int=1024, number of windows to sample in each training batch, default uses all.
\n", + " `inference_windows_batch_size`: int=-1, number of windows to sample in each inference batch, -1 uses all.
\n", + " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", + " `step_size`: int=1, step size between each window of temporal data.
\n", + " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", + " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `alias`: str, optional, Custom name of the model.
\n", + " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", + " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", + "\n", + " **References**
\n", + " - [Rangapuram, Syama Sundar, Jan Gasthaus, Lorenzo Stella, Valentin Flunkert, David Salinas, Yuyang Wang, and Tim Januschowski (2023). \"Deep Non-Parametric Time Series Forecaster\". arXiv.](https://arxiv.org/abs/2312.14657)
\n", + "\n", + " \"\"\"\n", + " # Class attributes\n", + " SAMPLING_TYPE = 'windows'\n", + " \n", + " def __init__(self,\n", + " h,\n", + " input_size: int = -1,\n", + " hidden_size: int = 32,\n", + " batch_norm: bool = True,\n", + " dropout: float = 0.1,\n", + " n_layers: int = 2,\n", + " futr_exog_list = None,\n", + " hist_exog_list = None,\n", + " stat_exog_list = None,\n", + " exclude_insample_y = False,\n", + " loss = MAE(),\n", + " valid_loss = MAE(),\n", + " max_steps: int = 1000,\n", + " learning_rate: float = 1e-3,\n", + " num_lr_decays: int = 3,\n", + " early_stop_patience_steps: int =-1,\n", + " val_check_steps: int = 100,\n", + " batch_size: int = 32,\n", + " valid_batch_size: Optional[int] = None,\n", + " windows_batch_size: int = 1024,\n", + " inference_windows_batch_size: int = 1024,\n", + " start_padding_enabled = False,\n", + " step_size: int = 1,\n", + " scaler_type: str = 'standard',\n", + " random_seed: int = 1,\n", + " num_workers_loader = 0,\n", + " drop_last_loader = False,\n", + " optimizer = None,\n", + " optimizer_kwargs = None,\n", + " **trainer_kwargs):\n", + "\n", + " if exclude_insample_y:\n", + " raise Exception('DeepNPTS has no possibility for excluding y.')\n", + "\n", + " if not isinstance(loss, losses.BasePointLoss):\n", + " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as loss function.') \n", + " \n", + " if not isinstance(valid_loss, losses.BasePointLoss):\n", + " raise Exception('DeepNPTS only supports point loss functions (MAE, MSE, etc) as valid loss function.') \n", + " \n", + " # Inherit BaseWindows class\n", + " super(DeepNPTS, self).__init__(h=h,\n", + " input_size=input_size,\n", + " futr_exog_list=futr_exog_list,\n", + " hist_exog_list=hist_exog_list,\n", + " stat_exog_list=stat_exog_list,\n", + " exclude_insample_y = exclude_insample_y,\n", + " loss=loss,\n", + " valid_loss=valid_loss,\n", + " max_steps=max_steps,\n", + " learning_rate=learning_rate,\n", + " num_lr_decays=num_lr_decays,\n", + " early_stop_patience_steps=early_stop_patience_steps,\n", + " val_check_steps=val_check_steps,\n", + " batch_size=batch_size,\n", + " windows_batch_size=windows_batch_size,\n", + " valid_batch_size=valid_batch_size,\n", + " inference_windows_batch_size=inference_windows_batch_size,\n", + " start_padding_enabled=start_padding_enabled,\n", + " step_size=step_size,\n", + " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", + " drop_last_loader=drop_last_loader,\n", + " random_seed=random_seed,\n", + " optimizer=optimizer,\n", + " optimizer_kwargs=optimizer_kwargs,\n", + " **trainer_kwargs)\n", + "\n", + " self.h = h\n", + " self.hidden_size = hidden_size\n", + " self.dropout = dropout\n", + "\n", + " self.futr_exog_size = len(self.futr_exog_list)\n", + " self.stat_exog_size = len(self.stat_exog_list)\n", + " self.hist_exog_size = len(self.hist_exog_list)\n", + "\n", + " input_dim = input_size * (1 + self.futr_exog_size + self.hist_exog_size) + self.stat_exog_size + self.h * self.futr_exog_size\n", + " \n", + " # Create DeepNPTSNetwork\n", + " modules = [] \n", + " for i in range(n_layers):\n", + " modules.append(nn.Linear(input_dim if i == 0 else hidden_size, hidden_size))\n", + " modules.append(nn.ReLU())\n", + " if batch_norm:\n", + " modules.append(nn.BatchNorm1d(hidden_size))\n", + " if dropout > 0.0:\n", + " modules.append(nn.Dropout(dropout))\n", + "\n", + " modules.append(nn.Linear(hidden_size, input_size * self.h))\n", + " self.deepnptsnetwork = nn.Sequential(*modules)\n", + "\n", + " def forward(self, windows_batch):\n", + " # Parse windows_batch\n", + " x = windows_batch['insample_y'].unsqueeze(-1) # [B, L, 1]\n", + " hist_exog = windows_batch['hist_exog'] # [B, L, X]\n", + " futr_exog = windows_batch['futr_exog'] # [B, L + h, F]\n", + " stat_exog = windows_batch['stat_exog'] # [B, S]\n", + "\n", + " batch_size, seq_len = x.shape[:2] # B = batch_size, L = seq_len\n", + " insample_y = windows_batch['insample_y'].unsqueeze(-1) \n", + " \n", + " # Concatenate x_t with future exogenous of input\n", + " if self.futr_exog_size > 0: \n", + " x = torch.cat((x, futr_exog[:, :seq_len]), dim=2) # [B, L, 1] + [B, L, F] -> [B, L, 1 + F] \n", + " \n", + " # Concatenate x_t with historic exogenous\n", + " if self.hist_exog_size > 0: \n", + " x = torch.cat((x, hist_exog), dim=2) # [B, L, 1 + F] + [B, L, X] -> [B, L, 1 + F + X] \n", + "\n", + " x = x.reshape(batch_size, -1) # [B, L, 1 + F + X] -> [B, L * (1 + F + X)]\n", + "\n", + " # Concatenate x with static exogenous\n", + " if self.stat_exog_size > 0:\n", + " x = torch.cat((x, stat_exog), dim=1) # [B, L * (1 + F + X)] + [B, S] -> [B, L * (1 + F + X) + S]\n", + "\n", + " # Concatenate x_t with future exogenous of horizon\n", + " if self.futr_exog_size > 0:\n", + " futr_exog = futr_exog[:, seq_len:] # [B, L + h, F] -> [B, h, F]\n", + " futr_exog = futr_exog.reshape(batch_size, -1) # [B, L + h, F] -> [B, h * F]\n", + " x = torch.cat((x, futr_exog), dim=1) # [B, L * (1 + F + X) + S] + [B, h * F] -> [B, L * (1 + F + X) + S + h * F] \n", + "\n", + " # Run through DeepNPTSNetwork\n", + " weights = self.deepnptsnetwork(x) # [B, L * (1 + F + X) + S + h * F] -> [B, L * h]\n", + "\n", + " # Apply softmax for weighted input predictions\n", + " weights = weights.reshape(batch_size, seq_len, -1) # [B, L * h] -> [B, L, h]\n", + " x = F.softmax(weights, dim=1) * insample_y # [B, L, h] * [B, L, 1] = [B, L, h]\n", + " output = torch.sum(x, dim=1).unsqueeze(-1) # [B, L, h] -> [B, h, 1]\n", + "\n", + " forecast = self.loss.domain_map(output) # [B, h, 1] -> [B, h, 1]\n", + "\n", + " return forecast" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "show_doc(DeepNPTS, title_level=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "show_doc(DeepNPTS.fit, name='DeepNPTS.fit', title_level=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "show_doc(DeepNPTS.predict, name='DeepNPTS.predict', title_level=3)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage Example" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from neuralforecast import NeuralForecast\n", + "from neuralforecast.utils import AirPassengersPanel, AirPassengersStatic" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Y_train_df = AirPassengersPanel[AirPassengersPanel.ds=AirPassengersPanel['ds'].values[-12]].reset_index(drop=True) # 12 test\n", + "\n", + "nf = NeuralForecast(\n", + " models=[DeepNPTS(h=12,\n", + " input_size=24,\n", + " stat_exog_list=['airline1'],\n", + " futr_exog_list=['trend'],\n", + " max_steps=1000,\n", + " val_check_steps=10,\n", + " early_stop_patience_steps=3,\n", + " scaler_type='robust',\n", + " enable_progress_bar=True),\n", + " ],\n", + " freq='M'\n", + ")\n", + "nf.fit(df=Y_train_df, static_df=AirPassengersStatic, val_size=12)\n", + "Y_hat_df = nf.predict(futr_df=Y_test_df)\n", + "\n", + "# Plot quantile predictions\n", + "Y_hat_df = Y_hat_df.reset_index(drop=False).drop(columns=['unique_id','ds'])\n", + "plot_df = pd.concat([Y_test_df, Y_hat_df], axis=1)\n", + "plot_df = pd.concat([Y_train_df, plot_df])\n", + "\n", + "plot_df = plot_df[plot_df.unique_id=='Airline1'].drop('unique_id', axis=1)\n", + "plt.plot(plot_df['ds'], plot_df['y'], c='black', label='True')\n", + "plt.plot(plot_df['ds'], plot_df['DeepNPTS'], c='red', label='mean')\n", + "plt.grid()\n", + "plt.plot()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}