diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 000000000..1c2fda565 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 000000000..03d9549ea --- /dev/null +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 000000000..105ce2da2 --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 000000000..98a95883c --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/nerf-pytorch.iml b/.idea/nerf-pytorch.iml new file mode 100644 index 000000000..2946dc0d1 --- /dev/null +++ b/.idea/nerf-pytorch.iml @@ -0,0 +1,12 @@ + + + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 000000000..c8397c94c --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/NeRF-Simple.ipynb b/NeRF-Simple.ipynb new file mode 100644 index 000000000..a774975df --- /dev/null +++ b/NeRF-Simple.ipynb @@ -0,0 +1,1129 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 55, + "metadata": {}, + "outputs": [], + "source": [ + "import glob\n", + "import os\n", + "\n", + "import numpy as np\n", + "import pylab as plt\n", + "from PIL import Image\n", + "\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": {}, + "outputs": [], + "source": [ + "# import wandb\n" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": {}, + "outputs": [], + "source": [ + "# # demo = np.array([1,2,3,4,5])\n", + "# # print(demo)\n", + "# # demo = np.linspace(1, 10, 3, dtype=np.float32)\n", + "# # print(demo)\n", + "# # demo = np.repeat(demo[None], repeats=5, axis=0)\n", + "# # print(demo)\n", + "# # demo=np.random.uniform(\n", + "# # demo[:,:-1],demo[:,1:].astype(np.float32)\n", + "# # )\n", + "# # print(demo)\n", + "\n", + "\n", + "# # _sample = np.random.uniform(0, 1, size=(5, 5)).astype(np.float32)\n", + "# # # print(_sample)\n", + "# # _sample = np.sort(_sample, axis=1)\n", + "# # # print(_sample)\n", + "# # _sample = np.cumsum(_sample, axis=1)\n", + "# # # print(_sample)\n", + "# # _sample = np.pad(_sample, ((0, 0), (1, 0)), mode='constant')\n", + "# # print(_sample)\n", + "# # print(_sample[:,:-1]) \n", + "\n", + "\n", + "\n", + "# import torch.nn.functional as F\n", + "# import torch\n", + "\n", + "# # 3x3のテンソルを作成\n", + "# input_tensor = torch.tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9]])\n", + "\n", + "# # テンソルを各辺に1要素ずつパディング\n", + "# padded_tensor = F.pad(input_tensor, (0, 1), mode='constant', value=1e8)\n", + "\n", + "# # パディングされたテンソルを出力\n", + "# print(padded_tensor)\n", + "# print(padded_tensor[...,:])" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": {}, + "outputs": [], + "source": [ + "def split_ray(t_n, t_f, N, batch_size):\n", + " \"\"\"光線をN個数に分割する(Rayの格納される箱を作っているだけ)\n", + " t_n (float): 分割地点の近い(小さい)値. 分割の開始点.\n", + " t_f (float): 分割地点の遠い(大きい)値. 分割の開始点.\n", + " N (int): 分割数.\n", + " batch_size (int): Batch size.\n", + " どの程度この光線が他のRayの影響を受けているか.\n", + " \"\"\"\n", + " #t_f(大)-t_n(小)がN+1程度広がる\n", + " partitions = np.linspace(t_n, t_f, N+1, dtype=np.float32)\n", + " # ゼロ次元目に拡張させる\n", + " return np.repeat(partitions[None], repeats=batch_size, axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": {}, + "outputs": [], + "source": [ + "# ``coarse(粗い)`` network.\n", + "def sample_coarse(partitions):\n", + " \"\"\"\n", + " t_i を粗いネットワークでレンダリングする.\n", + " 各層の最大値と最小値から離散化させる\n", + "\n", + " \"\"\"\n", + " t=np.random.uniform(\n", + " partitions[:,:-1],partitions[:,1:]).astype(np.float32)\n", + " return t" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": {}, + "outputs": [], + "source": [ + "# ``fine(細かい)`` network.の中身\n", + "def _pcpdf(partitions, weights, N_s):\n", + " \"\"\"Sample from piecewise-constant probability density function.\n", + "\n", + " Args:\n", + " partitions (ndarray, [batch_size, N_p+1]): N_p+1 サンプル化する地点\n", + " weights (ndarray, [batch_size, N_p]): N_p一個前のサンプル化地点\n", + " N_s (int): サンプル化個数\n", + "\n", + " Returns:\n", + " numpy.ndarray, [batch_size, N_s]: Samples.\n", + "\n", + " \"\"\"\n", + " batch_size, N_p = weights.shape\n", + "\n", + " # normalize weights.(無視でOK)\n", + " weights[weights < 1e-16] = 1e-16\n", + " weights /= weights.sum(axis=1, keepdims=True)\n", + " # ランダムに値をちらして各層で値を整理\n", + " _sample = np.random.uniform(\n", + " 0, 1, size=(batch_size, N_s)).astype(np.float32)\n", + " _sample = np.sort(_sample, axis=1)\n", + "\n", + " #乱数同士を引くことで差分を小さくしている.→一個前の光線の影響を取り除く(Ti)\n", + " a = (partitions[:, 1:] - partitions[:, :-1]) / weights\n", + "\n", + " # Intercepts of a piecewise linear function.\n", + " # 重みを1次元方向に加算していく\n", + " cum_weights = np.cumsum(weights, axis=1)\n", + " cum_weights = np.pad(cum_weights, ((0, 0), (1, 0)),\n", + " mode='constant')\n", + " # 1-exp(-\\delta-\\phi)\n", + " b = partitions[:, :-1] - a * cum_weights[:, :-1]\n", + " sample = np.zeros_like(_sample)\n", + " # バッチ回数分\n", + " for j in range(N_p):\n", + " # 手前だと値が小さいので手前の値を取る\n", + " min_j = cum_weights[:, j:j+1]\n", + " # そのとなりの値を取得\n", + " max_j = cum_weights[:, j+1:j+2]\n", + " a_j = a[:, j:j+1]\n", + " b_j = b[:, j:j+1]\n", + " # デカくなりすぎたとき小さくなりすぎたときの調整用\n", + " mask = ((min_j <= _sample) & (_sample < max_j)).astype(np.float32)\n", + " sample += (a_j * _sample + b_j) * mask\n", + "\n", + " return sample\n" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": {}, + "outputs": [], + "source": [ + "# ``fine(細かい)`` network.\n", + "def sample_fine(partitions, weights, t_c, N_f):\n", + " \"\"\"\n", + " t_i を細かいネットワークでレンダリングする.\n", + " 各パーセプションに重みを加えてサンプリングする.\n", + " Args:\n", + " partitions (ndarray, [batch_size, N_c+1]): Outputs of ``split_ray``.\n", + " weights (ndarray, [batch_size, N_c]):\n", + " T_i * (1 - exp(- sigma_i * delta_i)).\n", + " t_c (ndarray, [batch_size, N_c]): ``t`` of coarse rendering.\n", + " N_f (int): num of sampling.\n", + "\n", + " Return:\n", + " ndarray, [batch_size, N_c+N_f]: Sampled t.\n", + "\n", + " \"\"\"\n", + " # C(r)^(^)を取得\n", + " t_f = _pcpdf(partitions, weights, N_f)\n", + " # couseネットワークで取得した重みと1次元方向に結合\n", + " t = np.concatenate([t_c, t_f], axis=1)\n", + " # 1次元方向にソートする\n", + " t = np.sort(t, axis=1)\n", + " return t" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [], + "source": [ + "def ray(o, d, t):\n", + " \"\"\"Returns camera Ray(r(t)=0+td).\n", + "\n", + " Args:\n", + " o (ndarray, [batch_size, 3]): 初期位置のRay\n", + " d (ndarray, [batch_size, 3]): 方向を持つRay.\n", + " t (ndarray, [batch_size, N]): Tをサンプル化したもの.\n", + "\n", + " Returns:\n", + " ndarray, [batch_size, N, 3]: Points on the ray.\n", + "\n", + " \"\"\"\n", + " return o[:, None] + t[..., None] * d[:, None]" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": {}, + "outputs": [], + "source": [ + "def _rgb_and_weight(func, o, d, t, N):\n", + " # 各点のRAYを生成\n", + " # C^(^)_c(R)\n", + " batch_size = o.shape[0]\n", + " #RAYの取得\n", + " x = ray(o, d, t)\n", + " # 配列Numpyをビューに変更(変数化)+bach*Nに整列\n", + " x = x.view(batch_size, N, -1)\n", + " # Directionを2次元向に拡張(コピー)→無いデータは整列不能なので拡張\n", + " d = d[:, None].repeat(1, N, 1)\n", + " # (dを変更したので)もとに戻す\n", + " x = x.view(batch_size * N, -1)\n", + " d = d.view(batch_size * N, -1)\n", + "\n", + " # forward.(出力)\n", + " rgb, sigma = func(x, d)\n", + " # RGB/SIGMAも同様に変換\n", + " rgb = rgb.view(batch_size, N, -1)\n", + " sigma = sigma.view(batch_size, N, -1)\n", + "\n", + " # 横方向にPadding\n", + " delta = F.pad(t[:, 1:] - t[:, :-1], (0, 1), mode='constant', value=1e8)\n", + " mass = sigma[..., 0] * delta\n", + " # 縦方向にPadding\n", + " mass = F.pad(mass, (1, 0), mode='constant', value=0.)\n", + "\n", + " alpha = 1. - torch.exp(- mass[:, 1:])\n", + " T = torch.exp(- torch.cumsum(mass[:, :-1], dim=1))\n", + " w = T * alpha\n", + " return rgb, w" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": {}, + "outputs": [], + "source": [ + "def volume_rendering_with_radiance_field(func_c, func_f, o, d, t_n, t_f,\n", + " N_c, N_f, c_bg):\n", + " \"\"\"ニューラル場を作成\n", + "\n", + " Args:\n", + " func_c: NN for coarse rendering.\n", + " func_f: NN for fine rendering.\n", + " o (ndarray, [batch_size, 3]): Start points of the ray.\n", + " d (ndarray, [batch_size, 3]): Directions of the ray.\n", + " t_n (float): Start point of split.\n", + " t_f (float): End point of split.\n", + " N_c (int): num of coarse sampling.\n", + " N_f (int): num of fine sampling.\n", + " c_bg (tuple, [3,]): Background color.\n", + "\n", + " Returns:\n", + " C_c (tensor, [batch_size, 3]): Result of coarse rendering.\n", + " C_f (tensor, [batch_size, 3]): Result of fine rendering.\n", + "\n", + " \"\"\"\n", + " batch_size = o.shape[0]\n", + " device = o.device\n", + "\n", + " partitions = split_ray(t_n, t_f, N_c, batch_size)\n", + "\n", + " # background.\n", + " bg = torch.tensor(c_bg, device=device, dtype=torch.float32)\n", + " bg = bg.view(1, 3)\n", + "\n", + " # coarse rendering:\n", + " _t_c = sample_coarse(partitions)\n", + " t_c = torch.tensor(_t_c)\n", + " t_c = t_c.to(device)\n", + " # 大域的な色空間の作成\n", + " rgb_c, w_c = _rgb_and_weight(func_c, o, d, t_c, N_c)\n", + " C_c = torch.sum(w_c[..., None] * rgb_c, axis=1)\n", + " C_c += (1. - torch.sum(w_c, axis=1, keepdims=True)) * bg\n", + "\n", + " # fine rendering.\n", + " _w_c = w_c.detach().cpu().numpy()\n", + " t_f = sample_fine(partitions, _w_c, _t_c, N_f)\n", + " t_f = torch.tensor(t_f)\n", + " t_f = t_f.to(device)\n", + " # ファインチューン(細かい特徴量)\n", + " rgb_f, w_f = _rgb_and_weight(func_f, o, d, t_f, N_f+N_c)\n", + " C_f = torch.sum(w_f[..., None] * rgb_f, axis=1)\n", + " C_f += (1. - torch.sum(w_f, axis=1, keepdims=True)) * bg\n", + "\n", + " return C_c, C_f" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": {}, + "outputs": [], + "source": [ + "def gamma(p,L):\n", + " \"\"\"Encode positions.(PE)\n", + " p (ndarray, [batch_size, dim]): Position.\n", + " L (int): encoding param.\n", + " Returns:\n", + " ndarray [batch_size, dim * L]: Encoded position.\n", + " \"\"\"\n", + " # Normalization\n", + " p = torch.tanh(p)\n", + " batch_size = p.shape[0]\n", + " i = torch.arange(L, dtype=torch.float32, device=p.device)\n", + " a = (2. ** i[None, None]) * np.pi * p[:, :, None]\n", + " s = torch.sin(a)\n", + " c = torch.cos(a)\n", + " e = torch.cat([s, c], axis=2).view(batch_size, -1)\n", + " return e\n", + "\n", + "def _init_weights(m):\n", + " \"\"\"DefualtMLPSetting\n", + " p (ndarray, [batch_size, dim]): Position.\n", + " L (int): encoding param.\n", + " Returns:\n", + " ndarray [batch_size, dim * L]: Encoded position.\n", + " \"\"\"\n", + " if type(m) == nn.Linear:\n", + " nn.init.kaiming_normal_(m.weight)\n", + " nn.init.zeros_(m.bias)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": {}, + "outputs": [], + "source": [ + "class RadianceField(nn.Module):\n", + " \"\"\"Radiance Field Functions.\n", + "\n", + " This is ``$F_\\Theta$`` in the paper.\n", + "\n", + " \"\"\"\n", + "\n", + " def __init__(self, L_x=10, L_d=4):\n", + " # positional encoding parameter.\n", + " self.L_x = L_x\n", + " self.L_d = L_d\n", + "\n", + " super(RadianceField, self).__init__()\n", + " # MLPNETWORK\n", + " self.layer0 = nn.Linear(6*L_x, 256)\n", + " self.layer1 = nn.Linear(256, 256)\n", + " self.layer2 = nn.Linear(256, 256)\n", + " self.layer3 = nn.Linear(256, 256)\n", + " self.layer4 = nn.Linear(256, 256)\n", + " self.layer5 = nn.Linear(256+6*L_x, 256)\n", + " self.layer6 = nn.Linear(256, 256)\n", + " self.layer7 = nn.Linear(256, 256)\n", + " self.sigma = nn.Linear(256, 1)\n", + " self.layer8 = nn.Linear(256, 256)\n", + " self.layer9 = nn.Linear(256+6*L_d, 128)\n", + " self.layer10 = nn.Linear(128, 128)\n", + " self.layer11 = nn.Linear(128, 128)\n", + " self.layer12 = nn.Linear(128, 128)\n", + " self.rgb = nn.Linear(128, 3)\n", + "\n", + " self.apply(_init_weights)\n", + "\n", + " def forward(self, x, d):\n", + " \"\"\"Apply function.\n", + "\n", + " Args:\n", + " x (tensor, [batch_size, 3]): Points on rays.\n", + " d (tensor, [batch_size, 3]): Direction of rays.\n", + "\n", + " Returns:\n", + " rgb (tensor, [batch_size, 3]): Emitted color.\n", + " sigma (tensor, [batch_size, 1]): Volume density.\n", + "\n", + " \"\"\"\n", + " # positional encoding.\n", + " e_x = gamma(x, self.L_x)\n", + " e_d = gamma(d, self.L_d)\n", + "\n", + " # forward\n", + " h = F.relu(self.layer0(e_x))\n", + " h = F.relu(self.layer1(h))\n", + " h = F.relu(self.layer2(h))\n", + " h = F.relu(self.layer3(h))\n", + " h = F.relu(self.layer4(h))\n", + " h = torch.cat([h, e_x], axis=1)\n", + " h = F.relu(self.layer5(h))\n", + " h = F.relu(self.layer6(h))\n", + " h = F.relu(self.layer7(h))\n", + " sigma = F.relu(self.sigma(h))\n", + " h = self.layer8(h)\n", + " h = torch.cat([h, e_d], axis=1)\n", + " h = F.relu(self.layer9(h))\n", + " h = F.relu(self.layer10(h))\n", + " h = F.relu(self.layer11(h))\n", + " h = F.relu(self.layer12(h))\n", + " rgb = torch.sigmoid(self.rgb(h))\n", + "\n", + " return rgb, sigma\n" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": {}, + "outputs": [], + "source": [ + "def camera_params_to_rays(f, cx, cy, pose, width, height):\n", + " \"\"\"Make rays (o, d) from camera parameters.\n", + " INPUTFUNCTION!!!\n", + "\n", + " Args:\n", + " f (float): A focal length.\n", + " cx, xy (float): A center of the image.\n", + " pose (ndarray, [4, 4]): camera extrinsic matrix.\n", + " width(int): The height of the rendered image.\n", + " height(int): The width of the rendered image.\n", + "\n", + " Returns:\n", + " o (ndarray, [height, width, 3]): The origin of the camera coordinate.\n", + " d (ndarray, [height, width, 3]): The direction of each ray.\n", + "\n", + " \"\"\"\n", + " # DEFAULT VALUE!\n", + " _o = np.zeros((height, width, 4), dtype=np.float32)\n", + " _o[:, :, 3] = 1\n", + " ######\n", + " # v,uの高さと横幅に各軸方向に値がシフトアップしていく\n", + " v, u = np.mgrid[:height, :width].astype(np.float32)\n", + " # 描画用の箱\n", + " _x = (u - cx) / f\n", + " _y = (v - cy) / f\n", + " _z = np.ones_like(_x)\n", + " _w = np.ones_like(_x)\n", + " _d = np.stack([_x, _y, _z, _w], axis=2)\n", + " # CamPoint\n", + " o = (pose @ _o[..., None])[..., :3, 0]\n", + " # Cameraとの距離(光線の長さ)\n", + " _d = (pose @ _d[..., None])[..., :3, 0]\n", + " d = _d - o\n", + " # 正規化\n", + " d /= np.linalg.norm(d, axis=2, keepdims=True)\n", + " return o, d\n" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "class NeRF(nn.Module):\n", + "\n", + " # sampling parameter\n", + " N_c = 64\n", + " N_f = 128\n", + "\n", + " # batchsize\n", + " N_SAMPLES = 2048\n", + "\n", + " def __init__(self, t_n=0., t_f=2.5, L_x=10, L_d=4, c_bg=(1, 1, 1)):\n", + " self.t_n = t_n\n", + " self.t_f = t_f\n", + " self.c_bg = c_bg\n", + "\n", + " super(NeRF, self).__init__()\n", + " # 色\n", + " self.rf_c = RadianceField(L_x=L_x, L_d=L_d)\n", + " # 密度\n", + " self.rf_f = RadianceField(L_x=L_x, L_d=L_d)\n", + "\n", + " def device(self):\n", + " return next(self.parameters()).device\n", + "\n", + " def forward(self, view):\n", + " \"\"\"Render Image with view paramters.\n", + "\n", + " Args:\n", + " view (dict): View (camera) parameters.\n", + " view = {\n", + " # intrinsic paramters.\n", + " f: ,\n", + " cx : ,\n", + " cy : ,\n", + " width: ,\n", + " height: ,\n", + " # extrinsic parameter.\n", + " pose: \n", + " }\n", + "\n", + " Returns:\n", + " C_c (ndarray, [height, width, 3]): The rendered image (coarse).\n", + " C_f (ndarray, [height, width, 3]): The rendered image (fine).\n", + "\n", + " \"\"\"\n", + " f = view['f']\n", + " cx = view['cx']\n", + " cy = view['cy']\n", + " pose = view['pose']\n", + " width = view['width']\n", + " height = view['height']\n", + "\n", + " o, d = camera_params_to_rays(\n", + " f, cx, cy, pose, width, height)\n", + " o = o.reshape(-1, 3)\n", + " d = d.reshape(-1, 3)\n", + "\n", + " device = self.device()\n", + " o = torch.tensor(o, device=device)\n", + " d = torch.tensor(d, device=device)\n", + "\n", + " _C_c = []\n", + " _C_f = []\n", + " with torch.no_grad():\n", + " for i in range(0, o.shape[0], self.N_SAMPLES):\n", + " o_i = o[i:i+self.N_SAMPLES]\n", + " d_i = d[i:i+self.N_SAMPLES]\n", + " C_c_i, C_f_i = volume_rendering_with_radiance_field(\n", + " self.rf_c, self.rf_f, o_i, d_i, self.t_n, self.t_f,\n", + " N_c=self.N_c, N_f=self.N_f, c_bg=self.c_bg)\n", + " _C_c.append(C_c_i.cpu().numpy())\n", + " _C_f.append(C_f_i.cpu().numpy())\n", + "\n", + " C_c = np.concatenate(_C_c, axis=0)\n", + " C_f = np.concatenate(_C_f, axis=0)\n", + " C_c = np.clip(0., 1., C_c.reshape(height, width, 3))\n", + " C_f = np.clip(0., 1., C_f.reshape(height, width, 3))\n", + "\n", + " return C_c, C_f" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": {}, + "outputs": [], + "source": [ + "class NeRFLoss(nn.Module):\n", + "\n", + " def __init__(self, nerf):\n", + " super(NeRFLoss, self).__init__()\n", + " self.nerf = nerf\n", + "\n", + " def forward(self, o, d, C):\n", + " device = self.nerf.device()\n", + " o = torch.tensor(o, device=device)\n", + " d = torch.tensor(d, device=device)\n", + " C = torch.tensor(C, device=device)\n", + "\n", + " rf_c = self.nerf.rf_c\n", + " rf_f = self.nerf.rf_f\n", + " t_n = self.nerf.t_n\n", + " t_f = self.nerf.t_f\n", + " N_c = self.nerf.N_c\n", + " N_f = self.nerf.N_f\n", + " c_bg = self.nerf.c_bg\n", + " C_c, C_f = volume_rendering_with_radiance_field(\n", + " rf_c, rf_f, o, d, t_n, t_f, N_c=N_c, N_f=N_f, c_bg=c_bg)\n", + "\n", + " loss = F.mse_loss(C_c, C) + F.mse_loss(C_f, C)\n", + " return loss\n" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": {}, + "outputs": [], + "source": [ + "dataset_path = \"/workspace/datasets/bus/\"" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "focal length: 525.0\n", + "image center: (319.5, 239.5)\n", + "image size: (640.0, 480.0)\n" + ] + } + ], + "source": [ + "# Cameraの内部パラメータ化\n", + "def _line2floats(line):\n", + " return map(float, line.strip().split())\n", + "\n", + "with open(os.path.join(dataset_path, 'intrinsics.txt'), 'r') as file:\n", + " # focal length, image centers.\n", + " f, cx, cy, _ = _line2floats(file.readline())\n", + "\n", + " # origin\n", + " origin_x, origin_y, origin_z = _line2floats(file.readline())\n", + "\n", + " # near plane\n", + " near_plane, = _line2floats(file.readline())\n", + "\n", + " # scale\n", + " scale, = _line2floats(file.readline())\n", + "\n", + " # image size\n", + " img_height, img_width = _line2floats(file.readline())\n", + "\n", + "print('focal length: {}'.format(f))\n", + "print('image center: ({}, {})'.format(cx, cy))\n", + "print('image size: ({}, {})'.format(img_width, img_height))" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "focal length: 560.0\n", + "image center: (255.6, 255.46666666666667)\n", + "image size: (512, 512)\n" + ] + } + ], + "source": [ + "# データセットの画像サイズ.\n", + "# 画像サイズを反映した位置合わせ\n", + "width = 512\n", + "height = 512\n", + "\n", + "f = f * height / img_height\n", + "cx = cx * width / img_width\n", + "cy = cy * height / img_height\n", + "\n", + "print('focal length: {}'.format(f))\n", + "print('image center: ({}, {})'.format(cx, cy))\n", + "print('image size: ({}, {})'.format(width, height))" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "pose_paths = sorted(glob.glob(dataset_path + 'pose/*.txt'))\n", + "rgb_paths = sorted(glob.glob(dataset_path + 'rgb/*.png'))" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": {}, + "outputs": [], + "source": [ + "dataset_raw = []\n", + "# Datasetからカメラの方位を得る\n", + "for pose_path, rgb_path in zip(pose_paths, rgb_paths):\n", + " pose = np.genfromtxt(\n", + " pose_path, dtype=np.float32).reshape(4, 4)\n", + "\n", + " rgb = Image.open(rgb_path)\n", + "\n", + " data = {\n", + " 'pose': pose,\n", + " 'rgb': rgb,\n", + " }\n", + " dataset_raw.append(data)\n", + " \n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAIACAYAAAD0eNT6AAEAAElEQVR4nOz9Z5QtyX3YCf4iIjOvKW+e9+1eG6ABNBreEQRBiSREUSI1MtTu2SPt7Epc7dlDWR5JHIqShjqr0ZnZ1XzQrKQdLg0gekIiaOE90I32vl/3s/X8K39tmoj9EBmZkbfqgQBIEKbiB9xX96aJjMxbXX//D2GMMQQCgUAgENhTyG/1BAKBQCAQCPzZExSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SFIBAIBAIBPYgQQEIBAKBQGAPEhSAQCAQCAT2IEEBCAQCgUBgDxIUgEAgEAgE9iBBAQgEAoFAYA8SfasnEAjsBYwx5HlGUWTkRY4AlIpRShFFCUKIb/UUA4HAHiMoAIHAn5AsTynyjLzI2Ny4wTjtc+PmRaQUDIcbDIeb9Pq36A+uMx6vMU7XAY1giTieYX7uJAf238vhQ3dw7Og9dDoz3+pbCgQCewBhjDHf6kkEAt/uZFlKnqf0B1v0emsMBhvcWj3HcLTBzVvPkWXrZNkm4/SMZ83X/2n5/5XZ9wZjDFpr8lwzTg39nubg/j/Hmx7+UV732nczNTX7Z3mLgUBgjxEUgEDAI8+toL+1eoVx2md19RK9/g1WLj9Knq/R6z2DkFkl5I0xYHxRbxGiKfS/GgaDMZrxOGd9rUBrxckTP8D3v+//zskT9xNFwVEXCAT+9AkKQGBP4gT96toVxuM+q2uX6PVucOXaV0jTDXr9R+2BpSCfFPQCEEJQRe6F+8dgDPgR/Z3/gflbam+BwVAUBYOBIh1rQPGWh3+a97/vr5Mk7T+tWw8EAgEgKACB73KyLCUvUtbXrzIa9XYI+v7gKxhjEEJgjJlw1ZtSLO+05isFQEClBlj5X/4j2BkIYGLLzsQ/g8YYQ5pqsrTN1taY03f/H3nfe/8PnDh+z5/kUQQCgUCDoAAEvmsYjfrkRcbq6iUGg02u3XiJlcuPkuXr9PuPYTAIQJumz96U/9r/EoS3ZRIr2BEgSqXBifkdNr1o7pk8Tuzy3r+yNhqjJYNhwXAQMTP9Gt75tv+eN73p/SRx6xt9RIFAIFARFIDAdyRO2N9avcTFi08wGm9x5ernSbN10vQVnCjV2lryGgPGeILas8+NE7t2mw3vO5HctNJ94T5pvxvAVAKfKk9AIMoxhTcH0xjBiyBUczRGYzBkWcTW1phsDO9517/gzQ9/gKWlA1/vIwsEAoEGQQEIfNuT5ylZnnL9+llu3DxPf3CT8xc/RlYKe5twZzDGS8zDufBLoewEb/nbbpryl8ptX7r1mz5/T1A74V8lATqhbxoWfUPBcON+Q3fvqgUk29uSjfVtDh58O3/jr/4bjh45/Q2NGAgEAhAUgMC3GU7Y37hxnsFgg2s3XuLK1UereL1zkxsnwUvB7rvUKYWyxgn/Kj2v3Onc8b7F796b5lgevr3uu/Rd3L52+zc9BMYYjO818PIGBAKzy9V2VhHYD72eZG3NsLjwWt77nv8zDz/0fuIQEggEAt8AQQEIfMvwLftbt86z3UjOe7Q6zlr2zQS9nS51H4ExZawf7yX88WohjhfLB92w6Hez2psWfo1X9V+5/a2AL/dWLoKvzRuwuyJiyDLBxYsjpITvf9+/5C1v/gCLCyEkEAgEvj6CAhD4M2M46tWW/fWXuXrtEcalsBdCYLRhIj8PoFlzX3E78Yzd7rnmfZrqgkRUwzQL96zVvtOtL4Dd8/tFQwGYnP/X5/6/fZWA218Uko112O4NOHTwrfzVH/s3HD8WQgKBQOBrJygAgW8aw1GP69fPcWnlSYajTS5f+Xxl2U/+2u10d99eZFbnNgS3wRezZrcYvmi65ZuX8evxvUS8xkzsJwmlFe96A4iqXNAd7a4vKRUY4e/9auqA8zzsfszk1u3tnGvXUo4ceSc/9AM/yem73xhCAoFA4GsiKACBPzXyPCXLUm7eusCrZ7/EhUsfqxrqGL2bq95nIivea6f71X9Dd/gLbrNPNDYbzxdQWehe7sAfN0+JV75X9hDQ3hF1hoEfbHDVALuN6FoI+LkEu+gnLpvBRTQEDIeCjfWMLM/4vvf+a977nr8RGgcFAoE/lqAABP7EnL/wIs+/8AU2tp5hNLrGcPQ0xowaWflWrlnh5YSgM8JtIx4vMW7SJW8m4vFidxf5zt/kCU9CmTjoRL8ut/ku+smggTuj8dk4paEU2+V8/cZB7nr2ngUY4SUANoW8G7fOZ/AUANH8WeoH1b267UVhWF3V9HtjXv/gf8973/PjHAshgUAg8FUICkDgG+b6jQs89sRHefzJ/41up0+cKIwZAk2B5XDCq/pcxtpFWSbnttrza2e57zzfNS1uYlPTyp7IH2jkGPgx/sqeLy/arCKor+3K+mTt3vf3GlMpAvWcBTYYYBCivrorJ2xUC0zcpZg4bld/RzWAYHOzxdZmxPK+O/ixv/QvOX7svrCWQCAQ2JWgAAS+Ic5feIbf/p1/yvXrjzM3l9DpSFtC5wnEhrB3ifjCFeQZz4ptWvJOSNbWs3Oo1+LRd4E3qwO+htwB96MU9HUe4IQnoOodYLzTatNbuFCArwR4jX5MeWpdAOhyASaUAH+O5fWMf7uld0R60n5SGRDeP+lYsrbeAqP5nnf/A977nh8nikJeQCAQaBJMg8DXzXDU41Of+19ZXXuSTkeRtJyl3BT+xvgxb1NJusrNPeHe3mn3lu93MX0rN7hntO+kIbJ37jG773MzqBQCQXlvTSVk8jrVPRn3frfAgpk4w7sHd1Ou6sEpKOVkNTbxUAjRzBvw7gcMSStn//4Wmxsxf/BH/4wsM7zlzR9gaTGUCgYCgRr5rZ5A4DuT0fgmSSyQEnRh0IXrWGfqcj5dC76qlI/yGBf3n3DXu3BArROY0nLfXVTXFnodKLDjmtroxlcQTClsrTteioaq0ThOAFKAFOWcpASpyknbyU86HIQQSFm+hJtL3WHQvZfly332z2/8rzzO3qa3MJGpMxwmlR9jBFL0WVjYZnEx4dOf/R/4xV/+CS6tvLTrMwwEAnuToAAEvm467WmOH3kPna7EAGmmKQqNNhrtEuGMAeFn8NcegErgNeS6ZxN7Zu1Xi0+JSpBXZ9bnGZt4V1vuvjB2H+wuIX0FZeI4/LFtwZ9/urelDllM5h3smHNz3Pr6ptoosNECN5osL2iwrYo0AmPq9MRacaBxf7NzEcv7WmxuPcKv/+Y/5/kXvkCWj7/KUw0EAnuFoAAEvm5Goz7Xbr6IEIJWYi3SLNcUhUHnVuhrbZUBrXXpGcAPnyOEQBu3WA/UIrapF7iItyfeGi/XaW/y1RildKP7r/oQpyRM4rf7tWJXWLFbCf9qEG/tgckRJj0T1R6nCHhhkLpYEOdgaFj30p+RKSqFS1MnHla5Fd49ttuChYWE7f5X+PXf+mt86tMfJMvTXe45EAjsJUIOQODr5uVXHuPajd9BFxECjVKSdAxKQlR6yLUx6EIjpWwsnWtKAWoFsbPehZfUV2UI4Cf4VaK1IavNhPLgl9b5FrVAGCfIq1MnB6t31QH1Or9A3Mair+ZZ31fl4ShzB1w+AeUcXBlgo1CgvHdRvy31knrufqWCEfX9GCOQ7rlWNycqp4sx1sswPy8Zj+Gzn/8XDId93vKmD3DgwKldn0EgEPjuJ1QBBL4uBsNtfulX/jab248wHFgLf35OUeSCra2CbleiFBgKAKQUKKmQStbCrZRju/3iCe8fd/ykNwBRZ9pXBvgu7XbrhoGe6C/NbgOI0lRuFAMaML4g9icMVW+DSWw5Yyn8vfur78c/2P/hKTyNh+B5Km5zX2Zy7t71fG+CP4wQMBrBeJQzN/t6fuSH/y1Hj5zetUFRIBD47iaEAAJfF9evn2M4/gogSLOM7e2c9XVNFEGnLUjTgiwvSDNNXmgbGtAFRV5gTFlZPyEZta6TB7W3GEDl0jfG6yRYu9QdojRzzcTLjVCdJybOn5R6xvhHlzF2UTYsEoCc6OtfX8NPa/DzGlzSXuPV9EV4o4lGkp+/x9/QDA14OQfe3flRjvo8e1y7DTOzEf3hM/zyh36CT3zqgyEvIBDYg4QQQODr4pVzX2Y0yrl1K2fQM4yGmtEwJYpiZmYEciwYp25FPYmUWAWAAoNCCmmb6EhRuf8b5YBlXgDCVQnUwq1ug1fPZ3cruekzcO+dO5yJZr9i8gynoPinG5dr4IS9U1Rqix9PCahCHrs2IPKUCM+7YG/Nfm7e107z3FB7KUxje+nZmOyt4B1pDEgJnY4kHZ/li4/8U4zJeNtb/jJTU7M7rhUIBL47CR6AwNfMcNTjpVc+yrmzgke+lPPs00P6/YxWy9DvZ/QHOSrSJLHAGInAvoy2nfMLoyvBDvghelvbbmzuQCUzze5x9xoxYQ3723cTgJMphPVGW+bnMu9ckp6hbjtcWvrVvL3jvHuYLOuz9+Fb/LVXwnffO7+Dndft7su7H3ctygWHJjwBfv+AxgPwFC4podWWdDoJX370Z/n5X/z7XLr04le5aiAQ+G4iKACBr5krV87y/AuP8fST8PILAy6cH3Dp4pCN9ZwsM2ysZ/R6OXEsaCWKdAxFUbbNFRKjrYAvqsoA1w/ACiXrFRAI2RT+zqCtlwX24uBGTMT/vXeeEK2y473yPz+kUJ9jENJZ0GUlQKkINAR9eaqbZ+V+r+L3ppq/3+iojts3EgG89/723VUAX3nxbqdaoKjxr2k+M7cug/9840QzNS1Z3/wov/nhf86rZ58hz/Ndrx0IBL57CApA4GvmiSe/wnPPFLx65jrHjkvuurvLaJzz4gs9Ll3MEEIwHGm2+xlxbGi1BOlYYLStWRcIisLmBGiTo7GlbL593AgHlPF3sGEBrWurtnmcH3avlYYdxi9NAVjtnXS3G+EN4NvupiFEm+fU89gR4S/7+mrwXq64sBzTnVvlCbhxdnP/N8MJLgohRNm0aPLYiRwE78zqIKlgdjaiN/gK/5///AH+8KO/SJaFvIBA4LuZkAMQ+Jp45dUX+eX/8r9x6fwWSaJBaBaWEg4cnCPLNEUBrbZCKUO/lyEMdLsxxkCaGuJEEkUSYwqrAGiBVKYUWGVW/qRQN/XaAq7EzuGHBszEZ+FaEE9Kwttis/+ZUDqoEgfr49xQu9rmjQRGP6ug+ckN4CccCl8wl5OpkhurK97uymV5Jf6zcu2CDabMM5iskzATH6SAuTlFHMEjj/0sQhje8qYPsLQUWggHAt+NBA9A4GvikUc/z7lzlynyIXNzMVmWcf16j35/leV9M9xzWtFu2aQ/pWAwtDkBcQxRJBiPNEUuECgEsrLmhfsVFJ6QrBSBZhKflH66nkcpsfUOq715/uQ214zHKgveub40rkxo+8GV+0FD3u/A9Qfwb6jptq+FvxPxrldSXclQpQd6c5/0LtSbjDeh+r5cdsBXyyjwwhgGulMwP6949PF/zS9+8Ce4eCm0EA4EvhsJCkDgj6Xf7/Hpz36MWze3SFqGAwdj7rl7hn3LMa3WaU4e/0u0O9NoY+P4rbYiihTpOGc0yul0Ba22IMsMWlslQEoFRmKMoCio4ulgfxZF082+S8Wet28iJl8dM+mqL2P8TNjRBuqOgE6CKhAxiBhDhEFWPQCq+L6oHfV1aKCWyJVvw9TJg+VyPnUOf3lyle/gZQhoY+qkSHdcdYxTEEx1NZeY6DICjJAYlxlQagQGWb6aT6GhbBlBFAkWFhR58Ti/9eGf5ulnvkCahpBAIPDdRFAAAn8s5y+c5Utf/gzDQcraRs52v0+ajZmeTlhaWqM/fALDGgZBmimuXjW88HzG2bMply8P2dxMiWOQ0jAa6TIfwP7q6XLBIO1V5glAuTJBg11UyLOKXczd5QOYsrGAkm7hnFqguhFvt0xw3UK3jvFXEr18K4RsWNC7JQJqKBUEL4TQCNa7rP0y3OAaCjUcE74S4CkC2LZKBWX/fyPAWMFeCXQhMUKV7xVGSOx/3hKDwhj7E5xSoOx2VDWGUyKMdy+ttqIwT/Dbv/PX+NgnPkiajm77exIIBL6zCDkAgT+Wxx5/lI21baanFDNzNk6c5ZqbN/sYM8XBQ2cRaobxWHB5RXDhfMKt6wXXrg04fBhe/5Dh5CnB7JwiHUGWGSIFMvIsXg3KdekTBqSN5RtjENJb9IYyN6BUXZsl76Le6FHlEpR5BLV73lTlhzb8X1vlBg1GVyV5Lj+gSR2nx+x0+VfvjXe6qbfZc0yVxGd2jO9f0wnnOmeimqwQE8cyESnwMyX9q9Qej2ZGRXN3nBhmZhOefu7fsL5+nne948c5fvz0LnMNBALfSQQPQOCr0u/3+NSnP8p4NCbLC4RQxFFCu9Xh4IEF4iQBIdBasLoquXxZYEybpNVmarrL889pvvj5Hq+e6bG9ldJq2xyB4QjyrHada4rSrQ4IiUChojZSxVSrC4In40vHtxBMtucVoi6Ic1Tx8QlPvxPOtdFvXeV+6V49brM5UcPV75cJTjzDRkJiGf+vWg6UG2uXvLzNq4zkl42UGq/JKL/zmuzKTiXG3UN9feE9KNvTQSlFpyu5euND/NZ//SkuhbyAQOA7nuABCHxVLlw8x+NPfok8Lzh0aIrFxRZZPmB9Y4t9y6c5eESjohHDgeTaVc3K1SFXL43YvDVmakrS7SrOvZoTqYhW6yhHj11lZiYizyHPKYWtjadnRYEREiVrIRXFXfLMoHVdl15luRtb9maoqwUma+/dAkQNyli6cFa7X1Ew4T3whX3lKcD166vd+H9ckl09vsD3NAhvxJ1jiMbbnasjuGNMJcarrbKcM7UHpNJ7nCegckv8MQmCXpZBnCiK4jl+9bd+gtc98Ld4z7t/jCRpfQ13HggEvt0ICkDgq/LkU19hNEiZn49QkSGOBbMzbUajMUV+EswWeX6Jm7fgwsWM61dGbN4cAIZBTxLJiLiteOXMNrMzG6RZn1Mnp5mZjRiPYdA3TE2J0helESLHGIXE2Mh3nlILK0qPdxnn1zY+7gt/d5zWteXtWvLWbYWrhrs2zOAGxg8p7BzTF79fm7inDCdQai2iIeiNqzbwwhKTgpxqpo0h7R5DtXCRcZNvHF3/tA4Q0VRwjMtlqB7Ujqu7LcZojNEIIVBKABf5wiP/DG0y3vn20EI4EPhOJCgAgdvS7/f4xCf/gJs3Voliw3Z/RH80Zt8o4tixWaZmvwJSIEWXm7cK1jYKttZGpQCRKBFhtKAYRYgo48tfOkt/GJPEEcdUl+6UJM8N/T5MzyiE0ORFjtA5UgiUkUBBFMk6ac6JSOetBiZFskAgJ4JbfhKg7/Se9A64fgPOpV8PWwXqqyu6dy500FymQEwcJz1hWw7nJzbUCQKN27GC177XfvxelGqMV/Xg3jfmbShDKvbmbOjBrsmgdYGuOjHa67vujGXeZXVtISAvCkATRzYkML8Q8/Tz/4qXX/48f+GH/gEnT95LIBD4ziEoAIHbcvHSWV488whJAu2OZGq6zYGDCeN0m63tEceiKVrtFr1tyZVrY3pbGTrTgEAKiZIJWucYLSCLUAqefSojirZJEsnhIx1mZiWrN3PGI0m7Y1cPMEbbDnnaIKVEG1BlXF80pSaVa17UFn7tLq879/k0hLCXIOfs58rDUG5vyP6JGL8xteA2Xma/weUG1O+bSXre+O4+qrx/b7upPRaN224cVE9M+koG0pYRFqCphXyWGoaDjDzPUZFVlgyaItdlUyc74ygSKGUzFLQ29PoZ43FBpyOYnYuZno6JY8ko/TS/8/tbvOed/w/uPf3GEBIIBL5DCApA4LY8+9yT9PsF272c1TXD1GzB9Kzh0KEOnQ5IZYVNvy/YWJeM+lmV5S6EgrLczybORSSJImrnPPnYmE6nh1KKI8cSFpYUvW1NlkKSWNO90EWVdKe1AWmQwnoCnCDzpaET9FKKCTd+KdCN7xj3lQVn8pcudSe0jah2uUxBMWFt11l8thyvampUhRNq87lSLvBkezn2RH7jBLXwF96YNmRgFS1hJTgunm8MjMeaPNOkmSYd52RZQRRrtNaMRoYiF0gFU92EThciVZAk0CpbLgth2wNHkVtFUTA9GzMeK8YjzXBQMB4VTE/HtDuGvHiM//qRv87lKz/D+9/3N4mi5Gv4DQsEAt9KggIQuC0bG1tsrfeZmYlodxTHjs2QmxGDYcbhw12mphTpWLC2qojENMV4vTzTxtZ9IS2EpEgNQkV0Zwq++LmRTfYTMxw+kjA9KxkNDXkGKpIoadBGW+FfluUZAZFU5Xiicms3WwTXOXb1NpsvIHareSlj/KYsyauErS+pPbFd+x1K97uQtYu9GrP2IBhNmeRo57xrbv6EwmIvNBlrsPOTlMspCwlCoI0kSwvStGA4yOkPMoS0fRVaLUWrZTv7GWPzN4SwTZqq7oTChhlscybRmFMdhjBlfwXodqj6MWSZJss1g35K0orpTCmeef5fMR73edPDH+DI4VO73W0gEPg2ISgAgV3p97d55NHPsr01xmBQyraHlapLocdIpVGRYnvLcOXqmK3NgnSc4ixQ69DWpaUtyqQ8ST4UyFZE0jZ87rNDWh1JtytZWIxptwT9nkZpQ7tTdgnMC7Q0xFJhjKEwGikFUtaL4N4Ov1QQuZvodUJWeALYnVuJcFw5XANXfujX/0+M6/cb8GPp/qVB2NUHyw5ElSFfzltKK/Apqx6MNqSpJs8z8sIKYGMKpDBIZZibk8SxREU2WU9WF1RgTJkb4WX7lTkCrmVwfX/N9/X86xJJpRQto0gzzXico7Wg1Yp46ZX/mbPnPs6P/qX/JydCv4BA4NuWoAAEdmW7t8XK1RfpTkk6HUWrI7h8bZN2J+bAvoS5uZgkEYzHmhs3ckbDHIHtiEcp/DWgTG1tukz8bCSRiSJO4HOfHjA9rbj/gWmmpxWdKUhHC+SpQaptIgWF0RSFtqvVCDDaKhQ2G12WY9fe/MptvwtOKPvZ/aYU8nUCYJ002EwfEFVGf9X1b+dBuyQK2ryGHfjx/XIBICkEUqpS+TGkhRX6WZaXSXgFCE0cQ7sj6AiBksrLa/CvWndA1BqEFN4zKq36Sc8DWGVjl+RI/znU2w2tRBJHguGwYDTKiWMF8hl+9Tf+Lm948G/zPe/5MeI45AUEAt9uBAUgsCvDYZ88yxkMUqLYsDyTsLwvAQp6gxFCJkRRxGiUsb6RMhpkMKEClI57quI3IWxin1Bko4JWV2GE4ZEv9ZmeijhxssPUtKDVhn7PxqSTlkIIgaag0DYjXikJwmaqi9LL4Cz4Rkt7LxywWyvgSbld7wDX59+JxmqdApeHgLfanrdf7Bi32T/ACVv3TpYZ+FJKMJIsMwzHBYNhSprmSGlotwXtjqClKOP+3v0ibS+ERhgBb3YWVxVh4/t+cqLDez5exqMQbt5UCou9Vv2sbSNEU673oBkMUtrtiDg6z2NP/TOEzHj7W3+UbnfmNg88EAh8KwgKQGBXsixn0N8mzwo2Nw3jos9Wf8x9p2c4fLhDpxsjRMLGRsH2dkZ/a0CzxZ7zAxS2t5ywQtt25NMoFPkQ2lOGW6s5n/vcNsMRHD/RYXFxi05XMBxqQJK0JBJDoTWF0BgMUhhQEolzwzcFkrW6DcbYLHdxG83AdR8UlZteNAxi0xDd9bXKIyfS+nzh76R+s8FvFCmEkOhCVyGENLUejjTNGI9SVKTpdgVz88JL9nPej1ppqJUJv0Nhed+NcIankYgq0FA3CHKeCL9UslQqqtUPvfwFI2x+hvJCCdIIjISkJQHDYJCRRYapbsSTz/wsr579PD/45/4hx46FkEAg8O1CUAACuzI9PUO7DTMzEfsPxswvCEZpztnz2yzvW2BqKqbIFRubCp37WegWl/Xu3OvSq2IXKNv3X0I+0kSR4MbNnOee7RNFgjhuMT8f0WpJ+r0ChLJxbQm5LmwDIGEAbdcJKEvfZFlr71N/rK1v30x3iXkGJ9hNFR5w7Xmrn547wLh7rCz/UlHwk/eaJQMoGaFkwmiUk6ZgdMFwmJMXOa22IYoMC4sCqVyiY+nGL68vhADl7kt41y+t/aqqwMt98H5C0zNgtPXOIES17oKpjquplJ8yV0BiMLqowxfGpUJYt0KrHRFHhsGgYDDIaXcUm72P89v/bZWHH/pbvOnh94eQQCDwbUBQAAK7MjMzzZ13nebTn7pObzjg3k6LI0djtG7R6UgiZdjeMvS2FWgqITVR6IYV0wWFAUWMoIxZG7t0sBGSopAIJbh6tSCJe2V+gS6z2A39XsHsXIRU0prApRWrjaEwTvDXc5fWCKUop6F16W6XtVXejGPbMj7XbMgI66UwztKuDvdi4/6PqjSPanxnUSsZY4xEa8V4rFi5PGRra51uxyY+dqeNLbWTVO58Z/XvdM+XPoemq2OXyoevlhxZey/8lst26Do84TwjlTJReUrc8sISgfa/6oYXIYoFM7OC8cgwHGqSRJLrJ/nU5/4e/cG/4B1vCyGBQOBbTVAAAruysXGDxX3P8fZ3TrG5XrC00CbLB0SRYGY2BmHY2jSMxgnGaKRQCCNx2f/guZqxy9lKImIVI4UkzzOEVjZVUAiUjpmb63JzdcSrr9jadCE07XZEp5sxHLRod0ZESoG05YEGyAuNkiCEqp0QpVByZW2ul4A2lFnxfo0/pQQ1lRCUOIUArwCgbqNbu9S9wH8p/GS5QI9dzlCRp7B6a8hgaJP3ZudgeV+rDEtQCvpykR9qN34l8oXvaZhw3Qsn/G/n9Zig8nx8dfWgSnSsvkFtEwONDcn4LRT8tkMuF6Keu6DdFiCsNyCLBO224vGnfpZXXv08P/QD/4Djx0L3wEDgW0VQAAK7MhoPmJvXvOGhFjeuabodxdzSLK+e2yYvDFkm6PcTpqenSUdnUajaVYzrw1f5zK1SIAxJlICBlDGUvfwRhnQEW7cKlg/PsXK5T1GMefB1M7BgPQatVos0HZMkEAkFFLgEu0KX4QBhUEahpCxr/mspb2PaxhOYXra899kIU/YEsJucQDPlB6fSCGPKdABhlR8hAInREikVa2tjrlzdIIo109MRC0uCVtt21yuHsPOjTOpjpyCHnQmFdlv9dBuKTPO2Gh/9Lon1uKbuXogX1qBWMirPgJcr4J8rRK0I+B6SSqcS0G4LokjS62mGw4J2S7Ld/yy/+wd93vX2v8fpe94YQgKBwLeAoAAEdmU0su16FxY1SinWVwX793dYXI6JYygKTX+QoEQChaGMDCNR6KZNiBVWEqOBAmv1m/JnuVcA/V6KuZxz5NQMN25IXn1lzD2nRSlAtoljQ54ZlJJW6EpNXhRl73pdufARBmmUVwNfqiZ+YltlxdYu8dpzIWvBaEyZY1ArNC7hTkqJkhHGKIbDgu2tlH4/oygMrTYcOhzTahviiFqQomoXvxP8aG/0XTBNQe63PW4oCF4exmTLocl2yL4Xo5Gu4K6zyzlUgr5+YsZNzlMC3MS0+2YNqEgwMysZjQzDsaaVjBmOv8xv/7fP8uaH/yXvfc9fJ0nau919IBD4JhEUgMCuxEkbKSNUlBMnBTJWpFnGwoKk0DAaFaxcusELz68AdcKcRAJ5mSBWiyHXJjfLMwwaWSoMDgMoJP1+wdWLfY6emuHGjZR2O+PkqYgpbE7AmIJ0bIhdp9myZl1rK8aktDH/8qJIWZbsuTi58VzVfoy78rWL+gAPt0sKSSSVFf5KMRjC+lrG1vYWUVQwNa1IElu2J4RdF8EfwHoLXP68Ll+mIeSNb9JPxPabP5uYiX9dZcOkcK9Hn8wV8N0HvmuhbqEomv9UUQWbQOiWHa5zGHTpLXCrM3a7gjSF0cigTUFnKuYrj/87bq2e413v+HGOhyqBQODPjKAABHZlcf4QUio6HUmeD5iby8hzzXgcE8WK4RBWbwk2NrYQLjV9MlmtsghFpQDkJkNUffNtDb8tK7PCJhaK8Zbg2qUhx+7osrqWMTWVoSJJlsXMzsJwmFMUgkgqlBSlJ6G03o1z41sBI4TCLxG0gslMyvda6JbC31XqV0KytPilkBgt6PVyNjb69PopU1OGw4cjlFJ28Z1yPNuzoByl8sAXtVrkrlNqI37s3LjOgF6agR8i8NMQdPV28qag3ismmhY6f4MfsqlOmhwEd6ZpvPPm6+6n8gbYUVWpT2mjq5BBHAPCMOxr8rxASs3K1V/kt/7r83zve/5+WFAoEPgzYrfu6IEACwsH2L/8IyQtQbsd0WpDnhUMBgV5rhFIoqRFHMuq+ZxrGlOrARKJQhJVotRAuU1WioCSURlvFkRRRLubUPQjrp4fs7UpuXHDsL6Wk45T8hySWJFlmjyzCwTJMoO/yKlWstPG5hfY5W3xJF+dQAdUFrIxAoMCoRBl+93aVS9t8iGS7V7B9esjrl/ro/WI5X2GpeUIqaCux3chglK0Gtewp1wzwNShEesqdwrDZF5CnUHRNNX9c11uwm7BA+eXYaczofyGnHJmlwi291jPwR3TPGM3JGUiY5k8qN2KjlCuDKmqEIzWhkhBt2vnmOcFSkny4gn+2+/+dT7xqQ+SpqPbXCkQCPxpETwAgV2JVExaTBEninYnIi80vS3N9rb9414UBboYAwUuQ7y2UF3nPytY0KKK90sUkYgwFHbNeew6esIlkAnbYz5px4z7OZdeHVPomOEwJ8s0ScsQRQlxnDDojzFG0WpZD4SM6yQ5gQJTYFQp/U2toPiC1ja6EeDm6uYk6th/FClGQ8Pqamrb73YN3SlJFEVIVSa/aVOujgiVtS2oEvxctr+z7P0wgy9U3Xx8gS6wdfq1UlDa64IqGRFcu1/PQm8sX+h/M5SmfKkeiCpLohTSngenCpu4oAINp0Ct0NSeAFHevysl1KZcHlq4GhH7HUgl6XQM6VjT72d0OjHd6YhHH/+X3Lp1nve8+8c5djSEBAKBbxbBAxDYlSzLOH/9KoNcMD0zQ6cT0elGdDui7EZnmJsr2Le0zNETR0gZYkzZq57S/SsVkYrquHDpE3CL+bhEs6JM5HOWY6ELkJKkm2DGERs3I9bX26xckty4UZDndtGhTjdhOCgYjQrQovQsKIQWyDJebzsF2mVwbZ5AOTvhWvBORsGddV523zOS3pbm6pWCpaUOc3OaVjslaenqvx4h7LoESkiUlChh30tqhaBpwJuq6Y5pXNPJ5EpMu6r7Mt3OxdlLT0M9nJX3sqlMCKeF7GgEZC9UC2OqKzhngx8gcAmLVJ+rh1V7UBr34TwPti+EIcKgrKonpH1OpedGSUmrLWm1YTzOSVNbNXH1xi/xmx/+Zzz9zBfIsvFX+1UNBALfIEEBCNwGw3CccunSNlLN0Eq6zMzYP9RC2JK+5SVI0zEzs1NEMqLAdYcrY/7GiQEXDLAZ8Npo3JK2UsgyDa7uKlcUBXmRl5JV0L+VsbGquXZd8eLzmpVLOcPhiFZbML8Qk6WGNDMYXdbhO3WjXKoXmnKwyvfDWq1CliLWlAl5pfUspQE0t26OkUqQtDO0yapwRdVTAKxCI0U1nntPafXrMlHOylgboijK6gVttBO/VNJVmkpJobL8Pe8BtUD2DHnv6xOVk6FOLqQS9pXINhpDjjEFxuSYUoFr5gVAtZZR1begvqCf09CoSazyGooqxKLdqoaGqm+ClIokkbQ7UGjNYKiRkWKcPc6Hf+ev8slPh5BAIPDNICgAgdtgBdzFF27y8vkBKpkjjhVKGaS0S9BOTQnSrM/K1Uvc/5r7EJEo3ec2nmzA9rynXMceAZQCz5T+ACnLPvelkDOGotBkWUqWpaRpyjhPWb8yZONWzo0bgpUVyfoaZGlG0hLMzEZkmV0mVxuDrkr4yti+8RUAz5KthHJpkZsymVAYpNAICtJ+zsatnMUlQdIq6uZCiFry4iUL1nur+9HlPdUJgvVT9uvmayHrGgn5o3kJgOUJnh6w29dXj29M1eeodgZ4Hgfvc/Odl8ZZKTQToYm6EUDjeTTRGDKMya2Hx9hn7K4vy14KkVK02wYhCoYDmxcwPZvwxUf+BR/6lf+Rixdfus3NBgKBb4SgAAR2JS8y+mywOoAvPH2RG6MxkKCxf8CNKZibh5MnYza3tpmemeLAwX2lAmCTvnwBK2rxjzYFWhcIKVBKEkcRUSNJTFPkOXmWkucFWZ6Tm4Jbl4es30q5fi3n4oWC1dWc3nZGksD0lGI0KhiPdGXg5oXBmFKYQh1/d+yWWEcB5CAKNAVpWnDreo/1tR5ZmiM8KWr1iTp04TL/XUJf7WUAIayC4f5nysqFhsu+EvieGBX+GJ6wrc7cTeD6A5ZeiMkBy3vevQtAHW5oDAfUik5T3alUhdu2IbTeFDd25fkw9flSCJSUtDuGONGMxgV5DrOzCddu/jK/8ushJBAI/GkSkgADu5LlGX1zC7Vk3f634gIzErS0xJCjjWFmBl7zYIu1tTFf/sqXMIVBiMgqARKk7xHGS/SjdvmLUlmQUjLODVrr2jrXuhKsYCMKq1cziiInSTrsPzBPuzVGRRntVsTMTMz2thXSQpYeB6M9q7u2toVs9iy05WpWgOe5RiorkKKWYXZJ8tST6yg1y9JyZBPbtMvyt1SJbc4qnhCURrhIfrnJrTuAacjMqrGOe2rCTwic8AJUeQKlOmF2yt9md0Hn9vc/e8sDTSgIO9UC/5v0khG95zh5zo4zhbELD1HnIMjG74j1CCWJQSnNeGzIckmno8jGT/Hhj/wNrl77Gd71jh8LawkEAn9CggcgsCvGGIg0CycFyycEcafNjUSwmmoiKZHKgNQcPxnxPd+7wPHjLeIoJkoEWhSARilRhg1U1S62siI9S1kIQaRiIhnZWHpp5bqYuf3gvMyCjRual57r8cJzYy5dnGJrs83mpg0HTM9E9PoFuijbBBcFWpeWp1d+J3BWulNInJJgExSL3Lrkk7bgxB1tVCR5/vkeG+s5WlsFp0ZUCXG6CieYapcQdTKf8Axua/XaUki3oFFTfvsVC3VynR/DbyCaW8xEvEFMHrAjrOB/mryWN6eGd0CUtr2rE6gH2t0XIMpnIrxSS/fZywtAEinJVFcQKc1gUIDUzMxGPPLYz/C//8JPcuHCi7teIRAIfG0EBSCwK4NBn83xUyTTgigpLds44opI2cw0kRIIqYGcw0ckD71xmsUl2wgHWdhMfgFRHBElkc1Ql87Z67r32XJCnAgUqhIKxtYIeqVvtfUshWC4Da+8tM4Lz41ZubDI9lab7e2UpAVTXdtRMM1cQpshz203Ohfrt/0Bdu+U57LTwXYSnJuPeP3rZ2i3Ym7d0oyGdu51Pp0XxzcuHKCbwrdSgCZFeN03AU+B2Ent6hfGF61m4ggvlu81JXLzc8c1cTNpTKwW8hN9C+qDXNLk5Gx8NaA5d/upTJxkUgWp8wIqBUEI2i1JKzEMBjmDoWZqOmFz+2P84gf/eQgJBAJ/AoICENgdI8hGEBnDVAptqei0pmhPz3M+67MxzFES0jRjnI44eUrx0MNTtLsSERtQObnO0WhUVC7OU7qYXVdAg6HQzkK3rYAjlzxY+rP9pj3CZdEbg0Iy2jK88Nxlnn5qk2tXF9je6tDvZXS6knZHMRho8kxTFHWLWl1UN1j1BShHB2nN9Tp2L9DaoJRgYSHm5KkORS5YvVUwHmuM1lUNfqUEIGymO6bsfeA7zuukN7fWnimb+dRWdXmm66o3IUZtr4TJuXs/Ta0E2IRFLy5fPv+d6oUnjt0kRa1OGD+MULdUpBbfZuJV43/ycxtk9d2W6oCoxzON4+0xSSKZnbFZJMOhodONaHWe5ld+/a/x0Y9/iMFge8ddBQKBr07IAQjsijEw2pxhHxvMXU84dtcJxksJOhf0e5tc2RrSiaZQ0pCNMuIW3PtAwtZml6efGjDMC4Qw5LlCRGXpX9kKVgpBXtbka6wXIC7d/zGxrdnHlQoKkNJas1o7GYcQkOeQp/DqK9eQKqbQ0xwVKUoVtFqKLBMMRwWtlq02gFIoNmLs9mWnI0vh6sfJbRlfFAmWlyOMgWtXR6hIsbw/RqGrckCHtusOl7kMGukK9IWouiZK6uZInu/cegFK61qIsgEQ9eI/9eFWCWjW4dMocbT3N6FAlDK+6o7ojPzSE7HD+SC86L5x8n/Sh+ANtMue5nB1hYQoJ+TnPVQLNjkFzWtKFEWSrjSM04L+ADodxdK+mMee/BnOX/g8H/iBf8Tx46FxUCDwtRI8AIFdGQwGpFsxpgeXX1BsnZfMxYtMJ/tpyVmGUcbKcARIkkSgi5xON+fBN7S574EurQ4IZch1xigdk5scI6zFrIWmELntG2AMWhfWZS5ASlU38AEoM8OVsNvrVfBsgqCUkuH2mPPnrnPuVc366izjccT2tmFqSpIkktFIk+el98DJXOeuL0MBAm13Gmthy3I54dodLYhjyb7lhCNHOmxuFKzdzCgyvcNl76/UZ3P9TS1ZhfMEeJnz/stmMDYtcOp8idoTL8qX5+Y39XHGq0pozM3/YG6z3W2wiRpeLsbkkQ2bvtrndzRwNRjVksfexf0cjGpr2T1w0jnjtkkhaLcEScswGhekmWF6JqbX/xi/8hs/xRe/9BHSNIQEAoGvhaAABHYlz3P61yO2ViPOXL7OE4++ymCjTzGQkCZEss2NfMDNYWHL+CJBnmcs7dO84Y0tTt83xfRsgow1WTFCCLuMr4okSBpu78IU5M66N9ZH75QA19ZHijphrko80wVWb5Csr25y4fxNnns+5eqVNlmmMAampiRxJBgNdVkW6FYLdAmI9r29eIEoS/Mm1wuw5wiSRLC8L2ZhvsX2hmG4rdF5Q1T73uwyMbD0MLiSRKkQ3t3Zubg+AbUJX4cE7Ds/wVBr6r4ClQuhfE343auIfdmToFkFMGm5e5/NDrVgh2iv7kOUORz4HRCpE/yAZkVC7fLwlaz6WJpagLHnOMUpiQWtliHLC3p9TZQoVPQcv/nhv8dv/uaH6Pe3dsw9EAg0CQpA4Lao4QKD3ixFy3Dx0mVWV2+R5ilTM11iMY0UEZfSHqtDQxQppIRxapWA170h5sixmCQRqMh6A1QsSVoxSlnXvv2bLiiMJisya7VqU7nNpVAooerses/VbJ3kGtc7PyLi5pUNXnrhFs88M2T1lmAwKNDaMD1t1woYj3S5WBBVQiAYLyGwKRDrdsGla90YCm2IY8GBAwmLy20GQ8XmpibLqVY0dJazw8X5Cwya0nKv4uy1kIcyedC5xE3t4nfCvlpAWDQr+P2FlxutARuh+aYNXh2PJ3ArrWe33whPaGOQuLbE9rNA2/JP58gQ9Rk7hHwl7GU1ZuUVqJQUlwWwyyyMIFKCbsdgyNnq5WgyDh2RfOXJn+U///w/4NJKaBwUCHw1ggIQuC375o+TxCdozcCUSSkGYzpJl+mZWVrtmER20VJzfrzNal8TKWkX0dEFi0uak6cUy/tikkSC0miZIZRARW51OHsdA+Q6L0v26tJA2xjGpqkUOi+FY5kY1khAE5Ul2l/PeObpNR758iartzJ6/RwhYXYuYjTSjFMbDigK60bX2goZW3QgSkvfycGmdQqUpYWGVlux/0Cb2YUOg4FgY8OQpaZKaKzc8m6WlWArqgY4VkjXKw/Wqk390y/l2605TzkKnpSnqivYVYg31QZ/a/3GNF9+dKKxT+Mq+ut2wO4baqoa7j5c4qPx78npS/WlSiHvtKCJ7UKgpPUIRUox1ZV0OprBMGM8zjlyLGI4/ji/8Vs/xac+/TuhSiAQuA1BAQjsihCCe+97LQcP3c++pTmOLvQYbK2jx5BEXVRLgCwQWpGR82q/x8ZQk8S2nj2ODYePau68O+LwsYQoBq1SRnmPLB8jlU3Ic4JVG824SEnzcdX8R2vtgvVVXFt44l/6v75ClMsCQ4TghZfHnL8wJstyev0MqWB5X0KeGZvBX7r1i6JcRa/0ldvtzs1uqmfhewPseQYVwcyMZHGpRTY2bG4YMBFVS2HMhBKg61fVEXCXNj9e9YNNB2hmC/jCuWlhO2Fcjua53OvzJr9n77BKCvsSvxG1B+8baLr0d4yMa4hUn197XarLeUz4X2i89VwKje9DCCIpacWKqSlJrg293pgoNgzHT/AbH/4JPv6p/8JgGKoEAoFJggIQ2BUhBAf3H0QUXbbHknQh4/r6Ja6fvUA+TFGyBXqMTZwDnWRcS0eMM4GKBEIK5ucF971G8fBbutx9b5soBhNljIpB6SqWnnwxFDon06kVitqgc7syoHOLmzJW7sSErNvr1F5zbehv5YwHhn4PlII0y9jaHBNFMDOjKApNnpeWuifo3TzqbbW1Wq3AV760MehCE0WGubmI2dmEjbWcLI2JVMsmEVZ5AMYb2ykBBVXSofFUAFMnB9r7obaAK7d4Y7al4CzzI5ziIOrj6za9dnEk4T+3Rt5AGTNw85kQ0I1GTrvIfv/5VN0Qxc4xnL9gR/ZBqZRVQRFTb29OyNTtCKq8AEkkJTMzkqQtGI5ykpbg5MmEL375f+D/94t/n5UQEggEGoQywMDuGMONmzfYWN3i0g0BcxGL11e5dn6Lre0jLBwcs7il2TKGXgwmgi015mJfclJ1iBXkGKamNe2OpNOZA0a8cmaLvMgZmRHCxI2sfOdyN8J6BJyF71bLA1c6Z983BYjdBsauNZBppqYjVAT5SLO2mYIQzMwkTBEx6FvFIklkVU4HIIQfk/ZC4qZ+CQHoslxN29DH3HyEEB02N0ak44SFpYTCpPVcjW/hG6ryttqvTdUVD1MLSonXebCy97HFc34YxFnnnmAVdRRdeM/LVK58L0lA4N0cntT1rf7dhL/vv2hc/TbsDHY45c3NlzLfoVH6uGtzpHqq9kgJxtBqaZSCfj8njiUzs4r1zU/yGx9e5e1v/Vu84XXvJ45bf8w8A4HvfoICENgVbQyPPvplTKHpr8f0LkvmpnPOvzyks3GF73+t5GAfbmnJxYWC1TmQMayKIWwa7pjrVu7yOM7Zf8DwwIMzFFrwykvrjHop0ggEiuovvzE2Lm8MOUW5grwsk+tK4e4EM/giCReTdm51sO55KQXGyKrr4GiU0W7FtNqK0SjHGIgilwgnUNIgpPCszzoMYH/aq0vlsvIFWtvOiLOzCikMV64MGaYt5uYVgtzOAYOSohK8DQFn6rwB2/t/wuVfrUCI91NW5/iifLeEOdczwCkVoqoeaB5nhH8V9wBE9aMOI0wc495Xu0Tj2bmkPlm7JerhfWVg0vFgmv0Pbkf13biTjSRSmqkZGPRs3kV3Ggb9x/j9P3yGtbWf4i1v/gAL8wf+2LEDge9mQgggcBsMr545Q6vVotuZoXUrp7VmuLamufLqAP3KgOFV2L6oOHhNsLAJJjcoCatmzPoor7q9YQxRPOLQoYK7T3c4cmyKqC1RsW4sqFOJK2NsZYApyHTuzcj+u1sinBvFRddVS7KwGNk1ATRMz0jiGLKsoD/IiGNBp60oCpsHkGU27GCFsPGW/S2vWZUM1oqASxQsCpuvoJRdi+DgwS63bo64dTNlNLZLAWNc2+HJmDh4KXT44YzqWt7xYmJ7rQj5z8hudzX4fpjFKQOy8gJUM6nCDKLxeJshlt2eea1mOC9JM/ehSqIUE6cKp7Y5RcBMvNtp+X81T0A9tL17JSVT07Yz49YWxC3B9GyLL3z5X/ELv/wToUogsOcJCkBgV4wx5FnGQw89xLvf+26yNKZVGBZbgsHYsN6DF/sdXlHHKYb7OHgLkm2bHEekuZT1WR8UznilyHOS1pAjRzLuuktx8FBEMqVoTSuk8gSJJ6wMmoJix9xEtbfOB7Au5Nob8IY3dNi3r0VRACg6XVW2I9ZkWU6hc5KWpJVI8swK8bww5LmmyA1FbksGtXEd9zybuRTaoixldFEDrQ2REszPRxw6OEV/2zDsibJTYBn/pw53uKx464H3xHzD6G1m1FuBaSpFZ6ctP/mwXPzfKS11omDVUcGFP1wEoJkZWD91z0J3wrm6lz8Wv8RvR/ufxrjunUulrO7S8yg0tQT/dp1yJBCmVAK6ESpK6PcVWdZjesawtf0Cv/TB/xuf/PQHyfP0a5h/IPDdRwgBBHZlemqGubl53v72t9Gakvy/rlxg5aWvkLSGiEHGMDVsLR5l4fT9pOk23UufYmYqY9wFYsFYFJztD7jTdJmfkjazXmd0pwqOn9Rok/DyS4belqBINbowIGTDpQ3ujWuhayr3uEZXll4dxLelYRjB3fd0SFqC/rag3U6IogxdGLJUgxGMhjmmLUhaCpD0Bzmtll2ApqwMnFjxj9oLUCbaVbLPYOvyDRQaIGN+HnSRsLIyoNARS8sSIQ2ioKwmsGEGqdw4Lg5vhbSXy99ANyxrN4HmUXY8L1Y/YXz7OQLSCLSwCo0W3toFxv8mat+LLr+LKnfBgDGTeQHNazUyFZyysSMAseMOPO/L5FE7PR6V3uS+H+/qShqmuzlZBsOhREWamdmMzc1X+IM/+idsba3x5jd9gEMHT+1+E4HAdynBAxDYlShSHDywj1YSc/jQAWbm5rhiZulH04xExMqWYD0THDp+mPm772FTHEOtQZJbaSMFjKOMc1sjBiMbZ5fSIGXB7Jzm5B0Rd97VZnpOIJICIzRKyaaw8OZjO83JiT5z9dFu9Th7kuTQYYXWBXkBQtr+AuOR5uKFnHPnCra2DOm4IM8LkpYNB4yGtk+A9qx9V/JnFZhms6CGkuKEIvZn0tLMLwhm5yTnz404f35Ebzsjz12vg9pdXoUHvKIAVyrot9V1lnuVR99I1PPn1eySUB0x4Zp3A7tyuvrZ+tK0SdW/0bgrldu9mL5dEdFUnQvrjoXlMaKsotC7B3N2Bj1c1UDtC/FDHVbm1z0G6sfh1hWw2Q9JrJmeAqMV47RgYVExMxfx6GP/jg/96t8PIYHAniMoAIFdEUJw3z13kmU5zz39ApevXGNoBJmG5W7M+WHEUES87e0P8rq3Pkh0/E6K7RZxJuxfeASRgr4cc3F7RF6Iqh+8UjA9bTh8VHDgoCDpGIQqEF4+YBUyNvV8lFQoFLV9V9SqgLC/zBrB0eOSbjcnHRVIKZDKYLTm5s2CRx9J+f3fz3jsKyO2NguyLCfLC6amFUkCw74iz2yDoDwv3f+6tnidoHO9AqxP3lTCvyohNIJWS3L0aJsTJ9tsrhfcuJ6Rju3qh7q0+G3SY7PnvxWMoo6n148B90icQKzleTNQIKpgvtld8JtamRGmfFE3VHLfYXNc+92WKydUgt2pBVbo1yEK979Gt4PaoQDCn/+O30BPkfGVgMYh3vS8/gQTI9lOjHYWSmm6UxptYDg0TE1LFpcibt56lF/64N/lUyEkENhDhBBAYFemprrMdBOuXL3KF774WS5dXGF7q8+RIwfJb92gv93n/tfew333HsNIwzMnjzM4u0wyvEZ/urbdotiwmo+INyQnFlooCShrse3blzMcGm7eFIz6mmw0BhSy9os33dVSVMLVL2uz2eIS2xTWcPSYpNOG0cgQJyCEJs8FN24ZrlwpmJ6a5/nnM+JozAOvSZhbsPH82bmIVusA/X6PdnsbFVlpJQRW7RCiinvXpYOe6U/tyXfNfDpdw4H9MUYbrt8cs7GlWYoKm/woPY3Hd/oLL+lOuNBDKQTLEsI6KdGbB1RNgypB2xCy7hg8NzmVfJ9M/qvXGKiFd23xW4GqywQI46oFjEEK412+9CkYp9jV+QXuCn5BY3Omk2pNrQz5Y/rHTuoT9njrWtHlQxUCWolgPBZsbcL0tOLAwRbb22f5g4/9Y7a213nTG3+IQ4dCSCDw3U1QAAK35cqlV7m2lfLs089x49pNoigmSiJGUiKEZmqqhc5GdKditIStW+t0jwqifTaur3Jrk9MyXEuHROuSY4sxUtiYeaulOXxIcPOYYG0ViqIAoZhqtcnGOeNsjEs+s70AbIMhqQXayGYc2RgKNJBz/EQHpezeOLbd+/JcsL4G4yEMt2+iheKFF7pEccHpe22IQklFq32dLM8ZDmFqWnnC3pQJhTY3oGpLLJrxb7fasNG2nBAjabXh0KEWUSS5cm0MRrBvvySWGkq3uzESI2x//UryO8vWZRlWioCdVGV/67KrIl7CYBlSEI0qi4aushNB3WxIWDFrvLn4VQO2fbET3fZidQMgqnWE7CjeMzJUmoYvuKlHwuV51MK+vq7TdfwRqvYFwvcYWA+FqM4yZYilrvSwOR8wGgm6XcXiYkSkIh574v/NmVc+zl/+i/+S48fvQ6nwZzLw3UkIAQR2JY5jutMzvPDcC9y8cZPZuVnAcPH8CldubbOZC27cWOfVVy4TS8mxk4fYXBvANsS5JMlgbvsAi2sHaG8ZokRzXQ9Y7+syFGBj2DMzcPc9EXfcETE1p5heTIjbqgoXULbVdcvbSmkXCXJrBVpcTN0msR0/0aIoPQU2kc+wsWFd8Nm4YDxOyUZD1tZGPPOs4KUXM/q9jCzXFNrQ6UQYkzMYJOS5sO2CC9BFXRHgVgc0ZawZ7HZnwAtZCxspJe2O4tChhAP7Eq6spKyv5hR5ufiPc8XjLGljLX0ETLrjnQeCMgJvqKoQfAsdI6pVhXeN5Fe5DL4S5YYxVUhFivLlOdglogzEuGWba6veeELaemq8a2IwVaKhKRd+qpIHqnwIUX7nLtehUT1gbMml1uVyx9ShjiqXouy2KNxsjA3Z1C9NnsF4bIgTTatt6G3bz7NzhrkFzcbmk/zvv/hj/NHHfpEsCyGBwHcnQbUN7Eq73eU1D7yBx5+7SBxHnLrrbtbXNnjy0WeJ4phkborZxQXOnL3Fgf1LLC3vI1/Yj+mt0h6CGLe45+TbWFw4wuOf/gXW2xuMWgVnez2iaJr5jqQwBl0YlvdJ3vDGFlqnXLpYoEnJTUrd9ofqD7+SCinsYkK5Kawr3B6BRnDv/TbxLi90qd4adCHo9WBjLSXPs+oeB9t9pIQnn1TEseH+B2BuPqbTiVjelzDonyBN11HyGlEcEceetenh7GAhRLmuACBMKczLbQKiWHLkSAutDWu3MpSEuQVFFNcCtlqB12CFpTEYIRFGWm+DkSD8ZYzrEEJlPVchAi+UAKWnwQurOOt+B56i0bhZ0VAy3E9pQAtRhQEolzZ2fROs4kZDGQCq/ZXCY+qZ1loEVWJkPRV7jn2u9hhdXrfSJyplzB1bt3w2BtJRzPqaptWBNNVsboxZXJAcOSaJIljen9DvFXz2Cz/NYNDnrW/+AEeOhJBA4LuLoAAEdkUIwfFjR5jvKB555Dz33HeSpaWTPPnos+RZxtbGNoNenwsr14gf1Zw4cYjlu09hVlZZulUgxCL33PNaXvPWtyBGKY8++x/I9guyJOOVzR6nxTTTbVEua6s5cFDwmtdF9PtjrlzKyY0u6/ylsx3Jte0MKF3s3LWnKwVZgWBxSSEVpGNDHNnyw+FAc+niiMsr2Y777G8NMHR5/IkERM59DwiKwjA9FdHuvsR4/TDj0RztzratQkioBU8pf52gFMImOFZu88qkdtY7JIni6LE2SgmuXE3JcsPSMigl7TLJTUlZeswNCF250q3xXrcRNr4gF1QKgvCm4OPc5y47XnpS3U8q3NnHvzxGe14Ht8cYG/4wBiPKrn8l0h/HOFXFm9wugXszscGUkzOlNe9uThgXqhCli19TFDZcVBRW8dAG+j2NLgRZVjAcGbSO2N4akCSGXq9geqZNq90mzzVSgSBlakZiiHn8qX/LSy9/nL/yo/+KUyfvI4rCn83AdwfhNzlwWw4cWOb03cf4w098ieFgwL3338WDb3wtTz/+HK1um35/wIvPvMirL77CX/yL76M7N0//jGF6TZIePMaB43fS6nS5/+1v59a5x+iPH0G3BVkr40JvyF2qSysW6NygdcGBg5KH3tjGFCkrFwqKQlQZ3GBt/FwXKEonssukr4SJ5sixmEjBSENSxnjHY7hxM8MUuro3l+1ujGawPURKxZNPtZFSc89pK1RmZiRz80NuXE8p8g656qGixJ7vmgCV45lSEDV66ID1BGgopSLGGFqJ5MixFklLcOH8iCiSzM6VFqtNkMBUgrkW19qUCyh5aXG+INXagKQs6fOPwXtCtQBu3IFx+92+Ojbv3Yz9d0fg0FS5D3bKLkDhVTA00zU8r4kXMnDpBKYMD3jP0YZcrCvAVVBoDcMB5JnN1RinOXmeUeQFaSYoCkm7ZWzfAqlQCpSK6Hah0yk4eDAhie0XliQR7bag0Lpc30qDMEzPSDodRW/7ef7jf/5LvONt/5Qf+sG/SRwnkw8hEPiOIygAgduyf/9RTh0/yPLhw1w8f5k3PPwgr3nwNGBodxLm52d48cVXmZ2fYW19k+E4Z7CtkZmhs3SE/UeO8sLzZxECDp14kKvPP8IgAZUIthhxYVtwaraLkoIsE0QSjh1XZGlM0hK8+mJBNjLUqwFGVhloWNguzcuKr/kFaf+Il0aiFDHDgeHypYypqWlOnDjO888/j0QRizYGTWbGDLZ7TE0pLlyYpdsdcuedOUJEzExvs29fm/H4JOPxyxjGRJEiicvOgtWyv8IKMyMqt3OVpS9sO2DrvbDrE8Qi58CBBGMkt27laAwLiwapDUqJUoj6KkY5Fn5/fN/Er8MCdSKdqQSyUyCM21ae3xyhvob/2V3Jjep2amMaIYLyrjFlt0FdCvO6YoIdngkvBQC0VWKEKBUR7eL+ddllUXZrHI0Mg77k5q0xWWrQWtJqCaII4gimpgVT07bTYxxLVFSmLApQ0jZiss4Wq5i5sklZaTJ2coW2/SnmFyCOCz7z+X9Nlvd5+1tDSCDwnU9QAAK3ZWZuHwcWZ3n4ofv5g498jM984kss7VticXmRY8cPc+TYPi6uXGZ5eYG77z1JJ1Z89swTbPbXOH7kFEmS8Ngjz7C5sc6Dx5Y41HqQq9nT5LFESbiRDhGbcHKuQxxLCm2IY83ho4KiiNnaiLhycQSFn7BWNqERtRXu/mJH0jA9rcgyjYokQoLWio3NlEFfMx4O2d6268JLJIlMEEISETHIe9y8tkWr1eallyRJknHilGScFrRaPeAs47RLlnaBGwg6REYglagS6qScVAIsQoCKvN4BQoKxseaDByKkMFy9NkIbWFoq+xvKWtBWlnE1ovbi556CIHwRrqsmOLh+BS5fr2Hh2+Odgd1IJKz2evPwzhOi6UWomxaJqkoCT/gDdZlko4eAHd/F6YUoY/ZF6dLXhtGoYDzS1tWPAi2JI8PhQwkISRIrul1JFEtaSdn7wfVUcLkMolwDQe7WLQDrdajUGDt5Y6DAIMmZmla0WjHPvfDvOH/h4/zlH/lXnDgeQgKB71zCb27gtnRn5phqT/GGhx7g/MUVtIZTp07xzFPPowvD0WMH2bc8x/lzKzz37EtMtTvsO3YMsZmysLyEEpI8L3jiqZcww5Ms5y2mR7DREiANcSS4UYxpbSuOzbWIImvddTuCYycko1FClituXulhyoXibaqfTYiLlCLPrdO6wHDyTmi1BUUOUWyz1PMi4tbNPsO+Tai7dOkSpa/dVhNIiaSFiQzDvMe1y2vMzhzm7KsxcTziyDEQIqbVHjFtOijxEKPxGfLiLIiY2K5AW1UbuKQ3Gw7whDgudm3XGXCbo0iyvC8iyxNurWYYIVmchySizAlwDYFc+SHNcH8VJnD19k7wlUqEcBLXvhfVeTVOPmtjbN5ks2bPjlv2E5Dedhfv91WTKjO/mqEn5n1FwJur25emmiwryDPNODXkWdmcSEGkFLOzHZJEoSIbulFS15UOQk7kLIjS2+LCM0x4PkTjOe6YbTVXF3bAdquMMmbnIjY3Huc///wP8553/TTv+94fJ47C8sKB7zyCAhC4LVHSZpgp7jh1hPf/+fdw5uWzzC/MsbS8SFEYTpw8xutedy8vPPsqzzz1Ivk4J792k1MHT7H/4EESKVFKMRiMePrZM7zraIulFDaNxBnCsZJcS1M6PcXyTIQqFwZqtzVHj+X0epLBMGK4qTEF5cq5pmFBCgQFmvlFa2kXGhIBQijSccS1a+OmGYovAAVSKDpxB601o9GAc6/eoNM9zuVL0whxk8NHIc8jpro30cVZ0u0uWs/SavWxq85RZbpZa9N4wqgUOYKqQY4rYQMbp4hjycFDCXEsOX9xxDhV7F9q00o0SVwm9Ml6IWE/pu7K9XC2uLDLKUu70k9tlYt6Lt7pjffW6+ACCLUwtGeKajyjy+S68nsAq9xI6Y1rBJhmFYefoW+884pCs7WVc+vWGGM0kZSoKKLbiZiaUXTaijixSZLW8+OUCendm7u/uj+CH+JwSZqN9Az3r7dRuu/QZjRSBjNqxQqQkWBxOaHfy/ns538GgDe/6YdYXAjLCwe+swgKQOC2SCVozy4zHN7gjQ89yKFD+/jSF57k2tXrHDlymOW5Zd70pgdZuXiVYZrz/Csvs3Zxnc7yEWZn52jFMZ2kBRrWVjcRd97B8naLy8YwQqIEGKXIleHsxgChp1hesAldcWyYmy+4+x5BliU8/2xO2ivQGVW9eJEX1CJOc/BQRKQEWWGb8AghyFJNb3sy+9+uM6hN2QJXSOI4oYvBjDS97SFnXrpG+7XHWLk4T5xsMjdniKOEVvss3al5NtYjwNX3CaSyI+syhu+8AI7KrV+W5jnvgC6spR9HsLwUIUSbm6s5vX4LKXOEyIgjWVr3VIUPzqJ3owtPaNeu+fI44ysBO2P7TWd4nS8wqcTY+7O6jq5CMnZEG+cXVQzf9W2gzDswplwpEhvHz7LCrrOgDaNRzva2YXamTbcrSRJJ0pJEkSzDKvXzqvMH6iqFyhNSz6a+G/9ct9/UIRbf0Ee451efbJ+7LKtVygPL72F6JiKKJZ//4s/wzHO/y1/7Kz/HsaOnCQS+UwgKQGAHxmiGwx7b2+u0pqYx25c5cnCJk6cO8IXPfIkb12+xf3k/7XiWB+6bI/3hIS+9fIkXnn2ZW/2ccavL/NwMrThmaqrD1uoGoshJZhdob+2jXdwg04JkU1MsChIJnbEmPTdglMzSnpEUeUEcG5aWDafvUwwGgldfNpiiwC5fp9FG18IQw8JiUso6iVJ27YHV1R5bWzvL/7QpyE2GMHZ1O60FUipimWC0YX11kxeehwdec4QrKzlSjoijlFkSkmSLdkczHBrE2JYqIiBJRCVYnOBx7nug6spXudzLnjhag5ASFRkWFiOmuhFXrqdc6cPyomCmq1GRG3sidl0K2FpY7SLoRa0OTPYxsB58T3hWHQDrBL+6aZCorODmBfAa8JSx/MI2RLBCH/Jck441o3IBpjzTRCqm3VHMzXRYXlI2+VG4XApRW+2e90K4h2s85UU0FYCyW3J1/GS031cKKndKdZDxfpZzwFcqTFWGaYB2W6JUzNqtR/i13/gp3vn2v81Db3g/cRxCAoFvf4ICEGiQ5WPW16+RZxqtbXvc7swMy0tzJJ2YO+44xu9+5HO8651vYXFuiX3LXR54zRad6S5nz1zg0oUbHD99J/Pzc8RJi9c8cC8nTh7lxs1V2nNzxFc6dFLoRxJzsSDq5yx1BctrML2esamHjF8zT6sDxhTE0ioBp+6QbG0prl3JYaQxhWtqYxDlyvbdqai0QG2jIK0j1tZGDAYZMzPTDAZD224Y6wPITYrUtqhQj4tqLCUULdrcvNHj1TO3OB0tcWVFkmc2gXB6OmF21paVDQeaPLNLDEspyxCGKWP3xhNi0sYvqg5/tbveaFuqJpUgjkC0DEsLcOlyxrUbkByJaKsC6TofGrcmQG3xO8le5w5Oij3Ka076JZoKgT2Xav7VtonQhqnCGH50RTcy9Yty+eU00/QHOVkZ05+bTZidiWi3Y5Si9NbUgrl6Xm5untfDhTvs/83OyeMf7wl6d2MThzdDI7XXxA9XTHpUhFuPodwfR4L9BxP6vcf5nd97go3Nn+bND3+AhRASCHybExSAQIPnX3qcTqtLpzVPpCJGw5TrN27w8DvewOraNqfuOIHJRsxMd+hMRSRqmuWF/WQ6513f8xBnXrrEvffdyfzcFO2kxf2n7+Z93/dutvs9Ttx3khuXnmFueI7tOcVqH8QVOLhUcHPTsE2E6WvWkRx/UBAnkOeGbldy4qSg37NKQL/IS1lq3dUFBUv7p0gSu0aBiqwQ0YXk1s0R/R48cO8Jtra2yiRAazUXJkeLHKNlmVzosuMFSkRMiYQrl7ZpteDknUvkeUqnmyNVzlQ3ZmZaoQvDdq9gejqySWulAHPJZ8417nIVgNpENbYkzVBWCGgruVRkmJk2HDsSc/NWzplXxhw4INm/LMos9tLpb833yiUOTXe3X4fv9w7YIQedUPe6BO5YPRDXZtdU+7XWXj6CzbjPMsNwmNPv56Rjm+TYbitmZhTtRNFuRcRx6VI3ohLWlaVfWvOVMuBNUewQ9rWZX60QWYU7Jo/dXVnwyxMbqtHEsVX1QjWh8vv0chunZyJarZgvfvlneeHF3+NH/1IICQS+vQkKQKBiY3ONy1eucPrOB2m1OuR5TmZiHn/8ed7/gXdjgOPHj3HP/XczGAwo8hQhIqaSeeamt7n7nuO85sG7OHXqIHHLip7pqS7v/3PfwyhNuXjuPFkKx7ZyMqXYiA03soM8d26Fnp5h/uhx5tqSxaV7ufjqFzh+Z0oUWUt0dg5OnpJsb0e88rJhczUvO/0atNEcOdYmadkyMRUJkrjNIOuyuQkU8Nxzz+24X+MWrjWawmTYygAbb1ciQomYKSLOvdLDiIRjJ6a5vLLNzIxhPC4wRjE7pyi0IR1rKNv0Rgpyzz9uKwRqs9QJ+nL6dpfwLW5JlBhmZg1xLIik5tUzKSafYWlZEMcFUrpWP6WYF9gyNikrK9+9XPf95r9UVq3xhFitBPiZ8HXinq8AGG0q5aXQhtEwZ3srJU0lcZIwvxDT7QpaiSpzJJpphVV8v3w8UMtoIUq/jh+/F95DE57nwfjn7+75MKWSMrnfqmauVLC2+OtxveRFd3zVo8FTGUolJEly5uYjtrae4Vd+/ad49ztCSCDw7UtQAAIVmoyDBw7QbrURQqCUYmpmhitXtrl44RIFCQvL8/yFH/5+rl+7Tn/Yx5iCSHboxtMc2r+fu0+fJGnHSFWgTcpWbwulFFEU8+qZs0xvr3Igh2wjY6u1n4NvexPXLh9nf6fL97zvnQyHfY6cOspnfi9ldeVjHDiV2Np6DYuL8NrXRkxNwROPa7bXinJ5YJhfbCPVwCoAShBFs4zHCVtbIJVCF0UzAF/imgwV5BhjkEahREQkhFUGlGSKNhfPrCGEoCi6JMk2J++wru44jllYiFldzRgO7TVkW9rcgtIqdoKkSjxzGfGl614YvHK/stxOQaQMrRYcOpwwOxtz9tyY4Sjh8FFoJboOCUCpRNTCzG/F21g6GU/wO/Fnmo+l6dr3F+GhfIBlUl+u0QbScUGa2TLNhYUpWu3Y5mBI03Df1xNwXQCp90zE+RsC3231E/Umv8dKIO+0/CcVmWZyYzPM4bv/rYPF9XbwZlJ5dVwIhsYYUSRYWIRe70l++3f+L2xu/RzvfMeP0u3MEAh8OxEUgEBFEiuOHD6IyQVaF6RpxsLiAseO3cHFS9fZt38fJ04c5f3f/26uXLmJUBJDhjCGdjRF0So4evgwSkQ2Ri81/f42H/v451BJws2Xnufe6DqxERzJDTeyHr1Ycfq193Pi1DF++IffT5pnpKLAaM2nf/4cw+4ZOgfbSGmIE8HikkFIyaCneOZpzWhgQAvanRSlChAxSkmEaLO1WbC5oWuR0BAazuFv+8YbQGPXuMeIehkiIYhURJsWF8+so80cQk2jzTYnTwpGo4JuV7GwEHPrZspoWCCF7UMQRQKhQfjWbylUG41+Kvd1PTtbimabBQlVMB8L7oklL76Y0u7E7N9nQNnVEV0HwGoMJ6AmXOhVexvhLH1dCT3/+r4CYIW/rpQW93M4LBgNC4zRJK2Y+fkpmwSpAGP78DuLWfjVChMyvW7J4/z/9tuYLNmriio8a9yP9ddWetPK3xHJaGwzjZ/C63Hsh0OEd7x7Xo1WCd7AdahAMz2tMKbF57/4P/DKq5/jh/78P+TEiXt3TigQ+BYRlgMOVMRRi+luC6nG5PmQ0XjA7MwM3/d938va2pi8yJmZiTl8dJHjx4+ilWKY9hmOe0RKIbRiaWGBwXaGFB0MMZ2pKfI05ZN/9AkGZ19AiT4bqWImgkP0WD17DqVilhcW2Dc/z8LcHEuLC7zlnQ+x74E3sPlsxmDT1p4pBVECc3OCO+6MOHkqZmY+Jp6PmZkZo6RAlYvp5EXO9RtD+n0bq25QWZ4u8Gyql115oECbwpM6AiVjYiIuvrLK1SsZr57rcvWK7VLX6+UoBfv2x6gIu6xwrslz41mTAsqFjGxsWzSsYD/mbvMXwGjK5Y8lBuh04cSJmLXVjJs3c9KsXibZ4ZrqOE+Aa/1T26umIdzscdoT9C6TX2N0USX7FdqQZ5rhsGBtPWVrW9PudFneN8PiUocksd4MXbhV98p7dNn80r7KpgaI6lUf45Ye3rVPn3BLQrnvCk+Si8oz4J6lH66Anft2IhrCfEeoYCJHwD+tkYvg3BfldWZmFPMLMZev/AEf+rV/zlNPf4E0He9y/UDgz56gAAQqigI2Nq+R5Sv0hi8zGKxQ6IzFxQXyTLK6usnNW+sIZSiMIROSXjbgwsoltgd9hsOM/UvLXF65znioUWKKbnuJpcUlerfWeOXmKs+k9/L0as6oMMzFMLx+jY2Nbc6+eon+Vo8iTclGKdOdLne+9jSJvIfWmU3GA1O6xW1y4P6DkrtPK46dbHHoUMzcnC2nG40KpIqRyjAYQLeTTPzBt8JClJ37XCWBwRcWhU0Q9JfOERDJCGkk5164zpWVMc89H3N5pWAw1Fy/PkZgWFhQFMU0o5Eiywx5bipFwBgotLdsrpuRsO1vnQHqEuJswYINRUhha+KXlhUnT06ztia4cT1jNMptGMOFFSrhbwW6Nsb5NXD5Aq6PgpOllcA3tilPURRoXZ5bCv50XDAYFPT7miRqcfjQFAuLkla7HHWivlBKg5Smsv5FeXXhhLywf3xcnF86RaC0ru3UdJml4ecxiMpjA1T3TOWS90IWvhVfP+yJ0AJV4qZ/rn+WaZzqezEmkxDK7o/u0RqbHxFFsLyvRV58hf/y6/8dn/jkB8nzlEDgW00IAQQqpBT0eutcuX4OpTJGA7iy8hIvvnCB7dEGRk5xaeU6SsTkWZvxWLOaDfns5x7lnrtPsn/fPpYXZultbXPj5k1mZuZJomm67Vne+ba3cOXCfva99Z0srb+b0XO/Rq93ncvXrnLz+TNsb/V59ewFTtxxlDTNmVuY5Y1vez3982fZ/vhzbM0MUHd0SWLrCWi3Yf8Bm4x3TEpmZ20pXpoKoigmTWM2NsaMR6P6/oRCCIkmB7w/0r5VWb4rTI42GmVq4aGkpCUSxrngxoVNInWAx5+IuP/+lKUlzbXrdoGfubkxa6sZKrJlgXnu3O4FBo0UBq1F+czt2FWyW9W6VlRxd4EtMURb78T8nGS4L+aF51cxeorlfYZOR5XhAE8QUhmjtUvdxfuN8xa4fvmmPq909Wtt4/s2zJEwP58glBXgri2xXffAG1/4zYKFN4vasp+srIf6GRuvZ0EVnHFfj6gX9Klc8dReDxciqFpDGeP8EPitfydp6i71s6/Ned9Dc7vFmLzx3N7Ku2OXiZ6fVwhh+Pgnf5o06/Omhz/AoYOndp9UIPBnQFAAAhXtVofFxSXOX3yFQX+bSxev80d/8CQf/aMvceTYYd7yzvtZXFxCKYXJWhw8cQdXrl7k9z7yKT7VSfixv/JD/NCffxf33XeKc+fPcvzYYTAzFLnh2LGjHD52jDe97T10I8NTcc6V//YfGEfT3Fy5zMFD+3j2uZc5cnQ/KyuXQZ5gbmGOIw8+wNOfPUyyepUbszGHDyQoCUYJpqYE+/aljEa2X0FRCPLcgFGsrwquXR3S7XZZmJ/n8uXLtFQHIRSZHpXJfwqBwq4kUNqXUiCMLEsMdRmTtkJNRsre+1AwzsZcv7iKUgu89FKbe08P2b9fsbbaZWl5yPyCYnvLNrxptyPbLthYq9Yu2Wu7FWrtWZWeQekUA62tZ0BJhZSKPM8xFBw8pBgNp7h8LQUl2L9f0IptTgBuMWBhbBwB1zOhFGxG1+89q9eP8Re5YTzWDIeabrvF7EwbFVnPj/MeyNL9XfsX3PTtcsQGM5GMWNvUzsp3251TxOBUA4MwzSTH+mB3rEt4KD855QZPofGjBM6VY8rrmaaQrt38Tcu/SbPL425VB3UDJW+U8sfcXEySKJ546n/ilVc/xl/50X/LsSOnb6ucBALfTEIIIOAhmJ9b4A2ve5hTJ+8lHUvOnDlLmo05d/YcTz/xPC+9eJY0zen1t1hfX+W5Z1/k2Wde5Nc+9Dt88hNfYHVzgxOnjnDx0gppNqDVMiStFtdurTF/4BDXb97k5QtXUEdPs6UWoNWmlURcv3qd8xcu0u20WJqb4uqVKwAcv+cU8o6HmE8NW4MR17esOS0VKCVIEkOW5qzdyshSQZLAOC1YW99mY3PA2uoq169fRwpFLFskokVbTdGOuiSyRSwSJBEugBtFEbFKiGUL6ZrRSIFUklYnIWm16LS7tGWbbKhZOXuT69dzVi5PMRoLonjEaCjodCTzZVvjLNNkZShAeJ1t/Pi/1rV3WkoXFvDDAdbDEUXKWta6YP+BFjPdiAsXhmxsFYzGhW2xW+iyr0BZq6+1fV/Y9ru6cvm7dr322DzXDEcFvV5Or1+AkSzMd1hY7CAkFFqD0UhASecJqGP4lD9VFc8X9QtqN/9kjf3E/5yXwM8PqMRylVNR/uuUGOosjjq6UYcLXCgE4y2I5D3/WgGqPST1hsm2Sl7iYtnPwTu0Pkp4CoKXK9DpSGbnIm6ufoWf/4W/w0c//sGQFxD4lhAUgICHQBCxMD/HyROneP3rH+Kuu++u9l66cI2zZ85x89Yqw+GAtfWbXF65zPlXLgDwiY9/gS986Qk6Ux20EYxGA7QekKYDzl64zGA45vkXX+QPPvpxZGcefeRNJDMzPPyW13PpwiU+9kefY+XSVe696yTHDy2RSMHc3BRm/2HG24LFgebKaMQ4L4WKEhgjGY/h4sUxV6+MSBIwJqfXGzMcjNFak+c5SsZIIiSKRLboxF0SlSClLftz7mmlFJGKiWVchQyUUigl6U53QRsktjIgFhHZyHDzyiY3bkScOZPQHygKLRiNFO22ZG5ekhea0UiTF4aicLFh+7KWZ+nIdgKkjIVLae+T8vgsL8oyM2UVkrbg5KkWC7MRKxeG9HoFRVFQ5LmN4WtXo1/H9ZsJf2Xznhy2t3OytMAYQafTYnGxy8Jii+60RKgcKJAIJDZ5z7rjy2qJMnFPlq+qD0H5U1ZCv47i+1iB61nKLg4/IVX93AV/cR6XqeFnG/i1BdXAxlRKQnMKEwqJlzexM0PEm7PbM5kTYJpjNsIypcISR4L9+9sY8Sqf/Mw/5lOf+S8MhtsEAn+WBAUg4CEwZgoQJEnC3Nx8ozRqe2vIE195jkvnVxhnYwbDATJSSCGYW5zlpedf4Vc+9BGuXLlJf5jS6w8xZIzSARdWLnPjxjWuXL3C6uY6Fy7fQM4dYnM05nVvuJ8jRw/yxGPP8vzzrxBHLY4eOMx8e4p9s/Pc88C9rHOE/X2bmX6pNyDNbG+A4VCgC0m3I5meFkxNRSipGI80G7fWuLtUYJRStQWpAS0QQiGFRGEVAGPsUr2uF0Cju5wR6EKTZ3m5CBEIIUloMdgoOPfSNZ57dosnHh+xupYyHOU2CbGrmJ2VjEeC8ci2xs0y20PAb6rjfvrhZecNkFIQKStY86ygyG27Y6kk3SnJ6Xu6HFhqc+VSytaW9QJkWW6FvrGvwmgKbddPcJn+RttOi+trI27dTJEyYXqqRaejSGKBlLZToXI9EUor31rUtaC1PQ8mhNxEyN15PmydhXPzTwhpZ2kb2xvBl7q+ha8bArkU7LXZXl2rUgI8T4opUzs1vm7hKxhuGOsVcZ8rD4E3F2/a5X3Uz8BtnIwQuJwMq8gJFhcTOl3Fxz7xT/mFX/5JLl56kUDgz4qQAxBoEEXzFDrFsM7166u8/NL5xv7LF2/w1OPPcvjIIcbDEdnYuuSnprp0uh0+/Gt/wDvf8SaSVovBIEPKiHQ0YnNzE61TVi6dZ7s34tyF85y6+y4Wn3uKB15zD5urG9y8vsr5cysMR2OiqIUeF0y1O7z1LQ/xzEcforOywvwYrscprc2IpU5EqxVx7FhCp0spIDVJTBnjl5w6dYrr16+TjTVSyKoxjy4oY/valqMZCVgL2qhmcNhoTYFge71HmuaVRZfEMbowKB0x3BgjxICXCsVonPLGhyVS2p4EU1OSPJ9hazNCylUQxvYqQOwQEBPJ9KUVLSigXILXZboJYiURBkSr4MChiDzXnD875vDRmNlZRRTZ9QXcokS6MNWCROWSCGRZgYoSDh7q0O1IVJThCt9F1bDHj2l7Qtu4FkOmPgdfqDYT6CrBKfBq670gfH3Xu1jcutrjYv8uxW+3/D4pdhujfHRYIe36J7jRdoxS5SmYOl/AuON3Bu2rFANjExjlxBwnyxLdCPPzCUpJLl76A379tzb5wT/3k9x11xuJo9A9MPDNJSgAgQZCKJTaT5GP6XY73H33Sc6ceaVxzBOPvMixE0cZjgUXXrmAQTAajlnat8Q1rvPlLz7Bn//A+9jsDckzw2g05tqV6ywsTCNMwbNPPcvywjxve/NDTE13mZ5t8+d+8F289OIrPPX087z40it0ul2efvpF7r33ThaWl5Hzi/RfFXSGhnhGcmucMy1jZmYiZmZitNaMxwX9Xk7SGjM1LYCMs2fPsrW1RTvpYmT1p7+8E7+uvLRQtaEQBZHUCBRG2PgxRqO1roSAoCzPK8sEEhEzWs+RcszFiwqlUl73OkM+p5hliunpbaQ8zNZmG20G1r1vOweX4WFRLSPswgJOkDpLvCjbGRhRKjhaECm7DnEcw5GjCVEkuXY1I8s183MSt7qeMYYsN2VIRJOl0O0mTHUTZmdtYqWQefk74MrzZOkZoYrDN39ZnBCdiJD7wXrvnZk81FQ5eVR6jftOGl76SXc+Oz7XLv/6uxXegboc217HrWFQhyXcvvpKjeCBPcK4YsT6d8iVLlbHlt4iIygTJr0VBSuvQK0YuTNnZhSdjuTmzS/ywV/5Iu9517/kve/5m0RRQiDwzSIoAIEdCBQz08c4eTLl9OkH+PznH2Vzc7NxzBOPPsGt61usXLjO9OwM/cGI2SzjyPHDnHnpLO/+nrezudVnuz9kezDkvvvu5p6776TQOZ/97CPk2Zg0G5G0Wmz1t7j7jmP82H/3Azz5+At86tNfwiA48+pFHnv8Od72zreRyYTheIGpwQaJERRdwdqGYGHGLslaFPav93ik2VwfEseKpYMJr7xilZe8yMhFajP8y9UDndTRorDCzAgkCqPBCFfD7tWll4K/shm1/YOuhAQZoWREvqkZx5qr11pMTWfcf78iUm06nTHd7hXSNGPQt+dJqWpFANdqt84Xc9a/RVur1lMMdBlPltL22o9iwYFDEm0kF84PWZ8XdDsR7bZCoNEmRyjr/ui0W8zNtWi1BAh7YUOZ2IefrCcagrRezLC2YUUVBxeeIb/TArfKkxO6zu1OdW3jbTDemFWiX1WW56x+L85uwPkjarFejlwO5RIt6yt6N1Zep/YMQKkGud8SnP+lmh+uuqK8ZqUclsLePSzjwh+lh4e6VXN1bhkS2Lcv4eaNlE9++qcZj/u88aFQKhj45hEUgMCuCKG449Q9/Ot//XP8w3/4j+n1evR6Pc6dO8fLL7/M7/7uR3jmqScRRLQ7HQpdsLa6wcLiHDdvrjEaDekPR2xsDdh/8AAnT53g2OHDzMx0+L4feC9tFVGYgtc9eD+L83Ncu3WN1z90L2943eu4dPES/+k//wrv+4HvZXNri9/5b3/IxuY26Y013nDApvLlSc6oLVhfg4OHIIoVUgmk1OgCdDHi0OGE3lbEeDig0AXjfEwsYiIZUzmTy7/90tjsekWMsr17bYMaKVGRzYMoctd1zwkPt+Sv+4MuQEoGNwrSLEOJgk674NSpNYSUKBUxN58gZc6gr0kjQxTbjHoE1mVfrg5YB5ZFo++9qExmiTa2LE9KiFw4QGoOHJYkSYvNbU02zklHthyvOyWYmkrotBPabUWc1AKxar9bNeNpWsHVtZ20Ek6w1YeZSYnvn+vvL5WAyVg6/pP1BKTvoWkcuMMlUK/Q5xr7+IPa9stOKO8cwpUfUgpv3xtgPTJ+X4Nagaym436ZSuWkKrP0whZA2aBIVAtPGe+xRpHg0OE2m5sZn/rMz3HmlY/xV370f+LokXtu/3ADgW+QoAAEvirdbpdut1t9fuihhwD4kR/5EX7yJ3+S559/gnZ7iumpKa7fvEGWZVy5dpPxeEy306I3THnjww+RxLZN8OLiDH/5R38InWb0tgccPryfU0eO8vK5Fzl/8TxvfO0bWZq/n6WFBWZmZ3jgdaf58ucf4+nHn2V5C0wmUNjEs2y6jxxP0dsWzC9Aq2Wz43UOSZIxN1uwsNhm/VbOeJSR6TFSSSJiAEQpyCKRlJn5ORKFFMqZ/CgliVQEEoQy6Lwgz5yL2pqUzrIDm5wXRYp8A873ctbW+vT7be67L8IYw8x0zNxchNY5o5GmLRSybG5kjE3Ks2WAtXC2lrlrl+vVoRfVigVobWxVRCGII8nSvoiFpTIHAAHGhgMMplyop25FbC1Sv+a+FraTOQn4omyH4DdNJWBnkb39WM7DhT/8Irs69FF+dsOYhqi12/1rVbX5zlNAFa93I/meASFk5VlwWxtBDlOmG5qyjbGnBNShAnuWu5fqkVQ6gvSUAP+OTJ0n4IUQBC78Y5ibizEGrl77Ch/6lX/Cu9/1t3no9e8nSUJeQOBPj1AFEPiGuO+++/hf/pf/mTvvuZNOJyKJI6IoZnu7j1QRaVaglGJjY4u52VnmFuYYjlI67WnuvuM4S0uLFLpgdmaaJIpIVMzTzzzHC2eeRbXHvOM9b+ATH/0cZ1+9yOzCLDJRnNlUXFqHZGRQQlHIAubGrI9abPUlUkKrJYkTwcys4vBhyaFDQ6TQCNccB2flUsW4lVFEIiKSMVKqUqjb8jnn9pdSEkURURJNWKdQ1YI76xFrkSemzfathBeeF7z8cs7aWs72tl11cHZWESeC4TAnLSsDdGGqBkB+SWBpn9dhgfJGpBQoKeqGPIZynookUbRaikhJVAQqpvSQyKpjoLPo6778snomjrrOv75XF+e2gsx35e+s3S+fOJOi1t9WPcb68Cbl9avQhBvBF8TCyx+o6gREpVxU1/cuutPL4VL2ymZKwoVn3D7thWXql0BWczQuTFPN0WtxXH2xVM9Ol7klfitl+90b5udjDhxscfPWl/mvH/m/8qnPfIg0HREI/GkRFIDAN8y9997H3/o//R3GWQq6YH5mmqnuNLrQ9PsjFpYXkDFlvHmOXn9ArGI6SYvPfvoLvPzyKzz42nvp9wdcvLDC5uYmL589w7XVy9z/4AkG/S1+4T/9Kk88+jT9rR5bGp47r4nWjS1Nk4otepwrVriWxiAWiBTEsWR6JubY8RZ33iU5fEwQiRaJmKpitk6wOjHlLH8l7GI9RphyUSCNkIJIKpQUthVu5JnmxhW2UbmfKePRkYyI84SNG5rHHi949tmU9Y2crS27eNDcrC1NHI0L8txQ6Gb/f4SoPoPvZSgFYin8/aY5UkkiZRsGRZFCRXYBIlvGZ13MNu+gVAIqxcizUT0hXuPs57K7X2XquszEr+7/99Mt3bMS5RoHCuvZUMKJXu+SldZmJbEsj3UKj/Zc7HUeQf31lE+sfj6unLF5x82LCV858DwXk2GIxpk799Wz8J+tC0+AXavBvcqcjoYTwtBKJIcOtyly+Ngn/hm/8ds/x8rKS7d70oHA10VQAALfMEIIfvgv/EXe+uZ3A9BKIpQU3H//nczPzxIpSZ6PGAwHzEzPkmYFQghaScKFCysUecH+fUt86pNf4lc/9BFeffkiG5sDbtxao9WBH/mx7yNLU5557Dk2VjcRaFZuGswNTWtkQAvrrhc5ZzfO8+K1NQojiWNBpxOxvK/FiVMdHngw5vjdoEyMIqbQeZVAV9mkAqT7n6gtOgBtyiQ5bV20sizhQwtsKVz5PJgUmHb9AIaKYiR56WXF008PuXlrzHYvI4oFy/tipLTLCttSvbp5j006L2PPpZz1mwcZY+vxjbFVAlK6Pvy1ha+kLPdZ13+lOEgxYe2L6ju9Hc0y+VLpcSv6yZ3nmcZPUSkJtRJQv6mteq9zoLEvoUWpVLl5uPULdEP4uzGbql09G0Fz8SG/M6HfzdCL6XxVrONFNMaruiCW13fNk2QVy6gVKfcwDKZSBNziVP4zjGPJkaNtpqYivvLYf+Q3fvufBCUg8KdCUAACfyLm5ub4R//oH3PX3aeJ45ilxVne+PBreOhNr2H/wUVOnToMZBSFRqoYQwxGsrm+xVSnixIRzz97ls9/5ike+fxzrFy4zvbWiF5vm9P3HeWHfuR7ef2bHyQdp7ZEb2oaY47S3egTFaZyaSctwbneNV7ZGKCFbZyTJBFLSy3uu6/L6x9SLJ8oMEVUdsmzi/3Yv/X2D65NypJIBEq4jndWIOeF7REAAlEJWj+mTNN6o15kR+eQbkqkaXNpZZ4XX0xZXc3p9VrEccLiUoTRMBgUpFm9jLATSg0BV+UA1IJcOuFrTWJc3383px1Crpq9m/duFn/9XCbfV5cqwxRG1wlu7lgNtVVbd8rxf1SeGFMKXus+r++zMR8jMLrODxBl4uQOtaMcy3PO184KY6qEO6cMVF4HISqhXSdD1t9l5dIvx6qu20gKdfdXz72ZYeC8LnVOgT1OVkpVUa7C6C8+ZL0+sLgUMz+fcOHil/mlD/0dPvWZD5JloYVw4BsnJAEG/sQ88MAD/Pt//7/yiU98nE998pMszc2TRBFRJElmu4yGOUYLppIp8kLQ648ZjjOkVMRxgtaChaVler0Br55Z4TWvu5eZmQ4Li4of/AvvoJVEbKxvcv7MBR566+t5ywfeya0zn2G8+QXy2RZaCIQwxLHgwmiVuFdw58w0SguiWDG/kHDX3YI0TXkiM6xejkAVpUvZt9x9W95KCl1ocjK0LkpL11YIVPF+U2btl1afEfWfe9eXXgiBHsPqyoB9J+e4tDJHq7UOIsaYaebmBItLMVevjMkyw8x0ZO1EY5BSI6UoPRb2Pht+hsqwtvX+TjjpMqbcVB+qMHM5x6bgprymL3i1rsviKpkkaglYb2q64YUT/C4e7h7S5I8Jr0BTmNZ5Cv5xrmixMfMdt2JKBaFZg4+vHFXPRHhDNAV/OYvqvqwHob6kqU/coYzUSwO7ccp7ELJ8XtVdTjgdrALlPAi15wMWFmNabcXNGy/xhx/7x2RZxnve/ddI4jaBwNdLUAACfyrccccd3HHHHfyNv/Hj5FnOZu8m6egWnekOSUuRtzVxu8VwPGY4GjM7N0cUJxRFwWg0ot1pg5S8+PwF7nnuAkeOLJNrzb598/zIX34fcRLx7//dL3DHPSd5/bvfyeaxQwx/6wzjeJ3+lIRCIyNBJCXnh5tEKuJkZxplNCIWzC8I7jltreWnHjVcv2BAFKUlpqo/s2WBFrUosH/EbaIWCLT9o+5qxp3rHBt/N4VGe0KsSv+SAnLD5vUBC4fneO75Llpv0HqwIEkSOh3J/gMJl1fGCGB2NvJi0aYhsLQBWSkEVMpKecGqja5vTbt7q6PV7ly77XbWfmU9OzmMHd8JLFs84Nr71vjy3lq4LgvfKgbCG88veZzwD9RUSkgtNv2WTpPehYmZ7NjmljHeec/1QDanw3lKfI2nVgyqad8G+/X5z79cq7Hh2TH19+UcJqKeU8MbYWx76SNHO1xeGfL7f/hT3Fo9y7ve8eMcPXL69hMJBHYhhAACf6p0u11m52Y5cugUetyltz1ACJiea9FP+5y/usLq5jZL+5a5eWuNW7c2SJIWUZywvH8ZKRXPPPkyV66sMhymjLOc+YUp3vf+t/C+738ro8GQl156BabnWVx4K3PrOcpYd72KylisEawMRqyNdbUqXRRJFpdi7rk34rUP2Zp4CklhijIUsNON69r1Vj3igSLLydIM7br21CeUSV2mYfVZ93oZVpCSvG/YuL7FeJzw9DMdnn2mx9pGzmComZqW7D+YoLVhnGrSVFMUNiRQFIY8s6v8gSlzAYxnOTbd/EqKMoZe5rXvEvP2nNpN4e0rA74HwRfaOO/DTuGvta9MuFNtYyUpfOFfPyPjCUQnXieFt1Mq6pm7nH13f+VBZd5E1eTBWfyVbK+9Au7V+B538Q40n1b9bG6veNSTFsKbb5lbIoS/WqLc8f14qkGlbPprRkSR4OixDt2pmC99+T/yG7/9U6xcfvl2swgEdiUoAIFvClJKDu+/h8jMkucFKpJEccFTTz3FyqUrvPXND9HrDTh7doUsK9je2mb15hpFoblwdoUXnnmV7e0B2ggKo5mZ6XLs+CF6vQFXr95gvddDLB+h3ZsmMYJYJTazvazZTs2Q57avsZaOKhs9jiXz84K77hE8+DbbRlcXgsLkVRIWYC1UagtPa9d1r8wHyHPP5Vxa58b22p9M4qIMAdi6c/s+72l6qwNUNMvjT8ScOzum14PhsGB+LmJuIWY80mSpqcoBnQ5SLSIkTC0My/+Km7H+8llUAqypBHjT88IZO0MAEwdWArpehc8TUOXeWrmQ3lhuYd6q3Q5Qu9ZrCdoMATSmoJv3IUtBWrfjrZzspeJWP7cdisCky95J5d1m0YybNBMGd/UwNMc1iKo3wY4wAe5JNTsw1mEBF7ooFTVTP3Ol4MCBFsvLCWfPPs6v/sY/4dGvfCTkBQS+ZoICEPim0Wq1OH7wXiKmwEC7pYCUj3/0U2Dg6LGjLCwscvzYMbY2t7m2co3Vm2vcvL7Gi8+9ysb6JsPhiHGakxaGAkGrnXDfA3dx8o5DJMvLmPwwU2MDKECUyWE2Gzuj4MVej17aRkm7tG8cS5aWI173cMLr3iUxRpV/VL0+76Vwda1+Bfa9LjRG69olLoQnOExdz23q8+xfeJclb9sNK6HItzX99S2EmuGJJyOuXDX0+pJxWjAzo+hOK7Jswgtg7CsvlxLWhaEoNM2EMTsvV7/vctScF2PS0vROs7flC8GyB0JTITCeYJrILyjtcZdYV8tat8pfGe8un0klTBHVioI7s/dr5KTQ3mGT1ypA+SCq61JfsrGhcuOb3Uaj+Zwa1y3vy9TrN7hnVj8pp9wIq6UJ6QctauXJ5SXgPbcdSYhO47S/X7WnxLCwGHPwUJeVlUf48Ef+TugXEPiaCQpA4JuMZOXcKr2tAXEU8doHTzMY9/n9P/wEV65c5dChAzz4ugfodrukaUo2zpFKcf7VFbbWtxiPx+QaBplmMExZWbnG3MIU+w/OsbB/geEoZmpdIwqb8CekQBpDPIakEIxJuTTcIjeGdhLZpXUjwb59Ea9/uM29D0nyQtVlWKKZzKYiu+wuULmXm1nctWu3PsghvH3W1WuMtg1/hKR3Y8x4e4wRCzz5RM7NG4p+XzEeGeZmFdMzirxc9lgIm7EOUJQeCSGpYsZCWKWgIf+oG/k48aY9paYWeF4ooCE/PYkrPIFW7qv8JJ4gnex5RynU6n26VAiqdfLKxEavyr7WoTzrvf7h47dmri1+p8SVXhL/vFJhqwSs2y9q93ojJLCrgd+0yu13XCtfzlPiu02Eu7bv9dhtTP9bKefl/qcnznL3qzXMzIw5drxFOtb8/h/9U37tN3+Oi5dCqWDgqxMUgMA3nZWLN/jwr/8e167cYmlxjne+6028933v4I67TrC5vcnp03dz9113IISgKHKSVsylC9e4snKN0WBAYaDQkk63w6svnSfPc6JOi/2Hl8mn5mlvZHQMqCSyVv5YMH1Vs3DL0Ek1V/PrnO1vkyNJYturIJKa5SXD69+kOHkvZLkEpYk62AVzMMhIEicxUVT26cVUMVtn/Vd2cPW32Q8eOMq4t5RorW05oTEoFL0bfbZubnH1asSTT4xYXZ1idU0xHmtmZiKSlrBegNyQ56UVLwW6/MPvFgdy3gcD1kKsEsjKeLN0ygBNJaFUYvxY/24l8C4xrbwdnEu/jm1LfFd8JVkrgeueirTWvqlDAu7ZCre64A6hbXY80ebcPIEpJ5+8p5I4w9+FB/yNk+N7VQe+MtLwtfgeCUP1/OtSQ7dXUy0mJPwRJgR6/Q14c/KVBnO72WKMII4Fhw+3iZTkiaf+Ex/+nZ/icsgLCHwVggIQ+Kazb3mZn/8Pv8ov/X9/g5WLV3jggbt529sf4nve+xYurVxEKcHRI4fpTHVJWgnz83P0eyOurNxiNBrT748ZpxlCCI4cOcDW6jatKOH4icMcf81pOr0O89sZomyNK9Kc0eV5WleO0r1eEI1gpbfGud42GkUcK6JY0GoZDh0RvPaNgsMnJMIoirxA59Zki1sRcTtGxVElkYSwjYCksr31tdYUukCboiEQqj/klde2zoPPTWZL/BBExGxc2WawOeTiRcOZlyWjcYcsF2it6XatVyNNbRhAF9j5YUv9Cq0rBaRSBHZ8A80QgTOzKxe0t3cyGdJROQMqA1V4+xRSRqgoQaoEIWOEihBS2eZETpoLT6gK6ZXQT/gjPOlm3f4GdgjOUmnwZ1JZ8y7RzrP02Slud79Pl1swqRBQW/KTj0F4ozW8B/ZLEU7JqT5XvqFG6oGd6+2+ATeNSQ9L83IqEhw91mZuLub8+S/xK7/+T/jSlz9CGvICArsQFIDAN512p013pssjn3+C5544QzpOiRPB0aMHWNw3Tc6IxaV59i0vc8/puzh4+CCL+/dx9cotBv0hG5s91tf7RHFC1Io5e+YipJr9y0u85b1vZ+m+72H/9oCWNiAkyigO3fU+lu/+APn5/Uzf0Mxuw/W1VW4OBlA2+ZESZqYFp+6Mue/1NsEvG1J2ZAOlIqRUZTIduIx+pSQqVkgh0KYgLzIKXTSEqW/d+m5tUS4Q49eYR0SMNjKUSVAc4ZWXW9y4KRkMNXmuabVsnFkXhiK3P402NhSgd4oLF5N2YlKUWYIuVACwSxEDrr3RZNtevO3u2FqIeSOUnoGqDrHaVUfeqz27uBn8hMeJnDych2CH2BOUYZv6kq5XQvUSbrbGG8uVATqXgyeIqRWIiRnu6h1x7v3a++GrJ/anaNz8LmMI90yavo4qOuE5S2yDpdsOhRCwsJCwtJRw/caX+b0//Lt89nMfCsmBgR0EBSDwTWf//gMYbaw7XcRko4x2O6bdjrnn9DHaU4qjJw6hlKKVtLh25Rq9rR7bm33SUUpva5u19S0GgxHb2wMur1xDaGhLxb0P3Mkdf/4DTLUeZqY/ti76OOfwkUO89wd/kOTAu0mfg+QFgTxTcPHGLTbGI4SAKFK0W5L5WckDr014+HslRrpkPtsjwP7R1ZUwcFarc1UbYyiMpjAFxnjWt2kugFMLT9lcea8MK8Qy4tqFdW6trnH1uuTJJzKuX9cMRwVCwsxsRF4Yu5iQBp2DsZGEMifAjlavpmclhNbNroIOUVrWprRqXX9/J7JcN7pGFYIrr3Nj4CsBLuygEaZAGI1AAwUYXVn6xrnKq+S3WmHxaXYeFBPvxMQ2l0tQFwY2bXUQQlfPpOm9qL+vyY1+mR+VR4FdaXpH8KSzsCGZUpL75Xz1mX7co1ayfI9NHTrwcG2iJxQLp/zNzsccOtxmNCr4+Cf/OZ/+zH9hMNje/QYCe5KgAAS+6URRhFKK8WhMmo55+blzXDx/he3tbZJIkeVDjCyIWzFCCTbWNsjTlOV98yglGPR6pMOU4WCE0AWvnr3AVm8bdMHsVMQ9r7+Xw/f+/9n772BZsvu+E/ycczKz7PXm3XefN/1ce4NuoBsNGoAgQYIOFElRoswMVzuajdidCM3shMzs7mzEaGc2YrWr0caMpKFmlhI9RRIEAYKEN41GA432/do/7653ZdOdc/aPk5mVdd9rgAAapoH6AvVu3ao0J7Nu1899f9/f/cy2Q4QV6Apsb1/BGoOoNnnmNXjqWcu5Z6D3RsL5nS36Rhd1+VpVMj4uuPvegAd+tILwIbUpURKSJDEWN2oX3BersVlHgLUo6Qx6IXtLXscuR4ODYUFKKUTWsVAkgjPjHHgerzzzKuffuMxTz27z8ksRcSxIUo1UgmZT0Q8NUWRch0L2xa+1JdWl6YViYGSFyObPlw1SvrzCnLq15KnvosUuW70z0vm2JQNVXKJFWA2Z4bcMhtwMRALyuntOBaQw/iY/6hDzT+wy1qVIvfh9MPPgzWcYDPbPpZ1vHYrb0n0p73frsP2WPIlbnd7VO4p37S2dgJvXm11dKR9QbmHMjlfyHezuy88+f8+T7F2sIWWTT332n/Hvf+cfceXKq29y7hF+2DByAEb4jmN8fJzjt52g1++zvrbB9maHxz79FJsbW1hrCXxBrS65857TXLh4mThOmNszwYmTh5mbnUKYlAuvX8QkCQ/dfYSwH3JteY1Yx1Q8w/REjT23nWS+AzUtSIMqa60XefLxLxFFEf3GIq9vwPPnDVfOGcKdmGtxl5SMYS8llYpgdtZy6gwcOWMI04Ru2KIbdkh1AhKEKqxPofMvhcQTHgLpDF7RolVOl4tMPc9m39suQh2q59osSZ16hBtdkm7K+fMhGxsJQkCrlVCtSRYXA7S2xIkTB8qzEEK4LECqc7GgkiHJgsubjY4tmP65zsEg917qdc8NjGCXSdqd8DbZz6zVsMhElFj5Q2cfkN7yIxT28iaUDLEo2cHdJYGbdrOUBYGKiLmEQbpflPa5aYPSIQeOSqnrc9dBdz2K1/MZE+LWDgM5V6RMpxy8IkuOgBU2e7j9yqWbnBdirEV5ML9HMz6huHb943z4z/8bLl56Ea3Tr3/vRviBx8gBGOE7jnq9zslTp5FS0G51EEKwtrTJU0+cZW11E60T3vXwXfz8h97Pgw89QCeMOXnqMHfefYLFhb3MTk2ztryMSiPuOVinofs89+xrXF/dwApJo+IzsXcvzfQI02GCVBX6tYu88JVPsLyyyvTsLEGzRiIFVy5YkuuwHnW5YjInwIJSkkpFsrAgueveKsdvrxOFhijsE8cxQgmUn7H/bS6d66LKXMkN3BdvEWQCw2GaKyfkrxsGCoQm5woIgbAKT3h0o3FeOtsnjiGoQJJomk3F9IyH0ZYwdN0BCBf9uymCoM3AMJniuS2l793aB73r2XqyqXt5q+PQ6ktGd1hJsJyezo1sHk2Lwcu82TDdgRm0A+uVZesH+Ybd5QcXSQ9KLjdN0SscGoYcj+FuTRdn5+uwubOQX6wtr1eUftnVQli6iqEyS3G/Bua8yPgwvN8wBvdTDO05uJzyrRUIiulI+ftWFHfXSVdYxsd96nWPi5ce59//zs/xuS/8hxEv4Icco1kAI3zHIYTgxIkTWGPp9Po0muNoT/DSM+eJwh7v+pHT+DXJ3J4aDz1yFwsLs+zfP86hw4tMjo2zVUsIPEUUWVauXICtFZ7+6kv4vuIDP/0w05N1VLWKHTvM2PZlVMXHVCrU1SvI/h0cPnoUKwzXr1yjs7PNzhXL/GlYqnbxpOKAVwcBSikadcGxYxJP1RBW8NpLLaxwLYFSeAi5K+ITuO4DqzAYhFDD0WQeIwuDQKCLzK1FY8gr1tbarItBYaVH1IuoBBVW1yy9rqVel0RxipSCSkXSaMLOTkq/p6nU3DmkMHi+wuIIgjLjAyglMh6AxRqxa3TvgCOQR7SuHS8P+8s16cHnWd7/Vp/3Ta8xGD1UFrq1eX181375trcyrMW6y3dZ5FLCtxYvGjLlN72X5x8GzoDLjtjBNnYwe6B8Le54pbtgb75fg3uQOyoDxyo/++CvZTARMf8sykvNyybSisyxyf6+dqUhBk5o7uW5NY9NeChPsrMd89jj/1eEELzn3X8L36/cYsUj/KBj5ACM8F3B4UOHkVLS6fTYuyebne5JXn/xOpWa5My9e6nXAt75rtO8+13vpFq1NMc8lOchhKASBHi1cS695NHdWmHllXPs7OywsN8JCe30ItYSn2YCfgJRRTKxJ2Z69WXOXYfAD5ibm+Pcdptu5KNSiU0tK16PCVFh3DoxICEFjQYcOBgShgHXr1VJ+glhFOKLAGnz+j35NzTgpI+FFZkhKmLK4ZsgHNnOijxSzssCudEQKM9Da43VILVBqjpC9FES8CRhmJLEklpNIYRHp2NIYkVQSYtvfTcuwCKULM6Td+OV+xSGZHrzsDJPfxc+jBiaXJejrDPwDdPwg8sfTEvMInyK6Hiw0cAw5iRFsesUu/n2lEzz0CpLPwVDprZ4mu1vwfXplx23PGtSpCSK+5Ib812uUfb/8t3KtrHD11a+J7tvUrGNyLIgwrr7NeQoidKS7NBxyyWU/OMsfD4LtbokCCqsrUY89vj/DW0sD9z3M0xP7dm9mhF+wDFyAEb4rmBh716EEGxvbWOMptftgAyQvs9Lz12n2rAcPDLL1Pg0+/Ycw1rohEv0oph2t4vnedRqDbr1Oa6sW9rtS7x+/gpTs5NAwNryGq+8fpl7Drg6qVaC1phibrLPxVdeYyNYYGKswdzCDNPjY1RkB7weqYCLts0xNU5TS6zQSAm1muHwUcND7xZ89i8UzboiTVJ8UWKZ58Y+T4kXWWeRWzjYZQyEcDLFVkikztvznPE1xpD3dwkEnXYHrfyifRBriUJDqxWzsLdKJZCMjQW0WtNE4SqerxHCFHoIeXTvfIw8Xe/UAGWp+Dc8CTD3A2xh3IcMlhBDP//6GDg57gLtwMrlxy79lhs8Wex766jd/Voyf0PrGjKLg5+581G61mEMnIVyV8XuzEHOxSvdvdIvwxF6+fk3unO7V8xQ6aZ0jXbXduXVW4pOjryiUXQ3WKcXMDNXZWcbvvCl/5YXXvwLfuWX/jkHD5z6Bqsb4QcJIw7ACN8VTE1NcerUacJ+SK3eAKnodHtZfzt8+fPnWV3aYmdnnV5vm/HmOErUWFnd5vULV4jihHqjypETx6gevI1+pKlUfT79l1/iLz/6OZ55+mWWl9bYQBF5bhBO1JAE85L79nRR7TV6/ZBji9P82D2nOL7nFJVKE18FhFpz1XSJkK7vXwgCXzIxprjrnioP/kiVsbFJlHTTAwdfzO6fW6Z7B5RtyCq4RcSftavJbEpgntZNdUIY9tE6RSKI+jA/bak3BmI63V5KrS7RxrUIKi+hUlknihKsgTRxOgFxbEjSnHxHiRNQjnjztQ4/zx/FNjlhTeS69wPj+6Zk9nzX4kmZsS8RyOxndodE2VhTRMLFrc7Pu8upKhIWIr+OYYcr5z6U+QP5sW8W5RUuVV7Y8dzZyZ/ujt0HiyxxJ3dZ+N3n+OsiY0C4mzN8BHvz5zeY/0DGA3H8CCkp+Cn5UvJf/QCmZ6HZFKyuPc3v/+E/4eq1kXzwDxNGDsAI3xU0Gg3e/eh76PV7rKysUqlUi3po4ClIfZ5+8hKdVp/V1auu/c74JFoRJhqLoFqt0AzgrhOLHN0/g+8patWAUyePcPvJ2zh8xz2EJnajcJXEeJLtacm+A4L7Z7qsrKwyFWiOHRvn8KnjHDp4hsbYJFL4tHTIddvDSldyENnMgPGm4swdHtOLFmndmo0wSG/w5V8kyIvM7KC/fjh2y6JP4XT8lVTIklK+wZKYBG01Auh0Uo4fT5mc8vF8RZzA5kaKH7h6dD/UxLGhVrcEvqLfM8RRPjsgU9y3kGZqgcOaAIPPpjDI+Ups6WLyZzlxrTC0gyu7VXdB6eCZw7MLotS7X3r+9VB2qnKHpriWW1jI/DrcNqUNCmegfI2Zk5YXHzJnrSzzPEyApHDcbkaW4RisHHbdgUEOYzd7P3+/bPQLT+ym48Ag2k/TbGS0G5s4UDTMSgmmxLfIjyWFoF5XTE3tZX095Hd+7x+PZgj8EGHkAIzwXcPd99yDNZblpSV8JRmrV8BajDUEgcfZZ65x/vUbrK2tsLp2g5dfOUcv0izuXySoVpCex8XLS2xcv0rV9JHWcPjIfh588E5+/EffyYd+4+9SHX8I0Y2xxpGp2nXJ9rTl8JzmuGrRMFsk9R5M+Bzaf5TFPceo1ZsEXp110eOa6GMyoyKVoFqVHNhf4/SdhuqMRSoPTYwRrjc/T7fujvLKPe+CMp99kBOQUrjIuLS3xmBwX+DVwHL0WIUgkAgBYQiNZpUgENlMAeM6AXTK1JSiWnEzBJIkN4CuVdApBmbVBWtLUfxgrXmkmae882sqDw+CPIW/KyV90+UPGykxZMDKb9y6h7+UYBhqsxtkHnatfeCVDO7wrdr2GJRCBs7aYIVZ0r/IDhTdE9n1DWUR8kNkfyv5Ot1xhFP/t6VjlNdQ+lmaiDC4svyeDF1D6Q4Wl2ix1s2WSFPtOCyZ4d89BApBQf60pX/zA9abK+zbX2Fr6yJ/9Mf/eJQJ+CHByAEY4buGgwcPMju/h1a7g/J9pOfjKYk2ljiJsRi++uXXWFvd5NyF1/j8577EVx9/BiU9PN8jSRM2dnqcu9YiGJ91xslapqaa1GqKO++/k9Pv/FkanTlspDHakkhYawqas5b79xvuOWCI4lXWNm+AleyfP8L83EHqjXECVWXV9NkUCVj3xa48qNc0x48L3vGIRlYlzckmeAahHPlP7sr9DhqwHMolAxc1DiJOKRXkUr2lr+a+tdxxj2Ju3rGz0xS6XcXC3jEqFYkxLtWvPAhDS6oNk9Me4+OKJPZIUzK5YLciU0Szbj9jXDmgLCWcP8tTyblxvpXcMAw7PrcWt8mt7O4Q/ea0eFGnJuceDJcahg5ZbC+Kbd16hlXxBsqAQ6ve9QDEIP4vHKF8v+L02eqKuvqtxYeGyhblX8QuQz+UTdgV2WfeRO4o5s5isWm2hlQ7B89anCBV5viU2wx3r22omlHUOoQTlpLPMT27ycbmV0dOwA8JRg7ACN81zM7O8e53P8r21jZhGFGtN/EDn0rgo6Si0ajTbRv6XZ8o6rD/wAKH9+9ja2UTnbpIJ6hWeejH3ssHf+WXOHjkIJ1OjziOubG0RKr7nLz3DvY07se0QnQmltPyBNtjgv2zMNVMWY63uHzlAjeWblDx6hzYe4Sp5gL1yiS+77PihWyRFIQ4qQzNpubOuz0eea8kyGYb+HUf4QsnEJTBmbqhGIuhr94iVBSAdN0DUu5K/7rJcXfeXWFszMMiWF9P6PcTGg2N8hRaC/xAopTFrwjCSJOkmuaYolGfIwrHMBp0at19SM1QjTqX3y2XA1wwODAiMIieb5XpHm53zFGOtm0eVu/a/9apbETuPO1++9aORRE9W1varqwHMDDou49mKSffB3uV9QMKJ+jmlZaUH8myA5by0vOiSrkl0QpcB4go7UOepi8VBcTgWLmKZP6e0S7zY7X7G5JSZg8X4UuZl0huzlaUyA2Uf3UPiUFRqfk0xwPWN57mj/74v2Zl5dItrn6EHxSMHIARvmuQUvLIux9FAOvrG9TrNRCSyYkxxscbWAvtdgtLDSkrNOo+hw4scPq2w8xNTZIkmv0H9/HBn/tJPvShn+Zv/MrPUKlW2dxscf3GOldvXGNyrsaRE6cIlsFGLv2dYNmqWsQYhDJiLWyx2drgwoWX2NhcY2F2gVl1kDE9TkXV0FKwVInoKKdhL4TF9wWNuuTUGckd91RpjDfwqgoR4AYDybw7fIBCCS/7YbGZRG4ptS4ETjvAmQzA6Qlgue1EPTOeknNvRDTHJJWqu4/WSKpVhfAEQlo8D/p9TRxrao1VarUWWlMYGpuVWgqbZUoGZyhidostyIoiU/WjbMRLrs2t0vdZqsOlzwvR31tE9GVHYGA9d+vel09RzjLkXITdBMJb9/kX4bOLxrPIN8/WFI5QQXQcZGpuiXIpJUuv33JLsTvfkR04uy/D5MZb7SyyNL8bBKWNLQh/eVZAytxpy/+mBvX/wbhn9zD5OfNXrCn+Jq1xTk2tLpicVmxuP82Xv/IXGKNvfQ9GeNtj5ACM8F3FsWPHmJqe4erVa5n6XoCnPMYaDQBq9Sp+UAXbpN6ssLGxzuTMGGdOH2G8XufM6dPcd/9d7N27wPve927uuuc0q6tbNJvjJKkiTRPOPHgX497diF5cfIH3gU7FshVodqI2Udyn093i2o1LmBia6WGmo1M0bAVP+MTCcE316RvIpWSVEsxMCe66Gw4erdCYaFKpBUgl8Mojg8kNR/7NOzABjrBlMOjii9h9YQ+IeAbLj7y3yuxcBSElSSzp92HvvgpCQRRlvf2Kov4sJASBZKdl6HRSmmMegSfRqVuH0WCNGDgf2X65pHFeDjDlMXM2n2uQRa+2bEy/gRNQRM+i2KZ8d7ItucnyZYZ5EIHf/Dd089TF0jmG8vZiKNIur0MWHQgDJ2D3ekSxjkGWZKgEUGQiSksvIc8GDYztrQz9sENUvmcDvoYz8lJJPCVLBp+bnR8Gdzf3YvLPuOTyDZxCY9xcC2OwNv9p8QPL2HiVJ776v7CxsXLzhzDCDwRGDsAI31UsLCzwc7/4IeKoz8b6GnfddQd+tcaVa0tonXLo8GEmJifZXO/SbrVYXllhu9VmdmGG244dpFatUq8rlILDB/bzIz/2LqIwJopjlBewsx0xtzDDbXe9A79jEClYA1pAT1l2hKGTxMRRRJokrK1fY2V1ibHaHKcP/iRNfZha0ERoQd/G3KBLiouolBJ4HoyNtzl4aJu9+3wmppqomiCoBVSqweBLNjOWIosO87qys0mOvGUxJSdAFAYDDCfPVBHKAII0gbm5MSpViTUe6+sSpVwvt4sQfaLIkuqUekMShpokMQRVEMIQRwajIUlclJf7JLnzkKeyjRmseZAqcD8ErqXMSQsPjG7+7jA5kOw4dtdmt4qRy0Z3YPEHqfM8pW3Ji+oDpv7ubEL5py39W35N3GIfdhnIgR8ynEEormyY+GlvLmrYvAujXP4pvm7z59nrJWfHWped0dnI58Ecg5uWXHpJDGZJlDITeZliiIiY/W5s1i5ohw9d5jZUq4rp6T28/sZzDCSsR/hBwsgBGOG7jvvuux+E4PnnniNJEh586EFm5me56647eeSRd9Go11lZXmfp+hqViuTCG5eIE82ho/uYGm/Sbm+jbR8/kBw6vMjKyiYrqxu8+so5tjZ6WK05cecZxs0JRGowFiIPIh9CDxLr6qjGaPrhDjdWL9OLexw8eIqFxu1Mjc8RBFWklWypiGtpH52bOCnxPThw0HL4WIf6eEJzqkllPKDSqGQ12AHy0S55jTeP+PO2PGM1xmhMPn5YWGrNKgcP1ZCeRMiAixcSx/KvSpJEUK96TEx4WAtJLEnjGZL4DK2dA+h0BoRiayvFWEu9oQAwmUFx8wIcgSwvQ5Tbw4aJgQMD4jCIhPOoskAp3Xyr0bXDHICymb05G1BWVhiktfOouJRZuSm9PlinM1i2MGpfD7b4lJyaY77g4XHIRZ1kYNJzf0WUeQfDxx2saJeLVOzrLkDr4Ug9q6IgZEnFj/wAA7em/GBoTcVlFD/dZ2vQ2d9AWc3RcQjcT7JsT5p2aI7NctttdyPEyFT8IGL0qY7wXcdtt93G7XfeTRxFfOHznyeOQu64/bQbaxtptlbXMUnCqy9dYt/iPOiEleU1ojTh9OlDWJkS6z5R2mNyus6NGyuMTzQJggob6y1Wlrc5ePQAs9PHUHGKUAKtIPHASPfIv+QAOp0NLi+9ysraMk1vjr2N25icmEcqDyklq6LDqomdSBCgpKBREywuRswubKF8i/AkwoOgGpTqxpl5ylK1uw2CtW6IjzYabTUGTWoNJ0/7VOsCgUKnPhaPg4dqeJ6i3Yqo1hP8wNXmt7ctyysHOf9GnS99IeSFZyOMrlKpOTEgqSxj4x5CujKA1i7CFLIwj0ghMLq0zpLRdxLCpQq2pWToKVngskHfHY1n75dY58MWqsyQH0SpRZ1eZPsIl1UYpLhvxfLP15N9tYmbrOdNEOQZGor6+YALYIccgHybfM8sZ5LlbbJrEDdP+rPk5L4Bp8IYMqfMFNF/wbtQWY1/eJXZkB8xcFDyyy3uatnRGDid+XNjBNZk+QcxzBVQyu0ThppOx7Cyarly7Uk6ndYt79sIb3+MHIARvusYGxvjAz/9QTzP5/wbb/D6669z8uQJjhw5yPLqKsZAvd7g8vkbxGHEkSN7ibshykomJ+p4nqCf9oh0DyEMre0WnhCMNWtsb29z7coa1VrAnsX9NPoT7gvbF1jpvvwG9tlidEKSxGy0z/GZL3yaSxfWkTuTzDT202hO43k+KpAsyz5bwiKlq/V7nmJyQnHnXRM88I5pxic8Eh1RqQd4virZQOtGCcvBkJfCiIjhOq/JvqTve6DO2JgELCsrMUmsqVQtaSoIQ8HUlHIDfoxkc20vY/UFnvrqCzzx2Av8yR++yjNPdeh3KwhhSRJnNBsNhbVOKdBm0V+SOvJXkakvot38k8rT7wMDmGc4iu2GxvyWTVH+uzPwuSBNfqLCnJYIeXlIX2bnFyspWvNEcbyvF9jfukPhG8OVR/JyTakkIcom1WXmnU7/8N6DOzBcUHCXL4q6u9XlCFwO3bKbWjNzhyt/zQ7ONKwxYIvP0OTncTWFzHkaOG5Dj2yNYWi5fj3m/DnNC89bnnvWsrQU8olP/SFxPJoa+IOIkQMwwvcEjz76Hs7ccRfGGL70pce5fOkyd9xxmuPHjmCModfrsbXZ4dVXLrB//ywTzYDOdhdf+SShYeVGi6eefIXHPv8kj33hKdbWtqlUfZrNKkliiPsJ+48fp2H2IRL3RWe8gT46WQo+1SlR1Cdoprx87ms88+wbnHt6G7mpmJtcpN6cQiCIbcxl26LjuQyANZbArzA9aZnd0+b4iSraxm5ksDdglAslkUpkQ3DKEXAZ5YjZ5+TJcZSyhGHM0o02k5M1KlWFJKBek0hpEdIjChVRPM31qxucf+NScbRPfnyFp78W0ev6WJyhRwjqDeXY5JqiQyFJDam2N63H7jJEu9Pou0lxw9eRPx84EMNOAUPPRe4IYUv98UDGkSCPr8XAkA0i31sWxf/aKJvpMkO+aMUrVRxueohh415oCeREQQavF4Q7kw1rEtm8Bjsg/g2x+suXkhnxQlyoWHuec2DgRGaOiTEiE3/K79KgXRCy+r92+hJhBMtLmmeeinn2aVheqrCzXWF722N5yefTn/8tXnn1hb/+TR3hbYPRMKARvicYHx/npz7wM7z6ylmuXb3KZz7zGR5+5GGOHzvG6uoqT371K4xPNHjllTf4QPQOTp/Zx/VrfaLQcuPaFl947HG++pVnWV9b59IbF7AWlPQ4fftxlm6sEfUTxiYn6G7EiKbA+oAHfgTKQKqydLcxdLttvEqFXqxorYWEnS7H05gTH9yPqlTp99vYfpfIhlzQO9wmpqgJjUDRbPgY26bdarHvSJWlax3a3Q6BDFCeh/Sl0/lJy8YMimRtPiTGQoLlR943w+x8HaXaCClZWerwyKMzVCoQdj2CoIsxGiECNrdSrKpx9oXXAZBepieQWj79V8vU6tPcfV+DatViY9fK2Bzz6LRTjBFUayqLeC351DtX67WDlHSGfKjMoBSeG7W8RFCOyMulAFt6vVT0HhzZlRWAm4be5Ilsm6fpB/sMIup8O5sfbejYuYHdDZsdOI/mi8S5KB0h+3gG79vimMNORva7tbkLAAzKALkzJRCuXVDkpMbS9RSHGpRG8pyDxZH2yuvOOSXutI7FUM7U5GsQpcjfGEuaStJUIqRrLdzeSrl+LWTpugHq+IHPjesxm5shnY6mVg+YnQ34zOf+ijtuvxelRibjBwmjT3OE7xne/eijfOITf8nZF57l5ZdeZmN9g+PHj7G2vs4rL79CrVbh1ZeucO36dU7cdoLZmRqXr12j1erz5cee4cUXX6Fer3HgyBzaaLa3u8wvTJKkKRsbO0xOTdEYW6CbXCLSkp7QiACMGDY6WsfoqMW0b7m2ukN/uw3K454P3s743nnWt5bZSq5CGhESsUSPQ7IGaQ9pPZp1n5OnnFH+7E6PrVpC1NfUZMNlAaTICH1gs978ojvA5FYGwHDiVBUpUwySJGkCIWOTHlKmtFotKlU37a+93WVrcw/t7TavvvwqKlAE9YA0NVhjSfspH/3TDbSBBx8ax1Oa7R2YGJc0xzx6XU2cOQXIXO1P4Huulm1KpG9X9x826kCpbjxwIop6Pe56hi3lcNxeHPEmA72LZY/AYMgb98pmtpwXH9jUgdNxyzZCdhvK4XMXyf6MnzBch7dFRiBzb0q7OsVFZ5ttYbRliVk/xA4Qee++HdSEsmsZlEJ2rdzufmWQD7BWlAx/nuIvTZykgpIVrDL0uiGvv9bh0sWIfghjzRoguH5xh2vX+iRhjMXSqwUYM85/+L1/xy/+3K9x5Mjxm2/oCG9bjByAEb5nGB8f51d/9dc4++JzYC03biyxurqG1toZFyHZWO1w6fx1Dh5YpN6Y4urSBk89+yLtbp+f/YWf4PDRfSANfkURp4Ynv/oia6trHDywl3q9TieWxF2BaEpCYdGBoZA1yb7ItbEIX3N0QZIsdnn5fI+NrSl6mzGHDy4yP72fnc3raAvCWjZEC7ThoGii0hgrJZVAcvRYyOaGpdvx6bcVwkASJ3hCoZRCKolITOlLvRzpCkCyuGgwJsZay5XLbe67/zjzCz7bm+ukGmoNgZIecSyoN47Qu7JK1AupTdVpTDQJ+xFJnCAEJN2Ej//ZBr4H9z8wRhBoWi3D+LikXlf0+xprIKg4cqOUWcq4ZOcH6f2BIZE30dKztHzJPg07ArZ4bjMSxsDo3zpfPxjDm+9vGTqezY2fO1burxRGubx59kth+Ev1/F3LY/D2m61P5mdg2DmAXE9B7vY6BEOv5UTKYlSvGJQJ8qzE0PqG7kvunAlkuTRhSysVdvjycS2gcZzSaUfcuJ6wtuqxsgJCVLDWcO16yMpSn7AbFdkMBETdiE7QR0h44cUXRg7ADxhGHIARvqe4/4EH+PW/+5+Ss5y00UX93GIJAsXTT75BHMdgY2anx4iSiAcffoAP/uL7+IVfeS8/+TOPElQ8pCdodyPeeOMKjfExbiytsbS8SbgDGDBIYiuytHW2gKy8HEtNbVJz9C6Pe+8xTFZCOt02gfLYM7lA4NcAidEGKQTrosM108NKH200aWKp1yR33VXjvgcCKg3tiIpJH60NUiq8wGMg+5obV/fcACfvqDAzV0OnCdYqMHXufWA/k5N1el1Ls+k7QRghaG1btnZCrly5AZ5w44H7IcYYt02gUFUFCD7yxxs8+0wXz/Op1mBryyCkpFKRJIkliXXRM+4GyxiygXLFvcrv1+42xzK+XtS925wVaf1bbVlqTxscuxQhs1sDwN7aWtp8IqKg0DcqLfImxUA7PINvkKfYFYvn6XhLds/yttJSVmIXeTDfPn9fAAX3z+YZFJvNbNh9Oa5tJe/FL1UJXOmmIC0W/xkhcF0fcazZ3tJcvpTyxOPbfOmxCJ3OU62OIaVkbTXkyuWQ61e7hL0IcA6MULIYHhT1I7q9lE9+6uOEUf8WN3qEtytGGYARvqeoVCp84AM/zec/9xmuXLmEi9QGQiZ+oPjaE+c4/8Fr3HvfJNPjE9xz7x0Y42Rvu90u83smmJlpsrS8yTsfvo/9B/dw/MQhnn/2ZUyaMh5bbGjRNTdER6QWqwZ1ZKzFJJrEauS8z9xMlbltDWYHbRKUp6jXJuh3265lL7EoKVmzIVPePM3IaQpIq5ic8rjvvhph3/LEl/skUUJAgBd4KKlI+ylJClgzqHlbiIETJyeYnhqj203otSdIowoTk2PEyTbt7ZjZOZBSEYWGbvcASc9w/uU3UDUPodyEQBf6ubKDcwAsOjJ8+I/WSFPNOx8eRyrN2pphzx6fMSXohxqts8hcSRdZWoMVA/W+vL58KwegnK4f9OuXDbgoIspBZG/zDPvuODrb3xaOhy39XvwsRfe7uQruVedoFdWVN8s0MDC2RVq/5IDk11GqNIAd6CAIyZt2JAy18WXXKaDUDZLfK/c3L4REWFNyNsqlEPf5mIL35wSj8+xLvngpBuWYnR3NjRsxVy5ZtrdBqQYzsw2Wllq89NIKm+uhmxFRuubsjoKSCCUxsSaNUuJ+zJe/8mW2t7ZZWKjd8l6O8PbDKAMwwvccCwsL/JN/+n9hcnLa9cVr1x+PtUgl6XYTnnryNTY3txkfq3HqtuM8+vD93Hb0EEvXtoiimJnZMS5dukKn2+PeB+6g2qjR7vSoTzaR/Vn0kkZoW9ThbZJPxcv0/hMNYYJFE09J7GHBlr7C9fWLdOMOSImUPkr6WCNdhC4TLiU32Iid0l6apAgss7MeD76zxn0PVPDqAYmNsVLjBT6e7yELFnsplCTm9jv2Uak2CCoNkqjG/J5pxsaa7GzFWKsIAkGl4tNuQa/foNdLsjtYqlIXIaBAKoWq+qiagkDx0T/d5qmv9qnVqlRrFm18/EqFwJfEESSJO1YeyGo96FWHQap5Vxl60KK3ywrm2+Xs/jwx7cyZxWKcI1TcBygr3+URrkAOkRMHVzxYyC3FfvLF2oETUY7kLaaU3bBFrT5/WLvrMCXZZMg0EriF8S+SDAPPodB5dGkDdywzIPc5JynPWAxEe/JWPmPAaOE+k8JBscVDKotSrpy1sWl4+aWIz3y6xRNfMmxvBfhehX4v5rlnrvP5z1xibbmH0Tcbf8idLIH0PISS7vMzmtX1DV548bmb7/MIb1uMHIARvi9w8uRJfvmX/yZaa7TR7ssvS4sGVcUTj73C669dp9frEiiPiudz4she5iZnCXspjXqFesXjS489w/LKBpevLPHyS+eJopjG5Dydix52wyCy+qnVlFTfBMIoLD42NWhriHzBdrLGy+ef5vrKBVIdoVSAtD5SeO5BhdhG3PDbdLJQMk01nhIsLgY88u4m995XpzZWodPrECV9VMVDeaowmuDq183pOocPzZIkCXFkuHZ9iz17x9Fas7nRZ3HfDJVKgMBnc8PHJILXXzvvlp8ZiYFYjSiMmVQSL/CpNioEU3U+/B+3eOnFPo16BWtDjJEEFZ9qRTonJiMRDiyXxNhBfT+XC75pRPAuIzhkVArBHgnFs8yoCzkwyZmmQDlzMGzw82ciz9gPn6dI6xdqAaXwPTP+RZ29lCm46QIGOYPyZQ44BGST93Jd/tIRsgxBQfrLavPOeXCOjymGMw+f1x1fZmn9QWYlT/27LEE+nyFvGXSZhji2LC+nvPhCny9/qctTX+vT61bwPMnmZpvXX1/jxRfWWLrWKdIYwy2cw5dvtcFqk/slJFFCtxNz9uyLN28/wtsWoxLACN8XEELwoV/6JYSU/O7v/jYbG2vkzG4lBcs3tvjCZ5+htRVTq02xZ2GWd77jNvbv20NoO5g04Y67jvOpTzzJn3/4M2id8tXHn2X1wkWm7t7H3NxBdi5dpTausQ3ct612fdEWQSgs2w0fEzj2thICYwU7O8v0+lWscQbLqQEKhFCuZmwNoY25piIOm4CGBq0Nni/Zs8fjgQcrpGnMC892SeOUZm2coB6g2xqtdRagCh56aJY9e8foR9uEPUkSWcbGYtrtZaT0qTd8jDYkiaTbm2F6ZpH11cfczcsjSplF1Xm0mRsm5YyuLyVJXfIXH99ifm6afQcVcRTi+YJaXUHf0O8bajUQ2uIpSa5W6AvpDI91WYGiFCCcJkI5CrbWvW8H9jbfdPjJ7tcZfj1/Kor37OCVojyRn7OUOC/l9cv19DxLkd2dwbvWlQvyyNdlDNzfQUEazNIfrpVv199u0UKZdwm47Q1lJ6XcXijINyvWaHOlPwachWw/N/0vk+ktOUXGQBhaNjcirl6JWF+3pIlHkgSkacLmZo/tnYQ4dA513tJYapd4U1htMXHesgJGa5Ik5ZOf+gR/9+/8J8zOzn39A4zwtsAoAzDC9w2q1Sq/9mu/xr/4F/8ffvqnf9Z9iWpNEFQ4dOQwNh3jxRcu8ld/9Slee/V1jJZMTU2gPAnCsv/gHKfPHOX86xf49F8+xhuvXWBltc2lS6v40qKSKcS6j59mxVID0kiEESTWsCM1vYwJb01W5zWGNApJ4xijQaCQQhaz3qVUCAEd0+Ga7pMIF0nr1BD4cGCf5N57PE6cDAjTmEhHBFWfaq2CALTVjO9psG9xnvk9i0zP7MWYOpPTCis32NlZB+Pj+QptLDs7PbAzQJXtrR3AfVnb1A6sZfYoi8IiBMYaqjVFq2v58z+/ztWrCj9ouKgfqNcl9ZokjhwRMNUuCyPloPZc1OXztjvLkPHPTjX008kJl94vnpUj0SIwzfYdVr8b7CmG9jVkhjaz0fkCrcApP+Z7icGZc+1/kX2KuZa/JSP1ZVr52AF7X4j8edlryQ30oJ4vEFlq303Xy4V7htoFc+fEDFL91qYYk2JsirGassiPlE40SEnpZJutIAphfd3yyssJLz5v2N7yqVZ80sRw8UKLi5e6rK6FJJHGGgPmG9r8YVjrMgDZB2wtpHHM8y89w40b17+ZI43wfYxRBmCE7zucPHmSf/yP/wm/8Rv/O6IootGoU63WqFQqvPLKWX7zf/uXbG6u0g+71JvTtLYjVte2mJmfYfHAHn6k+hD9bp/V5VUkcPnSFj6aTqLYKxUz1ZjuHpl3dCGsxGpLKsGmrr1KKlB+Fu0JC0YjpHKDBERuoAyg8KSPtjFbosc163FINhBaY9IUKSxHj3goUUcKyWuv9lBIJmemEVLS2enQHA84dXovs7Pz1OoeX95ewRIjRErY84AGnhciRMD6eki/b1hdvlDcL5s5StIqBr34UNSc7cBoK6WYmaty8VqPj/zpNX72F/Zx4ECVOAmp+K490BgnISyVRViLsFnLWZZSkGqgk4+1BVv8ZuTRdfZbNoY4N+w5pyCvwYNLuQ9GB785ira3W5QL3K/ZWrNfXRkjr50PQm835niQpRhE8vl2tsQvyJ7vbiMsDpbPA8jLDWWHZWix2VqGxaHy++G2lMW4Z5cwMMSxpddNabcN167GrK5YGo1p6o2ApaUt3ni9xc52ijUm47q4bo5bpvn/OsjIhCIrmehU0+tqnn76ae66655v7ZgjfF9h5ACM8H0J3/dZXFy86fXTp+/gAz/1Ib705U9w4eIlTp+qEfcNFy9cZ3p+kjgMecdDZ5ifnWJro0Wr1ebGxZTL19v0rGKyOYO9ZvFrIemUxObDZazE6Eye1QgXVWcMb2stQoE1GoR06X+Eq94LgcLDNXCFrNk2FRmwnwASi7Upnmc5eEgQ9hVJ4vPyi5vUajXqEzW0TQnTkIWFCoFfxVbmaLdgbm6GWj1ge10zNtZAyh7G1Nnc3GF1bZvnnr2Y3RFR1J6tMVg1MJ6DCXaUMgKgtabRFFy50OajH77Ez33oAIcON4jiPoFvqdWcRkDY1/iBxPMEyEHUP2TBGUgGD7Phs9dkvhYK41/OGtzc6uf2LaYoDiFPpctBan+ovuBOMkj5U+pkKKfc7aA2kBk351C4aF1m7Xl2l3Nj84Nm5ymOnHWslK6aXUMCBscojHHevZD/dEfLh/MgbXHcJDIkiWFrw7C8ZEhThU6rTExIlpd6LK+ss7raJw4NRmt3Ds2bruGbgrVY445jtCGONZ//4mf5tV/721Sr1W//+CN8TzEqAYzwtkKlUuFHHn0fkxN7eerpl2i3tziwb5qx5hjCQr0eUKt5vOvhu/iN3/hVFhb3sO/YIVa7ljA2eI1JROUY+lKKiJyRd3VRgdUCoUFZMnK6KH4KgYuI1SCh6xjbBqREWC8jcKXcSFrcIEEjUcLJ7Xqe4PhtPu96eIwzd87TCbdJbcS+2xY4dfoAE5Me1lo2N/p0WnDHnWeYGJ/G8yvsXZzNWOgCa2ZdS2K/l90R64YNlUrked257AAIQGaM7jiJ0UajaoIrF3v86R9f5dIVTVCtkWpnBut1hR9IolCTJJkuQCn9XyrOu5p/qa8+367MqB/sO7xdjpv2fdO/AFGk6+0uL6JcaR/4ITer6bmRyIP0vBCimAAoM6lehLg5s5HfR0AUbP78OAKDIO/LLz6bPLrPPhenF2BK7X3u3iklUJ5AKed9aA39PnTbsLxkuXRB0N6pMdacI4qq3LgRc/Zsi6efWePKxTb9ToJJtcuyDLyj4hzfMvK0hHUZHKsNzz7/NDut7W/vuCN8X2CUARjhbYexsTF++Zf+Nr/7B/+O5158hbvuOcHc7CSrS1uMjzeJwxh/Fn7y/Y9QrVT48Mc+zcbGNiuXbxB2u8yd2MfLr+yjtn4DWa2B50JkL7GMdxICT9Ab9zBGYSoSqZTLTStcOUAM1OxM1s8vlUQJH2Mj0qTPiqzhiyrT2XYWi18RHDxk2N62bLc8tA1Z2DfD4t6EoN6n1VlhdXWT/QcW2b9/kX6comRCpQp6K2F7J8T39iHRrC6vDW5I/uVcpJMHRiiPpS1ZDdsODLj0JMY33Ljc40/+8DK/9rcOc/TYBHHYwZJSryl0aokjQyWQGflPDmxK9tRoO0S8c+z0shEcrsFnWzGcXi9djh3uBBhE9KLYs1y3HzpyHlUXJxXFMYv3rbsOWVqHxXVODDH6i9XbAUEvO5Ytjula9zIqQQYzsL8MPcmWVOINCBAZwdIYNwAqji1hJNna0myspXTaAf2eJEl6GN3jlVdb9PopSaQpiBP22zb1RbbrJpReMlqzsrrG5UuX2TO/8G2ecYTvNUYOwAhvSxw7epx3v+t9fOFLH2Wn02F8apzUQM1TTE9NgdBMTDV5//sfpjFeo729w8rlGywvrVJ94ASHjp7kjSurjM8Y/IrAeorxbsrcSoRvfdKphP5cwMaeOoGqkKRR1o4ls3S2HXz5usI1SnhYk6J1Qhi3WfEEnggYt9I5DdZSqwnO3G4JoxobW1MY02VhsUuSely/kXL+wjYLe05Qb1Tohh6zc/sQyqB1zPZWh62tLc6+mJOwSsZNQy53l5PWATd8Bpd3N0YPolMpsMJpBYhAsHwl4U//+AZ/89dPcPTIPO2dNZRKaI55tFsp3a6mUgHPs0WdXkkXseZiOG4tZO2Cg2i+bE7zEsAAN5nwXa9bBKVWwSF3wGYUg0HU79L5ReF8kK3IHCPXnZAZ4GGfYLgbIZNClNkGJvs5cDtEloEQQxF3mUtQrKzkGEgpENKJLQkE2kAcG6TQKCXpdS2XLqb0+xJjJb2ex/KNiLW1iFY7IYoMSRQXUflbBdfSKLPOlJsPLFxtAp1qoljzxce+yIMPPvTWLWCE7wlGJYAR3rZ45OFHOXPyAb7y+Aucf+My1li2dzrsbLWIophOr4vA8sADt/P3f+OXedePPsSF1YiXXz2PSfq0rnukm9Dsauq9lD07KXLbEPt3I68K9l9tMdWzVIIqCuWY9rlhw9WHXTRoMuKYRAofEBiT0Ev77PgePeFaB6Vy/f/jY5I7bk8Zq28xv2eH2Tno99tsbV5jdWWDfQdr+NUUpRTN5jyeqhDHhrX1Lp43lgn24FL/SuBEcm6uVQ+n0gdse8jS3krhVT28wKMyEXD5XJ8//v0r3LgmqFZnSFOXuh8bVygJSWxJYid5q612Pe2FUI3rcM9LD4Ui35AtGaT5nZOwO+LMyiuiSLQP9ineL/U2FFSE4b56Y3Gp+Wyikch65fNRuEXtXuSs/12Rfr4Gi5OQzgiEpjD/2bmK1gOKax2QI4sXEMIipc1S/K7c4EZRG5IkIYk1W1tw9oWYJ79q6XZmWF72ePXlHi+dbfHGuS7raxH9ToyOk++Q8S+lcHZDgAx8/HoVay2+5/Pi2RcJw/CtW8QI3xOMMgAjvG1RqVR46MFH+dLjX+S1Vy6CFBw/eYxLl27Q6o3R2uohdIAQlsNHF/mZn38vT3z1LM+evczhdh8bGrrXIR1XjIUp02FEZ+pu7v2Fv8H11+8hffaP2LPS4spYhTSLRF0uPVtAZmGtTZHCwwqJxUNKD4TGkNAlJag3EN0O1XzEnhDMzire/W6fhcU5hN8iDEOk9UlTzebWOVbXeoR9n4X5Ot3+DaIoot3ySWJNq7XlvpQ9ifSyIT3GgBxuncvH2A7S4YMoNxcJElA0rFdrFa6+Bh/9kyV+8mdnmZ1rEIYdfB8aTUUYGtLUonBtkqBB5TfDorJpdEo4MZuihf6miDIvU4CQWSuGWxXDRmiQVRiuFORZjfxIKnMo7KCeT54NKU3323Usi+vykFn//3A0JLK6vnMw8j0G7P7yMYcj//LAIWf8B0qGWpusZCJINWxtWpZuGNZWYGtL0u+n+N42167tsL7eH4gy5UTDm2x07g4N9B++FXz9RgGBX63gVQKSXkiaap5+7im2t7dZWBiVAd7OGDkAI7ytcfjwYf6zf/B/4r/95/811ZpPc6yJUgGrL12m3WqDUbz80gUOHV7gjntO88t/4738x9/+c7i6xuED8/Svb9PebxlvCFQKjROnOHP/HZy65wxfkRrz0m+yM9OjOxWQ93gDIJw8q0Bi0FhrsEicToBCmxQrNWHSJx2fZNt4jHf71EQuMyuo1iKM2UZaiUUTJwatI3ba11hb65OG08zNL9DtbdDrGqL+GNtbXVZXNxFKoKpeZkAzB0C4WQGQ1dHJ+saxt/yGF0K4Xm+NE0UyikrF8tozPRDLPPKjNQ4crLG93Wd62mbDgzRpavE8p6ZorEEoSLXBerk6Qu4XZMbr5jO7/8nyDD+Gn2dlllwgZ/d2ubJeMVgJsuE6ThRJZAJPJSUAZ7yHlpOVBUSJXWDdfvlYICvKDfQ5t6KcuRiw+bPfBhyI/POwgjQ1xElKHIMnG2xuxLz22g7b24Ig8NlYS9neTtna6hNHmiTO+vezv7WvZ9cFKnvbvPlGXweFABK3ctbcVUnPiWYJKQj7IVutbZaXl0cOwNscoxLACG97nDx5ip//mb9JGCZsr2/Q3urQ3orotSNm5qZ4+D3v4Py5q1y+eIW77zvD3OIetruG6cV9zO27nY3XLV5XUyWgMTNLY6zOvgMz3PNT78WMPcy+zTYVnUnHCIEULpXvBG4yAhcpWKcnL5Vy5QE0yIQw6dIiZkXHRFnKPK+bt3Y20YmmUmnQ6aREcYSgT3tnh37YptO7Sruzzs5OSnNsD/1+QhxGSF/hBz5+xXeT25TM1iRRyssiYeHGEBe9eKWblqWo80y1MBJlPYSV+IHm+ae7fP7TW3Q7HjMzVeLYpa8bDYk1ljjKjZJACYHnZgmXD01ZT/9msl9ujIdfL+xPkanYtZfNVA+xjkwHSGFRkuyzyYyvdRwFWcoASMgehQQQucyDC64lFoUVg6xGviCXzc+NZLnUMVinlKCkxPPc6GesJA413U5Er5fSagmuXPJ54vE+f/rHy3z8I9s8/1yHF1/Y5vXXd1i63qLTConDZGD8d39uN8G6v7O3oCZwS+MvXJbGTYlMsNaShDH9XsiFixdu3n6EtxVGDsAIb3v4vs/P/9wv0qzNs7XVZXV5jSRM6HdSsDA3N8Xtd5zgC5/5CrVqhUd/9CE6MfQ6HY4enmF67BRiQyKDeeYX9xD4Pr6qcvjoURqnHmRsLWKmm2ZRqzMdEkd+czPZLdqmxZelEgopHVkwTvq02pv0oi6bImQ57ZPkg46ExPermDQF49Hr+ghhESqltR0Rhi12dq4Sx336fUEcaa5cvgIeyEBlKWHrCGUZScuNA5ZZ50FO1sucgDw1TmbE8huYlQ0kA4XDZiB48fkuf/YnN7hxA4SoECca3xc0xxRCWuLYoFOL1gJhJcIKTFIi1+1KPOQtgfk5b2X8ywa/iKyzdQ8m/WUlDCkKkuOANSAKRyAbLZC9JyhKB4MVkU+dHFT/3b0xmTKftbow9AXXIVuHIzlaJxqlRIlIB1E/pdeNabUSbtwwvPyS4cknNMvXKvR6AmMkyoc4SkFr5qYkYijb8M2gNFDpW8Qt5wIId5HCkxijSRL34WqjicOIv/yrjxOGo/HAb2eMHIARfiAwPj7O3/87/3tWbnRZXl6j3WojhUfYS3jj9YtsbW3TbvW4cukqt504xOTsNK+8fJ7N5WWOHd6LsHsxjQUWDx0kCCooWWO8OcORux+g25lgYbXHxE6C1HldWTk3QCqEkM5QkI34tQohPaQSGKOJki5xFKENbJiE1bRPYl3UFvg1xif2IG1K2E+YnqmipCCODBDT7ewQ9lNaOwG1yhxbGx1nxLQjHzryXUZsy1jaSZKgU41JdcHqdkI3eb14t4W2TnhGlBvroOn7PP9MwhNfCvH8vdTqM0Sxm3PQbCqkFESRcwLy1LgQopgemAex1rpWwZuU+Hbh5ql0g6jbHZvB2kvrtLc43kCUKJuul1P1s92MdeszdqDep22KsQnW6szvcPmCnM9QcApErs2f9e/nDgeGOE6IQk2no7hwAV4+W+PyxTHWVjSXL2zz1FNXefqpFa5f77vPzhgaDcnEhEetevOcgW8Ot76v3wi7HZsCUoDnZJW11ug4Ke56Gqd84bHPsb298+0seITvMUYOwAg/MDh18hQf/MCHuHrlBsozYDU7Wz3aO31ePnuO1k6X555+ianpMe6+9xTXN1Oe+dpZXnzhdbY2W2jr0ZiYcoQ+C+Bx+MQpkn2PMrvW5sS1Ngc2QmqxAWFRnkIpDyU9hFBFL74UThNA4tj/JrLEbbBhBU9MsWFgXUfEJiVO+tRrDSqVSbY2e8zOegSBj5SQJCFJFNPv9Wm3DOtrO8RxBAZ0rAuC2KCf3BmyNE6wqcGkhjTVrv0PGBiIvEUg72KwWGUwchBJ2swANn3FE18I+dyntmm1FEFQI00svi9pNj08TxJGhiTRGO12dKJFFKx7o0tKebeKNPNVlUoG7vdbkP+EHIj1ZBgm4Q3gKhQWMEhM4fpoU3RMkmv0W+v+Xorqv8hLGW4rKS1KGVdqKB5uW2MMcWzo9QQ3rmvOnm3zqU+u8hcfW+Ps89u89so6r7wSsr4paXUSet04k+m1pIlxPf2pwfNyAuO3jjfxrb61Y2UZFqzBao3RuuAcGm3Y3N5keXnprTvhCN91jEiAI/xA4Sfe9xMsLy3zwitfZP++RbY2OlQaAVJ6nHvtArV6hff+5Lt5x7vu5guffYrrmwkb3Yt4+xRhIvD9WsbiB4RhcnqCydvuoPrYxxBpQtCJqM9bLu+vEjUDPM8DWUWkoLP2PCklTjtYkfYs4bqBQCODkBhnWG4kIamFea1pt6+xs6U4eHAv9XqE1SndXsJ4IEgSRRxDtbKHa+0dgqoHrUyqODWgsoi6VDw3RruoHItQAiN3pb5Fng0YZAKMMKQqcbr/RmGFQWRjfJvVlE9+dBlkg5/86Rk8r00c9fA91x3QaaeEfUOlKlGeiyp0ofufK+p94xT17vT/cKYgJ/OViYW3IhkO7kPxjsin6w2oekMZBCBXe8xyFEX5QWYGT+Y0wfx6hCVNLUmsaLUM29t9Ll3QnH2hxeWLCUJWaE54dDpddy1SkMQpaZgW3It8+Z2eZrxpqVQUShm0/noO0pvfx3xa4Les/T98spwwMRg0hSNaYl2pyRi4dOky99xz77d/vhG+JxhlAEb4gUK1WuWDH/wgceSxtbOD1hFhL2Tf/r1Mz06ztrLJ2edf58jRAzz0rjtpJ5atnuFGS3OtFWJ0rphngATPh8UTx+loWOrVWFJ34W0dYmolRKGQ0kcJH08FmS4AaD2IrpO+JeoI/KqHDCClT2J69BPNjTRiK0npdba4dnWNffvnmZ46zMqyYX09Jgiy9KwO2LtwAk9WSXSSlcRtSf0PBinybJ9EYxKNzuVhy7B5xTtL2UvHdjcyJVEJqUowwrg5CVlKX3k9Pvbhizzx+DqeP0ZQrZJoSxDA+ISHHwiSxKI1brqgdZK7t+SVCfGmZYDBNru32/3z6x1v4BBZm6X5rSzq/NlNGBj+jD1g84FB1kX2Mk+UZOvxlEv5W23pdS3rax7PP5fw0Y+s8md/vMZffGSDyxcTPF9RH1ckJiVOEsIootfpk4RJQZwczBGAuG/ph8YJBClxyyj+G98vkRFS36IUQE6eyK4da53uhBTOqdQWow2vvf7aW3O+Eb4nGDkAI/zAYe/evfyX/8U/ptNKSW2E58HkxBh33ncHAG+8ep7Z2Qne9e77AEgNbHQtz599jrX1dTxlUCIFm4I1zC3uY2fmPXTn7uW2n/5VDv/E32MmPc1UomjUJ/G8atHn7dT2stYs41rwvLqH57uJgwVJLbAkKmaFPiudmF5fE0XLtNsxW5s+M9N1KpUqUlqsSfD8CkG1ws5mOxs1y2Bca06Uy6JTkU2zsQZsOmCsQx5lD+r1SEdec/R5sJ7GeCnWS7HKFIZAKIvwJL/3W+f58pc2kGoMv1InScHzJM0xD88X6Czdnxv+XPc+lwbOyXRuLW9urL6xIbO7ntuhlw1OZU8P8QgGG9ispc9mzkJu8GUmESjy0oGweB54viDVltX1mJdf7fDkk9v85ceW+L1/f5UXnumxdEPgVTz8ug+BIEwjojgmTTS2FNEPiQXlrxno9Qxpuuuydl/xm0T2jgz5Fqb/b669ZAsYZGasdfMUzr58diQI9DbGqAQwwg8kTpw4yQfe/4v8u9/6V9xxx3F0DAt757jnoXtYvXED5Unuue80YxPj9LpdNvqGq8ksrXY3G6fqGGxKBow3x9hQE0we3s87HnyIZmOatNejv/wHiIML+LLK+k4Eto+xaaYzX3ERnd/Dqyu09UFL/MDHClWI9LTilGu2jTZVtrdW2dzYZO/iQfYsRqRJFyktnp+yvnWenc4OaAYKhGbYmAxY/XntFpclMIOCfG5D8tq2EI7kJfNWgKymLYzAaF3wC5AWX0piFP/hfz2HlKf40R8/QBgu0+t08DxFowHtdkoUmSy7oLKkvcFKMWSIvxljNZz6HmQ8csNflAuyTLUxOat/N7LtgbKYzzC5MBtslNX984VubadcvBjy3LNdlm/EtFsprS2LUBLPV3gVj1SnjnSp87SDcMqRUrg5EvbW9t1a6HZS0tQJLd16m6+X1s8zCm9R+l8CSgwWLIaoo8XfVxInPP3c10aCQG9jjByAEX5g8d4ffy+rKyt88Ym/YN++BSqeYGZuml63g9Gag4f2cd8Dd/CVJ55GJymhdlFjrgkn8AHF+PgUkwdv48Rdp5iZnqJZn+aO+x9h6Q+/gAgDYm8caSuAxJJihc4ifc2hxq9gpORc9w8I6lV0qhFKZNP1FNVqldXuFlFsWNjxGW/6HD48TmNcs7kZEfZDPM/Q675GFOf/ubovZqsd0U8UpDhLrnMvfYmVmWOgDVYNz7XPR+KKgg9AkTm3hQqvKwtgBVZapFQEHsR9wW/95qvU6+M88u79CHGV1nYPJRXNpqXb0U4YCNcehxAIk6e2nWKdzVLht8I3jv7L5khk0bpj+juy4eB9m5P68vT/rhG5A+OfH8c6Zr8SxLFka8ewthrxwnNdLpyLWVmK0BqUklQnfNc9oDVREjuSnBGIrLWguMa8BGPfjAvhVAFNzxSfyzeD/H7dksn/zUIIhJKDIQhDiZZ8CqMrB6RRysbWOsvLSyMH4G2KkQMwwg8sqtUqf/Nv/hqXr1zi3KXnOHHiIHE6z97984xPjjMzN8bC4hz7D+5lZ6uFRnD9xjJxnFKtCkAirKLRHOP4Hfdx510nqVWrgGFh/0FmD9zP5vrz6CmnEaCUj9apq/2nfaLNgwThfibnxri6fQfGngVRde2DnsVXk0ijaOtNWpHm3HaX4zOSXncJVICSFH3qtWofL5hi/+G9XLu0lKX4LTp2DoVwGxf1ficOlEdxtqQP4FLimbtQ3KuBE2AHP4XAyqy1MRM9UkpQlZJISf7n//EF4uR2HnznOI0xw+Z6n0rFZQLCyJAmBimVSxdrXZw/H8gjZYm7mK/jFoJBg23KpL+8pJB1MVhKUf/u8kCe6s8cgixjkNf43Rhnk2UPDHEiCPuCc+cSXnxhh+tXYzptQbXmUakH6MzJQkKcJFmdAYQVTlUxM/Q5N9Pa3Al4c+NcrPybFPMrZ0e+XofFN3dQdoX8w89zkSJjDDrVXLo8IgK+XTFyAEb4gUatVuNXf+XX+Kf/7DmWl9c4dXI/V27s4FerTM+Mcf9DtzM5O0mjUeXjH/kc2zsttBWESUzgeXiyjhCS+vgE1UrNpdRFSrVeYf7oHaw+8xym1ifwAlJbJU0d0cvYGGnHuXZ5lZmZaY7PPMTlKKbfu4JSKV7VMlEfR8cakwiklWynIVdCw+RywKJtUKl5VCsud9wc04w3Q07esY9eb4fNtb6rq2uDjjXSHxD2AOcUZBY2Z7Qba5AF7WfXN/yAQTj4teQsuFKAdaQ5pWiM+cRVzb/71+eIk7382I9NMzlj2VrrU61KqhVBz+hMnz8/yUCvXuYlATsgm7nT34ofMDD8hTgQzpgPR/wDHoAtuA65UbUZm98WfARjsxZFo9HakiSCja2Ui+e6rCxbrl+z7GynSM+nPuaTaI0VAm0cwdIOeglB5PMRMlequL3D3ITdyAWNlFIgIInTb8qIl43/WwML1mBTCmcw/1sQ5G2jGQykseaNN954i849wncbIwdghB94nD59mn/+3/0P/ON/9o+4457jnDge0Ov28AOfU7cfpTE+wTseOsPi/j30dnq0w5ClpetMj09wcLGBUmOMT04hpMpK6RolYd+Bgzz3OYk3ETJVb6JMSkRIamKnBVDrs2HXeKlznThYJkxXEBKSxGmrW5uSppFLUluL8DQbYcxyv8N07FGpKZQnCaxiaspj//4Nzl8c496HjvHc1y7R2YnxfY8witDGcQ/AkfUEGSO8CEPZZWyHDW8ZNttuYEMFOXudjBGujbsH1WnNf/jNq0xN+Tz40DzYdXY2uwQVRbUqCEPtSgEChOcMriYvR2Qli1IHQ3kNA12AMoEwX315Kl++zUCxD4wjZmbHlUWaHFJjXBSPQEmfMJRcu9rn4oWQixf7nHs1odrwqTcDZMUn1Zqw1ydNU0ysM6LB4DZaGAwvJBtHnDs3b4J8GJPv+25SoRAkafqm2389fHPG/83KEPnBcG1/kLUBZnWiggsgBpkjC0mc8MlPf4L/5O//fWZn576l9Y/wvcOoC2CEHwqcOXOGX/7Q3+bs8xeYnZ1ic2WJna02Bw8ukCYJExN1fupnHuHg0QNcX1rmIx/5LL/92x/hwqXLWAsLC/vxg0bRaqWUYHZ2CiEb1Jc7NJc99m40mEi8jEUukbU15o406EQ90uAKKgCrPTAKnVo67S26/W2EEOgYhDToRHEt6nNhc4ewnyCEwPcUE1MVjhytMju1TnPM4/DxWYJKhanpKXzfx8YGHaeYxBTKsIK8PUwMhgaV2gbLHkCePjZZVDtsU8q/ZDV9BnPt5Tj8j//yFZ57ts3Y+DTNiRphaLBWUa1KdGJJtUXr3HTYLPK2RZtdriQ4dNbSIoy1WTufRVvrRvQiHT/BunR0mtqih14KNwjJk+6nNpYkdb3rUkk8Jel24dzrIV/43A4f/bN1PvvpFqtrPrP7JqhP1AjjlF4Y0u/3ifsRJnQ1/px8WRjFrP++yAbYm27gLaGUxPMlQgqMNejU/HV2+85jt7JwVjqx2c9ysiVNNC+9cZZr169/DxY6wreLkQMwwg8Nfvlv/DIP3vt+Ll1cxvMsX3viabSOabfarCytsndukrvvOcHVayucff51/qf/77/n4x//PHGcMDs9T7UygRAejtLt0Wg2mdt/G+M31tErTbzkLuprdTytMdaA6GLHr1HvHkMsPYKQHpXqGElHkvQFUdQiTXsIoTDaGZI4tCTScDFtcb3dQacaKQXVisf8fJ077mwy0Vxldq7K/kOTJGnqxIhw0/msdsY1S96Womtb2HxHUBzo/jvjb9ya8/bA3IbZweAdh8zAKoWSEgtUfAGx5V/8v1/g+Wd3GGvOMjZRRZsUzxNU64o0dbMDTKZdULQyWlHI9opbRKZ5u5kxpnACbEYizHUQrHUqf74SVHxJ4Et8T+JlAkTGmizV7hEnktdf7fLY53b48z9Z4/d/Z5mzLxoSU2VspklQrxClMd2wRxiHpHHsnKo041eYjENQyoZYYx0PQPNNDeSzxnEjtEkzYZ1vX9P/r3HWwdNbZYB2vyaGuSLFITLnx6RO0vrSpUtv5SJH+C5h5ACM8EODaq3KL/7Ch1i+EiOVR73pcfnCFRr1gKe+8hJxGHL4wByNRpWNjW3WVrb53Ge/zLkL550suqpjbQVjBIgqvt9gz5FThOE0Mpjjzod/jrmJn0BuGjAGQQVbvYCYXaZpF6lv/Djdy4tcf0mwvZy6jgNp0ZHTDBAENJpVqrUA6wnOpztcb3fRxiCloFarsH//GPfdV2XPbIvZ+QrIiFQnznjm4kDpMBs8fzpolctS/HIQddviX1tIC5Cl4YdsgnDH8Tw3jVBljPHKpI/pW/7V//Qyr7wSMj6xl6ASkCSWwBfUaioTCcoyAVnGIa/jG2MxDAjzbt3Ze3n0bzJHRTtHwlg3VVEpgedJlHIja6VwBEPPFyhPYYzH2go8+0yLT/3VKr/5P6/zsT/vcvmqh6yNoZUiRNBPE7bbLTqdLnE3xEYGEoHQDO5TEd3bQfr/W7DZxfVrmzkN31gc6VuHuPnX7HN8Uyfg5oQMQ3yGvDJgBXEYj3gAb1OMHIARfqgwMTHBP/hP/w9cOb/FzOw0nXabfi/k+efe4Oq1FSoenLrtAKfPHAPgE5/4DJ/+1KfpdbezSLWOEBXAQ6oqkzN7WA0DgsYUt524nXe864NM9t6J7KUYbVEqIK09Q7/xBr6sM2FPs3/iPbSvLNLdzAYJIQjqAqsVSlRQvkWi6CQp58Jt1tp9N+NeuMmHExMVztzusW+xz8y8T7VeRXpywOPTxikBlkbKmiwzkKM8A14giqhW5FF/4TUU/zCwdFntXkpnjJMUIQTVyQpp2/IHf3ieN96IqFQnUF5AFIEfKHxfkiS2eDj5/XwaX7aSgjmf1/PJUvzukff8KynwlcQTEiVcC5/0LF5F4AUSIT2SVHHujYjPf2aD//gH1/m931rny48bpvZMMrM4gaoGJFbTjvqESUQch+g4gdRCKgYRfU6EzAzmIMtvvyXjX/4MnAPhhji9dUS+EvISxZs5F7tPKQUop/Z3K+fA/X3knACB9BU60fzVJz/KtWuX39q1j/Adx4gEOMIPHc7cfobt1ge5fO1xjt12iOUbz3Hh3BWeevo1Dh1cYGqyygMP3s7nTh3mjVcv8dTXnuFDv7jK+NgMCIUQ1eyL21KrN+mIJvN+hWZjjKPHTjI9dTtbW5/G1AOE8KjWmsTeK4TROn7lDJNNj5Zu4Mk+wkaOjOYZ0iTFq0dYNMoTGANtm/B62EJajz2TVaSEajVgz4LlzO09wtin3pjgxtUGy1eWC+lfmxiM71LfxmZ6AQC+qz1DKdrOOwBFieDFYALeME3fvZRq52CkUYJONH4twA98mBZcPbdJa+sMdjGgVv8KntcjSRLqdR9r4ywD4Ay6EM4JEMK6koQQWExGNhtE2FLmrXu5MR4sXSqB8gRaSzotuHq1z/VrCUvX2zz3dI8okdSaNeb2T4CUxElML+wCgtS6ITc20djUIvIJhhlp0d2oLOgXrtXv1lI93zxyToM1BpPqYmDSWwmhJNKXmMRgs2FNBYqnIssIWKcBIfNXxbAjmJVs8r8J4SlU4KG14cWXX+RTn/kYf+/v/EOkVG/5dYzwncHIARjhhxL33nMf1z9+kevXV5nbM03gGR77wrM88I4zHD+2h1NnDnPPO+5i4cA+nn3+LOcvXGD//mMI4bthQWg0KdValWRikVgphBTMTM9w732P8PoffQQxtwW+BRSeqpFWl0m8FkwbFqcl9coknXCZqOs0BALfR1UNUpWMr7a0VMKFqE+t7zFW85ASKoHPgQM1rAkJ6oraxCJRHLN+bQ3IswBppghosUlmYGQuy1tmqZdY+Fld2x3E1amFHGQAcqOgU02aGnTkMh3aS1Geolavob2I2dl9eHKe5ZUXWNwX4RlJ1DNUq5I4jrJxx06hUFqcDHEWVQsGQaYUFlTO4s+skLAoKTNHRrC2lnL1ao/lGwk3boS88Xqfdguk79EYH6cW+AgBUZoQRTGpTjBGO0VFIVz9PrFFuxsyEwUSgM6yDnnW/y000tZY0jgddGq81VACGUiUr1zvvimdJi8FQXbN1vEwpRhO/2d1meLPwlqXEZEQ1AMqlQpJP2Jtqc8fffi/593v/jFuO3bmrb+WEb4jGDkAI/xQolar8rM/80v87u//Fq+ee4E01fz+//anvPORu1hYnGDf/lmOHDtApd6k1+vxqc98lsNHDlOpjDEztUC1WnXRoIDK5CzHTpxEKUUlqHHb8VNUxUl066uYmsUqi/QUnmlgU02SaLxGhEkD4q5me8kws1CjUq2ggg4IXTCurbGYVNNuaG5Yw+EUAiWycwXs3w9JkqJ8D1/exhc32iT9cJAFEKmz9TqbCZA4VUBUicqdYRDtDVoHjbXIkjpNwePPVAhzMl7aS7DW4vkexw8eozHe5OlnX0LrvfiBZX7PGrLu0WkphBgjCPqZIcbNI7CDaDPXtVeIbMbCwBkQAjzPR2ufra2Eixe7PP3UFq+8FNLvWqSvqI1VGJ8PUMpDa+1Y/EmMSU0pf581HshshLOSqMBHGIs1Gp1xKTA4Zb/vSH3eDozqTWWWW+BNO/jKb2TPJchAIj0vkyLOyj0JA62I7B+bOzvSZVfy2RDGmqF2x1zwCFz5p1KtUqvXSMIYIQX9XsxnPvcfOXr4n6HUyLS8HTDiAIzwQ4tqtcqPvucnuPDaBq+/chGAj37485w/v0SjWeHwoXlaOzs0J8Z45rkX+chHP8K//tf/mi89/kXiOEIJRbM2zuzcPLefuR3fc6nP2Zl5Dh26E5bHUMb15lvjpHS9ADAJcTciCrsuApZQaSikrqIjPyuviqy9zKDTlKgXspIY1mwNg+tvl0pSrfkc2C85MLfO3JzlrvuOFLr/JrWYWLv0r3VkM5tNcbPZkB5MxqY3g5q7yCJ9oBQp7uYEZI3vORneWHSYsnNji/e88z0sLizy5a9+mWtXO2ysnaLTnqVaEzTGJMpL8bwaQeCTJpIokUSpzg/u0szZ9TuSH3ieAKFodzzOno342J+v8u/+7QV+83+5wTNPhqTGoznTYHJ+nEq9RqwTWu0W7e0WUTd0LXwJmRaRQHgSGXgoz8cLAtfVIJSzc4lxxl/jeArZfciZ70UN4i3zCW5t/KUsEQOLukfRyrHrkd0zHDFSeBKv4qN8D0smD+2JYsQvkmwKZHbY/NqkQOaESkpTC/MGBevIozJQVKpu3kUSJyhlWV4OeeJr/5brNy69VTdmhO8wRm7aCD/UOHbsGP/8v/t/8l/+o3/E0o2/4tMf+zyP/uh9HDgwx6kz+/n8559mU6dstft87rOP89TXnmF5aYPDh45y9Ogx6vVxJqemqDfqTvcey1hzjLvvvZ9LL/wB1VjRCVwUL6xwbH+pEMqSJm3Xkx4opJ+CVZDUMGmYGT+ZRa2G2IZ0vRYbzVnqaoqxaA3Pd8eqN30O7A/pdK5x4OBeov5Rzj5zAaxx7YVFz3rW9pcajHX1Ycgj/0GUm7Pzi8jUyKxGXNpM5sfLzZYopt69810P0+/1eOPyG1y8coEofAfKO0S9/jKVaky/b9BxlOnLePR7HoYepgbVwKMSAFiUdDyA1AhuXI85d67PxXM9Xnwxotkcw9om9WaCxSI9D42h2++Txmmmy0+R8xZCgAfKd195FotNNcYKkM7hSZIEq7O2vuLCbLF92eDnkbKFXWOZvxXcvG9+jwvSXb6VyLo3siyNzR0w4Wr2WDISX/6ZOQ9BSDm4rnKJPnf6Mhljaw1GSTxPIqXEuBaIYd5DNi9AKVWUiaQUtNuadifh6We/xMEDx7+N+zHCdwujDMAIP/Q4deo0//Af/ufMTVQB+LM/+hQvnb3IkWP7+IUPvY+JsSbrK+usrm6wvrbDZz/7eT7/xS8QJylC+CA9oihEKWc8lVIcv+0EDf8wzS4ofGc7rKun+94Y1WAWVbFIJUhjjTEd0iRF9wLSvsKkBpMCmaKftZo4arO+s0zHH6Mrpgvj5nkejTGfY8d8Du3f4uChBrffc4hKLXBMM50ZeZFF+dq6LoHU6RVY8pY04+r5qSkEhfJpgoUITJYBcEGnLUXCbi33PHASoRSf+MSnSNohMrF84dOf4fXXeuxsH8Qa2NnRLK/2abUirI1p1AQzkwcxOqDdSYuItdO1XL2W8MTjm3zkT5f42Ee22dmZ59DhQzQnmhhlSUxKkib0ez3Cdo+4E6HD1EX7emC2nIsjsq4FjYkMNjKO/JfkrYUaq0D4CukrZ4CL/7nnSil8z6NSDahUg0x98S1GbvyziL1wuKRAKBAVhVfzkVUPVfNRtQARKAgU+C59b60jlSapG1JkcaOpbT7pL1cuLLgNg+eZ2ELJ37GD90p3M4pCojB0HAEpSGLD5gb80Z/8Nv/m3/5rVldX3/p7M8JbipEDMMIIwE+8//389/+vf8VMQ3LuyRf57CeeIIoSHnrX7bzr0QeYnJlifHKMeqPJ5WtXeeyxL7G1ucVYY4wff8/7aDTGXERmXSp7z8IiB0/fS9AP8aSHFG7gDEiEijGmBVYQdg1pZJFCMV2/HWWmSDpVkkiTJklGuJJgQeuUTmeNlfYq6ewcoWw64yQFvu/THA/Yu5hy8ECL48cnuP3OA1kkmNV/hTMgMtMDMKnJ6sGlyF9rTKzRYYqOUmcg9S2k4UTOHKdsKbj7vvs4d+E8f/z7f4hMIez2kRXJZx/7GpcuGaSqYa0ljt243UZDUan08b0+42NNZqamqQQ+vZ7g+ed3+PQnlvn4x3psb08jPY+ddotry0tcW75Bu9Um7SWkvcygJ7hxydkYXpfPLz8MNtXY2EBqsKULUL6PF/gEFVcSEJ5yhlTkQkiCaq3C2FiDaq2CUrIQNXqrugLch0SW4R+k3/Ojy6xs4VV8vGqAX60QVCp4lWAwwU9kTlvm5Ok4QeuBo4dwhL7B4KL8YYfsuzUGrY37m9318VvjNBl6nR7dnTZSCoKqT7UqmZ+rYbjC//3/8V/R6XTeuvsywncEoxLACCPg+AB/62//OhcvXuCTv/M/8Jk/+HMefOcZ3vu+d/Le9z3I/v2LJGHE5PQUmxst/uwvP8bP/dwv8As/9/McO3wUY6MiJS6EoNkY49Sdd/PkE3/uWtekcEQ8A9omGKuROAlgvyqoVfYzP34Iow0b3TU8r0Zzeoxu+ipCBOTa8mkasbZxianpceJak6mupiYjrG+p4yFnQcoYrbsYM04cHePsC+eKQUHW2oFYnXFRv8hTxSJzArKo3xhnJCSuXFDo9+cFYymweToay9gYHDp0mHPnzgOgUzclUQaK9uYSO9v3IpgE1mk0FOMTHsoDqwXWdiHTx+90DK+81OGJL22wvTPB/MIU/TBCG8PW9o5rGyQbvqMzdXpTCllLssIi6zAoiHBmt8G2IFyrnFIKJSQai9ZOlz/Xvs+Fe5wqoRMusgwM9VvpBBSGX5uB4bUgfOnIpL6PlBKNa8V0SoSuVGQNCG0dsa/gBVr3OWWRfN7VUQxJslnTZUE8NeiUjARobyENbDGRdtLKicUPPIy21GqKei1hbk+NH/+xfTSbjbfsnozwncHIARhhhAy1Wo1f/zt/l6uvvcD1V77Alz/9JKdPHeDYoQPMz87S6YS88foVjp2sUB2v8alPfYqH3/kuFhYWkFQBjdFRRlzzOXT0JE99eQ6lN5BCIYDE9F3q3bgvVyEcCTCML7O1s8HM+AHa7TbyWoW7TtzDc0uWXvIqnhcATrwn7O9w/fplxiYm2AxT9sYwNeZGDFeqPtOzHoeTCLA0x44hlc8Lz7zsmN3a6QG42QBOPthgXMpbyawjzDkJVtui/c0FwXlEmpHKlHQdBdKggbvvfYhOp8+TX3mSbEMstuAxLC2tkCQVKoGkEoiM2Jd1Gkin59/pG155uctXHt9GeUdYWPS5fmOZ1mZ7sJa8jp1r8kPhnBReQCFmNFAydAoD7iA2J1kKiZBOidFiMNLVwXNBpLzMopQkCPzM4JrC4AspQL8pPf+bR2G0hVM6pFTjlxKZTQ10sw8c18Fqg00MuVxzweoXApvPQ8oJBGQzGHTpXMXVZE6AdZ0QOUn0Vmu0aVlYyqLTlOZ4Bb9q8VSXB97x44xPjL8192SE7xhGJYARRijh1KnT/Of/6J8iqlNIrXn17CVaOxtMT/rMTI/THKuztr7G0duO8urFVzn78gtuBDASITykkgiRAoYDR0+ysPggQa+fqQgqMLYQfNGpxSTuizTu91jaeom4nzLhH6C1mjIfHOdHT/0tpmv3YOiDktnxJb3eFp32Nhu6yyUd0e46oRcpoFbzOXhohsNHUirBOsdP7OXEmSNFNGez2fRC4H5PbSEUJJREVTxURTk1OBjY1FIh2OJIcNJTyIxMeM+976Dfjbi+azCM1S6NnCQtvACEApUx3PPIOk4M7U7K5csRT3x5i6W1CmOTDcYmKlR8ectadU5eHMwQEEOvD85vs2vMDJx0JDbh+6ggQAUeUgq0NiRRRBJGmGwyXz5W2JK1xWEKBUA3POmt1+93WggWJUEGgCcgI/VZrdFJQhxFpFGEThJskhP1sn8VpTp/pnWQczVy8uDQPRIISvc4KyGQyxTf6vKyY8jMkfR9zeysIggEUlkajUl8z39L78sIbz1GDsAII+zCvfc/wN/5P/63XHzjElsbbb7y5bNsbq9jREyjEXD54mWiMGRuzxwvvvQ8V69fYXNrizQ1gI9joCX4vmR2ao5ay0XSUijcf3IW5XsY64b/2BTsTpXQvEE3uEilVkP7lqe/9hyNZC+nxt9HoG/LNAEknhcghUeaxMRhn5ZMuNJLnH6eADA0x5ocPXqcuT19BFe47eQeTt15rEgr561/4J7n0sGuXUwiA4UMXB28aDvLoshiEE6WstfW0pxsMjk5yflz5+nvdIFBxiBH4FuqlSbGBGCzO2EzxT9jCUN441yXlbUa41Nj9MMenZ0WYTd0Z1XCEeMKcpxbWz5OuWzUSubf/WtxqW4xkEwWmSNmjEHr1D0Sg4ktNs5S5NYdwxpDmuhMkjgTQkrS74iEr5QCLxBUKo6/4bJEMisBOdKjThKs1k5OschG4D4vmfE+pCxNgyxRNYb9o8ELecRvcBmCcpYgdx5KyP8O0kSzsKCYmwuKrE4Ub5J+i+ONR/juYVQCGGGEXahUKvzqr/06Sap57oVP8ej73slrr17i6MljTM820UnK2uo6jVqd68tXePyrX2RiYp4Ttx3ntsNHUQqwMQJNpVIh6EtEapFKuVKAwAmliAhroRpAraNJVIWO/RzaU+w/eICtZBNhfPaO3Ya+0CRtJFSrAb5fpVJp4PmKbn+HXjflxauCdLzKySMVpEiIwi5zc8e4/74xrHmClVXFzPSdmFTw+ivnALDCDRmy1kKmGSAqIpucJ7G+LQiARfo/nyFgwRUKBCZMefhHH6ZWq3HuvKv/SykJqj5xmBTGeWq6Qb0xjdbXkEpjsc4JkM7W7GwZblwXzMxMsrm1zfrKOjY1xJEunBGRiQEZrRlMKIQi1f+mxtgO3s/a3bKAHmO1M3CpSzGUDX+xt7XEUYKUwpUBslkFbzkEWCXQ7gnWuLVV6lVSk6KThJy05xZWeDOZfyaGDLWlVLpx7L9bnrZMAMx9gTKkL/E8jzRKMdrVBYRyDmdQg5m5gFo9c0DtQMBphO9vjD6lEUa4BSqVCh/82Z9D6yZpaui1Ey6+cZnb7zrGz//Se1ldWmN1dYMbSyt86pOf4LOf/Qxf+NIXOH/xgosSrRuFOz23gKcDhDEgnIqd00p3hlYnYH1LrCzppkH3JH3vWdR4C6N9njv7HLovuH/fzzArfwxERBDUqQRNKtVJPK+CjlN0q8nrT9R4/tkd0lTRaW+ytX2DhT37ueeeh5mausbkhODnf/En2Hd4r4v+86g5T2cnevDlLpwgjKyoghzIcACYpcBd+vnMmTMYY9hc3wCcPn/g+0gpMhlfmNvTpFoZx2gPYyU5x00gMEaxsqJJdYNuv0dnp0PUjUlijQgEqqrwqz5e4CGVygyMGA5Kdxk/ht7KRgjnA37y0keSQmIgNq5tUJfrDMNwdXedsf+/5T+tN0EpTW8t2kCqLX6tClISxRFp7LpCMIJCtD8vz+QReun6hxo0bn6yC29yQcJlXqrNKuMzEwT1IEsuCJQnSWPL7JzHocM1xsYUvi+yeRXjKG8UX36/Y+QAjDDCm2DPngX+q//zf4PymwTVCltrLeKwz0MP3859953glbOvsbK8yurqGl/8/Bf52Mc+zu/90e+ysbmJUgopLXsOHidoHHORswVf+QR+xfWcW5cC92uKsCHQmx6yVUGoZTqVpxH1DhevX+WpZ57FS2tMyEWUmaMS1KnXJxifWKBRn0UanwO1CX7ixD6WHou4erUPQrC6+jo7rSWOHTvFAw88gvQvsLAwyS//ys+zeHhxIAmbtZ1hcaqBRR+4S/FLT7n6f8axK83uA6AyUWFx3yJXr1xhe2MbAKMN/X7oouXM5tTrsmhxk1JhhSBJLP3IsL5puHo1Ik4FSarz0BVZUaiKj1SKIPCp1qoE1YrjHchBFFqErl/PvuWGP0/rZyz3gRLidyKk/zrILXSmeDjIsrupjJV6hfHJcZRSmDgdHsxT2j2vyAwdFwqHYCDq9E1cX36MTFK4Wq8zOT3BgaMHaEw18aoeKMnsXsUD75hkcX+VSkVQq0p8Xzhy5bd0U0b4bmLkAIwwwtfB0cPHeOSBnyLVKYsH9rByY53AM/zUBx7kkUfu5OiJ23jwoQfZbG3xykuv8pcf/0ueee5ZksRgraTanCSozEASZVG0xPcrKOFlk+Ukyhf44x779xzgSO1BgshHBNeIp76K2rvESvsaz7/wHNeuncd0GyR9RZpqqrLJ3snbaFZnCbotprvXqHRDHntyg82eJk1iLlz8Kuvrlzh29DZuv+MAV69/Cs/vc9/9JxmfaRbGuTAuxhZZgDxLkZMBc8cgT48LIUl0wqPvfJRqNeDc+cFMeGMsaeKcnLxW7/kJcdQiCBi8LgWpFly/nrC6boi1W7fRrtQgA4Xnexl/QmTpeTuwfmWblvsAt7A85Va+3an9bw5vgVmTGZ8hI0IOldcNoG1xz4R1HQjCQmFSi+lE2Y5ykAEo/rfrJhQVDTtw7nZvM3SJgoEUs6+o1nyqVZ9KPcCv+lgBk5OKu+4ZZ9+BKkEFlAdKCXxf0Y12RhyAtwFGOZoRRvgG2DO7n71zpxDVTSb0BK+9dImTZw7yS7/yXjpRjXtO3UEvDPnKV5/i6a88w2c+81nuv/deZqZnqNebjFemqbb6tPwA6UsqBmpxFRsn6ChB+QKvaqmN+eyfOkPvxipb3uuY6hq6vkzt2L3QnsasBiT9abz6JLXxcXrrCWNiL5VohzFxke7mGlthnysbkgvdHocrFQLb58Klz5Pod7L/4F6SdJvz515l/4G9PPzud/CZz36ZpBO7XvbcsEPBFndPS+z6wqCDUoqJsTFOnj7F0soKX3ry8SGjPByxBlgTkyRttIGgIjMxHYMnFRtrMZ2uQvmaKIwp0g3kx9JobUEq56DkBMZsbKDyZBE9g3UiNuabNe5/HXz7x3TaBKIwyPkRBy2NjoAYdvqYJJM1trhuBoq6SSEJPOwD5UWV4WMWLY25E/d1F0gpiwBTkw0mJqokOibupRijkZ7k6LEKhw7XqFahEoCnJEpBUAnYSTdJdUrl275bI3wnMXIARhjhG8D3fe6/8z187eynqDcFY+OTnHv1OnsWZ5jz6swtTPCOd9yDwWK05mtfe5ZrN64xMT7mpvZVG1RXPdpGYxOotVLGd2pMJfNsbiwR9CtU0pRQtuhrydHFd3P24lmiIwG2EhDa52EyIE0WufEEeFGTA43D9OwWzzxzjk5rh3tUyIXNiAvbGubgyvUuLRtxet84QRJx/fpTzM0fZnq2ThiusLbawff2EvYf5AuffRxhHDHOKQ8KPM9lGay2hThQMStA2CJyPbR4kPn5eZ5//jniJHGa80UGwUEqiU4CpIhIdY/Ah1rVQ6kUowVJCqvLIYFXIUk1aZSSjwU2iS6Y+0YbbOrUFq0eGDMhBY1mHchEerR1MrhJ+t1P6/81YK1FlG+R5aZ12tQxEaPEICSZsE+W+ZDuM8qHPoksu5ELUWUnGT5p2aEz4k3vi8i6AdGgJio0p5rMzU4Q1HxW17bp7oT0OiG3Ha9w4lSDPQs+gQ++Z/F9l63QQtDRX6ccM8L3DUYlgBFG+GvA9wL2z91OaydmcnaCKBZ8/lNP8+rZ14h1m2PH96GUZH5xnmsrK3z5K1+l1+/i+4qZ2VkCLcBqrE0h0fRWJQ15iiCaYWL8LurmFPsW7qQ5Pcl9jzzMXv/HqC1H2X+gPqQCVe9QrXToPvUEr3/+OYLOLMmW5s75gwSmycXlPr3YIj3B0hshrzzeZmNTEWtLEnfZXL9I1O8wMeEzM7NGs7HDOx64gx/58UcyESBREOE8T+H7nhPJsc4gufkBxsnMpprW8jaLC/tQUnDu0jnmZvbclFYWgkx0yKNa9bEmIElThHSSxL7v8tf9vkF5XjaQJ29VzIh6ejCQJk1jUp1k5YjSuTL7ZrTJ+vW/f3DLksRQOWXXe3lmIPtpdNaZIEAohfQUyvPwlIdSHlKq7B7vPo5zGhz53zkdwohsTTcvKvAF4zWJyt7bd88Jjj5wBisVvW5EvxPSaYXs31fhvgcmOHioSr0uCQLwPYHnK4KKz2Yccr39Kv2w/y3fsxG+OxhlAEYY4a+Jg/sO4/seX33xE8ztneH1Vy7x4T/4JNPTszxw/73s27eH8xcvMjk9wSc++Vne//73cmT/AcZn5vBtA2FDEpvS97rMzD3I0Qc+yPlOk4ff+06k0pw4eojZmf2MT/q0Nn6aJz5ynq2N6+xMu6K551n2HBqn3u/Ref1xntzuMz5e54OP3s3rX93mk4+/xnYEezxBvG05pSc4pg9zOdzE99Yx3RBr16k16szPK7BXuXSxz4HDs5w4fZRXXzyHwJL0E0I/wgsUynNdAGlq3ETD1DhZ48x+3HXn3bTbHbbbO+hibsAAuVGGEOVbgkod3/fwPNcVoYQgTVJM1pWgUz3Uw29Tg1GO7GdzfoIxGRPeOQVlzfxc3fbbRbnt8duGFG+Serc38xjIMvASpJBIIUgzBwghUZ6HVAKRtdpZa9EZyU9Kged5Tvs/1QidZQVUKSFwi8sRAiqBYKKuaNYUiJTg6H6O3n+GyVrAlc0NVi7vkPYTjhz2ueveMQ4crFCvCZRyjoryBI36NH0TsRKtEdtKNvtihO9njDIAI4zwTWBhbh8NuUi322dyZoxuO+R3/n8f5vKlq9x/3xlqlYBGs0Gr1aHd75DqhFRbKmoPnrBYZejXBIdPHeCe++5mYd8+jh/dz+nTxzmwf5E9s9OM1ye48777WTz6HsauJlS1RXoGvD52LKWFIA4j7NKLmI1LtC6+iG4tYXyPsIpj8/csp+c97pttsGD2EPqzCM9iTEIY9klTw/SsYmHvVQRX2btvjP2H553h05Zop0/YDl19XWXzY7VT0zOxcQN1BEzPTXPl2hXS1L4p6UsnKRAjhVM/jKK0mD8glSCODcZKpFToxAwZqZy8losVkVins5QZVIlEyoxIZ8lKGRRddd8O3rLyQS7NW4YUmcjS0IZIKfE8hecplHKT/fKBRQKBH/j4no8nFZ7yXItldi/qtRrTE5OMN8fwvNLM34y3UcgbF90HTo55csxjYabC/EyNsZpHc6rJyXfdzfyBRZJeRLvVY3OrzdiE4a57Jzl+W4163aX8PU/geVCt1qk2pljuRXRMyMzY7dSq9bfm/o3wHcPIARhhhG8CQgje8/CPs2fyGPVmjZm5SR7/9HN8+E/+Cis1+w/sIUkSvvzkM7z2+nkQGq/aJO32samL5GLPsJ3cIAm3CQJFo1qnFrgJc5XAQ0mYmpni6L0PEnQP4Xe0E3fzY9LmFj0V41vDfKXNyeAqm698jo3VK3T7EaoGlapAhIbZusdkPeCkCAg7C2yHYxgD1sT0+z0ElpmZCkcOt5ifDTlydC8ze2YLtnzaTwg7fZIwzlrlKFLT1lgeefTdJGnM8y88T2u7RdgLb9kf78h4Bt8HYyKCqpeJ1VmkgCjStNsmG7RTdgByprxLYaOzXv1sBoAQAr/i4/keURgTRzFJkpLGKWkmj/ut4i3lDmjc9L3hM5T+hbzjwiuklbMhRKboW8xstst8SCkJfA9hLSZNUUrSbNSp1xt4KnCy0yJrkUydw2bTQSkBD4KKYGrMY89MjYXpBnVfobXFSInyFJsXrvLaV17k+sUV9sxL7nvHBMdvqzIxKalWFb4nkMKilCN0rrQ2uNZdRWORQt6yzDDC9xdGJYARRvgmUalUOH3sbp557mscOX6AG1fX+PP/+FkOHl3k/ofu4o1zV0l2urz2yuvo9/8o+w8folKdR8XXoQpIxY32y7zy0nO0Wz10oqnUJHEaEumIQEgC32ff0cP4i0fw0k0EBiQE4wn9A4LxWDEfJCz4XdJ+QpR4tCLD1EGfxlSVhZWUhmzT2limEnkcYB/n2i1qccL4bA8EJEmI53ssLHhIYfC8Gp53mOeSiO2NNgLQUeoib+NqyVZQGKTZPbNsbK6zvHLNMdajoQkzBXLNfykNQriRyDjhYqw1hKGhWq3jB0E2YCbLi+f97nKgU1Bw5q0bIpQkifu1xPj/fiP+DfczOIhyVwWl63Lv4tosSx0CwgkRxWHs7LfyqFUreFIhrUQKRRynpHGHXreH7idO40CQtRa6/g7X1ifwfUGzopifqDM3WQNr2Nrqs90K6fZ7nH/sGXSU0F7ZZG5vwL33Nzh1ps7EmEBJgZKuFdGpWkp6OuFSb5u+ib/j93OEtw4jB2CEEb4FzM7O8XPv/1V+6/f+DWNTddZXtvnEx77If/Zf/G3e/4FHOffGRbq9Pv04Ympmmpl9t3Np+YtQa4AUbKfn+dpXPsbL5xUXH7qD6dkxap0Kq5s9PKvYt7DAxNgU9ekZTJIi8ACLVBZvssre/TWOR1eZqPpsWZ/1riVJDI2xGtIL2DehCOixfv08O0mAmRpnoTbLRFxlYzlmacfSabeIIoUfKITtsba6QnNsnsNHF7lor9Pa6iCFmyCYR50CUZDs6uMNLl+9wuaNFtJXt2y7E0Lg+wFxbFAKwqhHkiQuvY3EGokQimrFI9pdQchIbMZIhMhIf0PG3U22y55+W8idlKHXvv3DOtjdBn7XZeSRunUKe0IIjBFYlLvurAPAatBh4oh9VUO9Po3RFiMkVhriKEKnIWkUu8wBjoCpfOUmOwqo1nykhLFagNGabmww232iMKLVjkm1gMBn+9oqSFg8UOGdD09z9FhArW6QApSyWauim+IYG8PVuM1W0ndOYwI2GSWX3w4YOQAjjPAt4siRo7z3Rz7AxYsXeeGZV3n12Qt85bGnee8H3sPf+we/Sq+bECYRVa+KX60R9BvYxGB8Q+LX6XOBjashf/FnH2VieswNCELT72p+7Efew5HDB9BpTFSxpKmboGe1ZKK6wIk7jiFejKmrDpXxvax7ddInX+Tai102VxPundLs9AWV9jbXW7DdvsCxE7extDXOZ15aphtqPK9GLwrBaPq9mI31TaqVbYy2TEx5aO0TxS4LkA+9ydO64zPj+BWf1ZVlADeS9haRt7VuVKxSFik13U6PatUHLEJKtIat7ZheL0RL4SbZlfXqs24Abb/O0J1v10oLsNIOJt8JsuzEzVH5N3vcoePkvft5Ld6WjikEUkClGmANRHGCTtNCsnmgyQDCk8zMT9OYaBBGMcqkpKl2j0Rj4uxeSZtF/BJV9agFkmYtoOELms06Yb9PnFq2O336ESTCw2DAStJ+ihs+pGi3Q9ZWUvYfqBBUZUb8c8fWWFbikPW0j9apc0i0YbP3GmHYp1FrflsfzQjfWYwcgBFG+Dbwzgcfpt1psbG+xcc/8hl+93/9MxYX53no0QfY2OkTmhCEjxU+Ns4nyVmMNfTqEdUxy+a5r5BWDRrL+HgF3Q+5OO4zNzFGGnbRVd9NzcvS3uF2B1k3dGUdk64znnS4e98B7rvjKJ/42jn22ZjJBY+lLuzEhmdvWA4uXOTVN5os+Yucu3SVZr3KxORENk0OqrUqU9MzRFFIkkZgBSdOHOb1N64SAwjQpbr64SOH8X3F9tYW0pfY9M2NozYa3/OyqXuWasVpAAgksTG02wntPoxPDo+PHUwdzFL8wt5sht8C4y+8zO4bUdTHXevDX1M05+v5CYUiT0bE85SLzrV7ccDOt24Us1LEmUE3Q06V40OowKM2VkV5gvX1dcJ+SJKmpHGCjk0x8hhpIRv0pARMNnxmxqtMNwJMEpLqPhM1yVYnQnmKwKsgtcYaTWpA+Yakn3Lh1T5XL/S48546P/3BWZrNINNkcP7Sho5ZTkMS45wOoy1BtYqq9Ult8q1/LiN8VzByAEYY4dtAEFR4/3s/QHOixsEji/ybf/nbfPgP/5LTdxxl/755EtsnERVqk9PI0EfEMbYiMFg6jZR0SlJr9JloGpLIUiVippEQdJ/n+sVDxHEHKTzc2DpQniEJVrn68svMmy1aSKq9a3jC57bZGs/OTbKgdggTSde6GTcroUdvKebUnXcgOz0WF+ZQnsfY+BhTUrnachwzNTVJHEfsbG4zOzPL/PwMO52QjY0WSghaWy3SxKB8j5OnT+BJRbfdRVjhIscMRQtdSZVuetZpwoVhhOc1CSoB/V5CGKZsbKZUqx61eq0YlFQaPTCIgPP5N+bbt/uDxeKEddLS+fJewlt4GkKK4VJHmbAodlP7RP6OuxcChC/dQCKr3c98eyvQ2tJqdV0UXR7zm41dDmoBQc1HeYp+P3RZk4zwaHOtAOvukfQV0pPUKh5TY1Xmp5oc2DOJtIatlsHGKeutPq1UIPwA3Y/xFExOe0xOgJKWrU2LkgEvvdQjTQSeT9HRYLBs6IhraY/IZkqFxlCrN2lM1NCExGn0Vn1KI3yHMHIARhjh24TvBzz6zvcSxSEryxt8+A8+zif/4ov8+m/8PJVAEpkQ7YHpWLwY4gBHwhMpHWnpLaTsTAuMlmyH0NM+Qe1V1l8W2K2rqHmbTb+zYCXVOcPlzVXaHcNGW3KyGTATLlHxDvATD55AL53nervL/nmFtJbFveNElTnm9y/Su3qFAwf3E8UJMos4hZBEYYixhiBwrWVHTxxnvBFg7AkuXlxhfWMDg6Hfi5nbM0e1XuH6jessLa844l7JJuYz6K21TsEOsMJgrCCOXEeAC4kNUgnqNZkNSMq6ANweWaucUxI0GNf3Lsygnp7Nuc/bBL8lWEBn6XiRGVHNzT2EWfrer1VIo8QRI8t/A40q41NNtte30WGCVAIrheNGpAYSjS/g/8/efwZZml75ndjvMa+53qTP8t50tUOjYQZuDDCe3pMb5CxD1EpcxW5QhtJKu6K4Sy5XCin0SREKmZC0G9IHUcMhZ0nOksRgAMzAA23Qprq7urzJSp/Xv+Yx+vC8mVUYQzPEANPg/Xd0mcybN6/Jt855zvkb5yxKK6x5UuAPtY7ee4xz3/NNhYS0kZA0UqI4TFEckOUGU1hMZoIqwj1pOqQUpPWYTj1muVOjnka0mimteswsLyktbB1k7E0NBokQjlqiWV+XPPd8kzNnW0wmOXjH0lKX/YOMg4MRvV6EkhLrHTsm576ZkHsfeCLeEyWaJBEgPYu1D9NvLf7+3pM5fmCYNwBzzPF9gBSK5y5/mA9/+DtsPNziV3/5v+PCxeP8xM9+ikhrE50YwAAAhnZJREFUZC1le7+F2JsgagIrPNaAcZ7MeRLtUTWBTTyPh4q6TojUbfqqxIsUISRShkKadDR75z2jLGFzVyOngpYxeGM4fXKFW3tbODOjWa/xzv0ht3cHrF69yGyWM51MSWoJQgryPMd6qNViSimxhUEAKysrLCwtMh09pNPRXLpygfJNQ54XxDXB+rFVyjLnwYOH1Yn9txPcvkfcVqXdOaQM4UeIysvn0HEwjqqTdRhBhy8UoAUiEgipEOWhJK46V1cFViiJ8B5f+qeah98F1SpBVJaB37PePwwYOmTkh7HDk79Uf/eOMJqv5JzHrpzh2KXTdBa6ZIMJ3U6T66+/ze233idOIgpjju4miSXthiJHMLWCfx2RolSCKFXUmjFeQlEaTFnirKXMTUhuNE8IhUIGX4VWK2ap32R9qcVytw7ekVtHVGsz8TmPh1sczBylAR1J2l3N5UspFy7WWVtPieKY69eHnL+Q0GiX1JqCxeWESHus92yXGRt+Sikd0imECg1qeA8FcVyn31xD6+hf/gTn+KFj3gDMMcf3Casr6/yVv/TXWD2xwu1bN5BKcPfmXS4+c4m4UePWpEX8+j1Wkwjbf+ofbiVwziGcQ2pBVPcURjGte1pGIGOJVjqM1nUZyFcLHqkEC8ua/V3HjXc0D7a3uXPnFW68c58/9nKDUSl5axPe2Syw/R3Sd95hNJ5Qr9fCKTGtsbKyhlKS2zdvsthfpLA5i4srzKYZrnxEmgww7hTLK8vs7Q5QUcKJ46tEWlHkeSVV+50NwKFDXzhHhwREITRZZohTEDLCeY93AucIEsDK6U+Iqu7KQ3a+e0KGA45cfxChAP5enrrfy7F7YoDjqsclwB9K7b5HTPB0xPBhQyMOnxwnnrvAsfMnOXvtAu3lPlJJtBP0Gk2MVmR5znBnj3JisZkB6ymA0dRiAFtZLv9eewwhBEoLhA6cj/FoiogUzolADCwtmOpxicPHG4h+aarptGJOrnRY6DWJtaIwJTtbu+yNoLW4ihMJxk0QQtJsej70oQYXLtbpdASNOmxuzvjOd3Y4fXoFZws8Hq3BANsm514+xlA1dIeGTgjMRGLzhKXVJsf6F0ii9F/jqpnjh4l5AzDHHN9HrK8c47nLH2JhqU0cG/Z3trh94w5xFCFrNd79lidNDM1nBb7lcAZMLgJ5S7rwj38SisTYKlRdYxAoHSOkx0uPlwaV+LAX1gIj4e1bGe/fLLk/eBgy761hMJ4xyCXPP3eWuNGg1miS5SXTaUYUKXq9HucvnKfT7vKlL3yB1ZU1aq1aNYq3LC8VSJljHj9god/nwoVTDIZTavUaeVEQxxHWWp4OmjnEkT1vpWLrL6TEiSbPS+KohtaBe2CsZTYrwNeDJXDpq7S8UHRlcAzCYXna+/+oPtvfWx1wuDY51Nt/b5GvAo2kqDTyTz4jDluayn9ACImuJSwdX6G71Of45bOsnD1GUktCYFJpyL2nsJZav0NvdTn47ssBo3JAJCHPHbO8+h6/m+TwsOlBIHWw1hVKYEuHc+HjzvlgtVytLI6if6RApzFJMyXWgn6nwfpKD60VpfOUpWdqJRv3HqAe7nEw3KfXi1k/FnPt2TanTtdptkBrj44cq2uKX/r3j9PuBNth5xwGz+NsxsNiTImtDIk8AoeQimIEW68XaFfj1NWE1e6p38fVM8cPGvMGYI45vo8QQvD8lZdI78QMi4c0GhHjgyFQ4/ipNX7rix73dcclLWlc9mglKWcKZw2yklZ5LDKGrBCgJbJUpNSxPkdIhxMWRGgApFLhUHtMMuxYxluwEMMks9SjGb12m088d4J8NEA1a3g8g/0B1pQc7O/z8P5DnvncNRqNNg8f3uPihatMhkPyYsyZExOi2JMkGXv7UzrdFfI8FL1Op8nq6hJbG9sMdobfq6MX4rex4QVaS5QSCKGIdIckDtnxeIekRq/XY/9gEMxroNrDV8W7Oi17CON+CFkA/M7pw/fgd8j4qmlB1Uh4579XllfdzBN2+rVWk7RZo9Vt011ZpLvYJ0ojmr1OmMLYsH+XVctQlCU61jTaDaRbQkURyju6kWVrZ8xkFtYI4mlZIMFqWWmB9e4o+MeUDkV4zXSscF6Q52GaQGWhLLU6evZKS7SUJHFEs9Wg1Wkzmc7YG09JGwvE9ZLBaIt6bcz5i3UuXmpw+kyNfk+TpMEVUEoQyqOkpb8QkSY1nIdpMePeaMDjfIRXEh0Fy2ZR8TvKiefgpmHrG1Oy20Pi1PCXXj73+7l85vgBY94AzDHH9xlRFHH57HPc2YjIxWO67Trbjw9YXOwDsJl7ku96TiWQlAJfyCOiuwe8pTJyKZkQExUFapIHTXddIBIBQqBUTBzXKURGfbGgd0ExO2gh5ISDyNCQluHONu+/9S5nWlOWVpYZ+Qb7wzFaeJqNGu+89zbT6Ywo1Vw9cY1jy6dw3mP8fdqdHGMUWnuabcVgJLBukd3tjNFwTLOW0u932H60U/n9i6PT9uHQ/CjRznu8l+AVkWqgdSjdeQ55njMYDMmzp1jjHrDgiurE676XX+8rZ8J/GZ4+ZYdRNU9Z8gaOgYw0XhgoK16AEMStGutnT9BdXKDZbdHstlBphNQ6jPvTGIF84hsgBVIIPASTI+/oLXaRUuKNwWdD0kbJtMir9zk8LqUkUoWOoyztE7m/D+sPpMMJMLk/ckgUKkxFVDUZMaXBe4fwCpzDWM/UKlzU5mA24+b9bazdYzgcEcXw7Is1Ll2ts7KS0GwoklgSxRIVBeJmmM5EeOvQUcywyLgxGbJRDkB4dBVQJGKJdZZiKJjcFhy8kpPsOxI0eqZo17u/jytnjh805g3AHHP8AUArzfrCOV599z7Lay3OnGvw4ZcuHn1+VDZ575sztDQsXonorIOLDicAAo8LCrXIUBpHVkxpNlvgHcJFoHI8jlraoJ40KaeOZt9z7eVzyPoUWXtANJpxZsXxzev3MCdq/Ni1O1xoLbM+GvCrbw+ZNjvUEs274wwJ6CIjK3OuPnuV8dhQ2m+h5C7eabQ6YGFxgf39LXq9DpOho9ftcPWZi0yGU27fuBcK3mHRrYqpqKpZrR4hUOS5r2RtCikE1sJw6HDGYYpD3Xh1undA4X7PM374FofOeYcm94efeRphjI+uGgkLSIlONHG9hjGGcjhFIGitLLBycpWFtSXqrRatXoeoFmG8QypFnCTEaRyK4OGkQ4CXEo8nTmusrK0yGU6I04Sk0WBvMGRmQgciKu4CQoRAQ+eDR/8Rz8AfSRPdITnBhN+kEkfBTCEiIMj/EGGKoLVGa41QKQdjz91HQzZ3hgwHlhMnNT/+uRXOnkvo9QVJrFAqmEvpWBBFCWnap9ddJ8+H7O5t8Hgy4sZwh4N8FyVAeInSMvyMek8xgdH7ntF3pqSbhlRCgWNl+VmWOqu/30tnjh8g5g3AHHP8AaFeq3N6+Vn2p++zvFznE594ib/xP/0r/B//9/8vTp5ao5jNeOe9e4xnnuZxT+cUCAVSE46sEoSweBMY80prhKpS84AiK5nIIc10kXqtT6IUJ8+tU18fsGf3GD42XMk8Sx1FLa2TaslkusW13pjraoMvvHqX4x0LzVWeP57wqWaDV25s8K6ydJYXKd1nwX6ZfncTYwU236XdcozclJXVBfb3LevHVvnkT3yM0XDAzuaTVUAw8QkNjfOh4MRxTKRn1GoJ1kyx1lGUhizL0LEMjH7x9Mnd/3Zqwe+AlEHKaK0NREohnjbOIxTUarcvFQiH84FsKaQ84gZ4PO3lPievnKO73CNtNmh026T1FKEkriyoN2pESUQcxTgfzJyORI0+ZBs0em2MEEyLsuIvgPGC0lLN++HJOuIpIt9hsRdhs6+0wgkfGoTqJqJiMnrvgrWvFEfFP4kSup0OKHi8ucn2oy3u3t7gxNmYT/9Em0tXmqysRigBWkviWAXVgJDESZ1u+ziNRp+yzNke7nLjYJtHxQhDjpYCW1riKKbRaJGXM7JBweS2ZPxmgXtQEHuPTAWtk4KltRW0misAPgiYNwBzzPEHiGPrJ6gdJIwnt2i327z44Re5/MxX6Pc6tBbbzLYecufAsfU+pF1QSTikmgLysUNrSZQ48izHZBDVNEJDpDzOOIoiJ48mpPUWVy6cpqPXGNZ30cZQnFDsDjXnE02/GeFknb3RPkz2uLoiublhMV6wIB4jpoLNjTbDBwN2t29z/IUXWbn4PFv3TjAZlSRRi6X+J4j0V9H9EeFYusrDBztEccLP/8Jn+ZVf/oeMh/bICEhFCqElLvOAQeuINAkxts4avLMIPK1WjPUJ1g1/G3fgX41De2Kl5FM++hZr3VMuewRugXFh/S9D8RdShtuWJSqNOH71PMunj1FvN4jrKUktQacxUisiG1Or14jjhCStkRU5RRa08pG1KC9QQjITjqmz4Azal3gbju9KSbxSGGfCqV2CUGGMT/X3Q1MDIUJTEySR1QlfSA6zECQCGSviJKYoSqQQKKWZTWZkRcbu9giM58c+0+ajn+qxfixCa4i0RCtJnEQkaQpI4qhBs7lKEtfZP9hmc/KY27NdthhCVEJusaVDx5Ko6cmLCbMB7L6m0LcjFiYR09QQqZKoIWh2FB/5yGdJ9FwB8EHAvAGYY44/YPS7y8RTRW7usbS0xKkzJ7j+ndd5Zr3FM2sR3huG7zkmS55aLzQBthRMDwRKaJK6RyUTdnd2WFhYotnokPsDwrnVkpdjdC3l7POLxEWNbChQkxKhPZPTNV57PWVtL2KTkq7UbGxmLDYEzxzTvLVTZ7k9QEbw/taQu+M6H714mv7Ba9wfbVJbfA5rPsODhw/Z2b7JpWsRzlq63YNq5Nzjze9u8uwLL/Dzf+xP8mu/+t8xPBgFe9l6glCCssirXbXCixD7q7RGKoi1opZIhjNHpCP8v0TK/7tBSvk9v4P4XfwADrX8QSsPVBG2Cucdzlg668scu3qeeqtJWotJ0oQ4iRBRRDDuKzEedMUSrCHBOEyes9hoEkmFk4IsHyMiFQJ4cOF5W4uWHmKJR2EP/QyUREYK4YMHglQqsPwrzgTOV35IT0b/CIiSiGa9gdYRk8mY2WTGaDRiNi6p91Je+nCDj3y8y7GTmiSVKAFJpInjiChS6CgiTRskSZ04bpJnBXvjPe4PH7Pjc/aLEc4XlcOgI65pkqbGlo7hTkm5ldB82KOVOgaDhyx2LKnW2HqNXEi67bkB0AcF8wZgjjl+AKglPUbDPU4eO8Gpkyf4F//4N6jbEUsNjdaCqBSM34XuMshTnnysyB63kEVM0SyoLY9xjSGjUcriwhLSFwg/w2NBWAo7YKrukyc72GiETjo4O6LWVcwu1djdX2bzUc5LnYIb2zCeWV44vcjC2fMwusn+4BHvbXs2hgU/3VBEw5Riv0Qc3+L+t3PGwxHHTy1T2C6t5gGxnhBFj5DS88yzq7z2yqusHTvDZ3/+c3zty19l48HjMGIXvpLqGaKogRQxWgu0ilAKSgOliZ4w8sOCuXrV/g1GAcBhof+eFcDhml6CjHRw5vMOKcMI3BUOXzhqaUx/qYtOYqIkot5oIIUEKZnNZviqkGfDKTJOWO90mUjNbj7Bx55hOSGTnlmZUcxmeK0pUIE7oBXS6yPHvEOyoUpipFJESuGMweMDRaG0WGvDWkGIo4mAjiI8nk6vTafdZjgYMR5NMUVorp77cI9P/cQxVlZyOh1w3iKlppbWaLWaSC3BG1QU0Wr1cM4yGO1wUJRsI7id7VGYMabIsEWY5DTadWQsKTLHZDc4E9YSRTMWpLUYNZUsCcm4iBi7OqNOyfFjp79fl80cf8CYNwBzzPEDgFKShf4prPV86EPPs7i2xOP9nSDnkqAimG0JRm97Og2Y7SSY/QZmKhB7GrxCHt9jPN3j9s2IZjdGphFahROtcyWD6QZSRuRuH+tKhGggdEFzbYZqTdnZmlK6nKGrcf/+hLVVzdWLffKJ4zuv7bEzzpn6hFlh2Bwn6OICJxZO8t3dV8jGBVeePUvh9lBqihCGJHG0O1s0myvE+jK/9dW3WFpd5uWPf5TXvv0qj3cfk9RqoDRaRyRJC6U0zgc7XCUV46FnsJ9hZY4t7VMbgH/94v89rr1PJRYe3Y0ElELHMSqWGFNibTDpsZnBO0dZGEYHI/qrSyipiXWMMwZZWnRe0IgilhodRsMxg2lGkRSkcQ0tRxxs3GM3y7D1OtZYyjxHCIkTAhUplAqGRVEUVBCFkAgdoeOISGm89eSlwRmLEgKpNd4F62QpJVEcUUtr1Bo1irJgOp0yGowY7AyodSIuXe1y4VKDy890WFq2KOnRMiKp1anXGyRxjSipk6YNSjMBD85J9ka7bEyGbDvL3mzEZLITpg/G4b0gSjVCS/KJIB8mKKDRUYweO4RKUcqz2o7Q44I7j0smH59w9dN/hGMrJ/9tL5c5fkCYNwBzzPEDQhzHxFEbKWKuXLnEl76wTTx2REpQGNB4Ht0GuoKsjCgnEjMF5xQmq9HorxF1dtjaekTpuvTX6rg8QaQO6z0zOyWOU6JIYX2GNYZICIRxjLY2SayhmTRp9xd45yDit64/5sXiu0gt2NozjDPBMxcWaDbbvPP+YyZvPeLyM8/yuT/2ITZmr1AvX2WhrdCyxNkSrQStlsCY21y4/GGkeoF/+mtfYmXlBM996AXGX/ktMjODskDJsJuXUiBVgY4ioqhGXuyzt1fQW5YIpZ7SDv42VKY93+MICN9b7Amj86MVQEWoQwpUJImTGOuDla/NypBgWN2V856D7R1irVDtDkJE9Gp1hLAoGVEUYaUSJzFlMePh6AAtBGbzNn7wgDT3ZEmPqUrJrKMoLPs7u0zHE2bTGQ0twn3riMhAYVwwCxJh1O+txxsLWqMjHVYT3hFFMZ1OmziOKcuCyWTCZDQjrsV86GPHefaFLsdOCBqNkiQpkXjq9S7d7gK1RkRRzLAOlpavUKslbD5+h8lkxEEx4bGZ8Wh2wHA6wJQ53jqElNSbLaI0wltPMY5IaBHVLNNBRioSYqDTbYOfUswcW7Eg/1MJvcuKxbU+Ws7LygcF83dqjjl+gFheXuXTn/gc/+Dv/yqNeo3dyYx6orDO04lhOBAk9/rUepoyszgb9tvSJsgHq/i4T1J7l+H+hLQpSBuapExBO4R2eOdQMiJNGrjYBu19ptifjOjKCK0TkiRhdTllY8ey8ugeUaTZHUEUJxzv14m9Y+Og5J3brzHLSz7+Cy+xfuE0s9m7yAjiSGAqtr+zBUJ4nH+NM+de5OyZE3zxS1/jJ3/qp/nsz/w0r37nG+yqGd1eC8QApXOybESWxUhhSVPFbBaxqGU18n7ix/eUMW8ljeN7kgAPFQfWBg8CqlTCQ0mdOLqbUOiNMSGzvngqPa+6N1eWeGOCvG86AedRUR2lFJGQbB+M2br5iMloQlbXQVKY5SxMHjObDEisI4sEI5GyvT9kZ3OH8f4BEgHWBk5BHGOcwBqLybLATdTqybMV4cSvdRTG9YJK7w/j6YRZNkVFipdePs7Va13OX6jRaMzwzBBChFF/Z4G11bPUajVmxTZp3KfeWKLbXOT+wzd4uHmffSuZxCk72YRhdoAxRXi+KkKngqimkbaBytv0Wl2Ek+xtDtDOo1WCyTL6nZiDScmDEwKer9HuCqT0XFn52FwB8AHCvAGYY44fMM6dO8cv/dJfZXd7l7feeI1EB/lXqwbNdp+0dZwoNgzdwZHOPUoi0qiB3mrhF0DXH7D3eEp3JSGKNWkzRghHFEm8l0QqRscRUkVQaMadlNX1VTodRf72TWpJjwNRY3dywEdOe3bGEVljgYtn1jh7bo1j93JuDAVvfOsthLK8rF7ALS9wb7rF6aYi1QJTBiccKQ2wS1G8yqUr69x8/wR3bt3k2gvP8Zf/yi+hlWZr66tk+S5CzhgMN6nV6zjvkVLRallmswxTlr+7ra8EoYMroj8Moq8Mdaz9nbE6T3sRHLYC3nmw7shQ6LdvGAabe2zeeUSn0aRek0EOJzUHO0NuvX2DfFrANCd3jtq5LqpmKaYFw8E+3jiSZo+ltWPEk5z99+6y92DzyLVQCsEs0lhvsFaQZQVFXqCkwgtRKQA8SgkazRoyUlgrKUvDbJoxHo1AS5651uf5Z1e4cL6GTiYIeYCUjiRu0l1YZXXtJK12D7ylyEcsdE/TqC0wGOxw/cbXuL9/l3uzATZukWXbzLIdTJGHx5/WaLTaSJmgTI+6WKLerzEdzdh5MCTPwXhQRBSTkrLtmZyawqJDpoREQi955sSHv2/XyRx/8Jg3AHPM8QOG1po//sf/BFmW8Xf/9t+C8YMQH4uk3l5kcXGJg9GIIt8lijRCCpqNBjpSYAX5vR7NSyWt7oTxwQFprSBKIqRS4DVCQCQjmo0OtbRJLNrky0PWOuc41Y/hyzcYDgdMs4yN3FNXnqsne3RbVzi73iZRhhMn1ljbzXkscm5++x16ay2ufHaNbbeDyTLO9xNqkQz+9C4Y2Xi/zcoq/Phnz/GP/n/f5Rtf+Qo//lM/ThSlWLeDdSW1mgQ3I88tCIl1OljmEnbz/rcRAEVV/FUSI4TAGM+RVKA63f9eXgGH0vrgruiRkUJWjHtXOkInUcE58smUwlpMq880SZiMRmzfvM/s3g7OWMqiYJw4tG2jRISxBftTx2xvzOrxJqcSz3RqkM7hbQgwEir4G8wKS156JJI8L7GhYpLqEPPrJWglaTYbZHnB4GDEZH8GOM5dbXHp2RbPv9BkqWeopZ4styRpTLe7xvLiKZrtPnFdMx7vYc2YRm0FnOL926+zOXjMZj5ho5xwUI4QxQDncqzJ8DYiqtdod1ZIVB3yBr3kDImss7HxiJ2tHfAOZw22lOS54WG5y0FzH7UyRupgZoST9JsfYaW7/gdxyczxB4R5AzDHHD8EJEnCz/3cz/MbX/gC17/6a5T5PqNSItIu9WaLrd39I6c5pTS1Wg3nPHmeMRvljF9JWf1oQRTVGGxnSDS1TkK9HiGUAK/QZUqr1aPXWsOdnLD/rQOMUqw24L3NAbPZjFuZY2MU8dGPXuTZi59krWm5ef0NOu0GZ86s0Vwd8+7np2xef4d4saD73Iz75Ri9k3JhqRn8/AmGOEJCEh9w/ITkxz97ki9+4Sa/8etfJoo0P/ETOaaQeB/hnaEsSpIEGnVNEmva3TalMwzEMIzshUAoQIjAzI+S8H20xfig8RdSILwMu/PftQmo1gje4wVoHSG8oxTF77ilM47h9j67Dx8jBEitGQ3G7D14yGRygIw1MzOltdxhNp0wm8Foe5/B1i7TvTH5zCGsYTQNigm8wNtwqnfGkhf2SHpoTBkmEZEkrkf0ej2ssYxHY3a39hkeTLDGcOJ8wrPPLnLthQX6S4ZYCZr1D3H7/T0mU8Nnf/Yyy8tLmNIAJf3WKZq1ZW7depfr330PEU/I0pKhdGznB4zzfbzJj0yTuu1TtDsdkjhF2hp7D2f0asdp1Za48d77PNq4S7PeoN5MmM7GNBt1LDOiq1NEzyJUiP/NDiKUSGj31lDz8f8HCvMGYI45fkhYXl7mP/lf/q/4e387552v/SpmFvTx3nv2D/aRSgLBrc0Yy2A4oCxLtNaMdkvuvVpy9iNtCjPlYGdK6UuSJKXRbgbJGwmduEe/uUjt7DJf+M6r/OZrt+jWci4vlWxsl8wcPBg6fiKGZksyzkoezupEkaCdRBRdwdqHI8ztCYP3bsFaSnMdbowO0PueswstVGX5KwUYm5HEu7z08kmOH/8ob7z2Hml9zIlTGu8NuBoq0kCJEJ40BWtzDg4OKIvyiOB35CSoRDXCd3yPSUDl4IcSIRjI/m4dQPWxQ3UhHillsAV+6uaHNIHhoz2uj1+n0buN1Jo8y0DB4toaCyvL7O3sMD7YY+PhJqYw2GmOz0Ma4e7DHSajMVJBPinDx/GYgsoOOQQGWRueQ62ektRTnPeMhiMO9ofMBjNAsLAa8fJHF3nuhTaLCwlKGaRw1NOTYNf5/D/9Lh+6WkOrDpPZkEa6yOrSWbCKzYcTvvT5PX7jS6/x4h9for4uGAx3KMpRkBY6h0RTb6a0uk1S3aIcSYbbBb36OqlqsbOzxdbebdrNJjZT7B0MUPUp9ZZmmh+QdB0Ijyk8m+/C8F7CwlrMCx/9HGk8NwD6IGHeAMwxxw8Rly5d4n/yn/yn/NU/83X06B75ZJ9sljKbZUgVXPPSWo2DwQGj0fBoGmCxlA9Sdjqaxcs1pmbEbGwYpxN0HNNKakR1TRTFJFFMd32BM88c51e+fpP+0HBl0VGchPf3Ilyk2H3wXd7YtNyfae7tTDm31qKrBBsjy8LpmMf3DeuDjIMbhtpqhEgtbw22cdZxdqGNjgQ4jxDg7BRnbnPu7AnOne1TFODFFCEUcexxzoaIWyGJE8doaqg3zPcU5cMsBCHAOYd3ZeXoW8Xgeo9w1TrgX6kYDPbAeZaB85Sz4nuMAp5OFMyHGcU4BC/Jumb59AnOXLjI4vIKeNi+95B8e1R58AuElggZkhmNKfEzj8vD+P/wmYT8IXfYhQS/fgTT4RSpJAPj6PYE5y60OXky5dz5lPW1lCTxOJ+jdJAC9hY71NIeP/PRNX7ySodYtGn1VtEy5uat1xiOBO+9k3L/9bucbk+QUZNpXlDk4+AJgKTXOUaUQuEKyglMDnIi2qwuHGO6X3L3/gMW12Jcriisw3lD+5jBK09eHiDSWVj5OMf2Tbjx6zFRDMUBXF5/4ftxSczxA8S8AZhjjh8yrly5wl//m3+L/8Pf+h9TZ0q91kDHCWmSUK/V8d6xtbuLx/PTP/2TfO5zP82ZM6f53/5X/xV3bt5G65TGyT6Tg00iNUEngkjHZOmUg3KPNl2ESOgsN1k6vcDb33hMspfx8dMRz55r0++1ONY3PLh1kzdvawbKIGWLU60m+W5B60qKXrFMJpbynmN2YGksAtLy3mwHtQcney20lmgdfA3KsmAyvUunqbAqJys8WnuE8OHzRYZJImqp5sRaysZuQVmYJ7HCHvCVZS/g7JOGQFSnae+pUnH+FS+w9/jSUFbOes7YastwmFvwlLKASpbnBHZcsvPgMQ8abaYHQzbv3We6P8SbYFqk6zEykhhjQhCSczjzdKARRz4EUkukVtjMUmQlhTdIpTh5us7iiuDMuRonTsUs9CVpWk0/hKRVb1CrJ0RxTL1hyLMbnD/T48qlC3z15pCHg0dMswcc5Pss9n6G2c4m/6Of6bNxu8UXRjuMtQ8rBxz9fpdmM2V/uEs+kuhxgwvHXqRRa/Ho4QZ3bt2k9BOEWiBNUjqLTVaOdxn6u+zvz1CxpxAFCMvObce7XxCUOSgRE7keb3z3TS5fuPp9vjrm+IPEvAGYY44fMrTW/IW/+BcBzy//X/8ejXpKo97AmJI8z5hMJ5w8dYr/+D/+G/zMz/wMnU6Ht99+m7t3bxPFivF9gYwauHadwcEImTjwOiTOYVGJRtpt7m2M6R7THHt2hTuPY+TBNi9f6XHuzHEalHDvNueX4A1j0ScEg0e7tMcF27cdOm4xM4LV1DF9PKbeF2gtManjxmSXVEvWey2g8pzX4eSeFR7rJFJ6rLEU+ZRaPQ0ugKUljhSLi5p7jw1pPSafaIpZeVSRVRSRJinOBstea01g/lcpfFCN/39nAGAo8IcxwjZMHiCoBKSSKCWD3e1TSXz+qa/HQbE34fYbb7PZbJBlGS5/cnuPD/781mPyMkwFrMdXkcNCCHQk8dLjhceWBuclcV1z8lTMhYt1Tp6ss7YeUatBFB1GBCuarUW67T5Ke/JyBEKjdIfFpZPY7h57d9/j/VdeITurKLsW017DTS1L4hHL8oCdeomROeUkpd87Te73SNQ6jx/sMx5ktOVpltKz5Juws/2Qod0i90ES6ESLhbU+/ZUGLpowPRjgoxmFHeG8Yf9hztufr9FKzjAzE9qtBv1+j9/82m/x8z/3izTq9T+gK2WO7zfmDcAcc/whQJqm/Kk//Wd45VtfZzTcJ8sy8mzKX/8P/yOuXbvGxYsXOXfu3NHtd3eDQkBJiXcwvK1pXeghU8nkYIrwQ6I4QkWC7d1NhFDcuv+Ig8mYpcspJ545xvvfTBjfyhG9mJZOuDVMOb9e59HGBicudtjMcrpDx2xfU9YS0m6Tk+sptyb3gRlSQyQlBsM74120kqx0WqH4+rAOMMZjD1cD3lWRuA4vQkGOdMmxEzGvvmlRUgUTn4rgp9I4JPKlKbYwlHlJlk2hLBFWVNP/J8X7EEJIdKRRkaLIcnyVEghPVIDBLMg/VfKf/npAVpQDKbClYbh7EAo8h5MDh5kVGFEGMUEVy5vWY6w7tPsFHcmQfSAV7Y5meSXl4uUG68cFKyuaWEuSJAQZxUlCFEc023267WUkMJ4MqaULtDtrCDSb23e59/AudT8j9/BQOERaYLMNDm59i8s7D7krHN95PGP3tGJ1/Rj1KCXd65OMTlHceJvGeERGxne3Xid1OasrDcoFg6NAKkmr02ZpaYm92QO2tm7gkxElGc5bZsMpB29d5ES/hfMaM9mnlnZREm7cfJe7d+9w9cp8CvBBwbwBmGOOPyRot9v89f/ob/B3/85/zt/8m/9zfuzHfoyzZ88Sx/HvuO2dO3eOfOJRHlcKZhs1Fhc8zpfMZlNs0cbkBfl0ApGgfcLimwXdTpMTK8dZXlriu195yI1MEE8Nu5nmow1YnORsbe6TnlEUBxFXo2VuTBRps0+7E5GMd/FFCVGJVIKoppj6kjf2t1EqZq3XxNiws1caMB4pBaU7TLQTCKGCK6CEYyca9BcMZalRscbkFi8BPPlsBs5hjSHPC0xV0F1F6JNa46yFw/Q/HySJ1lgQnjjRlNaEYi5AeHG0SvDeI468AqoPVuN6ncREcQwCavV6mMRMRrg8fC9MeB+8c8G8J9a0ujXa7TqD4YQsz9FKoGJJs11jbaXBseOKTs9y6mSNRgNqNVk5IyrqtRZRFGNchilzytKwteG4cWNGHE9YWRsi5JCdgwNsoXH1NkuthPezGWKcU+Qjkqlib3efB8Yz6rSIm4LhwQGT+xmzG03uPdjg0sIO9URwY+cB29uW0oGvreEaJVEnZvXYIrWeZCe7RR5tQH1EWc7w0uKdRG1/iksrx3lz+3Ue3b/F8VMXOHv+LCsrS2w+3mFza2veAHyAMG8A5pjjDxEuX77Cf/n3/nccP378qYS734npdAoEkyABeCUp9gUHt1OWr/WZzfbY2z7AC0tpSpJmgqwVtKOIKLaoOqyebxE11pEjwe2vbTOceLZ2hkSmYOPONic+XGfSdJySCVeXjiMW1kh7MfWdxyg1xXtzpNuPa4KpL3jzYJskrrHQqlGWMyyVFM558BJrIpwVeGVRKsY5R7sjaTQNg4OYdqfN7nQ3+Ao4hy8NKq2T1lJikaOabWqtOnmRM5vOgk1vmZNPM2xeYgpTvW4CJzwqjkl0gpQhBKjMcoQP8krnHM66EPoDCC2J0sqjP05I4oRmq8XSyjIOjy0M1lhmkwkbjzbIZzOK6SzwBrxnsD9mOplR5CZ0FDVFox2zuKhYWXOcu9BCSEUcd+j1UrSeYX2BVAqlFXk5Q0hPs9mmzOt84V+8w+6NuxTlHuc+tsj61Q6zOOZgNGVld0An1ayP+4w6PR5NXufSmc/QmCZ01xZ46fLzPDAZb79ynZ1vXuf9t++xvBLhC8UoFyQeFhZTYlFQRmNUM6K3HhM3RbV22Gc426YwM5wLZkbl9lnGd+HW/VdZXzuNFJoTp07w7LNX+dDzL/HFL/0mvW73D/oSmeP7iHkDMMccf4gghODkyX91mEqItQ+jb+fCIDuKFXrQx20bmmuOwWAHpMULKE2OE3kw1Yk942KPVrRI2gEfFaj2lPcf7VO3nmJiGPccZSnYMVMejPZ5dv0kw1jQOL5Mv32Skd7C+EDo85XTnlKSoRtzfXeHl2vnqMUReTGoyHwCYyz7+zmNRhMQaB3hraHXizhzWvOtb2XESUqtXWOyN0FIQa1Rp9VtUUtrVcqfR2qBmEmstZRlSM1TWhHHEfksC1G63qPikHsvpURHEdYYbF4AMjROUhBFMVppnHOISJI0wu1NYRjNhkwnU2bTabgPpYnTGGdddX8lLo2ZDWdhKgCY0qEjMCVMS8NsPGI8UGzcF9y/OyXL4MzpGX/5l15EKE9eOrSsoXWKsQbnDHu7+7z2yib7r32DX3zpNNBnJ44Y2JKhnrBpDe7GgGfWNN2FFZ4587PcPniWnltlUG6wPSzZfnOXdx8NuPHtO/TFlLSTsNTTPD7IGc9gXDiimuX8cU10zqPWBF5nTHPDVD5mXNzHlD6oNZTFzY7x6Dueh2+8ybXnniVJJcdPHOPatat87sd/muXFNa6/dZOzZ8/9Xj+yc/whxLwBmGOODyLEkzG29x6tFOfPn+X5F55jeWWVh9N3+Obd/5rh3j71Rp0k0WT5DCcMMtIUdkwZ1fDSMSlHCDVhaUHxwDVQNUU53abMIM89Nx5scm5tg6SxwNJyA712hu/efxWTebys2O5OIJUHIdksd3l9o8FLx06RxI68GIP3xJEEmbG3P2X1WEIUpRgK2m3Biy92ePRoi93tBteef56Dg30eb2wQaY2UAuct1nmm4wlFnlMUJVIKtNaoWg3Z0GgtKbKcPMtwxgbBoKcy/BPoKMgiPYfuhQqlVKU08DhrmQzHOOvw1gZ/fK0oZzO8c5jS4LyjzAxYAE+tr3nmxZReL6HTVOhIUOQluzvw8H7BxsMZi4shtbHMLLuPDe3mgLyYoKKCNG2gZYPpbEqe5ziTM50W2LzGn/rzv8Rnnj/HwVtf4dv33mR7cIBfqHH+8hm++eYdxKMBzyw1ubL+LFZoNr/2TfTwbe7tpdx4/BrvPzLUGxEjGeMwZJnh0V6JsQqHoNsW+GaKqVmQU6J6QuEzymJCFQ8AeIRP2X2vxu1vbXDh6hmOnzzBzt4eSZyCFyz0lnjw8AFnT5+j3Wr9kC6IOX4/mDcAc8zxAYWoVgBeOBYXF3j5ox/m+edfYH9/gPXnubj4s1zf+mV2Hh8gIxCpB2cpigzjCgozJnOOnIysHLPQ1ohOj9Wzq2wMPJBTThQbo4L33rvJsk15eLJPdGoUIoy1xnsHyiMjcBbwIDXcGN8meqR4dm2ROErJiylCeS5cOsGtmzmj0YQ8GyKlIk4kx47XeenDHb7+1RGDwYBGq84yi4yHYw7296toH0FR5CFMx1iiKAondyEQ0uHR1WMKREApJQhP6UpsbtFK4Vz4Wu8ror/zCGHw3lKWZbDvrUJ5pBfYwpJPSpzxxKlkeS0i0pKFxYjllTrnLjTo9iS1uiBNQEmBVoJvfH3CezdmLK1F/Pm/dJaFBclkUvB4o6S/kKDVAA+Mx8HyuchzTGnJsQxVjfr5ZerdZfa95vFggh0G3/+JEsRRweZgQs1Yuo8f8OVf/yKP3nuV+u4bbB1YCmZIAcs9wf39ko39gnNLnkYUc2whodOqYWXMLBHYUwrZGUPqyG2ONQZT2iPDIhUJhhuK9z6/hax5RBRxMBwxnRXkowkfX/gEpS157Y3X+Pmf+qM/vIthjt8X5g3AHHN8EFEx5SGkBa6vr3Hy5Em8h1dffZ0TJ46z1rjIo+YJNvfeZucxLKw1QEuKbEpR5BhtcMJgZclMFbRjSbJU45mPHGdpXPBw8xa2VKhGyvbBmGPj+7z1esxKbQKE7++tCLG6KqwDLGGX3mp2GdQk7+3vc6HXRMkIY0v6/SWKLGE6u0lZ7jMcZoAmSSKevdanUY/58hcfcvemol4PrnIST5bNKAqDsxZvHLZwFDLjkG4vFCAlqhrfB5mfPlIfCAFGa4wpUUqB95jC4EtDFEehkXGA81jrcUrgS4uK4dipiJOnmnT7Mf2FiG5X0e1K0kQQxf5IjRgaB0Eca1bXYn7uF/osLMacv5TgvaHR1qyshUyD6WyEMVDkJaWzZHhGEYwiy8weYMsRm/du0Hgtp/3oEf0spb0H9+9Jpufa2FmdhV5Ox23x7ud/hU68z3Ym2M0Ue2OD8NBqCKQyHIwLzIJmPLOoSHIwNsz6Md2rMerYBJGWFUcDvA9pikKEnytXSB6+UqCiHucvnOLY+gpJlKCThDhRnDl3nFt3b/JTn/oZzpw+84O/Dub4t8K8AZhjjg8gev3+kc5dKcXC0gKtVov79x+S5yXNVoPHNx6QDp6l2yo5GN1lcpBS72gcOcPBLoqIVDeJowjRlhBnuMkUU8LqiSX2xo9BjIlizdawZDrc58G9d2k/18Xp4HAnJNgisOkRICVIofA+IyuG3PcS9gxn2zW0EoyG7zObSYrCsbRUx5iS8cjweCMLbne9lA+9vIAxO9y6sUmWeerNGlIrkiRBAApBNs0xxuCsJ04irDU0Oy2MMYzNJJzgJdjSoCMdJgF5gTMuJAsCtvQ460KqoYLegqbXjZHK0+nHLC+mNJsRy6sRy8sxStkqj1igpEepMEVQWgY+hgCpBMZazp5LOX02RWuFNSWzzAAWKQTWOKzzFNYxxpOlCVnkyXxOaafY3CBsn4G+R77W4nHaIXrHcy1yHBuVjN55yAutIRfXUxJpuLo0ZDdT3HhsSGpNOo0cRYmOQAjNM2cSxpMJU1Onu9akcdmxfimhtujIXYkXNngXuKBqCLN/gdSKwUaJ3DnDcy9f5CMfeRklLHfvPCAqMy5fvgAI1hdPcvni5R/atTDH7x/zBmCOOT6AOHPmTGVy49FKEmlNHMfMZhlJkpBlGf3+Aleu/Dlu7V/mq/f/NmVmGVmo9aDQY6azAUmzTr3WoLfe4aA+IpqMeHzrgPPHGzQabeJ0i9nAMhMKvESUEyYzj6wbVKzx0qC8C5p8BwJNpBpYZ5nMtvBC835Rx5QNzvea1FOoNzMmW5YoatPu1IAMBxwMS6aZIUkEz73QYXmlxs5Wzv6+YTTMKQsojUMKhTHmMDMoyP6cYzqZYU0w6nHWIsLGg9LbcKovLCiodz2driQWCQvLNdKGpNuN6LQUrYai1pA0mhBpgbeusvoNOQWianJU5XiolUSqypVIhI9b45Ai6A6NMdjSY0oL3lFYx8SWjIVhEoFLAznR2RxrSrwTnOx+jHNLL1JYx87wIQ/8+2wld5iNxzy3rtgZb7K5GEHSYn82ZDApuLHj2BtLrrUVdzZmREpwYlnSST1rSzEPZh0WnzlD48QMvbRFXDPk5RTn8yNPJY/HGY93oBOFKzSPvl3nmWvPcfW5Z/iJT32GnZ3HHBwMiKIux48f58z6Ja5evPZDvBLm+LfBvAGYY44PIBqNRuUyK4h0RJqmSCmYTAJ73jnH8vIaSZzwzLGPsVf++9wc/D8oJyeZHDikLsmLIeNZjU5tiVavTXwyxr+1z8HtRwyvnETLFJM7cJJh7nHFjKaT7O0Ylk4kaFHDkuG0QVhZGeRERCrGldPqJJljlONuZjC7Jc+sLNNu1ZmMx+ztzOgtJLiWAGYIIM8dWeZQfWi3NKdPp4wnnsFBwe5OzmRckmeWyUwznQbb3dmsIKlBo2aoNyOiNA4JgQKiKEJFimZD0m4r+ss10pakngoSoYhiiVaWSvJfjb8dzgcG/KESU8onI34pBEqF/6UEqQ4zC8CWDmtc8DGorIbz3DIrLRNgKC1DkVG6kgiJNGVlIeywpqQRnWNBX2L7PY8SMTde2WaZXaJBwZsPPAdjya3tGXe2M+Jogi9KLp+psTuyxLEmd5L3HhZcOdmgnmheOqd5UBrSs5Be2ED1RqgISucp7Qzw1SQJ3KHRkZJILRlvCUZ3BP0Xu7z4/POcOX6aJNZcuHiG6WzCQmeFKxeuhpXKHB9IzBuAOeb4AKLZbIaTLgSDHGsRh4R869je3uGtN69T5AUfevElLnQ+yqPp19D1nPGoSTY6QEc5kR6S6RpJGtE9nfL4zojWowdsX++hlzrUa22mbkpLOGxZEEnNdAKuUNRrfURSsj/cpSwKpPB4bzE+mPUoGWNshitLrBbcy0vUXsTlxSV6vSYPH85YWE5ZXFzGuYcYN0Zpj4okZeEwxhEXBUnsaTc9x9ZTvEiRSIwljNELF8KCvCBSkjgReCWIlCBOFM16hFIeqSxSBJVCWZRIHEJYhKwKvxVYTxjrC0GkQwaBkCF4SMrwZykkQrijhgCqsbkQWOMReIzxOOfIrWXsSsbCUsQaWW9irEEWGboA4RxlXmKNqzIOHL3OOo9u7/PtX/siibCsyfeR3YT9fc/r92bECcxKw2hm8VPL86c1F9YT7m2XDGc53729yeUTTc4/c5b72WPGbctsWbBwLCduZiAdpTl8zQ5/ZionxYoAKbQALxk+iFk7foxnrz3LlXPPIIRiNB2SFTP2po+IkxfYP9in31uYNwEfUMwbgDnm+ACi3+9z/uIlbt54F++DU15ZlkgJo8Eeg92C7Z09Nh5vsrO9zWc/9zmO6R/njvt/U0tWmYwFja7D2pzRdJtYN0g7oE6DeMvR2M3J/QL9xhqPZu+zJAyNWDDOHbNdwWNyhvGU9skC2ShROMwsjL99NEP44AvgnMKYEqcs3lvuZLv4g5RnFs5y4nibsrzH0mIXKWOMfx+lMpR2lFpgncBZKDKLMWBc2J1rWYX6ULH4IeQDeFvF7gYCm1IG54pwWxciiw+LnfWgpEMJgVQSoSAS4si/XwkITMsw9hcIQsCvDffgPKW1CCGDP4ELUb+5tQyLghEls8hSHnIlnEAMMkxRIpTDOY+dFWjdpNnooCJBLBaRoy733rhHv3hAUWbcngq+dn3M5TMJx1cicq+IdMFnnusQy4xjx08yzmsU5jqnF2NyI1m59AKLzywwneS49jb9JY2ILN5ZnA32x+4oRCkkJR7ZHMuwypjtN8jud0kTTWkMuwe7vHPrLb7++pe4M/oGy40TpHGD2WzG42KDRqNGu9VFynkj8EHCvAGYY44PIOr1OpcuXebmjfcojCHPMpQSgEUCnV4PpSNmWcb7799gbX2dZ55/np3t98ha7+ImLYrpiFgXWOEwMseLgtZxz95GTldbGoBN25hmyoWW4ZkTbWb3Brx9z1DcKJG5on9pysXPCFQE6AK8wzodLHeFQqsI4Mgx0NiCx+Uu0ajBpfYquw9ijh0TJGlEmkTgDVo5ythjjAPvSWvyyJTGmGD5a0ww+3EenPFEUbWCqMyRni72AFIKVNUcBIng4Yk+FH1HSCoUklAo5eHYX+J8iP5xBFMBQdWcOI8xlqK0zFzJwBQMVUkee2Qswn2VDmE8UgicNyF7wAX/Ah21uLT6aSY7Bfv3DhCyy6P7I+68+i5rtSnrPcUruyW7E8fuoOTxXoGKWlw6d4zl48u89c4GJ5/5GEtyhTPbI8b7B5Tt49RPL7LLJgtnIlxSxzA+yj4Ib0aVqqgkgipTQQaLZiQI4aFostI/wd7uHl/9za9y++5NHs3ewvVvUEt6LDSOI9EYYymLjCzfpigHLC2cDvbUc3wgMG8A5pjjAwghBBcuXATAGsvOzi7T6QStJB7BZBJy21eWFtFS8o2vf5Vet8OlY5/mO4OvUm8vMxxIIj1DqAgpBcYadGLQZ0tu3NviF9YWOXEu5mrcpekF68dPsRE5Nr7yXVbTDi9fOsvNhxsMNnZYOBEhcVhcIOAJgXAeKYM237pDCR9kxYT74i7T2QhxUOOF6EXabc/e/kOEhDK3UEyJlCKo0xzWBHmeteHE7ZzHWR8mDl7hbVW84UjG5qvXSclQ5A/ZbmHqXZ14fUXqUxItD2N4RSiCEIh/lXWw8AKJwHrPrDQMsoKpM2TCkmuHSTzo4AWAC02IVOGxWkAKSZokWGOxxpCINns3C97+/HsU25t4GXF+0XBpZUy3ppnMHLmVXFjXOGc4dfUjXH3hHJ2TMU469h5uolYFHzrzPFE+5htvfo3i+Ijo/CNOrx9n5mM29w/wpT9KRTyUKwovnyQlVq8TyiNkUDR4aylam1xeeY5mvcXN8W8SL22y3DuDmLU5vXKVWtLAmhIpS5RwzGa7jMc1ms3Vo/XIHH+4MW8A5pjjA4rnnnsOCG51W1vb7O3sksQxcRxjrUErRZKktNpt6oMhr7zyCp/ufZKe/CwH/HNivcj4YEi9BU5JrDN4Spp9wYP9x3z+rYS+cKQYhsazaCQfurrOjz8c0a7F/JnPPse7G6t8ffIakcpDkE9pQ+ofAGHPDNWJUIRRuUAiRcok8UySAV9/4y2uXTpGWmvRbC1SS7o83Hif6XQPZw/JeALtBNZWSXsIrHWUJeDBWleREKlc/kJDcGj5+8Q3wVchRSlKS8AR6bAeCI2FJWQsVARALzHOMTOOzBpmZcnQFUylIZMGocXR/eKqmYMLp2jvQWmFQAVipoG8KCjzApsLJqMRb3zrOzyn9mmfFOyPck70Izb2NTe3HFtDT1qLAcfBuOTjH36elbNd7s3eZHf/HvVzjzFRSS4z7FLM+V9chm6Npd5JarrOweZDjM2B8PoF18hD5URloVC9XoevlRQCQUxaq5GeErRSyOV9urIkShbotlboJJc4vX6BSMfgLUKEqQhWYM2YspwQx80f2HUwx+8f8wZgjjk+oDhx4jgXLl7m/ffeZXAwZHt7hzRNq3FvGIQLIanV6vR6PW7ducv119/lysc+xtd2v4WOHdk4FH5BkPlJJDo2tE84bty4i7xb43S3RV60GL19g7Pre1zplmi7T7L9Lme053qmUKTISCHFDCd8lb7ncTiE90gh8TiE8EgRkcQN0noDujnfevcmRd/S9COazrPYhkhBmqTUagtYKxhNtrDFDCUF3odCr2TQ4zsXdvzeezxhNH/o6BfG+ofs/WD9q1RMEid4DNbmOG+QBIKf9xLngwpgWpQMs5K9mSUTChcLMgp8alAKIlSVQhimEd6HXb/SEqk4mhpYaygzg8k8thBMh5ZypoldnenBBvGqojCeYS64tQ3fvWfJjCcvHEoWtGLP7j7UmjVKDKUcoDo7LHVOku1p7vi7nLp2jGNpGys8S81lBtM9NuQmEAEmGPxUJ35XrWPE0Z8P1yUCIRXSa0pf0Gg2GEU3ULElMXViUWc9fY4rJz5Ju97HWYtWFq18tVbRjKdjrNuh32/MpwAfAMwbgDnm+IAiTWtcunyFG++9w2g8Zn9/wMpKAlVUbmktrooM7nQ6LC0tceP9G5w6f5zF+CV2/D9Fyg5laUhqAiUjnNO40hClns6ZEqUVJ4WgVV9itA93Hm6wnEAcGx7cehub1tBdRRI38coy1dNgIysOm5BwGqcaBITTcigM9bRF5GrcH22xszfjvnyMdjkr+YAugkRGtOIaq70TTLMlZrMBw8EOs9kIb8NGXgmNqnwAPEGKJ5w/nDkgZUXiE4HVL6VDiAJrTdUwgJSK0jhyZ5k5y94kZ2JKMpvQahwjXWzSSiRWThhkD8nMXkgTtMFMCMLOX0mFJEgwi5mt0gY91sJs6Mmn4fv1mmc5f+ZZRBmz9fpXuPX4gL2hQUWS/UQwnjmWOoo9C5H2eFfy3Cc/xrnLJxlHWwxvvo2wyzx//I/w4oWPcvLYWVqdJl56pFDgYZDtsZRc5FX1G7y/+89xbhZ8Bo7Yk0FFEVYnAqlDVLOUilQ3sGlG4XeJZYTwEc5IGo0+PX2Klf6xwI1wJd6XlMYSaR0mPITcBmMLIp38QK+HOf7NMW8A5pjjAwopJZ/61Kf5J//tP8Jay87OHmtr61AV3dKUgCDSCq1q9Hs97k8mvPf2+1z56PPsjL8FaowpPFHiELiKwCUQypF0HVLkuJ0JqewRNepMZtCQUxyCYTZl3ynEiQVqaQvwTKMxpsyDtezhAVAAVWqgksEnAAFpXCNy0KhPqYkuY/eQqZ0wy6ChY1IlSDffYW06o1lrs7JwjMWldQb7O2zvPKLIp3hbggVbjbitDxp8VzVBQsrAF4BqZeDxQlBYx6w0lM5hvGNkCmaupPAGhCCp12i2G9TTGIVjmg8Z53vkdoTDoio3QR2JwKr3njwrcLkLEwkjMIWgyAWmCFLBZq9BNz5PPz5JsZ3wzuu32Xl/l6SveLjvOLFgqUeCK8cUjXpMEpUUuaV34eP82M99AtV1vHfzO5zq/jyfe/FP8ey5l+g0+miZUNicvekGg9kusa6z0FynsdajnrQYv73Ng52vPDX2F5UKwIM8XJEE6Z+SEQiPrllgRmlLzLhDLGog23RqS0ipKqdFg5LVFCFoJsFJBIHnEM2ryx96zN+iOeb4AOP8+fP0+gsUZcnOzh5pGpPEwRrXWouUGiklcaRpNOo06nXu33/I2Qsn6dU+xGP/z/B5jC0tKtGARwoweKQS+JZnq5hht/cwY0VaSmoxjJ3jwcSzGzdoL7VJkwbeOSKVomSEl65i6fsqyMeDl8SqhpZJ+B9NFCnShiBOInSpmRWGwg0oTFARxLpL2q6TyTqvvP0dlhtNmnFMARTeMskmeAvOWpxxRFKGgCNjUVJhcOSlo/SWonS44HiLkm1UVCe3I4yf4bxFyrCqUErgbMF4ss14uB8mGMIhI4enoOIKQiWnM8ZhS4fJoZjJkKTnPDoGqQVaeCK6dMwFpo8jbr1/k627W9y+t8WZJY1HEmvPSkfRbkpMOaO+/CLLrRzVqXHx09coG/t8/earXDv+aT7z/M9ycvkMXhWUcowSmq3xXX7tzf+GW3vf5KWTf4yffeYv00p7rLRO0Ii71U9LRfqrSJJ4UY3uRcUREDhnmZkhzjtMLtGzPo2kQza1LPfPsbK4BniMmSLIEZWRkBQC7xxxFAEOYwqg/gO+Gub4N8W8AZhjjg8wlpaW+MQnP8U//JVf5sHDR2TZjFo9ZWdrB28tTmiM8/jSorWi2WoxGA55/HCH1StX2Cj+GYjAvNeRQx4GDIkgffORZ7zokX5C6mJSo3BekFvBVqbZLyzdKOz3pZREcUpSNPDOYm1ZFZtq7O8d1oWPaRKcq4pPLFAtS7EzDYWIYL+Ld8yKLbxT9JfWuX7vOq9vfTdI6xzgHV4EG2JX2soGQIbn6zzCgbEeZ4JHvxSKKI6opz2WOicQUrI/3GCcFdgQY1Tt813limfQUUK91iSKI5zPyYoicAQOZYkl5FNJPg6OQmlNE7UsprBYC+U0Zryfcap/jiRb4ptfeYWDe49ZWdQ8fzphsSkZzgxKeoQv0L5ArHycH/vjf4rHW4944/5vcWf6dY71TvOnP/PXuLT+AkbMuD78MjIqOd/6GDMz5ut3fo1XH/3XCKFYaZ3GuBInShJdJxYNvJNPSH+HxL9KAumdxxFcDY0p8dZjcjD7PRpxH5dFnGxd4Xj/XOBzSINWtloD2GoaIENkszVAUZEp5/jDjnkDMMccH2BIKfnkJz/FP/qH/4D9/QNee+0Nzp07i44iBhub9JZWsDacSD0QRZp6vcbG48dcev6TNGbPMovewuQSa0qEjsII23tENca3sWG8VGKUJY4M06FARw2Onz6JOxiyc3+XqNsm0SlaarRMkWKGpaLoE3bTXnmcN2H8LCQuOOSAEBRMKdwunuCpKyobXVsabt9/g6LIaTQiWuklHm1fx0uHs66aPHtULHEmyAKPptEOKMHnDhErRCQQXlCYjJ2Dx1hrmBWDQASUpmp6BDqRRKkKEkIFjimZceAMUnpsIcmGjtkQBBIUJA1AhQyCciIZ7XpMGV5DW0Ykqsnzz75AXsDXf/Ob7G/tsNTWbG7OSE9e5tM/+SwJJcu9Bo1jK+z6R1yffRl/8tucP/cX+IUX/hrnlq9x8+BVXnn0eaZig/PdlzmTCt5+/FVeuf9PsGXBuaWfY713hrc3v8l6+zQ13SIvM8BC9TMgRDAnOpJFVqoAZ114DQtJedCg4VaRNqWjjvPC2R9jpb+KkpYiHwYypxJEWleGTw7rHM4Fi+Qoin9IV8Qc/yaYNwBzzPEBx/nz51lYXMQ83uDmzdtcuniOE6dOsrOzg7MlQkUID1opamlKu93h0cOH7G4OWG4+y+3yDayTlIVBCBXGuYEjjpAOhMHGJdO+IMNhM8HxQnJltcZyJ+btvQNm+yVFYxpOzgKEUCAkwlvAH5nu2CAPQAjCGiDyKCXoNXrcl5qyMHjLUSyv94rR5B637oy5cOollrtr7O5vMS22USo85iBdO/TqBw5jikXFfK/kgc55isKAHZPJMd4AwuFxKCmIaxodSXQ10fA2FHTvHbYEbxKysaXIgidgo6dQscA6MDPBcLfAW4+3AumDK6JwoVG5cfcdlrrHOH/5FMYa9jbHZOMZ66fWuPj8aZr9Gra0PNq8zYPZb1LwBtGK5hPn/iY/eeXPcmrxItNywr3hu7y3/VUacZvFlXNkRc67m99hPN2knpzk4trLjPJdXnnwefoX/j1m+Yzdya1KGumOuBlhbR/4C6IyTbAGTCbx0wapXSONWjTsOi+d/wzL3SXazRpCBv6D1hHO2apJUsGJEYnWKWnaIormBMAPAuYNwBxzfMCxtLTEpz75aX7lV/4+u7v77O4esLq2ztraGqYsycpACpTCo7UmShK89+xs7rLSO8674zDyNwVEcYmQikM/PSojHicLZCrJ246H2sL2CHvvHlIp2tozGjbxYoq1ZSgMSqB8dDQKDtJ9gbcG66rJgPdoJWg3Fzjev8T1e6+Q20chHMeJkEznRTUhGLK795BW2mZt4SSbB47SjsM43likUMhYIr0FKzHV6kEAzgQCoLQenSq0lEGqFweSoJBVBoAGrWO0rkOpMIVjOphQTg1RI6bRrKE6U9pLUJQ5wilmBzHGCKbZiMm+I07BuQJbSryFKIbcevbHG7x268tcPvsCpy6t8tO/eI3pZEpWzriz+Q6vvftNVOqodSZ0+n369ed46fRf4OWzP0msFYNih0imNOIe2qesNz7Eie5V7u28y7ubX0ZraCXrnFl6ntu7b7G5fwOLZWt0n8lshDV5WJX4EGcs5KErIngvKDNPOfPIsonOFkijNvnYce3sJVYWlqjXNKKakgTegyeOIqSUYW1y+HoLRRwn82yADwjmDcAcc3zAIaXkU5/+DP/oH/4yucm5/2CDtbU1tBKY0qOVQmuFquxtZ1lOp9Nhe3uL4xdWSf1pCu5S5oqk7qps+0O3GMAHD3wvPSKFoic42LCM7+8yMoK8n5Ke7pC2JEU5QdgaQnmUlFgpgwbdB3c87x3OF5R2yv70PrIwTGdNnAXpFTgRNPWVRI3Khc9g2dy9TWkc3XaP5c4xNvcekIspMlLgQxiPN1SBPFUaYFOSdkKhl1JVXACBd5Xrn3QIofBGMduJaMpFjFG4GcyGGXtuyrELTVQrx5UZrlA0oz6zgzFaK4RxYExgvccSU7oqEji46qElaV0QRQrR3OPe+JvUihaPXn2fbFYy89vo3i2aaw2iKEbpiEi1+PDZP8/LF3+a7cE93rz/JS6vfoRrxz/NcusMzWSd1dZFrPVc3/gmo/weWmrOL32cJG7y3qNvk+ommZ3yzqNvMZpdDwS/yrNASJDISv0vsHko/uQpqugSixaTccaJ9jXWV45Rr0VIGWyQgwIgrEdsNR0JpkkCrSKazTZJUg8ToDn+0GPeAMwxx48Arl27xrPPvcBrr36Hm7fucPHiORCCg+GEerNDFEV02zWsqTEYjBFVdHCs6vSTZ3iU3aGYQtLw6NhxaOfuIdjaykrPLxW+Kxg0BMXMst31lHjOi4wkSSlshDUWVzqEMjhvwQf9vasc+sBhfc5wOkVIw/50nwe771AWebD2dYQC7QAvwr4aKIuMg+FjwNGstek2l9GzBKEM42wXawzeCYSCtBERJZUbD4G1773AU+Ktw5kIMxXYSUq2L5ltCvZuF2j7iGg6RQ0KaisR+pNLrJxc5mD8gCwTlFOJ9Sk+t/SX6zyYbFPkJRKJ0iH4SEceG4OKJUJb4kjSaEUI5SjLAwb5LrPkAemiptMQKBVc84R0SBHz4sk/zXMnP8Hm4A5ffPP/w8P916nrBc6uvIwWDVZbF+nVF7m9/RbXH/86zs5YaHyKZ09+irtbb3N358ucW/optg7uc+PhV7DGHjVWVIx9Kn8Ik0MxcWASVNEjEh3y3LJcP8eHr32U5cU+USTRWqKqbGTnHM47tFZoHRHFEaa0RFEwWJoHAn1wMG8A5pjjRwCtVouf/4Vf5Luvv8bB/gG3bt3l+eefYXP7gKK0KGWYZiVpFE5wSkXMpiOm4xkrjTPcnwXSXD4RSOWOPPLhkDUeDH2FcOiGZLjgmJbQOadBC0Q6BqGJo5Sp2cdZA1ZUSXyBR4AI+2fnS4p8ghElUkFWZjzev0lRjsMptTKqOZQPCn+4x5cU2ZixjIKuvtllfekUXlpuP8wgmZI0UhwlzhvA40w1Rcg0ZqBR4/OoYo1sf8TNt3+LcmLItnJ05mlGsNDR9FoJB0XwRZiWGaXJg+mPN8xyi5tNmM4mJJMpw8kAVwI6PEYVKdImNDqeONFMxwVSW9AOlKfRlKTNFB1pvDDBKvkomM9wYfUXePn8T+O94xvX/yk3Hv1/8a7PcHpASUmr1ue5E5+mpbt8+fovsz+5gURzdvElvPd8++Y/wRvFuZWXebx7k2m2hUDhsE+If0G8h5l5TObwZQKTDpHqUpZwrHuJly5+jJPrx6nXErQOag2tNc47SmNCmJHWOOcZDac45+l2l9BRypH18xx/6DFvAOaY40cEH/nIRzl1+iy33r/BjRs3uXLlImfPnuTmrQdMfAinidoN0jSl1WqTZxOkVJxdeYbXdxr4eEKZaeKaR8XVjrga3aOoUuM8QnmSdYWLHLUeRHWBTDJmxQBnHN4ZvDeVfr7aN3t3xDo3LmeWDxEKlE/Ic8ve+CHGT4DKS7+q20c2tf7wWXryYlz9PuNhbhBSEMUSGcUYW4Sce0kgAnrQPmH4nuThVweU0/fIR7dBleQ7Bp3CSrtOveWgzFHeMZwWlIWh1o2RtZxJuQ9ALUkpaobB7hbGGCbjhFojRkqLiBxIh7UeHYEzDmNzWkuKuA5CeYQMJ2alZXgtK1mekKHVWe1/lI9d/aPoJOI33vz7fPfef4OkjcWwN9xgmg3pN1Y53r3Aw90b3N9/C5ymps9xfPECd3be5NHe61xZ/0Va9T7feu9XKM0AvMOFQQxKhEwCZzym8AiT4AYdpKkzmGastS/yky/9LGsrKySRRogQlOS9x7rwukolUVIdpSomiSaOG9QbbYSYl5QPEuat2hxz/IhgdXWVv/iX/hJRHDEZj7h+/R0uXjzDSx96BlMEd75plpMXOd4ZlJLMshn99iKnuz+Hl4EJ7s1hPnw1ez+iBFS7eTxpR9E+rogaQcdvKcnzMaWZ4bwLFsCV25zwdYRPCbsAQHisz3EU1dieKjY4EAPDIbUaA1BF11a+BADWlBTFlCwbk2V7CAqSOGGWTTGuwOFDAp/zID2JTlEDQbLrGGyOkW5AzQy5fG2dz3zu41x+5gztfkyjBbXUYcuSKFKoWJM2FFIHFcB4V+BMjEos9b5HJCXNJU9rHdKeREUQRRAnks5aTPdkRL0vkBqkUkgZg1NY46u8gsNQHk8jPcdLF/8k66sX2Zw85MbGt7A2RB/jJPuThwwmW4CndDnvbX2H7ckr4EtWO+cRQvH+xreJdYvLZz7O/e33eLT7dYydVOFJobGyBZQzgS0EFDHloAazFivJs7x09qf4xZ/4I5xYW6dRS4gjHZqTw0AlAc5blAy2ypFWKKGIoph+fzHwPOYl5QOFebs2xxw/Qnj55Y+wsLjE3vY2199+l6tXL/Pxj3+E8WTKg3uPcLayxnWWTrdDvV6j2WhyauEy13eCVM6UAqlDap44TJE7jJM/Ynw7VC3o7b0IM2zrguTPVdp/IYM7HDJD+CTYxKrwcSEB6bG5JxYN1vunmE0ySkaVzZ6n8g+ueANVM+I9QvjgNOeLcLr1OQeDCV4a0PpJIh8ebyHPS2aDjNg5atJTryWcPHmGK89f5eTpE2zfu8Edv0VjNkXmnof7MCokhYRmq46ONZnIUYkg6XhSqZA6nKRN6ZhNwDlJvQFRKkF6EE8KvPAKKSJkZbMcTv0hIjgQ6DQXjv84l059FKUTIl0jjeso2QzOg8ozNY/YHNyl11pmMN3i3vZbTCdjanqFs6sfZmPvNvc2X+Pa6V9AIrl+96uU9gAp6nhX8SqMxJcxAo3JBH5WoyPP8dyVH+fqmedotmLa7RreG4whcAWqE39o2gxaaZTXeO/J89B4dTpLKB2DiOYBQB8wzNu1Oeb4EcLKygp/5s/+OZCCLC/4zrdfZX//gE996mMcO77K3sEAay1r6yt84jOf4vLVy+RZTkP2qOk6UnvyGVgbUvXc0TGVw5l85RRnq5O+q5Lmqt+rVQMc2sNLwIIoqtNkdZpXwQRIihq1qEMr6SO9Pvo+3oVwmsP/ZJVsGAqMP5oySAnWlZW0sGoWfJUYiEBpTUSKG3qchXYn5cLV83zys5/i3KWLtBp1Fjspp/sRZxYk/aYgjiFRliyyNJoNFDFRTRO3LSoxxIkKxkmFQkeSZt/TXrREdYMX9nsUDEoqdJQQVaN/KUEq8ISwIOsnrPU/w4uXfoY4quGco1tf5vTSiwgfgw1NkHFj7u28zSQbUE96RLKOosOl9Z9gbeEs97dv0K2fYX3xHK9c/3Xubf1jsLWjMCIzjWDWhKKOm8X4aZOl+FmeO/kZLp2+yurqAqsrfZI4qmyBw+urtQr2wPmM3GQ4ZzHWoJQirdVptBZI690Q2SyiH8aP/Bz/FphPAOaY40cIQgg+97nP8Wv/9J9w69b7vPfeTX7zS1/hj/7xX+QjH30J52F7a5fTp0+gtODmO++xt38A0tKKrlLG38blMuyMK6kc4qlDOeH0rqpjtvPBTubwNk+t6g9vXj2wYLVbOfaAACkjep11drYs+4P9sGOmyqU5/DLCByrrfY72ERwG0IDDIjXBzKCSDQKVt72jpEAWAqQin+W0uh2WlhfRuom0Yxq1lLwWMxo6Hu470npM/WyMulinyAyDDcHB0BKllrRhUZEhqnukDsx9Z31lHVytSqr3AS/DyV9qhHJYF4iQrrIwRhbU9DleuvAL1OImtx++xfrSaVpJj25zBSFcGL3Lglj1ubT6cdZ658mKKY24TaIWOL36Irujx2we3ODS8R/j1oNXeffhP0GKOBT/UuDzBto18UZiS3BG0o8u8NzJz3Bq/TQL/TZSGorCk6QxWmusc4f6CaCSksYx0kuiJCGJajQaPZJaB2MMWtXmp/8PIOYTgDnm+BHDysoqf/Ev/nvBpAX49nde47VX36DT6fHRj36EeqPOvfuPcAa6rSanjq9z9sQZGn4FqUAqjy+DEZB3Amuf7KoPd/TBqx9wwW3Pu1AE/aGG3x12DP4Jh+8wlleE+9BRytTuMskP2B5uYFxRjcxDnK+ovv5p29qj+3iKn3CYxnc4LQiP24P1CCUwPqdwBoPAlY5Go04tjYg0pI0OqtZlUNa591iydyIl+8kG8mcWOPXhNRp9CemIhZMl7RWDbpQgLDKyeCy2rHIHhK+alMMiGJL1lNJh1UFwD/TCH/2rK4TmwrHPcmr9GR5svc+XX//7PNy8gfOOOEpJ4j5IgbVDrh3/Wc4fe45ZOcXhaDeWOLf+MZK4wb3tN3ju7E8xzQfcePRbFOUAbyX5OMKPF4iKRUTRwE5jXJbSlRc4t/Qhzp44x8rSAo1GgqgavbI0lGWJd0HfH0KlHFJopNd4J1EyodHoE6fNKghII+X8LPlBxLwBmGOOH0F88lOf4oUXXkJKxcH+gC/+xpf57nffYmV1jY9+7GMkScLd+w/xaJaW1lldWcMfhPQ2GQVJoC3C7liI33aarxqB4BFA1Qg8+f/wa8ThUd4ffk0VNywEQio6zVUatQ5xHIcEQVeGw/1hemBVVMVR9T/8TTzVBBy6FVLdr0YJHaSOkUTLGDNVzLSj7EtcBrPZkMHogLwomGWG+/e2eJTtYX+8QesXOzReqCMWLZnYxyQDRJLhRXEkZfQ+cAuo8m7C0xNIIdFRHAh/Crx0WGFCMI8NqwFnnrwm7do1zh1/EYDrt7/Oo71vcnfzTab5kHrapp70ca6gEb3M2fVrjPI93t94jdwWrPQvcOXUJxhlu6z0zgCKd+//FqPZLUxZUg7bRLNVErMIWQM7jXBlxMneC3zi2s/z0rWPsLq8SL2uaTZSlNaoyjAqjuMjJz8hBFEUkcYxWipqtQb1egehYpTSQWWh57a/H1TM27Y55vgRRKfT4b//H/wP+A//h38NqSQbjx7xpS9+kWazxYULF2m1mrz22mvcvXsPEKytLrF/d4w8t45PHoE9dLMLe3bhBF5WxVgc7vh9MN45lOv5qnB78FWqII6K+U9Vq23Y6EuQIkboGp6s2nUrAl+g8qk/5B5Up9PDPYBzYTpxtF2oiGpSarSKUSIO0b5SUG80iJYF4x8DX3gym7Fxb5M3V96ic6KH6UzZv3SL7EqOlR4hC0yW46wDcZgK6I8MhpwLz1xIX0nrAqkRB0pH6DiGogypesKAExUvAjA6PHft8D6mVVtlsXeS3eEjNvZfx7mM+1tvsD/+JI1al0a8yJadce74x6knXb70yj+k1WqzvnCRxfZxjMvZGWoGo22+e/Pz7I++g5l0EZMForKFMCnWSJx1GOdYaJ/hhfOf5MyxszSbCXEMWkucD7t+cTg50eF5RVVTAD64/6Fo1HuktWaYeHhQar73/yBj3gDMMcePKJ555hn+6n/vP+D/+X//v2Cd48a77/HP9a8xm0549rlr/NRP/gTTaUiK++pXvsJod4LodIlOPMJbcBZcNUZHgiCE5AgI3gAiqASC1A+Oin81xj9CVTyBMDnw4L0jy8coC84XTGYG64uj/fnRl1cj/sAvqNwBRWUOxBOJmhTB3KiWNkl0DeEDmU0miiTyJHWNVHDscg8cuLUdduVWkOhZgZrE+CKvSI88mT5IeURDdN5x2KCE5161IQJQ1dd4E7ITqqLvQreCF8FgCemCk54T9FvH6DQWg8WxzfE+p7Q5Smj6rVXWepcZjLc4d/wFHu7c5J0H/4xrZ34W6wrqaZtylrE/2OQ77/5j7m/+M+xgFTntoUwTYeKw7/eerJzSjJf50PnPcObYadqtmFotwnmHtSHQJ5j6BCJn2NoIpJKVnDO8Ge12nzhpUpYFaVpDCDXf+3/AMW8A5pjjRxRaa37+53+BX//8v+DR/Xs473nrrbd5/HiTnZ0tzp47R7fT5f33b/DVr3yV0pbIzSbRag/kPlgRAmScRwFShNOicx55NJGvVgFP/fo9RcFTnRZD8fbSI3A4B1k5ohzm5KYkljlShKn64YnZ8+R+/GFRPqQVVE2CkMFzX+sIrWJqSZNm3EepBITFioLCFKSdFC8ttbbFOIOUHBVpKRSNhiDXGucsXlisMSHIyMsgXxQc7cUPJx2HcQnhkUogZBA45wJHgvDc3WFzJMOfvRXEUYf1xUtoGTPLx3jrEV4ifCiqzsKx5av0Oifod1f5yuu/TFZuMp7uY1xJVky4fucbfPPtf8CjR19DTs8TZx0oU2wWyH4Ij6FksX2GD1/5Ma5dvEy7VaNWS/HeIiuVxGH08uE7KqRAKYnUAlNYhIhpNBdodxZ4++03WF1bIEnTueXvjwDmDcAcc/wIY3V1lf/sf/23+F/8zf8Zw8EeUkq2Nrf5x//tr7G2vkqn3eTWrTuMRuNAmNtX5BsrJCf2cVYgHeG07z246uhbFbcjeb6gMoE5JOGLIyng4fofFyYJ4e9VsXEleV5inENogzQiRAAffuETO8BqBRCCbJyt/AfweBGY/1pH1BtN2s0endoCWWFQEVgXhUJlPU4ohLCBzS5DmI2SAuE9szysJzwajwnSuVJVagiwxqIqDTzS4XyJNYekh8OXKLgdWncom6zeBBEet3dVEyUkNb1EPW2F9sk5rJ0Ajmm+xWh2QKe1xrGVC0RRzIPH13m0cx0lOgih2Ny7zYONm7x+/TfYfnwLlV9CFjV8nuBMCFTyziCk5OrJj/GhZ15mfW2RTqdJHIWTvrXBy5/q/fDeBXtiH/T+3ltECUnSoNFcoNXskuUjnB+h9DKRrv0B/tTO8YPCvAGYY44fcVy6dJk/++f/Av+3//P/CSkEUawpi4L79+5ztyoGSqmQ1Ccc2YMa0eIipd+r9uuheKE8EonFPTmdOw61ehzy9sXR3w5/CZ/3HoSrZH7eU5qCspA4Fwh0TxR+T2sKn1olHK4bJEeTACkESmpqjRbddo9m2iRCUYoSpQUYgUIjtUQokDGoSOKFZzqYMd6bkc1m5NmUPC9RkUfGDnSJlOEx6SgijhrgPNaWlCbDVpORJ5sOf/Qcw+OSSBTO26AAcIcNkUMgMWZGlo+xrqSWtmk3jrE72qYop+wePGSlf4Y0bVCWGXcevEkzXeL585+jLCxf+c6vcufOd3CTCF2s4LMYbxJMFl6XJElZ6i5ycv00l85eZm11iXo9rnwaAmvzcEjjvcNKhxV5YH/awAvXOkYQ0e6sUK83sXaMFxMWl9dI4h5z/viPBuYNwBxz/IhDCMGf+BN/kvdv3ODLX/x8kG5J9cS4RwS9/CHT3xwosnvLqPUZRT5FRYEL4LULmnYvq0oe7v8JIc8/4QI+mY+HHf7hp/0TbT94pFBH0wPni4rwVzULTxsQHMnrnjIJcGFsHkUxcZSgfIJ0KSZ8krws8N4hpSRSGqmCe99gZ8ruxoRHt3fYfTBltJehlCaKI1qLCUsn6tS7gmG2A1FJf7lFq96nyAtG+S5FXlTTgrAOEVIgqxN+MC86bILc0YKEJ68QYCnskMlsCN5zfPUS5/Y/gr2fM832ebD9HmeOvUAUJQwGOyz3TnFm/Vlu3v0ub73zW4z397DjBsrUsbMIX+iwpkCRxAnL3TUunbvMmVMnWVjoEEUSpcKYX0qJlBJjSpQSYaVDcbTS0VphLUS6Qb+7SlqrUZoDnM8wRrK8cJooSr+vP59z/PAwbwDmmOPfAdRqNf7sn/vzvPvudR5vPITK6OVw/+ueJu0pyB6ktDurlPV7mLJExT4QxlAVFyAQ2ZQMhLynyfrwZAIun/q7OHTpAw4FfpFOqemYmlYYlVMGmv1RAqF3wQXwqJmoAoYqskEg+skQruOMJhceKSxWGGZuEiYYTjDYN4y2S7YfjLn39pDpVoT1gjhp0awfo91uY0pDNG4yvm1JLu3TavXY3nrM1mwPsySJU0FpZnjME08j+bRFQVS9CBZwobGpPA98tbYQgkBC9CWPtt9n++Aay/1TPHfuJ1jun2TvYJOl/mnqSRNrCqwtsQV8480vcOP9L2MmAmmbuJnG5hphIyA4J3ZbfU6sneLY+jpnTp+k22nSbKYER78I7x3GmCNyH8KjlCQyEZGM0CpCihgZ1+h1l1GRoCj3UQqwDWJdmxf/HzEI75++8ueYY44fZbzzznX+7t/52zx6dA/ck/LsfGCs22ol4IynfrwkPv0QK/eodzxRIoji4PduvcHZEBksZLgPUc39/eEs/3sagsOQHxCRQEUSqWLcsE+20WL1kmKQP6Qox6Ehcf7JidrzxOLXVQ0A4KwjbUXoWBPFDRYap6lF3WDME08p5ZRsljN57Ln33Sn7jyym0JgcOq0Fup0+2czQbDQQWKQX7O4fIJRGtMec/zHD7u42s2yITiFtKlQSWP5BgRD+6XQmPHEpNUL6iklfPWPvj+SMh4E6YR2gadROcW7tk5w/8WE6jWWUlBQmAzyT6ZAHj9/n/v132Hh0g2KcoVwHaVLMTOFyifARQkgUCSuLxzh/5hxnT5+k2UxptGokcRT4GAKUUuH7A1KGxkRKgVZx4EKoCKUT4qhJu7OAkIayPCCKBFAjiRaCh8McP1KYNwBzzPHvGN555zp/57/4W2xvP6TMgWoFYOyhRC24+wkF7ecOyPQdai1PreWJU4WUEkdYH8hKChhG9ocj/6fG9Rwa+VB9ziE0qFgitabc72P3O6xckAxmjzB2ineuCiziqOiH+5dHksNQUz1JU4OQCKlp11dIdYu8mBHVHXaq2bpR8PiGYbofUUvrxHFCPW2ztLiCRHD75vt0Wk0uX75MvVHnW9/8FoPxBFE3rL64j5VjZpMZSIuMHfWefMKJ8P7Ildg6qlO2PVqthOfuq2AiAVIEJ0BXTV6cINIdOo0zNJM1lNTkZsJsNmM02CYbjvF5hLQpwtawmULYGJcHi2FnodNY4sTaKc6dOcmxYyt0uw2stTSbwdTJWhcKfUX+01oTRRpjy4qsGSOEIoprNJtdarUm1udYO0Jpj/cxabyAnMf8/khi/q7OMce/Y7h8+Qr/6X/2n/Nf/Of/GzYePcRaU8n77BFt3+MxuafcbqBWOswGA3TsUcpBmDqHX8QRESBAPF3sD9kBTzT73kv8/7+9O4+1/DwLO/59l992zrnn3H2ZfbE9M7ZDRBxDQtKSIAVE2gYh0YIKqBIVKkWViArqX6WiFahSKWqqIqUtVVsatUXwD2oRIbSsJTStnNVpjJM4cbzNjGfurPfec36/d+sf7++csVRRIDgex+f5SFGsWa7v+F7N+7zP+ywhLw/K7/R38SnQKk3S3WK6n4b5wMH8KfX9//O3hdgXEyxCjhSZdrdwsaVQDYeXC17+HDz3dKJrC1bGY8Yr65w5dZbJyoSvPPsML1+9TFNVnDx5nK2dTWZtS1lVdPs3qWvD4d0pFNPcAVBGQhcJTi0OcGXudSiolPrAJS66JOj/DHE+FyH13QtJQcyZgc7dYv/OU1yPn88p/2lJmtWUcYwOq0RnCK0hOUvyBo2hNDWjeoPV8RoXHnyIvd1N1tbHDAYVWuUbf4zzqv4cAABYU/Zb/EIeU1yUeK9o6iGj0ZimGdD5IyAvbjo8CqyurMvh/wYmX1khltDFixf56Z/5x3z4w7/Oh/7Dv8cYzXA4xNqSo6NDDg/z+7nbr5hsnOJO+gKzu1OMzaX82qq+zq8/BF/52P8K6RX5gNxDoIl98ZxJ4GcR7zv0YplM31+vFSrGvI425SmEKfYfSKc85595YJDn/2M0A7tOuF3x3Cdu8+IXIq3XWGPY2T7OxQsPEL3j6uXnSKHjxPHjbG6t8+ijj7Cxtc3ly1fZ3N7m2rUbNOse7BHdzIEC03c75Hd/gzUFSiVCCgTfj/lVPrcmBhaZgdS3BKb5lMT+J+Y1EkpFXHuEP2ow3Sq6azA0+abv8sbB2Ck0FkPJqF5lY7LL6RNnOL63zd6xTeqmRClo6pKUoCgsxpq8zpeEtSVGlzgf8oIhpVHK5ha/Yc1wuIKxms4d5j0N2qJ0zdrKKO8yEG9Y8tUVYkmdPXuWH/qhv8mZM2f50Id+kf3969w9OMB7l2fpG0V36Dk7eDvbZ97Lhz/xAbSZolYTVuX2wL49P/f4L3rLXhkHJGLM7+VKJbQpyO1yAZUsRpUMBjtsrazzcniBo+kNgnKQ4uL2n4OCPI1wfqNOiZxSj/kJQhvLwG7irg/54sf3efrTN1BqxGAw4MHzD/DWt7yFrj2k9TO+6fHHMcaitGJra5PTJ0+jbUnX5nR9VBG9eoOY8nIirfJzRDW0WFtQFFU+GFMghWlua1wM01H9wd9PAOyDln454L0YgITvgHaE7kaUboDVDSEYCAbfacJMobBYbamrhp21Ezxw6gJbm5tMJiuMJwOqssAWJu8amHdZqDzIBzRWWaxtiCGP7TUm4n3EFjWFrRkMxxgL3t3FGouPCdfBeDSRKX9LQAIAIZZYXde8973v5fHHH+eZZ57h6aefRikYDkecPn2a8XjMqZOn0EZx5lfP8QsfeT+2Olpk+rXpD7eQcp/+/OY7P7TnvfxJEVNEpdAXk1kKM8ZqzcpwzGiwxp2jm7TtXULwi1S/QvXT8/h/JgGS8lAgqw0rzQ7qcMynfvdFPvnRK6xvbDJZH3B8e4/TJ04xPbzD0dEtLjx4kccfext3p3e4c3A3v7t3gUlTU9qSG9dvMTx5hBncop3mPQja5KeOotYYo/s9BZEQXU75w+LpQ/czDmK/AyC33vVv/yhSSARnSd0A7UborkHFEpVKYmdITpG8RnnLbHpECC1nTpznTRfexKnjZ9ja3KAoDEVhsUb3AYrCFPPAKuWxviFhrCUlQwgaY0oAytJQltDUA+pmkAOCcEhRGDoXULpmNBjK4b8kJAAQQrC1tcXW1hZve9vb/thf873f9QOEGPnF3/s7aO36lHg/538xuKc/sAHdN/7PqwBSTLjYobWiNltM6j1uuVvU9YCEIaaE6zzBxb5aPh/8i2kC87w590bXamMoioah3eKFZx1PPvEytmyYrK6yt73DuXNn8O4QEhzb2+P0qTN4FTnoprgYcc6zsbZDVZQ8/dTnuaOeY+uhI27enKEM/U6ESFkrlE39ilxHioEUfT7Y539AyM8RfYEg0O9RUIQu4Wca2iFFGGPTABULNCUEQ3SK6BTBRRSG07sPsH5hg63NDTZWN9jb2WU4aPpK/oRWeVY/KRdwGmtQSpPwaGNxLqKiQamSEHIQo/vAq6ostjCkNMM5R1GWeB8oihFlMXi1v7XE65gEAEKIP5WqqvnOd/8Vfut//ReuHv43tO3QKqGSWiwMgntjb+ftb/M5/vMWuIjGmILJaBOl7hLMXW7cnjJrj0jzbYAqH1rO9Sdrn1wIQfVjgOmjCk1djDnc11z5fIQ4Ynt7lVOnTnH+3FlKqxmNKnZ3tlhb3eD4idP45BgORkyGBZvjbVZHq/yPP/x9/vdnf5edNx1x9+Amvkv55q8jqoiUK3mZUIyBGCJJxX4/QJ/p6G/MizqF/tMLrSK2JbgBNgzQsUHHCp0KkjfEpEkeXBsJPrC1tsc7H383b/mGx6irEu9anPMYo/HB431k0DR9Z0TCuY6yKPA+oDXU9ZCUFClZoETrEmNz5qIsCowG7zqODm+yujairBqOpjPqaiKH/xKSAEAI8ae2u7PH3//Rf8JP/6uf4OWD30CPQdm+KSBAUikXCDLvd3/FOtz+ZFcYfGhxXSQmT2uuo9sCkrs3Oc/0N13Tv8uH/PZPghT6bYFKYZRBJ8vzz+zz6U/dYLw65NjxPS5eusDm2hoxtjz04DkePPcQ66tbaFvw3OWvMD1qeeDkSTYmm3z605/kQ7/yC9jTzzP115hNPdrklb9lo6nHFl3kA9cHf2+qIfPgBua9ibl9MRE6RZiW2DSiCA2FqkmxIvmC6BTea7QykFR+kzclp4+d4tve8e2cP30eqxUxONq2pe06rMlpf6NM39qXiLHLEx1JGG36cc4Ka2u8zzkYpRRVWdI0NbPZXZ599gXW11dZWx9irGU6bRkMNihs+dp+I4nXBQkAhBB/JhcffJif/Fs/x9/7wE1a8zH0KBfm35vQm1D2Fbd0ci+/0uTJeCoS0l0uX3+Gzh9izQqdu0vnHN7livrFBGDm1fLkvQPzf8cragKiV3SHiaqpGU/GGG0YNg3Xrl3h1JkdJmsrbG/tMGom3Li1z2Q45vTuGXTSfPQP/4Bf+bX/iN/5MtFcI848WieUTZQDRTMpFod/jDEP9+1rE3KmIz95EBPRJ3yrIRSY1FBQo0KFCvngV7EkOohdbo+MRLrO0QxGvPXRb+ab3/x2NlY38F3LrYMDZrMZSmuapiFGCCHlQ56A0i1KBVJo0NqilaXrAiZ4rDFYqyjLiqZp0FpxeHjIJz/5CVJKnDy5g9IQgmE43MBKpf/Skq+8EOLP7MKDl/i+b/tRPvgbn8LYlvIVE2JT38q3aBEgt+kt6spUAhOYdrdYaU6zOd7g6q3P4lybi+oCJD//YHlbX1J5myCxzxGoRAwQoieaKc36gLLKEw2PpjOuXL5K1x7y2GOPsLO5S1MOsdoyGoxoqiE39vf56P/8A576wlPozbuQ7uA7hzZg6kTRKKqRRds+++BTv46gb+NLoJLqb/spb+ALBSZVqFCCr8CVpFDiW0tyOg9KioqQAtYkqmLIqd0H+I53/yW21rcotOXo6BDXOZx32MKiMFhTYa1B6YDSEaMBDFoNqOpBLgK0DdbmNcs+JOpmQFM3TKcHPP/CC0ynLVpbLl08T4weoyc09SrGyErfZSYBgBDiq/Ldf/mv0oWWf/s7P8LKRsCWarHNL3nAQJ4LmO5d4zU5va/B+RkrdcNwsELczxXy+ZjM6fTU98+rRZHdfP1vrg8IPhK6QGTGyo5h+0ziylemNHXF3YPbqBQodMXKYI0YIndmd9jf3+cz/+czfPSJ3+dK+xRnTz3E/q1Pgb6BbUpspdBWYQpQOhHCvTkGJLUoRAxdTvXnjgSNSjZX8/ua1BUkV6BCQewshDzAp3MOYwxrKzvsbZ3g/KnznNg7wcm9EwTnmc2mi+VM1uR2Q5SlKCsKqwgx7wZISlHYkphKYjK5LkJpbFFhTYEtS5q64ujokCeffJKrV1/mscceQ7PCZKWhGqwxGIxf8+8X8fojAYAQ4qtSVzV/7X3fz7MvPMNvPf2zOQioFLbo+/UD9MNwF2/lWmusbaiKks4fMmgsWgeOpoe5uC7QH/Z6fte+N+xnXlTYH7xag+scs9kBzWrJubdUYKbsv9Dx4vMGoxO//usf4YXnLzMcNLz04hVefPl5brRXYHIDP/oMX7z7MXRVUDYNtgBtWQQgwfWff8zLkqJP/Z8p/3msVRA1yVsUNYQCFQuSLwitQUdDihofAwbD2soxHjj1EJfOP8L6eJWV0ZDhcIABXAwYndv6MHkGg9LzuQMW7zuqsiGakmZQEEOAkAf6mLLCFiXWWLQxBO/44he+jNIFw+GI8+caNsaKspowXt3F2Op+fLuI1yHZBSCE+HN5+otP8VM//+O8HH+DeiVR1v3Sm35oTz74wVjQtmA03mRrY5fLX7mGjkNGW5qr175AiproIySVi9q0zin26HNPfZwfzvfS8MEnTGEYTVZo6jHdUeLW1ZbpzZp4Zw0/TahQUlhLp29jV29hh/uUQ03VlNgGdJlQNhJc7FP++aBPgX4pUerrF/vBQ/N/DhqVTL7t+5LQWVQoUbFAhYqB3WB3cp6dtVNsTDbYWF9lMhkymaxQFUU/Gjlv6Et9JiFGRQj5X2JtSVU1+c8Z+rZHrWmakhgjShm0LjDGMhyNmE0PuXb9Ot4HPvXJT/Loo4+yvbWJVo7trS3qwfp8c5MQgAQAQohXwZNPfZq//XNvp16ZUY9Sf4N9RRGfTthSYcua1fEpHn7gzXz+M1/hxvQFmnXHwe0p3rV5oFACpQxGGxT9IUzeVZAX7fTrhTR94V3C1ppmpaYe1BALVNS4I8XR7ZaD256yLqmGUA8sykZU4dE25LY+AiFEfJuIXoMOAIvteSl3JmKswrvUH/6gMBAtsSvQqcaqmlKPGBZbrNbHOLt1kVPb52jqhsGgpK7LRbdAWRTElDcvGmOIURMCGGsxxmCtzf372pL6Xv+qqvDeY4yhKPImwKLIGYKmaei6Kc51PPfcZQ4P7nDy5EnWNzZYWRljTHF/vjHE65o8AQgh/twuPfgwP/wd/5x/85H3Y+wMqphb6VR/6Yx9IZ2NoAKFHjA71NTVCoW9Q2FKfNstBuhATrvPN//l9sE8bli9YuiQMmAqRQyJw9sz2qmnrAuKssCOCiYjQ7MZ0aafT5Ac4PE+EL3D93MGcssiKB374Tr9wa/u/Rni4nkiEZwiRodRMKp22Zs8xOntC+yunmF9tEGha1TUlLZgNGooy5JZ2+a6AcWi3VEbi9IVJN1fzjU+RLTWKKXRut++GHOGYDhcwRiLMZrbt2/x0osvcur0Seq6xhqN0QUXL15genTEYDhiIFP9xP+HBABCiD83awu+730/SCLy7377x0ixo6joR+mCsv1o3JCI3jFrW2YHgeFggOKQiCOp+IrWwUTKvX7AfCJgIvY5hUVann7GvlJ5UE8KtNNIN+tQuk93z2fk69wyaBbv/HGR2lfJ9D+Wf0t0afEx1TzT4BwJBxFKs0Og44Gtb+XtF97Dic3TDOsRlalxnWM6nS7+24QYcc710/sKtNLElPA+orQBVP5cI2hdoLSiKIv+iSNQFEV+QjGGqqowRhODgxTY3d3j+vUbXL36MmdOH8MWNUoZ1je2pMJf/IkkABBCvCqqquY73/U+fufj/5XPX/1NhhNPWZEL9lQe4ONwHBzu8+xzT3PQ3WSgm3z4Bpsr++fb8+ZTf1R+AoC+IDDlbXtAHtWbH89Rel4bkJcIhQjzIURap34bX/5fjDkwAZ1/KPbBQ+izFYskRCKGgA8tCo1Vx7mw824ePvlWKluTguf8sXOsr2xASsxmM27cvgkpUZUltrA5iNB5QI/SFpIh9J0NZalI5CcA56Eo8p7lpm6IMRK8xxiLtZamGfQFlPkT1FXJ3YMpH//EJ3j6j/6Iv/it70Sbgroe9R9HiD+Z1AAIIV5VT3/xc/yjD/4Ez93+LeqRoyjBFoAFXUCzUhJnQ+5crtg9X5OqA9qpI6QjgssHfPS5GG8x7CfO2wL7icP9ZkD6m/29FL5Ca0Xwi+m8wDwDkH8u9rPx8827X74z32mQfzUxOqado7ETNgeXeNOpd3Fu+02c2D5LZQuC79AEysLivKNrO7ou70domoaiKElR4X3CmAJbFChlAYOxGtc5tIIQHUoptMo/r43GmDzVz9j81l+WJdOjGaAYjwdMp1P292/yuc89xfVr16jqiu/49vewu7fHPGMixJ+GBABCiFfd0888xT/84N/ly9f+O8NxoByAKfIlfbRaEdoKd2vE8Us1t6fXSRGcbwk+5Pf3mHvtU+gLAP380J7f+vtDfX7eLf4aU3kW/vw2388QyCuD588JClvkd3WSRmEIMYJuCdFj05CSUzy09Q4ePfM4G+Ntjm+foTQ1IXiUagGH66Zoremco5112KLCmIoYFWVRo/tivrwxUKGUIaVIWRV5gY+2/aceUWhCjFhrMBaqekhT97UD0xlf+vKzrK2t4rqWjz/xBIPhgBgTFy9e4sSJE0wmk9fuiyveMOQJQAjxqrtw/hI/8J0/wk/9wke5s3/ExMxf72F2GCEEQpsoTIFKms61JNVX3s8P8z61HwP5xh/vzROK/TPAYv1wmm8b0PlW3+fx54GDQvVtfLkv0TlPjD4HGkBtjjEpTnJx9x2c3XmQteEOu2snqIqmn7VvMdZQlpa2dcSQsLYgpbycxxgDSWNtiQ8Ja/WiBsGYom/bA2tLjDZYq0kxEmPAGIvWhsrkj59U6Df75QE/N2/d4uqVK1Sl5eqVl4jRcenSJba3d5lMJvLWL75qkgEQQnxNzGYzfulXP8QHfunHKIcdo42IMbnFL3aGcFCzd36FUN8mFb4f+BPxLo/X9V1+8yfmAT1Av+kuLWIErcgV8gDklbfK5CeAeQGfNqlvJwwklZi2kbE9z6Q4zom1S4zrLU5tPMDKypjttT0G9RBrC2LILX/T6RRbWKqqJgZHjDNSbFEqEaNHKY1zLm/cKyuKckAIeULfvM1Pa0UCjLZ9VX6/7Egryn4dL0qzMl7hzu1bPPfCSwTnOHXyBE888QS3bt3ksbd8I2074/wDD7G7u/faf0HFG45kAIQQXxN1XfN93/2DAHzgP/0YN6YtwzVQKtHeidR1l9fl+gGxPkTZ3PduyOt2jY34zveZANXf/PvFQuS6gKDyjV4bTXABXXYEBVgICupyg6aaMDLHiZ1lb/Ige5Nz7EyOMWk2GTVjYkgM6gFJgdGatu3QSudq/RiwBqxWpOhQtCgcSvf7DnJFAlVVEoLHe9+vMp5X7udhP3lTXw5oyrJE99mBwWBASokXX3qJ9fUN7t6+zZXLV/jspz/F7u4OV65c5oXnn2dza4O9Y8dZW1tjMBjejy+neAOSAEAI8TVTVzkIUMA/+8/v5+7LLfVQMTuEFAMHBwesDAa0dy0MPB13OTyAymrKOhI6cAGsz+n/QlucC5g45mB2m/HgOBvDSxSmZm1lkyN3F1NY1upNKtOwt7fHoFphc2WHQlWUtqEqBjmIUHmcb/AeY+dLizxG5QI9Yw14R1EkSI7gA6NRw2yWcieBuTdVzxpDYQtCsqByNb+1Oj97xLw0aF7cV5YlVVVRVhVGa+7cuUPwnju3b7K+vkFT15w4cYxLly5xeDTlW97+NiaTCaura/fniyjesCQAEEJ8TdVVzfd+9w8Cin/xK++nO3AQDOjIweEBkRY3O4IZTEaP8hcefQ+ntx+EpJjNZrx47Tk2VjfpuhaKgPeOxg6p7IgT22fZXN+BYCjLAnRCG4NOhq51KAPBe+qygaRw3lGWBTG0hNBirULj0CrPHPDBYY1Ga40xCq1y7UDbOaxShOCJMeb2QqWIQFVVhABKF1hqjCn7XxNRSlPVFbaf8Ke1pq77mgEST33uszz1R09zeHjAO9/5TrRWvPjSizz8yCOcOX2WGGM/9U+q+8WrTwIAIcTXXA4CfgCAn//lH4fYoZVidpQI8Yi10Q7veesPc/HMY+xtnWZUT0gx4bzDB0dd1bk3Pgbats3LeEx+l08xp9Z98BRFQYy5f7+ucitd8GExd7+yBQpPDFPKMt/Sg85FdMZoKlXjnCPGiOs6jMmtBClGorp3k9emfyJI0LmEUiU6lf08/0hZlqBgOBzlSYKAtZaiKEHlrENKkdFoxGDQcObMKY4dO87+/g02NjbY2z2GtfLXs/jaku8wIcRroq5qvv97/gaPfcM38bP/8h/wpRu/R+dvc3sfHt55G+966/vYXD2G6zwh5AVARlu0MgSf8mhcFMOm7N/acz1ACKHvpdd454E83c8YRQhtnqNfWnyYQQooFdE6YazJ7Xgmb+Ery4Kuc7kX32q6tsM5KMqSsp+9b4ylDY4QNLooUMoAmrIakKKiKCqqqupv+RpbGG7fusFotIIxBbN2xo39fay1bG5uUpQVJMfZs+cYDIbUdc3p06elsl+8JiQAEEK8ZqwtePThb+Cf/uS/5iO//Wt86MM/Q+RLnN17hEExoWsdAFobYt/rp+argPt/1kZT6rJfFZwW6XRtNK7r+l0BHqUjCofREaU8Wnu0UoDuR+7qXGCoDEobiqLEhYM85EeBthatKsqy6ocQKWLS1M0KIcT+LV+TUkTrnI1omkFf5Jc/57adcnR0SF03PPnkZ/nyl77I2voGb37zN+Kc56WXXuAb3/JWtrd3MMYCMsVPvHYkABBCvOa2trb5nu/66+xuH+OXf/ODnNw5R1FUi4Nea00IgZRSn9aPi58jRhIJpRLGKmIMefpf6NDKoXUgRp8zAsn3qfScQUgp5U2DpkZrS+ciMSoMFh8MSiWaqkQpg4s69/+bKncchEACrG0wJvaz+Wuapu4X9JjcqRADznnadko7O2R1dcz1/at89A9+l7qqGQ6H/Zhfw6OPvpnxeHzfvg5iuckcACHEffXMl77A5csvsbd7HIgorbEmp9xjSph+LS8JYnSQOhIdSimcD8QQKYsCSIToSCn02/T6PQJKLbYHQl69q02F0poUVb9tL+J9DjiGg5oQY7+BL/fpQx5SZI2lLKvFSt5c3Jd/f0wR73IGI6XI0eE1bt+8zObOg1y9cp2PfexjfMs73snG+ibjyWTRCijE/SIBgBDivnLOcfPmDeq64mh6E9fO+uU5BSnlFHuiw2hLip4Qplibh++AWhzA1lq8d/1HzUt4lLL9DH5NTJqiqHAuoLWhbVvKqsyZAp9rB0IM2P42nyvwyzye11iKMs/lL2zRp/jzbX9eg5BSyh9HJWazA1Jo8c6xtnGCrnN0nWNtbU0q+sXrhjwBCCHuqxgDdWMZDkZorThINwghT9tLSaNVQpmWFNr+tp7vLEVRLn7/fGFQWdV4n2/vMWpsMaCshsQQ+8LBAmvzZL5cKGjygiGdp/LVtu5H82rqpsZog1IKa/Ngn3zT9wSfCD63DqZ+fXEIAUhYa2jqIYoRMeXhRqNRfb/+8wrxx5IAQAhxX6UEPnhi6qjrIUppuu6Itj3EuaNcta8MQcW+La9CKQ1J9Rv+PDF2FEWJ0prCQkr9Ep4UcV2LMcW93n6t8T726fv8V+Bg0JDSvYp/Yw263w2cUh7523WeGBNaG+ZrhVPKzwbGaAaDpm8hTNT1SG764nVPAgAhxH1ljMGoAh86qrKkaYa52l/nAzolR4oBrSJKKXzwi4M7htin53OHQYyJRMxzAqzFGEVMCa1zYaG1hgQMhw1a6/7H7KJmACCmSEoBHzrmWwZCiP2TQiLGhPd59n9RlH1QArHvFJDBPeLrhdQACCHuq65rgXyo2sKgtUErS4qJtpvRuRneObzvMCZv0MtbAyHFXI3vXMhb9xJoZaHv2dcalM5v+Nbk/1fKLArwtFGQIp1r6boWawtsv70vryfMB7nvq/ZTSn2Hgu9bEi3WzAMIOfTF1xcJAIQQ95VzXV7fm3IRHkSM1VhTojCE4HNVfvD40BJDPx8A8CFgjSGRMwYpJcqiyj/f3+rnN/1c9R+IKRBDIMRATJH9G9d5/vkvQTQ89OAjjMerpARKRWKMaKUpyhLnPM51aGOwxvZ1AVLJL75+SQAghLjv2nZGWVb4fkRuIvbPALpPveeDNqVEihGYX84TigQqkVIOHvJP9KuEiTjf0YUW71tAYVRBYRqgwPvAtetXsMbQ1CtMxmv5eUGBtYauc323AfmpoShkSp94w5AAQAhx34UQcM5R1xUhJkLflgepb7mLaJ2H6iuVD/YYHREHeHxs8bHDhw7vPXdu3+XooGVnd4ekAi7NCNFTFiWNXaHUE6yqiVEBirKsMaYgJcVsNmU2m6G0pioryrJY1BwI8UYi39VCiPsuT9FTzGZHFEVBWdo8mCfSj+HNhXgoD8kT6YipI6YZMTlcdPjk8d7RzjpeuvwSKmn29raxtkQHTdKJFCF4zVG8i1IHOYUfNSGOKIsRzgW894xGKxSFjOUVb2ySARBCvG7EmG/2zrWURYU2huDzGOCcBchDgZQOpOQJwRGiI6pIJJJfAyzeBZTSKKXxweNChwstR/3tXivL6soWKhmGzRrjlXWs3PLFkpEAQAjxuuNcx3R61M/br7DW5kLBGEh4YnKQEiH53PanIiF6nG8BmLZHzNopznmCj8SgKUzDxuouKhlWVlYp+0FCQiwrCQCEEK9beQlQRGudD/rY5er81OGTo3UzIuRxv0nTTltSStTlEK0sdTVAq3n7n7TpCfFKEgAIIb4O9RP6UkIr6cEX4qshAYAQQgixhGSKhRBCCLGEJAAQQgghlpAEAEIIIcQSkgBACCGEWEISAAghhBBLSAIAIYQQYglJACCEEEIsIQkAhBBCiCUkAYAQQgixhCQAEEIIIZaQBABCCCHEEpIAQAghhFhCEgAIIYQQS0gCACGEEGIJSQAghBBCLCEJAIQQQoglJAGAEEIIsYQkABBCCCGWkAQAQgghxBKSAEAIIYRYQhIACCGEEEtIAgAhhBBiCUkAIIQQQiwhCQCEEEKIJSQBgBBCCLGEJAAQQgghlpAEAEIIIcQSkgBACCGEWEISAAghhBBLSAIAIYQQYglJACCEEEIsIQkAhBBCiCUkAYAQQgixhCQAEEIIIZaQBABCCCHEEpIAQAghhFhCEgAIIYQQS0gCACGEEGIJSQAghBBCLCEJAIQQQoglJAGAEEIIsYQkABBCCCGWkAQAQgghxBKSAEAIIYRYQhIACCGEEEtIAgAhhBBiCUkAIIQQQiwhCQCEEEKIJSQBgBBCCLGEJAAQQgghlpAEAEIIIcQSkgBACCGEWEISAAghhBBLSAIAIYQQYglJACCEEEIsIQkAhBBCiCUkAYAQQgixhCQAEEIIIZaQBABCCCHEEpIAQAghhFhCEgAIIYQQS0gCACGEEGIJSQAghBBCLCEJAIQQQoglJAGAEEIIsYQkABBCCCGWkAQAQgghxBKSAEAIIYRYQhIACCGEEEtIAgAhhBBiCUkAIIQQQiwhCQCEEEKIJSQBgBBCCLGEJAAQQgghlpAEAEIIIcQSkgBACCGEWEISAAghhBBLSAIAIYQQYglJACCEEEIsIQkAhBBCiCX0fwE69Oewki27HgAAAABJRU5ErkJggg==", + "text/plain": [ + "" + ] + }, + "execution_count": 75, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dataset_raw[3]['rgb']" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-5.2444905e-01, -5.4954249e-01, 6.5035093e-01, -9.7552639e-01],\n", + " [-8.5144198e-01, 3.3849287e-01, -4.0058631e-01, 6.0087937e-01],\n", + " [ 8.9406981e-08, -7.6382309e-01, -6.4542574e-01, 9.6813846e-01],\n", + " [ 0.0000000e+00, 0.0000000e+00, -0.0000000e+00, 1.0000000e+00]],\n", + " dtype=float32)" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dataset_raw[3]['pose']" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAi0AAAIuCAYAAABzfTjcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOzdd3xc1ZXA8d+b3jQz6r1ali3bstxtDKbagCkGQmghCdmElE3dJY1NsptKSNtksymksCGBJLSEXgwYY4px700usmT1Xqe3t39MNOb5DS2AJdnn+/n487HvvHlz543kd+bec89VVFVFCCGEEGKiM4x3B4QQQggh3goJWoQQQggxKUjQIoQQQohJQYIWIYQQQkwKErQIIYQQYlKQoEUIIYQQk4LpTR6X9dBCCCGEONmUdI0y0iKEEEKISUGCFiGEEEJMChK0CCGEEGJSkKBFCCGEEJOCBC1CCCGEmBQkaBFCCCHEpCBBixBCCCEmBQlahBBCCDEpSNAihBBCiElBghYhhBBCTAoStAghhBBiUpCgRQghhBCTggQtQgghhJgUJGgRQgghxKQgQYsQQgghJgUJWoQQQggxKUjQIoQQQohJQYIWIYQQQkwKErQIIYQQYlKQoEUIIYQQk4IELUIIIYSYFCRoEUIIIcSkIEGLEEIIISYFCVqEEEIIMSlI0CKEEEKISUGCFiGEEEJMChK0CCGEEGJSkKBFCCGEEJOCBC1CCCGEmBQkaBFCCCHEpCBBixBCCCEmBQlahBBCCDEpSNAihBBCiElBghYhhBBCTAoStAghhBBiUpCgRQghhBCTggQtQgghhJgUJGgRQgghxKQgQYsQQgghJgUJWoQQQggxKUjQIoQQQohJQYIWIYQQQkwKErQIIYQQYlKQoEUIIYQQk4IELUIIIYSYFCRoEUIIIcSkIEGLEEIIISYFCVqEEEIIMSlI0CKEEEKISUGCFiGEEEJMChK0CCGEEGJSkKBFCCGEEJOCBC1CCCGEmBQkaBFCCCHEpCBBixBCCCEmBQlahBBCCDEpSNAihBBCiElBghYhhBBCTAoStAghhBBiUpCgRQghhBCTggQtQgghhJgUJGgRQgghxKQgQYsQQgghJgUJWoQQQggxKUjQIoQQQohJQYIWIYQQQkwKErQIIYQQYlKQoEUIIYQQk4IELUIIIYSYFCRoEUIIIcSkIEGLEEIIISYFCVqEEEIIMSlI0CKEEEKISUGCFiGEEEJMChK0CCGEEGJSkKBFCCGEEJOCBC1CCCGEmBQkaBFCCCHEpCBBixBCCCEmBQlahBBCCDEpSNAyQamqSjweH+9uCCGEEBOGBC0TlKqqRCIRmpqaxrsrQgghxIQgQcsE19zcTCKRGO9uCCGEEONOgpZJIBqN/lMjLq2trZx33nnMmDGDmTNn8vOf/1x3jKqqfP7zn6e6uprZs2ezffv2d6PLb8lHP/pR8vLymDVrVtrH161bh8fjYc6cOcyZM4fvfOc7J61vb6V/43ntxqxevZpp06ZRXV3ND37wA93jf/zjH8nNzU1dwzvvvHNC9S8cDnPddddRXV3N4sWLaW5uPqn9GzMwMMCKFSuYOnUqK1asYHBwMO1xRqMxdS1XrVp10vr34IMPMnPmTAwGA1u3bn3d497seo93/yoqKqirq2POnDksWLDgpPUP4Mtf/jLTp09n9uzZXHXVVQwNDaU9bryuoXiLVFV9oz9inMTjcTUQCKhr165VQ6GQunbtWjUej7+tc3R0dKjbtm1TVVVVR0ZG1KlTp6r79u3THPPkk0+qF198sZpIJNQNGzaoixYtetfew5t58cUX1W3btqkzZ85M+/gLL7ygXnrppSetPyd6s/6N57VTVVWNxWJqVVWV2tjYqIbDYXX27Nm6z/euu+5SP/OZz5zUfr2d/v3qV79SP/nJT6qqqqr33nuveu21145HV9Uvf/nL6u23366qqqrefvvt6le+8pW0xzmdzpPZrZT9+/erDQ0N6jnnnKNu2bIl7TFv5XqPZ/9UVVXLy8vV3t7ek9KnEz3zzDNqNBpVVVVVv/KVr6T9jMfzGgqdtHGJjLRMIm93xKWwsJB58+YBkJGRQW1tLe3t7ZpjHn30UT784Q+jKApLlixhaGiIzs7Od7Xfr+fss88mKyvrpLzWP+PN+jee1w5g8+bNVFdXU1VVhcVi4frrr+fRRx89aa//Zt5K/x599FFuuukmAN7//vfz/PPPo6rqSe/ra/tx00038cgjj5z0PryR2tpapk2b9obHjOfPw1vp33i78MILMZlMACxZsoS2tjbdMRP9d0rI9NCk88/muDQ3N7Njxw4WL16saW9vb6e0tDT175KSEl1gM542bNhAfX09K1euZN++fePdHY3xvnZv9fX//ve/M3v2bN7//vfT2to6ofr32mNMJhMej4f+/v6T1scx3d3dFBYWAlBQUEB3d3fa40KhEAsWLGDJkiUTLrAZ75/Ht0JRFC688ELmz5/P7373u3Hrxx/+8AdWrlypa58M1/B0ZxrvDojX5/f707ZHo1E6OjqorKx8S+fx+XxcffXV/M///A9ut/vd7OJ7at68eRw7dgyXy8VTTz3FlVdeyeHDh8e7W5PK5Zdfzg033IDVauW3v/0tN910E2vXrh3vbo2L5cuX09XVpWu/7bbbNP9WFAVFUdKe49ixYxQXF3P06FHOP/986urqmDJlynvevyuuuOJdeY134t3o3yuvvEJxcTE9PT2sWLGC6dOnc/bZZ5/UPt52222YTCZuvPHGd+11xckjQcsEpaoqjY2NhMPhtI83NzdTXl6OwfDGg2XRaJSrr76aG2+8kfe97326x4uLizXfvtva2iguLn5nnX+XvDbAuuSSS/j0pz9NX18fOTk549ir48b72r2V18/Ozk79/eabb+YrX/nKhOrf2DElJSXEYjGGh4c1fX43rVmz5nUfy8/Pp7Ozk8LCQjo7O8nLy0t73Fj/q6qqOPfcc9mxY8e7FrS8Uf/eivf65/Gd9g+OX7+8vDyuuuoqNm/e/K4GLW/Wxz/+8Y888cQTPP/882kD0/H+nRZvTqaHJihFUZg1axaJROJ1RxeOHj36hlNFqqrysY99jNraWm655Za0x6xatYq7774bVVXZuHEjHo8nNUw+3rq6ulL5DZs3byaRSLxnN7R/xnhfu4ULF3L48GGampqIRCLcd999uhUtr82xeeyxx6itrZ1Q/Vu1ahV/+tOfAPjb3/7G+eef/7qjHO+l1/bjT3/6U9qRg8HBwdSXiL6+PtavX8+MGTNOaj/fyFu53uPJ7/czOjqa+vuzzz77uivz3gurV6/mRz/6EY899hgOhyPtMRP9Ggpk9dBE9drVQ9u2bVOfeuopNRgMalYTjf399VYVvfzyyyqg1tXVqfX19Wp9fb365JNPqnfccYd6xx13qKqqqolEQv30pz+tVlVVqbNmzXrDzP932/XXX68WFBSoJpNJLS4uVu+8805N337xi1+oM2bMUGfPnq0uXrxYXb9+/Unr21vp33heuzFPPvmkOnXqVLWqqkr93ve+p6qqqv7nf/6n+uijj6qqqqq33npr6hqee+656oEDByZU/4LBoPr+979fnTJlirpw4UK1sbHxpPZvTF9fn3r++eer1dXV6gUXXKD29/erqqqqW7ZsUT/2sY+pqqqq69evV2fNmqXOnj1bnTVrlnrnnXeetP499NBDanFxsWqxWNS8vDz1wgsvVFVVVdvb29WVK1emjkt3vSdK/xobG9XZs2ers2fPVmfMmHFS+6eqqjplyhS1pKQk9X/h2Kq1iXINhU7auERR3zhT/+Sn8QsAEokE4XCYjRs3csYZZ/DCCy+Qn5/P8PAwS5cu5dVXXwVg6dKltLa2UlVV9aZTRa918cUXs3r16veq+++Y9O+dkf69cxO9j9K/d2ai90+QdshVpocmAUVRsNlsmM1mgsGgbkqotbX1bS+H7uvre7e7+a6S/r0z0r93bqL3Ufr3zkz0/on0JGiZRGpqajAYDOzevTvt41LyXwghxKlMgpZJxmq14vV6CQQCaYtw/bMl/4UQQoiJToKWSaiiogKTyUQwGCQej+selxEXIYQQpyKp0zJJWSwWFEVh27ZtaUdcIpEIt956Ky+99JLuscsvv5wDBw6kSvxPNGeeeSaHDh2asP2rra3l6NGjE7Z/+fn5tLS0TNj+QTIPayL3b9WqVezfv3/C9nH58uUcPHhwwvZv7ty5HDlyZML2r7y8nObm5tftX0tLi+S8TFAStExiZrOZsrIy9uzZQzQa1TymqiqrVq3i+9//vu55mzZt4vHHH+eVV145WV19W/bv389LL700YfvX0tLC7t27J2z/BgcHWbFixYTtHyRvahO5fxs3buSxxx6bsH3cuXMnzz777ITtX2NjI1u2bJmw/evu7uayyy573f4tW7bsJPdIvFUyPTTJFRQUYLVa2bp167hsNCeEEEKcLBK0nAJMJhNTp07F7/cTCoXe0nMmUmXZdCby7s8AmZmZ492FN+T1ese7C2/I4/GMdxfe1ET/HZno/ZvovyMTvX8iPQlaThE5OTnYbDa2b9/+lpJwH3744ZPQq3/evffeO95deEP/93//N95deEO/+tWvxrsLb+gHP/jBeHfhTT366KPj3YU39OCDD453F97Q3XffPd5deEO//e1vx7sL4p8gQcspxGQyMWPGDAKBAIFAYLy7I4QQQryrJBH3FOP1erHb7ezatYtwOJy2Zks4HKa5uXlcNqZ7K0ZHR2lvb6e/v3+8u5LW0NAQiqIQiUTGuytpBYNBfD7fhK7X83o/mxPFRO+f3++npaUFs9k83l1Ja3BwMPVzOBH5fL7X/f21Wq0nuTfi7ZCg5RRkNBqpr69ny5YtaXczNRgMOByOCRu0mEwm7HY7drt9vLuSViAQSF3DiUhVVcLh8ITtHzChrx9M/P6ZTCYcDseEDVpGR0exWq0T9hqOrbZ8bf8+8pGPMDAwgKIoNDY2smDBAs1zcnJyNHsVffnLX+bxxx/HYrEwZcoU7rrrrrS5ZKtXr+YLX/gC8Xicm2++mVtvvfW9eVOnCQlaTlF2ux2z2Ux+fr7usebmZvLy8t7WBosnU39/P1lZWbjd7vHuSlrhcBij0Zj22k4Eg4ODRKPRCds/gKamJunfO9DZ2ZnKY5uIfD4fbreb3Nzc8e7K6woEAprP+Omnn079fdmyZWzduvUNn79ixQpuv/12TCYTX/3qV7n99tv54Q9/qDkmHo/zmc98hueee46SkhIWLlzIqlWrmDFjxrv7Zk4jE/OuJYQQQkxgF154ISZT8nv/kiVLaGtr0x2zefNmqqurqaqqwmKxcP3110/4BO+JToIWIYQQ4h34wx/+wMqVK3Xt7e3tlJaWpv5dUlJCe3v7yezaKUemh4QQQog0li9fTldXl679tttu44orrkj93WQyceONN57s7p2WJGgRQggh0lizZs0bPv7HP/6RJ554gueffz7twobi4mJaW1tT/25ra6O4uPhd7+fpRKaHhBBCiLdp9erV/OhHP+Kxxx573VVSCxcu5PDhwzQ1NRGJRLjvvvtYtWrVSe7pqUWCFiGEEOJt+uxnP8vo6CgrVqxgzpw5fOpTnwKgo6ODSy65BEguTf/lL3/JRRddRG1tLddeey0zZ84cz25PejI9JIQQQrxNR44cSdteVFTEU089lfr3JZdckgpixDsnIy1CCCGEmBQkaBFCCCHEpCBBixBCCCEmBQlahBBCCDEpSNAihBBCiElBghYhhBBCTAoStAghhBBiUpCgRQghhBCTggQtQgghhJgUJGgRQgghxKQgQYsQQgghJgUJWoQQQggxKUjQIoQQQohJQYIWIYQQQkwKErQIIYQQYlKQoEUIIYQQk4IELUIIIYSYFCRoEUIIIcSkIEGLEEIIISYFCVomsMHBwfHughBCCDFhSNAygQ0ODuL3+wkGg+PdFSGEEGLcSdAygVVVVWGz2di+fTvRaHS8uyOEEEKMKwlaJjij0ciiRYuIRqPs3bsXVVXHu0tCCCHEuJCgZRIwm804HA68Xi9+v5/h4eHx7pIQQghx0knQMomUlJTgcDg4cOAA4XB4vLsjhBBCnFQStEwyBoOBRYsWoaoqgUBAghchhBCnDdN4d0C8fQaDAZvNRiwWY+vWrcTj8fHukhBCCPGek5GWScxkMrFw4UIikQgHDhyQJF0hhBCnNAlaJjmLxYLD4cDhcBAIBBgdHR3vLgkhhBDvCQlaThHl5eXYbDb27t1LJBKRURchhBCnHMlpOYWM1XR58cUX2bVrF7FYjN7eXt1x0WiUvr4+FEUZh16+uVAoxODg4IRNMvb5fBgMhrTXdiLw+XwEg8EJ2z/gdX82J4qJ3r9wOMzAwABms3m8u5JWIBCY0F+cRkZGCIfDaT/jiXpNRZIELacYo9GIzWajpKSEvXv3MjIyojsmHo8zMjIyYYOWSCSC3+8nFouNd1fSCoVCKIqS9tpOBMFgkHA4PGH7B8d/Bieqid6/WCzG6OgoJtPE/C88FAqhquqE/T8mEAgQjUY1n/GnPvUphoaGUBSFpqYmFixYoHlOTk4Oq1evPtldFSeYmD/x4h3LycnBarUyZcoU3WN9fX1UVVVhMEzM2cFwOExJSQlut3u8u5JWS0sLRqOR4uLi8e5KWoODg/T09KT97CeK3t5e6d87MDo6mpoSnqjcbje5ubnj3Y20uru7CQQCVFZWptqee+651N+XLVvG1q1b3/AcDz74IN/61rc4cOAAmzdv1gU5YyoqKsjIyMBoNGIymd70vOKNSdAihBBCvE2zZs3ioYce4pOf/OSbHvvCCy+Qk5NzEnp16pOgRQghhHibamtrx7sLp6WJOT8ghBBCnAIUReHCCy9k/vz5/O53vxvv7kx6MtIihBBCpLF8+XK6urp07bfddhtXXHHFWzrHK6+8QnFxMT09PaxYsYLp06dz9tlnv9tdPW1I0CKEEEKksWbNmnd8jrGE/by8PK666io2b94sQcs7INNDQgghxHvA7/enqpT7/X6effZZZs2aNc69mtwkaBFCCCHepocffpiSkhI2bNjApZdeykUXXQRAR0cHl1xyCZBcWn3WWWdRX1/PokWLuPTSS7n44ovHs9uTnkwPCSGEEG/TVVddxVVXXaVrLyoq4qmnngKgqqqKXbt2neyundJkpEUIIYQQk4IELUIIcbL19mLYtg0m8P5GQkxEErQIIcRJZHzgAey1tVgvuwx7bS3GBx8c7y4JMWlI0CKEECdLby+WT38aJRhEGRlBCQax/Ou/yoiLEG+RBC1CCHGSGFpawGzWNprNyXYhxJuSoEUIcUp4rvE5frzhx+PdjTeUKCuDaFTbGI0m24UQb0qCFiHEKeHbr3yb29ffTiQeGe+uvL7cXCJ33IFqt6O63ah2O5E77oDc3PHumRCTggQtQohJ59NPfZqKX1QQi8dSbf86719xWV1saN0wjj17c/FrriF44ADhJ54geOAA8WuuGe8uCTFpSNAihJjQWodb+cWWX2jayjxljERGeLnl5VTbpVMvpcJTwY7uHSe7i29fbi6J+fNlhEWIt0mCFiHEhPa1dV/jP1/8T9pG2lJtn1v4Oaozq1nfvj7V5rV5mZ0/m6NDR4nGo+lOJYSY5CRoEUJMGM8efZbrH75e0/bNZd/EY/XwwL4HUm1Oi5OZuTPZ37sfVVVT7QsLFjISHqEn0HPS+iyEOHkkaBFCTBiPNjzK00eeZlvHtlRbdVY1c/LnsK17myZAubT6UoKxIEeHjqba5hTMQVVUWoZlCbEQpyIJWoQQ4+Lu3Xdz1YPaDed+eMEPKcwo5N7992ra31/7fgKxAJ3+zlTbvIJ5GAwGjg0dS7VVZVbhtrhpG21DCHHqkaBFCDEuXjz2IuuOrWN39+5Um8vqYmXlStpG2wjFQqn2pcVLsRqsmmOrMquozqxmQ/sGwrEwABmWDGqzazk6eJQev0wRCXGqkaBFCPGe++uev/Lvz/27pu0XF/+CIlcRf977Z037VbVX4Y/4OTxwONVWmVlJsbuYfb37NMfW59Xji/gYDg+n2rIcWfQF+whEA+/BOxFCjCcJWoQQ77k/7/0zf9r1J/r8fak2h9nBVdOv4ujgUfwRf6p9Xv48TEYTr7S+kmpTUNjTs4e/7vsrRweP57A81/Qcf9j1B77/yvdTbcFokLv33M3l91/Olo4tAHSOdrL8r8up/309P974Y1RVJaEm+I8X/oML/3oht669NVWU7sjAEe5quov79t1HIpFInTehHv+7EGJ8SNAihHhX7eraxeb2zZq231/6e7IcWfx6269Tbaqq8tKxl9jVs0tTW+WDj32QZ5ue5TfbfpMKZh46+BC7enbRONjIjzb8KHVsp78Tk8HEI4cfYW3zWgAC0QDZ9mxUVG57+TaC0SAtIy24zC6mZ0/nb/v/xgMHHqDH38P2ru0YVAMvtLzALc/dQiwR45OrP8lzvc/xnfXf4TPPfIZ4Is6/PfdvzLlzDkv/uJT/3fK/yb63vMR3X/4u/7P5fxgJj7yXl1QI8Q+m8e6AEOLU8pHHP0JXoIv2z7djUJLfi7689st0+btY37Y+lauyp2cPe/v2EkvEeGD/AywtWYpBMXDxlIsJxULs693HS60vsXLKSi6rvoy6D9XxjRe/wa6eXfT4e8hz5rH6+tX8eOOPWdu0lqeOPMX5FefzmQWfIZKIYMLEps5NPHboMa6beR2PXPMIAPfsvocXml/giporeOaGZwDY1b2LH274IRvbN3LnJXfSeaATW4WN21+9nV9u/SWz82ZTmlHKcHiYRw89isPkYE3zGlpGWkioCZ5reo4fX/Bj7txxJ7t7dpPtyOba2mu5evrVhGIh+oP95DnyMBvNaa+ZEOKtkZEWIcQ/JZ6IE4ho80YGg4McGjqEL+Lj6SNPp9qz7dlYFAtNw02p5cx1eXVs/ehWlpQsYUfXDvqD/QB8at6nePzaxylwFfDA/mRtFqvJSk12DR+a9SFsJhsvtb6UOvdFlRdRX1BPt7+bQ/2HAPj3Rf/OZTWXsbRkKfv69mmmdq6feT1um5vnm55PtdXn17Nq6ipean2JSm8lBsXAvMJ5fGLeJzg2fIyVU1byxSVf5DvnfIevLf0aWzq38J1zvsOrN73KCze+QG12LT/a8COcFidLipeQYc7gd9t/x7df+jaX3H8JF997MSvvW8kvtv5Cs2xbCPH2SNAihPin1P2+jtyf53L1g1en2kwGE3n2POwmO3/Z+5fUDfoXF/2C/73of7EaraxrWUdcjaMoClWZVXxg5gcIxUMc6juUOo/ZaObKmis5NnyMLl9Xqj3HkUOxq5gdXcenkzLtmVRnVlPgLGB/3/5Ue6GrkDl5c4glYpqkXrPRzFmlZ3Fs5PhSaUhuAxBPxNnVvSvVtqJyBTVZNaxvO15594LKC6j0VqZybuxmO7edexsFrgJK3aV879zvcedld/L5RZ/nQN8Brp1+LfdccQ+XTLmE55uf5wev/uCfvuZCnO4kaBFCvKGWoRa8/+3V3WzPKTsHgFfaX2E4lFy9k2HN4MUPv0iVt4p2XzsH+w+mjr+i5grK3GXs6t7FQHgg1X7DjBvItGfyQMMD+CK+VPtV069CQdGc44ySMyj1lNI01ETHaAcApe5SPlr/UZaULKFxqDF1rMVoocBVwMzsmZpzAFxYeSEj4RGODR8PXDIsGUzPnq45B8CZpWfSOtyq2T36uhnXcbD/YGrVktVk5YOzPki3v5uBYPK9XVp9KatqVtEf6md23mxuWXILt55xK83DzWzt2Pqm110IoSdBixAi5ZNPfZLLH7hc01bgLEBVVTa2bWTAfzzY+PXFv2Zl5UoyLBn8367/S7WXucu4avpVOM1OHj/yeKo9w5rBiqoVhBNhDvmOj6pYTVbOKDqDY8PH6A30ptqrM6spdBXycuvLBKPBVPtl1ZdR7inn4MDxQMSgGKj2VhOOhmkfbU+1ReIRvA4vA6EBzbSMzWRjSuYUuv3dmve6oHABw6FhYonju0fXZNVgM9k0x1Z4KqjJqtGMyszKnUWRq4itnccDkpXVK0moidRS7SXFS7h4ysU81fiUrEYS4p8gQcsE1tXVpVlyKcS7aWPbRl1+xdONT2uWGgNYzBY+Pf/TvNz6Mmf9+axUu6IoXFJ9CfW59Wxo26AZJZmbP5dpWdM42H+QweBgqr0ur44cew67hnZpgoCLp1yM1+rV1GGxGC1UZ1WzvWs7o5HRVPuZpWeyrGQZA4HjARTA7PzZFLoKU6M+AFMyp1DtrcZqsGoCH4Bp2dN0q37ynfkYFAPB+PFjrSYrBa4CTYCiKAp1eXV0jnZqnl+bU0tf8Piy7kxbJvMK5rG3d2+qbeWUlRgNxtRybCHEWydBywSmKArBYJC9e/dK8CLeVWub1nLBvRfwwUc/qGm/ZdEtXFJ1ie74uflzqc6sJhaLcbDv+AjHzNyZLCpeRDQR5ckjT6bac525VGVWEYwFefro8YTcutw66vPrCcaDNA4cn4YpdBWS5ciiZbhFM2Xz0dkfZUHBgtToyZgCVwEHBw5qgq64GieqRtnbdzxAMCgGhsPDBGNBzcgMgMvsYl/vfs2Ih8PswGP1cNh3WHPs3Py5RONRzeuVuctAQTMqM79gPm0jbampK0iOwIyER1JBk81kY37BfA70HdBdZyHEG5OgZQLLz8/H6XSSn59PMBhk9+7dEryIt+3767/PefecRyx+/Oaa5cgiy5rF9TO0OyqfXXY2ua5cvvr8VzV5F3Py53B+5fmUeEr4ny3/k7p5V3oriapRFhct5qVjL6VyPOrz6rGb7JxVfBZ7uvekpn28Ni/5znxybbma5NgKbwX5znyeaXpGM1KS58pjRu4MFJTU6iJIBkulnlLNOUwGExdVXkQkFiGeiANgNBiZmTuT+vx6zbYAAMGOKrq2z2NLg3a0ZHb+bMwG7dJkj82D1WTVBC0FrgJGI6Ps6dmTarOarMzOm60ZXSr3lGMz2TQbOy4oXEBcjdMXOD4qI4R4cxK0TAK5ubk4nU6Ki4sJBoPs2LGDeDw+3t0Sk8Sfd/+ZHT072NVzfHqjNruW+YXzKc4o1hw7LXsadpOdP+75Ix957COp9qlZU8l2ZHPRlIs4OnQ0VZU2z5lHtbeahYULiakxXm19FUiOcBS4CqgvqMditHCgNzmqYDFaMBlMOIwOmoabUkuUAc4vP58PzPgA4Xg4FXTYTDZsJhttI22aQMJqshKNRxkMHQ8OAAwGA7FETDN64ov68EV8WIyWVNtTTyn8z/+Y2L1X4fm1KkNDx8+RUBMMRrTnzbRl0h/s1xTBg+SeSGN9HZPryCWSiGjapmVP04wg5Thy8Nq89ARkfyQh3g4JWiaR7OxsnE4nlZWVhMNhAoEAAwMDb/5EcVoIRoOU/7KcCx+6UDMisKx8GZ+c80nmF85PtVlNVubmz9WMXkByeqTMXcaNs24klAjxUkuyHoqiKFR6Krmo8iJqsmr4894/p16jKKOIbHs2c/LnsKVji2bzQgWFPGceWzu3plbfLChcQE1GDXazne3d21PTKwk1Qa4zl3JPOc3Dzak+1ebUMiVrCgf7DxKNR1PtF1RcQPtou+Y9lLpLMRgMbO/enmrLd+azoHABB/oPMBwe5uc/N/LTn5ro6AAlkM2O1kM8+aSSClyqM6txmBy6JN0zis/Aa/Nq2kwGEyjaz8FlcbG5U1sRuNJTSYKE5nPJtGWyt2cvQoi3ToKWScjr9eJwOLBarbS0tOD3++nt7X3zJ4pTSiSm/Ta/s2sngWiAXFsuKsdvjgsKF1CUUaR7/vSc6YRiIT7+xMdTSaGKolDlreLS6kuZXzCfu3belRpJqPBUEIqFuHLalRwePJzK23CYHHQFulhaspS+YB+HBpOjJ9VZ1fQGezmv4jy2dW/jSP8RIDkKY8TIGcVn4La4U1Mks/NmY1AMGBQDDpMj1c/h8DBbOrdQklGSqrAL4I/6cZqduC1uzfuq8lZR4CzQtDnMDmblzuLjH/byta+Z2LLFSE+PgjGUi3Gkis2bDaxZozD6j3xfs2LGqBg15xgJj3Bk8IimLceRw4H+A5ok3+rMamqzazXTUWajmd5AL8HY8ePmF8zHbrZrcmKEEG9MgpZJzGg0MmfOHOx2O11dXfj9frq6ut78iWLS+/XWX5P/v/lc+eCVqbYMawaLixdzx/I7NDf3Sm9l2iqsuY5c+gP9PH7kcW5+8uZUu8fmYSA4wBcXf5GhyBAvt7wMJION/f37uaDiAopcRfxxzx+BZPATjoWpyaphWs40Htj3AMPhYTIsGRQ6C6nNruXS6kvZ2bMTVVUpchXRE+lhWvY0llcuZyg0REJNEFNjBKIBPFYPQ+GhVPJtXW4dZ5eeTSQR0RSPq/JWEY6HNYXmxt7XiXVZFEWhx9/DM3t3AAZiMTh82MDooINj6gaCiWF27DDw5JMKw8PgNDlpHmrWnKMur44sW5Zm6slj9TAzeyZWozXVZjQY6fZ3a6aDsuxZTM+aTo+/R3NcNB6VoEWIt0H2HjoFGAwG6urqGB4eZmBgAJ/PR0dHB4lEglAopDt+rN1gmJgxaywWIxwOp+37u0Hp68PQ0kKirAw1J+dtPz8ajb7utX2vDIWG8Fg9KEpyLmJT2yZybblcPfXqVD8yzZkUOYsYDgxjjBlT7UX2IrZFtxEIBjTBTIWrgkO9h/jSwi/xm52/4emGpzmv4jyqMqo42HuQanc1c3Pn8uD+B6nPqWeaexo9Iz30jPRwbc21/Gbnb9jetp0pmVNwGBzEIjGurLqSO3bcwd7OvcwvmE9jfyMm1USuJZed3TvZ27k3WX/FWogdO8FQkEAoQDAYxGwwk23Oprm/mUxLJkb1+Ht44egL1OfVU5RRhD/gx2gwEogGMCQMlDhKNJ+F1+QlYo/oPp8aTw2bn3Iwb0YygItGDezc7GG2ay5Bu5PecJxduxIEgwlUjweX0aU5x0hkhKP9R5nqnorNZEu1H+47TDgSZm7+3FTbnOw5xGNxzfPj0TjRSDTVZsKEUTWyr2sfM3Nmvq2fh3g8TjgcflvPOZlisRiRiP4zmCgikQjRaDRt/8Z+x8TEJEHLKcRgMDBjxgwGBwfx+/2Ew2EaGhp0x4VCIQ4ePDhhfzlHR0eJRCKYTO/+j2f2s89S/YMfoJpMKLEYR/7jP+hfseJtnSMYDKIoCkOvzd58D31j1zdY37OeCwou4Bt13wDAHXOzIm8FddSlPuOEmkDxKzQ0N1BkLkq1j0RHGBkYYcfeHdzTdA9XllxJgaOAcDzMSN8Iy3KWscG9gV9u/CW5gdzkexwIcjh+mLmmudzVfxf3b7yfxdmLaehpwDBgINOSSUY0g6d2PsXFRRdzdOAo/R391HpqiQ5HeXb3s9gH7bgCLsLhMLmmXJbZl9HZ0kmoK0QwFOSRLY8w2zub4cgwzzY/S6WrkqPDRxmwDFBgK2Dv8F7KneW4zW6mq9OJdcfY0LwBu9FOgT05/RP3xdk/vJ8c6/HgM67GebX3Vfq9/Xgt3lR7i7+FFn8LV199A3//ezGgEg4Z2NfcSSB/lFJrNaFQgu7uBO6CHLriL7OyOqj5PbH5bezavwuPxZNqyw3nokQUGgaP/641DDcQVaOEvMdvij2+Hg6ED7Awe+HxD9cHQyNDNPTpf0/fiM/no7GxccJ+8fD7/QwNDU3YaetwOEw8HicQOL531i233MLQ0BBGo5Hm5mYWLFigeU5OTg6rV68+2V0VJ5Cg5RSkKApTp06lr6+POXPm6B7ftGkT9fX1E/Y/vP3791NSUoLb7X7zg9+O3l7sP/oRSjgM//iWWvPDHxL8yEcgN/ctn6alpQWj0UhxcfGbH/xPSKgJzYhI/65+FhYt5AvLvsCcsjkALDIuYjg8rPt8F7CAHEcOueQybdq0VPuhnYcw5Zh46JWH2Dq6la0fSy5nPrznMI48B7dV3sbX132dNmcbV067kqYDTcQz4lxecjlt9jaODh7lE/WfwNJvIRQPMb9gPrH8GI8cfITMikwumXIJLouLLHsWaqHKTzb8BFeZi6JoER2jHVw29TIguZtynjOPHl8P8+bNo9BVSCAaYCQ8QoGrgPJQOW0jbdTl1VETqQHAaXGysX0jg+FBVs1dhUExpAKJYHuQXEcu1ZnVqfeqqipKt8KMnBmaEZGaaA0dvg4+e2E2bW1xNm0yAgqBjlraB11UzPNgNit4PCoDfYNYOueQqJpLXR1Y/rHwaKB5gCJ3EVOzpqbOu7ljM32BPi6pPl7fZkpkCkcGjjCn4PjnU+gvZCQ8onluuD1Mp6+TC6Zd8FZ/PADYuXMn06dPx2azvfnB46CxsRG3203u2/i9Opm6u7sJBAJUVlam2tauXZv6+7Jly9i69Y23WnjwwQf51re+xYEDB9i8ebMuyBmzevVqvvCFLxCPx7n55pu59dZb3503cZqamHctId4DhpYWMGvrb2A2J9sniOsfup7i/y3m8898PtVW4angK2d8hWVly1Jtec68tM8v85QRiAZ07XW5dViMFj6/4PMMR4Z57uhzACwsXEhcjTM9ZzqXTLmExw8/Tpevi3PLziXLlgXAv877V/qD/dy9927sJjsuswtIlqTPceRw34H7GAoP8cKxFwCYkzeHr535NYwGI9Ozp7Og8Ph/5rPzZhOKhjBgYFvnNiLxCA6zg0g8wpGBI/gj/lRibkyNsb8vWfxtbv5cziw5E6PBqBn5mF8wH7vJrnmviqLgtXo1Gy0C2E12Wodb6Q/2s3ZtlMzMfyxVThgZMh5mwwYjfj/4/RDxeRgesLDm1UHWrUvmuQAsLFpIhiVDc95ZubNYVLRI0+aL+HSrj8wGM7t7dqdWV0Fyd+nXruoSk8esWbN46KGHOPvss1/3mHg8zmc+8xmefvpp9u/fz7333sv+/ftf93jx5iRoEaeNRFkZRKPaxmg02T4BDIeH2de3jwWFC7hq+lWp9tKMUhwWh+bYck85sURMl2DrsrjoDyWXADcPNqeKEbosLlRF5WtnfY3FRYv53Y7fEY6FNQHA+2e8P7lf0KHH6Qn0pFYBAdxUdxP7eveRIMGhgUOp1TIfmPkBTIoJRVWYkz8HSAYNM3NnYlJMHBs6xob2DZobeDAexKSYKHQVJpcMk1yFk+vMpTijmOk504nGo3isHubkz8GgGIircV449oJmqwCApqEmtndt50TBWFBXTG7sOozVezl27B+BS9wCEQcDAwZeftlAby8EAkaGAj46e8Ls2KHw2GMGGhoUDvUd0ZTkBwhEA7zU8pJmOXahq5AyT5lmk0W31U1VZpWmXsxQaIinjjylea6YHGprazWjmels3ryZ6upqqqqqsFgsXH/99Tz66KMnqYenJglaxOkjN5fIHXeg2u2objeq3U7kjjve1tTQu+n2V29n5u9m8sU1XwSgfaSdfFc+f73ir5xXfl7quBxHjmYJM0CeIw+r0aq72U3LmkY0ESWaiHLm3Wey7O7k6IzX5mVH9w5UVeX7534fVVW5/8D9FGYU0jjYiD/ix2P1cHnN5ezu3U2OPYfqzOrUqE19QT1Ztiz29ewjx56D2Zi88RdlFGExWdjStYXDA4dpHDxemj/bkU2OI4cqb1Vq1EZRFKozqxmIDOA0O1PBjMPsoMvXRftoO1s6ttA03ASQeh2byUa5pxyHWRu8TcuexmXVl+mubW1OLdmObE2boiiUe8pTwYzRCG1tEXIcOaCaQEkwNGTgpZdMdHfbsQ/NJTTqYmRE4cgReP55haED86hzaKdyMm2ZTMuelgrAIDlFdXjgsKaGjMlgYig4RNtoW6ot35nP+6a/L/U+xamlvb2d0tLS1L9LSkpob29/g2eINyNBizitxK+5huCBA4SfeILggQPEr7lmXPrRH+zn7t1347F6qM2uBZI3tZqsGs03cUiOqkRj2uDEY/VgNBjp8munQFwWF1m2LBQUFhYupMffw7bObRQ4C6jPq0dFpcxTxnUzruOF5hdoH26n0luZyv1YUbkCr83Lb3b8hsbBxlRQ4bF6mFc4j5daX8If8WuKv/37on/nyporcVlcyQDgNX3pD/YzEBzQ1DcxKAZMBhOjkVH8EX+qvcxdRr4zn8unXk5NVo3mfRkUA5m2TM2S4THPHH2G1pFWTVvHaAfPNz2vG4lqGWnRHXu40U/O9INgTc4BBYMG9u3LZEf7floiu/H7VSIR6OyEtZt6+a+/38+GTTFG/rHX4thKptfuUK0oCisqV5Bj165Oy3HkaKaXFEVhbfNaDg0cQkw8y5cvZ9asWbo/MloyfiRoEaef3FwS8+ef1BGWo4NH+chjH+Eb675BPBFnZ9dOKrwVrLtxHTfPTdZIybRl4rF6NFMKAIXOQl1ZeKPBSKW3Urd3jc1koz/Uz4HhAzx0zUPMzJ3Jba/cRlyN0zTUxM7unQBcXXs1eY48/nbwb/jDfs0Oxv9xxn8wHBomEotoElkvqrqI+rx6Xmx5UTfCY1AMhOIhmgabNO3VWdVkWbM0eRwmg4ksSxYZlgzynfmpdrvZjtlo5sjgEV5p0e40DdAb6NXs6TNmatZUzXkAStwlXD39at0KuQWFC3Q5JBajhWOrL+HsRWPBhEo8buDIurNoeOYcuroUBgcVAgED8dFc3P3LePlFM488orBtm8LrLSJb37o+db3HOM1Ozd5KAOeWn8sU75T0JxHjas2aNezdu1f354orrnhLzy8uLqa19XiQ3NbW9p4l8J8uJGgR4j2mqiofeeIjvNL6Co2DjYTjYSLxCMUZxVhMx0dVXBYXbotbUzUVklMk6UYYSjNKU9MKVz54JV967ksAnFV0Fm6zG4Ni4BcX/YLR6Ci/3vprLq66mFJ3cqjaoBj48OwP0zzcTEyNaSrmOiwOFhUt4tW2V9nQtkHzmnML5jIjZwY9/h7NKIbX5qXSU6kZgYFkgGKz2HQl7M2KGV/Ux5NHntSNhuQ6cinz6POM5hXMoza3VtfusXrSbjy4o3sHnT7tZoidvk6ePfqs7jVfbXuVn9y9i9/+9jV5MK4uuh1refZZM/v2GRgcVBkdMdDtG6B7dJDmZoXnnoOHHlIYPjoN/7Cd1+5nuqxsGfMK5mleJxAN6D7fo4NHZcfnU9TChQs5fPgwTU1NRCIR7rvvPlatWjXe3ZrUJGgR4j3QPtrO2ua1ROIRdvfsJhaL8fi1j3PvVffiMDvItGWmAogxdrOdfFe+bhPAGTkzMBlNmlLxkEw2HdsEsXGwkb/s/wstIy0MhAZo9jUDUOop5RNzPsHWzq282PJiai8hSO6UfE7ZOdy99242tm/UjJ6sqFpBTI2RbdPmhdTl1XFx9cUc6DugCxTynHmUecp0GwhWeivZ3rWdDl9Hqs1kMDG/YD6XTb1MNxritXmTeTknjOZ0+jp5/PDjuoCjN9CrOfcYl8WlqVQLkG3PptJbqXvNSm8lJe4SPvhBeOWVEBCHkBdGiwmHYccOI88/b+TYMYXB4Sh9/TGGhxWGhgw0Nyvc/8IBfnFfA48+qrB3r8LgILQOt7O1U7tstia7hlyndoSv1F1Kvks7UiQmvocffpiSkhI2bNjApZdeykUXXQRAR0cHl1ySXP5uMpn45S9/yUUXXURtbS3XXnstM2e+vUKCQkuCFiHeZb/f/nuW3b2MzzzzGVY3rqbb383cgrmaUQK3zY3dbEdVoaFBYawGl8vi0o0O2M12Mq2Zur1w5uTPYVrWNALRAOs/vJ4p3in857r/ZEb2DArthamE02tmXMPUrKlsbN/I+eXna276N8y8gRnZM3i17VXN+bPt2Xy47sN879Xvsal9k+Z1M22ZXDTlIs3UESQ3SDwycIRnjj6jabcYLZxTeo5ml2ZI5nO0jrTq3m9CTXB44DADIe1moLmOXGbnzdYFHLU5tZpl1WOmZU3DaXbq+jK2xPq13BY3o+HkxkNz58Izz7zMsjOMEHaDkkBVob/fwIsvmtj//AJ6mnPp6YGBARgYMODoPwPaF7F+vcLf/65wzz0GXnrOQ3djAW1tEPxHvHlo4BBbO7SBjEExMBCUjU8nm6uuuoq2tjbC4TDd3d0880zy576oqIinnnoqddwll1zCoUOHaGxs5Otf//p4dfeUIUGLEO+ieCLOQwcf4sySM1n3gXVcPvVyDAYDuS7tt+t8Zz7hWJiW7hEefNDA975n4kc/MhJomaq7uVuMFobCQ2zr2gZAv7+fgcAAiqLQMtJCX6APt83Nzy/6OX2BPv5++O+0+ls1K1e+dubXODRwiB9v+jF7evek2k0GE+eVn4dBMfDisRc1r1uXV8cHZ31Qk1w7psvXxeNHHtcknwJcPOVi6vPqdccbDUbdsUDaZdsGxcDFUy7W5amYDCYsRouuDk0gGmBt81rdEud9ffvY2L5R95r7+/brAqVuf7dmastkgqefjvHt32/A5B0bxVFIJKDLuJmXGnfw4osmDh40MDCg0ulr41hsK+GwQk+PwsGDsH19Fo89rnDHHUZ+/WsD99xjwBOayfLK5ZrXjsQjaWvrCCH0JGgR4h1QVZWfbvop5//lfP7rxf+idbiVWCLG7effTn5GPoqi4La4daMSWbYsilxF7D08yvAw9PervPyywq9/7uZLt8a45x7taMI5peeQZU8uG778wcup+30dwWiQM0vOTCXpzi+Yz5XTruT+Q/dT7a7WPN9sNPMfS/+DQ/2HdMXRFhUvIqbGeL75eU27xWjhoqqLONR/iCMD2t2Np+dMZ3HRYpwmpyboCMfDHB48rCtLbzPZdEXgIBkYpduBunmomX29+3TtjYONuqkgq9FKnjNPt+pqRs4MXdE3SK6QKveUa9qqMqs4q/QsTZuiKHzx0ssYbsvhd78LAQlAga550FVPf7/C1q1GVq82sWuLm66mLDo7Ffr7FUZHDXT7+mj2HaS1TWX7doW1axWaugZY17JOs+limaeMqswqXT+FEHoStAjxDty7/15+v/P35NpzmVswl6HwECXuEopcx2/EBc4CTIpJk+uhKAo51kL2N0QZ27MtFoPoQBF7Wtv41BfCfPCDx6dr+kJ9HBxI7lz85cVfxmwwc8uaWxgIDtA5enzU4F9m/wvTMqfx6wO/1iV31ufXs6hoEd99+bu6UYmfXPATfDEfd+26S/ceMywZeG1ezW7EHquHQDTAwYGDmmXXRsVIhiUjFWCNKcooSptc2zHawepG/X4uVpNVN7UDcFbpWZqS/ZAcxanNrkVBG+gZFAOto626kZy2kTaahrSrnKLxKBvbN+quy5HBI7SPtnPjjeD3h/mv/wqBIQoZx695OKzQdiSHna8UsXatkU2bDOzfrzDUWoS5eSV9vQYGBxUiEYWZNU4KnAWavjYONmpyjYQQr0+CFiHeRDgWpqGvgfWt61nTtIb7j9zPD7f9kKeOPMXdu+9mbv5cvrDoC+Q58mjztRGLx+gc7WTdsXUMh4Y5NHAIs2JmW9c2wrEwzUPNBKIBmrtH2NG1m4QaJ2bwk0goBIYyUHtmQNRGX9/xG9us3FlM8U4hEA1wVe1V/Pfy/2Zj+0b29O4hy358SbHJaOLzcz6Px+Jhb+9ezQ3boBi4atpVeGweXmzRTgUFogEybZmUZZRpnlPgKmB+4XxGwiMc7D+Yao/Go3T7uylxl5BhyUgFZFn2LOYVzNNtMxBPxHn88OO6VVB5zjzq8up017zQVUiFt0LXPhAcSLu54MaOjbpKteFYmLaRNl0gklATxFVtsrCiKFgMFs2eT4CuqN9Xv6pysLmXr3y7FafzNY/ZB6B6NeFojJ4eAwcOGNmwSeXltpfYtCNAUxPYbJDtsZFpy9ScszqzmhWVb2/TTiFOVxK0CPE6EokE337p23zsiY9x16672Ny5mbv33M0v9/6SNW1r6PZ347a6ef/091PmLqPEXUKJq4RcVy4eq4dcRy5Wk5VcRy5xNZ4q1DYYGsQX8TPaXsBIIMiooY1+414i0QQ91g3gbYSaR/jy949wsO8gn3rqU8QTcZqHmlPnuLr2aj4060M8fPBhXjj2giZpNdeRy8qSlbzU+hJ377lb856mZU9jfsF8Htj/gCavw2F28KXFX2JJyRK2d23XjKq80voKo5FRKjwVDIWGgORIyPLK5eQ6cmkdaaUn0KM5fn+fdn8Vo8HI2WVnk+vQ5vaYDCYyLBm6xNhoPMrTR55Ovd6YWCKmq1kDUJ9XrytIZzfbOb/ifOxm7bRUmadMN1pjMpiYVzhPN8VUk1VDcYa2rkaJu4RvfmwxPT1hurpCXHBBHFPUCwdXQeI1+UgJE6ovl5DPTkuLEZstmb+yqWMTw+HjtVoC0YDumgsh0pOgRYjX8cCBB7hr110sLlrMN8/+Ju+f/n4O9h/kqqqreGbVM3xg5gcodBUyK3cWJe4Syj3llHvLybRlElWjzMydic1koz6/ntl5syl3l2M1WZlbMJfRrjzi3dOwxvJwRitwjc4nFjUQ6aiFhlUwMI05tRk8fOhh/rLvL/znuv/knPJzNP27ZcktTM2amtwT6DU5EgBzs+eyvHw57aPtuo0Da3NqmV84n99u/61mGfVQeIimoSYKXYWaIOLM0jOZlTuL0cioJtBpGmpiW+c2pmVPo9BVmGqfmTuTKq8+R8NustMX1NdT2d29W1P+H5I5OHV5dXisHk17njOP2Xmz057bF/Xp2l9b1XfMQHCA9a3rddNGm9s30zzUrGk7NnyMLR1bNG2BaIBX214lGo+SkQGPPRZleDjCi7sOcctXhsnOjpHMfwEGpkLCBCisWhXDYrRwZc2VeG3e1PkMigGz0ayb3hJC6EnQIsTrKHWXkiDB3Xvu5pbnbmHdsXXMzJ3Jv9f9OwbFgNFgxG1xYzUdrwWSacukxF2SWj47piCjAKMxmaMSi8H+/RAbLCZuDDCoNBINmenrAwI54Byg5IyNZNmyuHXprVxWfRlPHHmCZxqf0ZV7v2XxLfQF+vjeK9/T9f9909/H/t79rG5crblBzy+cz5U1VzI1ayojkZFUu4KCQTGQ58xjR9eOVIl9k8HEgf4D5Dvzqc6sTgVBHquHAleBbkol05apqzUD0DbapgtOAM4oOYPp2dN17QWugrSjD9s6t+lqxHT4OtjZtVN3bCwe0wV0LouLPGeebul0qadUNxKUbc/WjbSYDWYyLBkYDdol6GFjH//2FR8tLTH8/gg+X5jfP/ssn7h1PzfdFOUfZTzY3bNbs8TZbrZTn1evO58QQk+CFiFeR4YlA5NionWklabhJrpGu5IJn/+42RkUA1n2LF0xtVx7ru5mG4wGUyXdDx+G5maFaFTBFq4Afy7xOHR1/ePXcaiCP//HFUQTyeJqf77izywrW8af9/6ZWTmzNK+Xacvki4u/SCQe0Y0I2M12vrTkS7zY8iIPNjyYavdH/KxvW8+qmlWMhEZSQUhRRhHTc6ZjMphYVLQIf9RPNB4lFAsxEBggoSYIxoKpqY0sexZ5Dm3uCiSnv7Z1btNdg5qsGpYUL9Edr6Lq9gMC2Ne7L1U877U8No9uNVapu5TzKs7THTstRzsKBMlVUVOzpuqOLXAW6KaSXBaXbun12CjQicHamaVnkm0/XoxPUWDV3KX89BtV/PrX2iTs1wZMkXiExw8/njbQE0JoSdAixAn6g/20Drdy3/77OKv0LO645A6MBiN3772bLR1bWN+5nhc7XmTdsXUMhgZZ07SGnd072dq5lVfbXqXd186TR57k2PAxtnZspXWkFYfJgdPkZH9nE2t2NNI7FKSPBhIxA8fsj9IzGCSSCMM174PZd9NpfYkdXTuAZD7I7efejtvq5pbnb9ElnC4pWUKVt4ofbfgRgcjxeh8eq4cKbwUfn/txmgab2NOTrM/iMDs4q/QsXGZXKgAYC4ReOPYChwcOYzaa6Q/2MxIZwW11c3b52cmRJaubadnTUq/xUstLHB08qulPlj2LS6sv1ex6PKZpqElX2XcwNMjRoaO66ZoZOTOoy9Un6VZnVuOyuHTt3f5uXRXd3kBv2qXTr7a9Sseodul0Q3+DLvAbDA3yzNFndAHYyy0v62q97Ovdp5uK80V8jEa0o26z82ZrknEtRgsXVFygS9AVQuhJ0CLEP4RjYT63+nOc9+fz+PqLX2dN0xpahlt44dgLnFd+HmXuMoyKkWxrNvn2fMo8ZZS6S3GYHZS5yyj3lFPpreTcsnMpcBaQ68ilwFVApi0Tr93LQHCQ3Q1+ututBIJxIokI5kAphmAurd0BcHZD2UvYrvwisXiM6sxqWodbOdR/CLfNzdfP+jp2k52XW1/W9f3GWTdyTvk53PrCrakRGoADfQewGW1YTdZU8TRFUXCYHbSPtpPnzKM/2J/ahbk+r54KTwWQzH1JJBKMhEdoGmpKuz/OgqIF6Vf5hAbSFpPr9fdqpqQgmTh8bvm5uukas9FMOB7mRG0jbakA7LWODh7VJATD8YJ0Jyr3lOuK5lV5q5iRO0PTlmnLZHHxYl0AVumt1O3gbDPZdMe1j7brprI2tm/ULbkeCA7okpGFEHr6r0JCnKa+98r32NWziy8u/iIVGRVc8fcruLrmar551jfJsmdRl1tHIBZgun06RqOR4sxiAtEAA8GBtHVJHGYHDrMDSE4zzMlYzuPrQ9jCZYSj4A3X0+OHkWgMf8wHw1Pgzs3U3HY1d++9m2A8yIVVF+KxJZNRa7Jq+MDMD/CLLb8gz5HHtTOuTb1emaeMBUULUFD4474/siovuSnb2HTM3IK5+CI+GgcbmZI5hcHgYGrJcpm7jEg8wkBwgGx7Nh2+jlQeRzgexmAw4DA70t783RY3g6FB3fvvC/SRUBO6HJFFxclib01ob9o9/h6sJqsm8TYcC/Nyy8ucXX62piCey+JKm/9xZumZurZMW2baEYwT81QguSKKNAt40tWLKcoo0k0PTcnU79R84o7SABWeCtxWt6atfbSdDGuG7noJIbRkpEWc1oZCQ/xs08+4f//9bOzYyJfO+BI3zb6JfFc+FoOFwcggTrMTRVHIdebqVni4zC6sBu2mfIWuQjr9nZqkU78fNm0P0B8YJBIBgwFGR1VCIejePw1GksXo/uV9pfzf5f9HPBFnV/cusm3ZmhvceeXnsaJyBVs6t7C+dX2qPRgNMhgcZFHRIobDw2zrT5b8VxSFrZ1bGQoPoShKavqkwlvB4uLFQHJEwx/10+PvYTg8zK7uXalv/SXuErLt2eQ789Pe6Lv8XWzu2Kxrn5Y9jdoc/Y7MvoiPY8PHdO3d/m4Gg9qcDqvJyoqqFboKvl6bV5enAqQCyNdKqAlNUvGYhr4G3bRRb6CXF1teTLsD9IlTSVs7t+qSog/2HeTwwGFNW6evUzeN5LV5dQHgsrJlErAI8RZI0CJOW3t79vK+v72PBw48wEstL1HqLuWyKZcBkO3M5rKaywjEAtz81M08efhJrEarLmhxW91s6tqkuynWZtdqEjj371cItk5HjRvxR/2MjioctT7My5WXEBh0Q8lGQOWXv4wzI2cG3zr7W2zr2sY3XvyG5rxmo5kPzf4Q2fZs1reuT00zOC1OVk5ZycKihczPn8/q9tWpaQmL0YLJYMJlcTE1ayo7unbgi/jY17uP9tF2ILmUeGrWVHwRHxdPuTjtqMqGtg20jbRp2gpdha9bGO3o4FHdlIc/6tcFJ5As559umklB0Z0jnoizuWOzLleky9dFy3CL7vlWk1U3MlOUUUSJu0TTluvI5YySM3TTVEuKl+i2GpiePT01jTYmy5GlG3EKxUK64na7u3drCvUB7OnZI5smCvEWyPSQOC0l1AQ/3PBDit3F/OqiX7G+bT27u3djMCTjeLfFTXVWNf+28N944sgT/Pfm/ybTmkmxu5hB1yB5jjy6Dd30BfoodBby7NFnqc+vp8vfRYGzgK0dW9nUvom6vDrUgSn8fdMhPJHFHA6/SmEkh5FoD52WrYRyXoLLPwprf8Dnvt6EP+LBaDBS5injM/M/w39v/G/Wt63nzJLjUx89/h7OKz+PJw4/wTNHn+Ejsz+CzWSjZbiFTl8n55WeR04iB0VRCMfC1Ocf38DQaDCmlilbjVZNDkZcjROIBugP9hOJR3QjK2WeMs3qmDHBWBAVVTMioqoqPYEevDav5kae78wn35nPxmbtRoYJNUHbSJuu1P+hgUPE1bimNovRYCTLnoXNqF1BlG7/HkVRmJEzQ9d+4vTMmBPPCcm8mHgirgl8XBaXbkQm3UhJpbdS1zanYI5ux26zwaybbhJC6EnQIk4ru7p38bPNP8NsMNMf6ueuS+/CbXUna29Yj990GwcbWd+6noa+BqZmTuWqmqtoGWkh256NQTHgsXrIdmRjN9sZ6RtBURTynfmYjWa8Ni9nl58NKvR0Wdi71Q2+QmIRE/mB8wiEwgRHPYR2fgmqrDDzAcxT13Hph2YRjs8kFo3RH+znfdPfx4HeA3z3le9y7xX3kmlP5maMrd5ZULiALz7/Re7efTefmPcJPDZPsiaJCvn2fLp8XdhNdpwWJzu6dnBR1UUoikKhq5D9ffvxWD2a0SCL0UJNdg2tI60MBAd0QUu66SGAluEW4mpcU45fUZS0y5shOWXii2kLwfmjflpHWynKKNIEUq9dqfRaJ1a0HZMuv6h1uBWz0UyBqyDV1uPvoXm4Wbeh4iutr+iScff07CHDmqGpuNvQ30AkHmFO/pxUW/NQMyPhEWbnHw+wunxdBGIBTbG9YCyIQTFogrzpOfo6NUIIPQlaxGnj5ZaX+eLzX6TCU0GJu4Sl7qXku5I3bbvJrtmF+MZHbyQWj3Ht9Gt5f+37cVqSyZhPHH4CS8BCpbuSYk/yJl7qLuXwwGGcFmfquAxLBn/Yej/+I3MI9ZbijFQyOgqJiJP+2DFGjs6kv9UErd+F/qmU3HQ7Wzo+xLLSZQCpvXuWFC/BYDDwuWc/x/9d+n+pQnZbOraQbc/mq2d8lXt238P9++/nuhnXYTPaGB5O1lGZkTODodAQvqiP2XmzNdMeNVk1BKNBnjr8FItLFmtGUErdpZS6S3XXbyg0xNbOrZxXfp5m1OHEm/yYQDTAYGhQF+z0B/sJxrXLnjMsGZrRpDFjoxwnahtpI67GNbs1h2IhdnTv4IziM1IJ0K8n05ap238IkquhXGaXJlG4Lq9ONzJSnVmt61eOI0e3FNugGDCcMAvfOtyKyWDSBCrbu7ZT6CpMm6sjhDhOxiPFaSEcC/OzzT/j/Irzuf+q+6nLraPU85obs4LmJua0OImrcRKJRCoQgeQN7MRy8SPhkVRuSKptBEwt5xLpqCEYhFBI4RXrf3HQ8Cjxo2exv2FsakHl8qob+NCc61nfup4NbRs05zm/8nxuWXwL55QllzOPbYxY6Coky55FoauQuvy6VMC1q2cXB/qTS5MVRUFFTa3i8UWO99tkMBGIBXBZXLpS+ZC8sZ64VNdj9TAzd2balTt9gT7dTXwkPEKvX7/seVbuLHKt+qkUX8RHIBrQtIVjYda1rNO1W4wWrEZtArTNZOO88vN0AUupp1QzygLJ3KB0AYLL7NLltJgMJt3GiRajBbPRrGlzWVy6ZdR5zjxdrs6svFm6kZU8Rx5eq1fXHyGElgQt4rSwoX0DDpODbyz9BoqiYDaacVuO5zWUuJJJmWN5Cn+98q/MyJ3BL7f9kp9u+mkqwdNkMOlqcRRlFFGbU5taJeLzwYYNCkeOhdgZXE04rOD3qzSaH2FfzvfZZv8pasE2QCUnR+W+e+N85YyvkOfI4xdbf8Hunt2pc6uobGrfxEWVF1HoKuRX235FKBaixF2CioqqqqycspJLqi9hV/cupmVNY0b28ZGPLHsWBc4Cnm58mq0dWzX9znfmc3b52WmLwAViAU29FyA1tZTOgf4DuhopBa4C5hTM0R2bUBP0hHt07a0jrbpEWqvJyryCebpAJM+ZpwtExs59Ytl+f8Svq7irqirbu7ZrNi6EZIn9E7caODJwRFcXpnmome1d2zVtHaMdurYefw+H+rWrjAZDg7qk2xJ3ia4arxBCT4IWcVpoHWlldv5sXNbk8L3D5NB8U8535WM2HC9mVuYu438v/F+unnE1zUPNfPWFr/Lrrb8mEo/QMNjAMy3PcHTgKA19Dezo2sG+vn28eOxFjnZ38/tnNvLStl56gl2Ywtm0Bg/QFTrG3P2PQe8s4hWPgvcwmIKs2dxIl6+LSDzCR2Z/hIWFC/nttt+yq3sXqqrSMdqBx+Yhz5XHR+s/SsdoB08deYpoPMorLa/Q5U9WYDUqRrw2LyaDiU5/J6H48RUrFqOFhYULWVS0SHdD7w308nzT87qk0hM3QRzTNNSUqtT7WmeVnJX2+IHggG5llT/qpyfUo6syW5tTmza3I12dlVgixuGBw7r3s693ny7oCMfDjIS1Be0URSHHnoPDpA2GqjKrdNNZld5K3d5Ipe5SXYJv2n2KjGbN3lQA/YF+3caRG9s3pt3KQAihJUGLOC0EY0FNnkaGJUOzWsNsMGNUjKlvwC+3vMz9++9nZ9dOzEYzU7xTiCViNPQ1YDPYMCiG1HSSw+xgceFi1IiDV1+20X00B2PchTJaSDhoZiDcS2DIzaHdOfDAX2GoBpb+mK//+lUynObUdIvD7GBJ8RK8Ni//t/P/GAwNMhweZlr2NOKJODmOHL519rd4+ODDrG9bz8VTLk4FCoqiUO4pp2m4ib19exkMH19WrCgKhRmF7O7dnap8Oybbns3M3Jm6KRGAQ/2HdCX3C5wFmjyS13rt9NOYjtEOXWXcDEsGszyz0o7wpKsKe3TwqK7f8UScYCyoC1qmZU/TL0W2ZzEzd6buvGWeMt0UT4YlQ7evkdlo1o2CGA1GXf6K1WTV7VOUacvUXa/qrGpNUi8kc2QKnPqRIyGElgQt4rRgMVg0y1ztZjv9wf7Uv40GI0UZRbSNtvH99d/n5idupi/Qx4qKFXyk7iP851n/yecXfZ6V1Supz63HYrAwNXsq03OmMy17GsM9bnbsUmk8YsY8UkNwxMk+HmZoKEF8sIzDu3LoacmC4Qp49E5qcqdx2H0nw+Fhch25WE1WpmRNIaEmuHXprZR7y/n+q99nZu5MRiOjPNf0HPFEHJfFxU8u+Am1ObX4oj4O9h/UjJJMz57O2cVnYzHo66xYjVZd3olBMVDgKtCNtKiqmtww8YQpIrvZrludA8kRm62dW3XnmZU3K239lWgiqqtfEolHeLX1Vd3ITLYjW1cy32qyMjtvti7wsZlsukAEkiuWTnzvLcMtdPu6NW0dox267Qo6fZ26qbW+QB+vtr2qaesP9rOtc5uu7cSaLIOhQc3PHiQLzqXrtxBCS4IWcVpwWpyaxM2pWVOTCZavuclWeCpwmp20j7YTjodZULiA+oJ6Dg0e0gzx59nzKMs4Xk/kyBGFdWucWJouY2gkQiCgMOJLsD3j+6wvvZa9h3y0doXhH8mct3zezMav/h6P1cMfdv2BIwPJUQSH2cGZpWeSYc3gprqbmJY1jW++9E1y7DlcWHVhakQm35VPQk2wt3svvf5ezTSLyWAinAjTMNSQStodM7dgbtoqtU1DTWzt1N6UFUVhbsHctPVM9vXu0xWZy3PmsbRkadoRmxODEICOUIdu/x2L0cLCooWaxGdIJgB7bV7dOYZCQ7pz9/h72N29W9M2Ns124p5HJoMJk1Eb9HisHl29lVxHrq4GTJY9Szda4ra4dUXozAazbuRmJDyiy2nZ0rFFpoeEeAskaBGnBa/VqwlQTAYTVqNVM70QjofZ0L6Bny7/KcurlvOrbb/ir3v/ykhkhFj8eGDgNDvpC/Wxu+MQ69crPPywgZYWaA8f5GhsA8PDMDpsZPr+O4n5XQyc8SlwJ0vX//u/h/judxMEYgHOLT8Xk2Livv330TzUDMCx4WOsaVpDjiOHlVNWkmnL5FfbfkUsHtOUvy90FbK0dCkFrgJdbojX6sVtdrO3b6/upt7l69JN+RRnFFOTrb0BA0TjUd0OzpAMUNIFERajRTfSEolH2Nq5VTd1VGovTVuD5cSAZawf+3v363Zw7vH36G7+mbbM1HLxMYqiML9wvi43piijSFcsz2lx6lYAmQwm3eiSQTHojku3Isltdeumh8o95UzNmqppm5U363Xr4AghjpOgRZwWDIqB3uDx3Ipseza+qI+9vXtTbdOzp7OkeAmheIg7L72TX138K3Kdufx2+2/54GMfZCQ8gi/iwxf1ERj08sjqIZ56LsiBvgP0+UbwDVtw9J9BW/gAHX0BmnZUw/33Q8IIZ/yEKz+9ic/e2sVgaJAeXw9XTruSD9Z9kNHIKH/a8yc2tm+kyFVEpjUzNW10ZsmZtIy00NDfoJvKMBqM+KLJcvyvDRYMioG67DqKXEW6cvydvk4GQtobvcVoSbvsORQPMRwe1uWN5DpydfkcAEcGj+imVixGC2eUnJG+fkmaCrCtw62pkafXHms2mnWjODXZNdpl66ArIjcmlojppod8ER/d/m5d24nvIRgNsqNrhyZoCsVCbOvcphnN8kf87OjaoXmd4fCwbj+iodCQLtiyGq1SEVeIt0B+S8RpYWrWVEyKdjpobv5czQ6+iqLQOtJK60grnb5OOnwdzMiZwSfqP0Gxq5jnjj7Hffvv4+Ete1i9Lszappc42H+Avkg7g34//ZF2joZ20BTcxY4dBkIhoHc2vPolcmftRznrx/QF+jAqRsxGM93+bhp6G/jiki8ST8R59OCjhONhXBYXvf5eEmoCr83L15Z+jcrMSl2Je4AqbxUDoQEah7QrZqKJKAPBAd2NcG7B3LTf6Bv6GnS5FxmWDOYWzNWdQ1VVdnfv1o2elGSUpE3STbePkaqqbG7frLt5e+1esh3a0Q+jwZiazjtRusTdI4NHdPsSNQ420jSsnY4KxoKMhPRTRq8tMgjJ/JkcR47m9a1GKwWuAs17s5lsZNuzNXVsDIpBV5gu3fTQts5tuo0VhRB6ErSI04LL4sJqsmqClkg8wuFB7bfgs0vP5j/W/gcfePgDPHf0OULREG6bm8KMQs4sPZNraq+hMHgu6rGluAbOxDw8A1v7cgY7PWyqvJZ9+d+i7cWVxAIu6JoLKPz+yyt5+OafkmXL4i/7/sJIeISqzCrsJjsz82eS58jjS0u+RJ4zj++88h1MRhN9wT7sZjtTs6bitXlJJJJ785xY88NtdXN22dkUOYvo8R+vfaKqKrFETFcUDdInpRa7i9MGM8FoULf6R1EUMqwZumDEZrKlnd7pGO1gb89eTZuiKEzJmqKbZsqwZKRd4jwUGmIoNKRp6wv0pV1+bVJMuiq0Vd4qKj3afYByHblMzdZO09hMNl3isEExUOou1Yz0KIpCcUaxpm1sz6gT38+J+TBlnjKqs7TbENTn18vqISHeAglaJrBjx44RiUTw+fRLScXbk2XPomO0g4aBhlTbvIJ5uMwuzYhB+2g7G9o2YDVa+d653+OGWTdw9fSr+dd5/0q+Mx+P1cPZy8LkZhtQ2hfR6e+gv99Af2cGtCxCtXbAys+CvR+XS2VoKMwHPqBQl1/Hx+d+nFgixp92/4nW4VbcVjdTM6cSiUdwW93cMPMGpmVO45699+j2xMl3JTca3NWzS5dgOxIeoaG/gS5/V2oKw2K0UJ9fn3Z0omW4haHwkKYtw5KRdspnKDykCYbGVHor046gNPQ16IKcHEcOxW59QDS2j9OJjgwc0eXdjIRHdLVWchw5mo0Ux1R4K3TBk9loTlvJdzCk3XFaVVVahls0eUKqqnJo4JCmKq+qquzv269pi8aj7Ovdp5lG8kV8qXylMcPhYV0AZjKY0iYxCyG0JGiZwLKzk8PkR44cwefzsX37diKRSGpvGfH2jCW+jkmoCfxRv2aKoSa7hutmXkdfsI8bH7mR+/ffz0BogP19+3mx5UUALBa44gofUUsv3ZFG9u9XaGgwwV/WwIavQvYBSq77Ae2dfsz/WMU6GhmlJ9DDt5d9G3/Un0q+PTRwiI1tyR2PC1wFvK/2fbgtbhr6Gmgd1q4mKfWUsrxyOYFYQDP9MTYtMTtvtmbZrD/ip6GvgRMtLl6cdrfm/b37dVMUha7CtDVOQrEQTYNNunavzauZcoNkAJVu9GQwNKirvwKkvXmXecrSTo+lqyI7GhlNOypzYq5KIBrgYN9BTZChouKL+jRbOiiKotuFWVEUnCanJig0KAbsJrvmuISa0C0bHwwO6vKKtnRs0W2bIITQk6BlAnO5XFgsFubMmYPL5WLatORqi5aWFnw+H36/n8bGRmKxGPG4fvM3oRWOhTWjAEaDkenZ03WJkh+b8zG+fc63mZE7g2eOPsOX1nyJrZ1bqc+vTx1TXR3j6hXF+PdcxIgvDqZg8s+um7jnX75L7bl7+Mrar9AX6KNpqIlMWyZTM6eCAp9Z8BkGg4P8reFvdPu6mZU7i8HQIB2jHWTaMrly6pV0+7rZ07tHtyzYF/axvXO7ZlWQ2+qm3KvPJRnbdyidE0dDAHKduZqtDV573U4c3YklYoTjYd1qoQJXQdrNCpuHmnU3ZbPBrKtICzAlc0raYOTE0RdIjsqc+F5Gw/qgxW116wI1h9nB4uLFmkDPoBiYkTNDt69RpbdSt3S53FuuGW0yGoxUZVZpRnTcVrdupVCFt0Kz6zMkc41OXI0khNCToGUScTqdWCwW6urqcLlc2O12HA4HsViMTZs24ff7OXToELFYjGg0+uYnPM3MyJ3BQHBAM0phMph0N6N5BfOYkTODf1/473zrrG+xcspKPFYPjx18jMcPPc66jnXs7N3JLxIziV9zFSz8BeTv4OpPb2F9wz4q8nJYWLiQA70H+NHGH7Gvdx8JNUHjYCPrjq3DoBi4ZsY1GBUjd++9m0AsQOdoZ2pKQjEk8z1m587WFSFzWpzMyJ2B25ocjRkLSrp8XbqRBJfFlXYH5nAsTENfg24TwlxHbtqclGPDxzTLrcfOPT1num5UJJaI0TjYqMuZSVf0zWVx6eqaQHIU58Rpm1AslHZqzGPz4DJrp7WKMop0eSkWo0W3FPr1DIeHdcvIe/w9uuvVOtKqCR4TaoKjg0d1q4w6Rjs0zxuNjOqSmM0GKSwnxFuhn/AWk4bBYKCwsJCmpiaWLl3K+vXr8Xq9tLW1sWPHDvx+Pxs3btQ9z+/3s3nz5nHo8VsTCoUYHBzEaNTnILwTcTVO+3A7O/p2aCrG7h3eS7O1mXxbsmjb4x2Ps2MoeUyRvYhCeyEZxgxiaoywJYxVtWKwGBgODBPOX8elq8r5zPx6TCYINSVv1vMS85hbPJc/H/szT/U+Raw9RpYli0A4wLG+ZABQF61jNDbKvzz4L9xYdiPTM6bTwfEb3DH/MZr9zYRzw7oVKHE1zkBkgCHrEAC+mC+5RNk6TDweJxqNMjg4SDAeZDAySJH9hKJnmNndoS3CNvaa2dZsXKbjgcBYYDSgaKc0wvEwkUSEDHNGqi2WiNER6qDL3qXr82uN/WwOR4exGCzYjcdHVgYiA/hjfkod2uXM0USUHa36xNt0IomI5jNWVZXOUCc51hxNe6OvkSxLFpmW49NXTf4mzFGz5nenLdBGhjkDj/n40vD2QDteixenKRnoxdU4bYE22uxtqdfwxXz0h/spdx4fCesIdqCgUGgvTD1v78healw1mutwomA8iFExYjFYCAaD7NixY8LmwYTDYYxGI42NjW9+8DiIxWIkEgm6u48veb/11lsZHh7GYDDQ0tLCggULNM/Jyclh9erVJ7ur4gQStJxCFEUhLy8Pm83GokWL2LBhA0uWLNEdt2nTJhYuXIjBMDEH2vbv309JSQlut36q4p1y97gZCA6wpPz4dXF0O8h15lLoKuQb677BXa13ccuiWzir7Czm5c/DZj4+EnOw/yAMg8vqovGcRr6y9iusaXqOdaqbb57xTc1rHew/yM/n/5z/3fq/NJgbuGraVSzLWkYwFkxNoZwTP4epDVNZXrlcV4m1yl9FPBEnpsawGCzku5L72jQPNROOhzkz+8y073FwcJCenh6mTZvGUCiZSJuueFwgGtBN5RQNF5HjyNG1q6qqu0F2+joZDY+mPfeJVFWl09dJgasAg2Jg48aNLFmyhMbBRtxWt+69v1W+iA9fxKepzTJWa2VewbzUVI2qqjQNNVGcUaypblwbrsVldmmmdJawJNW/k2VOZE7aUa7XOth3EJvJRoGrgFe3v8rc2XOx2Wxv+Jzx0tjYiNvtJjf3n/tc32vd3d0EAgEqK4+vKFu3bl3q78uWLWPr1q1pnnncwMAA1113Hc3NzVRUVPDAAw+QmanP3TIajdTV1QFQVlbGY4899u68idPUxLxrCfEeyXfm66YOpudM58jAEcKxMItLFuM0O3mu+Tn+tOtPdPq1ialdvi5Go8npJZPRxH8v/28+NPtDbGjfwM82/Sw1fZFQE/T4ezAZTXxx8RexGq08eOBBnm9+XrNnjdlo5oaZN9Ay3MLzzc9rXivPmUdhRiEOswOb2ZaanvDavGkTabt8Xbol0V6bN21QMRIeYWvnVt00TpmnLG1Oyr7efbSPtmvaCl2Fac8diUd00zuxRIyB0ICuqu2UzClpA5Z0uThtI226qZZYIqbbw8hlcTE7b7YmEFEUharMKt2Oyx6rJ+2qopPtzQIWSCaJl3vLGQwNMhAZIKEmaBpqet28JfHe+sEPfsAFF1zA4cOHueCCC/jBD36Q9ji73c7OnTvZuXOnBCzvAglaxGklz5nH/r79uuTbBMn/+C+vvpx1H1zHkqIljERG+MaL3+BLz3+Jje0beabxGSwGC9nW4wGDoih8ecmXuXXprezs3sntG26nx9+DQTGwrGwZmbZMHGYHH5vzMXIcOTx++HHK3GUMh5MrwNpH24kn4qkS+MeGj9E63Iov4iMSj/BKyyv0+nsxG8zs691HLBHDa/OmTdq0m+1p9wryR/y6VUFuq5vFRYt1N2xVVWkeatYFF2WeMvIc+pyQeCKuu2kOBAd01XvNRjOzcmfpggZAF3QA7O/br1tq7ba6ybBmaNq8Nm/aDRnTvc5kNzbSVeAqYIprCtFElKHQUGq7hROXhIv31qOPPspNN90EwE033cQjjzwyvh06Tcj0kDjtTM2aSpHreI6HxWhhfsF8moabmJ49nVJPKd8997u0jbTxTNMzZJgzsJvsfOz5j9E92s2KkhV8ataniLviBGNBnGYnnb5Oziw9k1daXuG7L3+XS6ZeQr4zn2NDxzAbzVw29TIqPZXk2nP57LOf5aOzP8o1tdfQOdqJx+ohz5mHyWBKJWg6Lc7k6hqLgyx7Fg6zg7kFc1NLbI8NHyMUC2n27/FYPXisHgYHtaMcwViQ4dCwbl+csWJ7r532SagJQrEQcTWOmePJoemCIUiu3rGZbJrVSwWugrSl9BNqglgipllxE4gG2Ne7T/PeACo9lboVRK/Xh9NBuuk5q9HK3IK5QLJGTFyNc3jgMBmWjLTXX7y7uru7KSxM/k4VFBRo8mNeKxQKsWDBAkwmE7feeitXXnnlSezlqUeCFnHaybHnsKZpDRdVXZT6Rj4UGqJztJPp2dMJx8Ls6t6V3FwwFiTLmsWO7h3csvAWfrH1F2zo3kC+M58bvTfisrpIqAlm580mw5LBkqIlPHroUdY2r2VVzSpm5s5MTX9cUHlB6sZsM9lQFIUFRclkP5fFhaqqDIYGsZlsDIQGyLJnMa9gXqrfr72pZ9oydStcILmUuX2kHTvHb/g5jpy0IzP9wX6aBptSfYB/LAPPmZ72uh3qP0RhRiEZluOjHRXeirTTK2NLoV97o+32dzMUGtLsNO0wO9IWwXsr0yWnix5/D+2j7cwtmJvMcYoaueSSeXi9CnV1ChUVKvn5MygoSJAzxcf82WYO9B3AarTqqvGKt2f58uV0dXXp2m+77TbNvxVFed2k6GPHjlFcXMzRo0c5//zzqaurY8qUKe9Jf08HErSI006GJUNX0bXEXUKeM4/vr/8+Txx+gqrMKs4uPZtydzkmxcScgjkUOgv58OwP81/P/BdPtT5FXmYeXzvza6lz9AZ6sZlsfG7h53jo4EM8cvARbpx1I2XmZFG0sRvz1bVX0zbSxq7uXalvypAMZMbqiYwlruY6cukY7cAf9Wtu9q836mAymLAZ9cmZ4ViY0cioJnjJtGViyNLPEKuqSk+gh3xnvqbdZXFpVt7A60/DHBk8gs1o02xoWOAsIMeuD57SVdYVx+U4clLXeXPHZl55dCrDowUMjyioKgwNQWamisdj4DxDOdmLE8QTcUwGEzu6dmAxWpiSOQWzIX1VYPH61qxZ87qP5efn09nZSWFhIZ2dneTlpV9SX1ycrAZdVVXFueeey44dOyRoeQckp0WcdowGI1n2LDa0b9C0N/Q3cN/++zg6eJQzS87khlk3ML9gPl67N1lczJysNXJz7c2sqljFnp49fPfl76aSbweCAwwEB/DavNww4wZWTV3Fjzf+mD/v+TPtI9okVo/VQ4Jk7ZaxujFem5fijGJsJhtWk5WR8AiReIQse5YugABoH2nXJd5m2jLTHjsYGtQVYTMohrSVaoOxIJ2+Tt1ITlFGUdog5fDAYV0xt1J3aWq10xhFUXS1WsSbMyiG1C7cs/Jmcf8fCyD3AGQeRTHGMFj9WK0KVis4/pFDnefMI8ueRaW3knJPOds6t3Fw4CDto+1pN5kUb9+qVav405/+BMCf/vQnrrjiCt0xg4ODhMPJ/x/6+vpYv349M2boayeJt06CFnFasplseK1eTVtdbh2PXfMYK6tX8sihR7jo3ouo+10df93zV44NHS+uZjaa+dSsT/H1M79Ob7CXW9bcQsdoB9Oyp6WmVuxmO+eUn8PXln6NLZ1bWN+2XvNaiqJgMViIxqMk1AQj4RGGQkN0+bp4pfUVgtEgNVk1DAQHsJlsZNmzdO/BbXPrdkSGZIDSFdQOaRe4CjQjNa899sTAx2F2MCd/jm7KJqEmN208MfHWbXXrVhzZTDYZQXkPZFgyONZshL7pMFiFNb+ZkPsAZmuMqLkXh0P72XhtXlwWFwsKFzAtaxqHBg7RMtzCru5dsuroHbr11lt57rnnmDp1KmvWrOHWW28FYOvWrdx8880AHDhwgAULFlBfX895553HrbfeKkHLOyTTQ+K0lG3PTq26GJv3VxSFYCzIZxd8lpm5M7l3z718+fkvc+/+exmKDHHPqntSz48n4rSMtPDxOR9nbfNaPvvMZ/nWsm8Rjie/VZV5yojFY5S6Szm//PxkbY3WV6nOqiYYCzIYHGROwRw2tG0gEo/QG+wly5bF9OzpGBQDI+ER7GY7voiPaDxKj7+HwdCgZh+g1+aWvBXxRJxwPKwJMBwmx+tONUXjUc3ISDwRJxANEE/EMRiPf99JN7Ij3rmEmuBQ/yHKPeWapORAAPhHknRGrIoCNU7U0k3IdgSz5Qxahtt0+zSNjZCdV34eo5FRtndu5/DAYcKxMLPz9ZtOijeXnZ3N888/r2tfsGABd955JwBLly5lz549J7trpzQZaRGnrUA0oFsmmufMoyijCJvJxkfmfITGzzZy6ZRLaehr4Itrvsjmjs1s7NrIwcGDGBQDPf4ezig5gzNLzuTWF27lldZXCEQDhGNhArEAZoOZuYVzMSkm9vXvw2K0UOAsoDKzknAsjKIoTM+ezrll55JtyyYYC7K0ZCmFGYUYFAPTc6ZjN9vJsmelXRHS7e/mcL92+XamLZMCe/pjjw4d1bRZTda05x0IDrC3d6+mzWw0U5NdI1M8J4mCgs1kw2Qw0TLc8ppl5McTPi1mBZPBiEstpCRxFjHjaGpl2eGBw7q9oSAZ7J5Tfg4ZluSquL8d+JtmmlKIiUxGWsRpqyqzCn/Ez0h4JDXa4DA5eHD/g3QHuvFYPCiKwrkV55LryGVjx0Z6A71ckHsBtc5a8gry6A/0U+YpY0HhAs4uO5u/7vsrGdYM6vLqKLMnv+3WZNVQk1XDrLxZ+KN+/BE/xe5i4ok42fZsjAYjRoMRmzl5g+oP9nNs6BjzCo+vHLKZbLo9kgCcZqdmV+Ex/pifHn+PZr+dooyitKMio5FRRsOjmn2AxurLiPGjKEqqBo3VaMVsNCf3osr2Q38NiqJiNoPZDCYTmIwKGWYPi8qW4Y8kP/98Rz4tIy3MzJ2pW90y9nmbjMmg6GD/QUoySt61kRdDfz+GlhYSZWUwQSvjislHghZxWjvQf4BYIsaS4mTJ9vrf1TMUGuJD9R/i0imXUuopJdOWyVBoiG1d21jTtIY1bWvItmdTN6WOxsFGcqI5uCwuFhYtxGF28Ne9f+WWNbfwx8v/qHmt0fAo4XiYDEsGPf4echw5mh2AxwKMWCKWdiPBbn83vf5eZuXNSrW5LC5cFpfu2Ggiii/qIw/tigajwUhCTWgCHVXV7watKEraIEmMj7Gk5tHIKEQTYAxD/j4M5lrAhNGYDFzGOC1Oziw9k9HIKIFogG5/N52+Ts1qtTFjybpjK9oODRxiUdEi3RTT2+F56imKv/UtFIsFolEid9xB/Jpr/unzCTFGpofEaW1+wXwWFS1K/bs/3I8/4ef5pud5tulZ1h1bx6H+Q3htXlZOWcn3z/0+Nd4aHj76MPfvv59FRYs0oxkzc2fymQWf4bLqy4jFY5rh+UAsgNPiJNuRTftoO/6oH1VV2dG1g+Hw8PHaJih4bV4gWT8mGA0C/1gZ5Eq/MqhxULsxndfipcqrr9HR6etM7p/0Gm6rmxJ3ydu8cuK91BvoTbvKJ8OSASMloBoxxJyYjUYGbTsIMYSiQCymP35R8SIsRgt2s509PXvY07NHt32DQTFQ5inj4ikXk23P5t7997K6cTWJxD+RrNvbS8m3voUhFEIZGUEJBrH8679Cb++bP1eINyFBizitKYpCQ18Du7p3AXDwkwc5o/AMBoID3Lf/Pl5pe4UfbfwRqxtX0+PvYX3bej5c82GWFixld/dufrLxJ2zt2KopOV+UUcSV065kXcs6moaaUkuN3VY3NpONYDS5YWIoFmJt01riiThGxcimjk2EYiE6fZ3s7dnL5vbNdPo6UyX/LUZL2n16zAYzDpN+KiccCyenE14jz5FHmfuf/wYtTo6+QB8j4REC0UDq89dImDAM1qBgxGHwYDFY6TEkCyKmk2VPJnnnOfPIc+Tx6KFHaRxo1C2DNxvNnFdxHh+c9UEO9B7gZ5t/Rl+g72313dDSgmo6YRDfbMbQ0vK2ziNEOhK0iNNevis/VeI+x5nDczc+xw/O+wEFzgKCkSCfW/A5Lp96Odn2bGpzaokmopxdfDYXVV1EKBrihxt+yJHBI7QOt9LQ18D+3v1s69zGgb4DOMwOHjn4CL6wj4HgAEcGjtA22oaqqrgtbsq8ZdRk1+CyuJiaNRWbyUZxRjH1+fWUecqozanVJMqOhEfY1b1LM4LjsrgozCjUva+RyAiDQW1Jf6PBKNVmJ4HanFpyHDn0B/rp9nWjquprgofkZ6+qyT+ZTMFmsmMnCzXsYkfXDlpHWtOeN9+ZT74rn7NKzyIQC7C9czs7unbo9n8qzijm4/M+jj/i5xsvfgNf2PeW+54oK0M5ccgnGk3mtgjxDknQIk572fZsDIohtYOwoihcP+t6vrb0awyEBvjOK9/he+u/x7pj6zg8cJh7Dt3DkcEjxImzcspKzig+g//Z9D9s69xGjiOHXGcu8wvnc9Psm8hx5HBBxQUkSFDoKqTMU4bX6mV6znSsJitTs6bitrrZ17svNR0wVoQt3aoep9lJnjNPl1Tpj/g1tWQAch25VGdVv0dXTZwMpZ5SarJrGImMcGjgUDL3yDYMSuIfQYtKNAoGg0KWsRRDLLnvkNfmZWP7RpqHmtPWY8lz5jErdxbnV5xPt7+bl1tf5kDfAU1BQZfFxb8t+jf29e7jp1t++tY7nZtL27e/TcJmQ3W7Ue12InfcIcm44l0hibjitBaKhVjwfwuo8dbwzXO/ycH+g9y37z5MJhMuk4tp2dMwYGBr51bCsTAfrf8oFRkVVHurqS6rRlEUFpcs5pdbfonRZNSUyXdZXDQNNVHuKafb3000HqXcU552j5J8Z76u7spYvktVZlUqx8VoMOo2PhSnjnQbI0KygvLSkqXJf+Tug4Ea4lEnCUUhkbAQiyWfEw5DjrUQsxnKPeV4rV5WN65mTv4c3Fa3Jml7LDi+eMrFHB44zL7efezu3s3i4sVUeCuIJ+K80PIC3b5uBv2Duj69keGVK1GWLyfX75fVQ+JdJUGLOK2N7aWzqWsTP9rwI7Z1bqM/2M/1M67nmtprqMurw2Qwcaj/EH/Y/Qfu2nUXS71LcZgdmpvLh+o+RCgWYkPbBhYULsBsNBNPxBkJjxBLxCjOSO4/srtnd+o1X7sxYboNDRVFocRdoqssG41HOTRwiOrM6lTRMKfFKdM+p4B9ffvIsee84S7Nho4zSMRNULQZn8lFjFISBpVEwkYoBH4/eL2kgtsFhQtQUXm68WmWVy4nnojrft6mZk2lOKOYF4+9yOee/RyKqmAxWmgeaWZuwVy+dc633vZ7SWRnk5iefvNNIf5ZMj0kTmsGg4E9n9jDT1f8lKNDR7GZbNTm1HJ06Ci3v3o7H3rsQ1z38HUcHTrK18/8OjmOHNa0reE3e3/DYOj4t0+PzYPX5iXHkUNfsI/2kXbMRjNzC+Zqlg5XeCoocZdgMppIqAkC0QCdvk4AXmp5iR1dO4jGo+zo2kEwGiShJnQrg0wGE5m2TCnydgoqc5eR48ih09fJ4YHDaY9xOf/x33bnfKLdUxk2HaRPPUiMEP5I4B8Vc4/Lc+aR78zn0upLaRluoaG/gT09e1Kr0sY4zA6Wli5lTv4cDAYDLouLB656gPvfdz8em+e9eLtCvG0StIjTnqIoXDfjOv72vr+xomoFpZ5SVlSu4DvnfIcvL/kyV0+/mqnZU/HavHxi7ieYkzuH3mAvP9zwQ815OnwdDIWGGAmPYDKYeLn1ZULREE8deSq158vhgcO4LC56/D10+7vxR/2pzQaLXEUUOAswG82pqrz5znwURdGsClEUhaKMorRF5cTk5ra6MRlMeKwesu3ZhGIhdnXv0uSazJiRAFRQjfhGDTh8s8iK1dEbP8xR3z56h4L4IvrEWYfZQX1+PWeWnEm3v5tdPbvY3bNbc4zH6uG753yX31/ye+xmO7/e/uu0VXWFGC/yv5447SQSCe7bex+xE1Y4FLuL+a+z/ovzK87nyNAR7tx1J6F4iBtm3oDT7Ezt5lziLOGGqTdgVIzs6trFxvaNNPQ3cGz4GHnOPDpGO3Bb3bQMtTAaGcVldmE32SlwFaQqnM7KmUXHaAdZtqzURobVWdUUZhSiqmoqWFEUheKM4rQbJopTl8PsIMuehdlgxmvzYlSMHO4/zGBokJtvjqaOGxpSQDUSjxnIUWeQFZ7HztYjHOw/yEBwIG3woigKyyuXU+gsZDg8zN8a/kbHaIcmOMl15nLd9Ot4/NDj7OmRvXPExCE5LeK00zTUxKdWf4pvvfwt/nrVX5lXcLxcflyNU5dbx6KCRfx1/1/55ZZf8rcDfyPbms2U7Cnk2nNpH22nwl1BSUYJ7aPtWEwW5hXMI9+Zj8vsIpFIYDfbubHuRnoDvXhsHkrdpUBypRKAy+qiOKMYo8Go6Vunr5Oh0JBmR+ZMW+ZJuCpiPLUMt5DjyNFtnWA0GCn3lAPJGipGxUjdme3gdIC/EJ8PYjEluYooYsRmVfEE65ido7K9ZzMuiwun2UmBq0BX4bjcW05RRhEN/Q08fvhx8hx51ObUMhIeYXv3dh5ueJiEmkibbyXEeJGgRZx2pmRN4Xvnfo8/7/kzH3/y43xy3ie5ceaNOC1Onj7yND/f8nPKvGVM9U5lYeFCdvXswma0McU7hfr8enrNvRiNRhbkLiAUDaEoCq0jreQ6conEI4xERlKl8nPsOWRYMmgZThbWGiuNblAMFLgKdKtFch25acvyi1NbTI2RUBOMhEeIq/G0gerYKJ3bMwRKAhx9BM0hQqECwmEFq1VFVaG/X2F0FBYXLwbgxWMvEkvECMVDzMzR7kFkNpqpy6tjauZU/u25f+P7679PhjUDu9lOqbuUr5/19bRbSggxXiRoEaelzy74LP8y+1/45bZf8uThJ9nbs5ePzfkYTx55kob+BsLxMF6rl0JXIQsLF9Iy0sJjhx9ja9dWVuWtApIl0q1GK4FoAJPBRDwRxx/3U59fn3qdsT18xjZkPNGunl0UuYrIc+YRT8TpD/aT78yn199LQk2Q78rnUP8hMu2ZaavhilPD2JYLbSNtxBIxvFYvg6HBtNOCZZ4ybDEDIXMvmAK0Dwxhyx8l01hCOAyjozAwAFn/eOo55efQF+hjT+8eDvYfxGaypQKgMTazjZ9c8BMeaHiAQ32HWFa+jEurL32v37YQb5vktIjTltPi5KtnfJUfnPcDPDYPv9n2Gy6ouIA7L7mTK6ddSam7lJHICL3BXgpcBczJn4NRMWr2benx99A81Ey5pxyDwUB/oJ9YIsb+3v2p8usJNYHJYMJr89I81Ew4FqbL18Wh/kNUeitpHmpOlevvHO3U9TPXmYvHKqs3Tgcl7hIqvBUMhYc42H8QVVXT7kFUX++D0WLor6F/OEgwMcRgaJBhtY1gENratLVechw5nFd+Hoqi0B/s5+kjT+vO67K6+Gj9R1lRtYJfb/01q4+sfk/fqxD/DBlpEaeFUCzEsruXcV7Zefzwgh9qhshrc2v50pIv8Wzjs6xvW4/FaOHCqgupz6/HaXam8gziiTitI620t7djs9goppgSd0lqBKQ/0E9tTi1rmtcQjUcZDY+S48xJVtv1dTA1cyqb2jdx4ZQLsZvtqKh4rB5qc2txmp0YFEPqm3Wu8/ioiuS0nLqC0SB2s13XnmnLZEnxEhRFYUvHFqZkTtHUbvnv/z7EWWcldyYfbCnBPLUQv7mVeHwAh2piX2uMBb4iXCfMNE7LnkYoFsJsNPPIwUcoyiiiLq9OExRfUHkBt62/jV9t+xUXV1/83rxxIf5JErSI04IBA33BPu7Zew+to618c9k3NcXdPFYP18y4hsXFi1nTtIbHDz/O5vbNXFx9MfMK5hGNR4klYhweOMzBnoNUZFZw7757mVcwj13duyjNKGU4MozX5qXAWUBxRjEqKhajBY/VQ0lGCYqisKJqRSoIGbtRnFgJV5weAtEAe3r2ML9wPiaD/r/iscB6bsFc7CY7u7t3k2HNoNJbSfIhFTAQiUBfH5Q7S8nOKGEkfJh4d5RtRxuZUe3WTSvaTDZm580m35nP9q5kwm3zUDMOq4NAOMDOnp00Dzdz3Yzr3vuLIMTbJEGLOC1YTBYaP93I3bvv5p499/DVtV/lmtpruLzmcs23zDJPGR+u+zCPHHqExw8/TvO2ZrLt2czMm4nL4iIUC1HmKcNhcmC2m3FYHNTn1ROOh1lQlKyEOzZacmzoGAECeKyeVCE4WbosxjjMDuYWzMVkMNHQ14DH5km7RcPYSF+Zpwyr0crRwaM0+5pZsWIOzz2XHKVpazOQn5/A5wO3oQabP0FD0w7KSo00DjQyt2BuqnrymHxnPiunrGRt01r+1vA3TAYTs3NnM79wPp9b+DkuqLjgvb8IQrxNErSI04ZBMfCR+o+wqmYVf9n7F55qfIrtXdt5f+37U/u6NA42cvbdZ5PryGVR0SJiiRj+qJ8Lyi/Aa/NiN9tpPtbMUHSIOVVz6PH3EFSC1GTX6PaMybS/s2mdvkAfmbZMookoHb4OKj2VafelEZPX2BYN+a587CY7/cF+hkPDVGVW6Y4d238q056J1+Ll6z9s5LmdKvTW0d1tYHQ0jsMBFouK0ahg6JmLSw3TTRdHB48SioeYWzBXd97zK89natZU7tlzDyjw+YWflxVsYsKSRFxxSmseasYX1hbYyrJn8dkFn+WbZ36TIlcR9++/n19t/RXd/m6MBiNGg5FIIkK7r53GoUZ2du/kU09/im1d2wAIxoK0+9qJJ+LkOfOoya6hZbiFDW0b6Av0pV7HbXW/7qohgIHgALFEDF/ElyrVf2TwCM1DzQC0j7YzEhlBQcGoGF/3PGLyy7RlYjPZMBlM2Ew24ok4TUNNaavRZtqSQUtJvgNLwg0Z7SSyDtDRYcTvh0BAJRaD3l6VY0ctLClZgtfuxW6y88jBR+j2d+t2fi71lPKvC/6VY0PH+Orar56sty3E2yYjLeKUtvwvyxmODHPbObfx0TkfTeUOKIrCtNxpFLoL2dSxiXXH1vHl57/Mh+s+zJ5P7MFutDMUGsIf8xOKhWgcaKTCXcGx4WPYjDbm5M7RFIabkjkFt9WtG4Lv9HVS6Crk6OBRPDYPoViIbn838wrmcXToKEWuIjKsGambSIGzIFWe/7VLp8cKjInJrz/YTzAapMRdonvMY/XgsSZ/ToZDw8TdcfoD/WTbs3V5L5m2TH78Xza+8B+jEHVwbKSJbDWCNVCN3a5iMCjs3asyfXpy88QCZwEJEmzp2ILVaGVm7kxNDRaP1YPT4uShgw/xg9APyLBJrpWYeCRoEae075/7fX625Wfcsf0O1jSv4d8W/VtqKgiSoyHLK5azuGgxzzc/z3NNz3Fk8AgzcmZwdtnZQDJhssffg9fm5ejQUQxRA/6En9hQDKfFidVkTZVLbx9px2620zrcytyCuezu3o3X6iWWSBYPy7HnpG4+CwoXpPqRkZW8Qciw/KnPZDBhNphRVTUV1J447Wcz2ZhTMAeAY8PHUFUVr82L2WDWBMs33wxf+EI2kEck0UpXhxGlYg+JUDb5hiKam6G5GWbNShYxnJEzg1J3Kft69/FSy0usb1uPP+JnMDxIKBqiabiJmTkzcVnl51BMTBK0iFPatTOv5f0z3s/zTc9z5847+d/N/8v2ru1cWn0pld5KIDnq4ra6ubLmSmblzmJ963r29OxhWekyFEVJbTTXG+wlw5LBntE9HB45TFWkCn/Uz6LiRfQF+qjyVqEoCoWuQixGC06Lk4umXARATXZNqk/5pvxxuRZiYhgbTQnHwvQGesm2Z6MoCiaDKe0mmIuKFgGwuWMzWfYspninaB7/0pdi/OQnBhgtofOASmZugBESBJ0vUDy8hA0brFRVgeMfOwRkWDJYUryEAmcBe3r3YMDAkpIl5NnzmJ47nenZ00/sghAThgQt4pRnUAysqFrB0pKlPHHkCZ5vfp49vcmg5IYZN6S+uSqKwtSsqRQ5i/hbw9/47Y7foqgKikHBZrQxEhlhauZUbEYbs7Nnk5eZh1ExUpNVQ01WMigZq6WRbhXIOxGIBojEI6lkTDH5WU3W1BTgru5deG3eN5wGnJM/B5PBxM7unTT5mphOMrj49rcT/OQnCcBIOKzQ1zCdjOoIqrOHHutR1jf7mb5tAcuWafNjKrwVfHHxF/l7w985OnSU5RXL0yYACzGRSCKuOOU8dOAhlvxxCQ29DZp2p8XJdTOu47vnfJcLKy9kd9duvvHiN9jQvkGzG+5l91/GF579Ar/Z+hueOPIE2zq20Rfsw2q0MqdgDtMzpzMjawZ1eXXMzJ35rvQ5Go+mKpS2DrcSiAYYDg+zvWs7qqoyGhllMDT4rryWGB8j4RFdAuyY2pxaSt2ldIx20NDXkPYYi9GCQTFQnVVNsb2YPb17ODRwCICf/SxCsm6LSlubgeEBM6ae+RgCeYSHMrlz499oaO/QVHOGZFL6v9T/CwoKX37+ywSjwXfzLQvxrpOgRZxy2n3tHO47zCUPXMI3X/wmPf4ezeP5znyunn41/zLnXyhxl/DowUe5a9ddvHDsBQAum3oZM3NnMj1nOuWecmJqjOHwMKqqku88PrVjUAz/1BLkgeAAw+FhQrEQO7p2EEvEaB5u5sjAEQBC8RCxRIwMSwYVngoURSHfmZ+azhKTU+NgI0OhobSPjQUkbqubHEcOvojvdYOXsT2vCl2F5Dny2N65nYuuO4bdHgcUVFWhsVFheBjCg3kYR6qJtc7l/pd2sfbYWtpH2zkyeIR9vftoGW6hdaSVQlchG9o28Njhx967CyDEu0Cmh8Qp53MLP8d1M67jRxt+xPau7XziqU9wQcUFfLT+ozgtztRxtTm15DnzaBlqYUP7Bv6fvfuOk+uqD/7/udN72Z3Z3otWu+q92JIsy1UYY4MBA8ZgWgwxOA8tzgM/AiEJkMDDQ0IglDw4EMcWNrZl3KswtnpvW7Sr7TtbZnd6n7n398dkxxpmBBhUVvJ5++WXtPfeuXPuzGjvd875nu95bfg1NtZu5HPrPscdi+9gPDye676fik4xHh4nkU6QyCQwqU2/tw3BRBCVpCKejuOL+6i31/Pq0KusrV6LN+pFo9JQb6/HrrejltQ0O5tz38JbS1pz5xHF6C4fS8qXoJJUuUURf3fRQsgmYlt0llzPn6IoDAT+Z22r38l3cRldGAwGyi3lmLVmfvz8y3zwhiUQKSMcVjE6qqBSKSiKmlrnPFaUVOLQHee14dfYM7aH8fA4vriP6Vh2vayWkha2Nm29EC+FIPzJRNAiXJbKzGV8+5pvMxwcZtvJbez37Oek9yTXNl7LlsYtuSq4pcZSSgwltLnaODJxhH5/PyOhEdZWr83rVXGb3Fi0Fjq9nYSiIZp02bH/2UURJ8IT2PQ2BgODlJpKCSaCqCU1VdYqZFlGp9ax0L0Qs9acl5Q7m0MgIRVNwhQuH7Pvr91gR1Zk4uk44+HxswYv813zc4trVpgriKajlBhKCnr3qq3VACyoL+O9H/Cz7YVj0L+FYFAilZJYsiTDhz+coaHBCKyizFJGKBGivaSdTfWbsOqs2PV2TLrfH4gLwlwgghbhsjEeHqdEX4JOq8ttq7XV8vm1n6dnpofHuh7jpYGXODF1go11G9lUvwkgN0Oo2dlMOBkmnAwTSUZQFCW3mF0kGaHP38dEeIJwNEwgFeBY/BhOgxMJCYPGgElros5eh0VnodnRnLu5zK79cuaCd3+KjJxBkkRwc6mbXWsqnAwTS8dQFIVAIoBdby8ISPQaPetq1iErMr8d/i0rKldg0xUvWNhW2sa//VOMZ1YkCVbtpaFdz99/vo2NG/OPq7fX88757+TR7kf5z2P/yYcXf5hK67lNHBeE80UELcJlY+39a4mkIty37j4+verTuRLpAPNK5vGZVZ+h39/Pa8Ov8eLAi/xm+DfcMu8WOlwdaFQaPGEPJcYSbmq9ie1d2+n2daNVackoGfxxPw32BmpsNejUOmrMNZS6S6mxZguEnevy+tFUFJPWRDARJBAPUGuvZTAwiEpSFf1mLsxNk5FJEukEtfbagn0WnYV2VzsAJ70naS1pLVjccJZKUrGlYQuSJPFs37MYk4UrQwMYtUY8R1vwhD34435OTD1CKHE9Fp0l7zNaYizhrsV38dXffpXPv/h5fn7zz7Eb7EXPKQhziVSsTPQZfu9O4fyRZZm+vj66urqwWCyEw+GCP4Gz7pv902w2F5w7EokU3T5XxONxdDodKtWb61F4ZPgRnh1/lpSSosxQxrtr3s0i+yKM6vxf8OF0GE/MQ1+4j0OBQ1zlvgq33k2LpQUAWZHZ+tpWZGRcWhdl+jLUKjU2rY0rXFew0bERSZLQarV/9rXKikwkHcGqtTIRn0ArabHr7JwInqDV0oqsyITTYcoN5aTlNJL0h0v6ZzIZ0uk0er3+9x53Mc31z+C5al8kHSGjZLBoLPhSPkp1pUWPkxUZlaTidPg0Zo2ZcsPZa/mE02HS8TSd8U6WOpcWfL5nZZQM4/FxesO92LQ2msxN9IR68Ca9aKXsZ3cwPMjjnsf5SONHuLX61j/7emclEgnUajUazdz8XpxOp7PDtro3vtjcd999BALZ4d6ZmRnq6/Onn7tcLv7qr/6Ke++9l0wmw8c+9jHuu+++vGMSiQR33nknBw4coLS0lG3bttHQ0HDer+cyVfSboAha5ihZlkkkEuzevZv169ezc+fOgj+Bs+5bu3Ytu3btYu3atQXn3rNnD6tWrXrTQcGFcvLkSWpqarDZzr5uz9koisLrw6/z3yf+m6noFBXmCt7e+nY21W8qKLE/HBzm+NRxTBoT9fZ66u31uW+j3979bWLJGGklTZOjCYPGwJrqNVRYKpgcmySUDqG2qZnv+uMLcaXlNGOhMersdZycOonT6MzeqHynWVezjvHwODq1jhJjCclMMq+n6M3w+XxMTk7S1tb2Jz3+Qti9e3fRz+Zcca7bF01F6Z7uZlHZooJy/Geaic1g0BjwxX1k5Ax19rqixx0+fBhLtQWzwUzXTBeb6zef9ZzBRJCD4wfxRr1MRCYYCY0wFZ1iMDBIKpNCJal4+J0PU2oqHlD9Kfr6+rDZbLjdxXuOLraJiQmi0SiNjcVn5G3YsIEDBw7kbctkMsybN48XXniBmpoaVq1axYMPPkhHR0fumB/84AccPXqUf//3f+ehhx7iscceY9u2bef1Wi5jRYOWuRkGC8KfSJIkrqy7ktXVq9kzuoc9o3v4VfeveGXoFdZWr+WmlptQq9Rk5AxukxubzkaTo4leXy96tR6zzoxNb+O2+bdh1plxGpxoVBoURckFNJFUBJPWhMlYPHExmAhi0VkY8A9g09vwxrzEUjEaHY2MhEaosdXgNDqx6CxYdVbKTGVAfs7LnxqwCHOTSWvKrbB8bPIYLpOraAHC2dliwUQQVOCP+4mn40XzoWqsNai0KurT9ZycOkkikyi6irNNb+Oq+qs4MXWCQCLAmso13NByAwaN4Rxf5eVt7969tLS00NSUTZ6//fbb2b59e17Qsn37dr761a8CcNttt3HPPffk/e4Q/nwiaBEuaRPhCW586EZumXcLX7ryS7nqtjq1jg11G1hZuZIubxevDL3Cq0Ov0unt5MraK9FI2ZLpV9ReAWRvFtOxaXqme6iz15HOpFFJqrwFFgHi6Tidvk6Wli2l1FhKLBVjIjKBVW+le7qbens9J6ZO0OJsIZaOYdQaqbHWkJJTOAyO3LpHZ96wxC+0y4usyPjiPkqNxXsuamw1WLQWJiOTyIpcNCCZ3TYSHMEX9+E0OIln4rlZb7N0ah1NziamolOkM2me7XuWpeVLi55zgXsBDoODB088yD/v/mc+s/IzIo/lTRgdHaW29o3cpJqaGvbs2XPWYzQaDXa7nenpaVwu1wVt6+Vsbo4PCMIfKaNkGI+M88ODP2Trtq083vU4oWQot9+oNbKschl3L7+bOxbegdvk5mdHfsZTfU/lraKs1+ipslZRYizhlYFX0KizQc1s7ZRkJsnukd2cmDqBQW3g1bFXeX34dV4bfo2JyAR9M324TC5sehub6jbR5GxigXsBlZZKLDoLToPzT77GtJzOFSUbCY4wHh7/k88lnH+hZIjR0ChnG3p3Gpxo1VpkRSYtp8nImbMWnaux1bCobBEDgQE6vZ25x/wut8lNpbWSRkcjo6FRfjv0WxLpRMFx1dZqPrHsE4yHx/nLZ/+yoEKuIMx1oqdFuKRVWasY/vQwv+r6FY91PcZPjvyEp/ueZmPdRm5suTH3bXd2SnMyk6TaUg0ShFNhtGptbgrxofFDNDoauXX+rWhVWj7w+AfQq/VYdBaCiSDhVBi7wc46xzoa7A2sqV6DoijoNXqOTR6jylL1Z6/SHElG0Kg0qFVquqe7aXG24Iv7mInP4DA4sOgsYsrzHGfX21lcthiAnukeSk2lRXtdZntDRkOjjIZGcwsjFtNW2oasyBydOEpKTqGleBJ4W2kbgUSA0eAoD5x4gE21m6h31PODAz9gLDyGUWPErDEzE5nhpcGX2Dmykw11G87BVV/+qqurGR4ezv08MjJCdXV10WNqampIp9MEAgFKS89drpAgghbhMqBWqXlPx3u4ed7NHBk/wgv9L7BjaAcHJw5yZc2VLC1fisPgYDIyiVql5oaWGxgODjMTm2HAP0CDowFZkTFrzejUOnRqHT3TPfy699doVVoa7Y24zW7mu+ZTba2m3dZOo70Rg8bAUGAIq96a12vzxwokApg0JiKpCGPhMTpcHQwEBjBpTDQ4GrDqrGhUGiosFbkbnFgw8dJiN9ixaC25dYeKvX/V1mqqLFUEE0GOTh5lbfXaosm6KklFu6sdWZH5ZfcvkWYkllQVfu7sejs2lw232c0L/S/QNd1FS0kLFq2F41PH6Yn0MJWYYn3NetZWz91k6Llm1apVnDp1iv7+fqqrq3nooYf47//+77xjbr75Zv7zP/+TdevW8cgjj3D11VeL4d9zTAQtwmXDoDGwpmYNC8sWctp/ml2ju9g7tpddo7vQqrTc1HpTLqek1lZLKpMinonzysArqCQV62vWY9Jmk2vnlc7j0EcPEYwHsxVsJXK9KENDQ7nnlP7nvz8knAyTklNoVBq6p7tZXrGc3pleSo2llJvLMWmyz9vh6sj9kjvbzBHh0jFbVXkyOklaTp816JQkCYvOQqOjEUVRODx+mAXuBWjV+T0qszPg6sx1lJvKebbvWdZUrykYfpQkCbfJzfs63sfLAy9z2neaZZXL+MjSj5z7i3yL0Gg0fP/73+f6668nk8nwkY98hAULFvCVr3yFlStXcvPNN/PRj36UD37wg7S0tFBSUsJDDz10sZt92RFBi3DZMevMLCpbRJOjib6ZPh44+QDJTJJHux4lnApzXeN1AGjVWnaP7mZx2WJKjaWc8J5gkXtRLmm23FKOL+7DqDXmEnxnDYeGCXvDtLnypxVn5AyesIfBwCBLy5fSPd1NibGEeCZOLBXLJUOqJBXLKpblhnpmC8aJb2WXprScpmu6i2ZHc66K8plqbdnkzMnIJCPBEZZXLi84RiWpqLZWk5bTqFVqFEWhz9dHo6OxYEjQqXNi19tpdjYzEZ7gxOQJrqy7suCckiSxpXELapWabSe34Ql5uKn1JvE5+xNt3bqVrVvz12f6u7/7u9zfDQYDDz/88IVu1luKCFqEy5ZapWYwOMi7578bg8bAcGiYEn12SmkgEeC07zRNzibcJjc6tY41VWsYj4wTiAfo9/dTYixhVdWqvHNm5AyTsUlcRhdui5twMkwoGWI6No1dZ+fY1DEqLZVE01H8CT+V1kpMWlPerI8WZ7aI3Z+am+KP+0nLaVwmMSNhrtCoNLiMLvQaPf64H51al+u1O1OJsQSVpCIjZ+j19dLsbC4YCtKoNCwqW0Q8HWc0OEqVpYpYOlaweKYkSbSWtGaHGbUmnuh5guUVy6myVuU+WyPB7BT7q+qvwm108+UdX+aE9wT3rc8viiYIlwoRtAiXpUgywnOnn6PCUsGyimWoVWoaHY1MRaf4zdBvgOxw0uxic5C9Cdj1dqKpKKWmUsza/Iqo3qiXfl8/vx74NUtdS4mH4tnp0XKaCnMFU9GpXC/P7ysg9maMh8dxmVzZ5MrQKIvLFpPMJHOzmoS5YzbvyBv1YtaZiwYtGpUGl8lFKpPKvY+BRACrzloQxBo0BjbWb2Q6Ns3rw6+ztWUrElJBr59db8eut5OSU/TM9HBq5hSLyxejklTc+8K9aNVayk3lmLQmuqa7OOY9JoIW4ZIlghbhshNOhnni1BO4jC7WVK3J/ZI368xklAzDweFsPosSJ5wMo6Bg1VlJy2kiyQi+uC870yidZM3P1pCRM1h1VoKJICqVCr2kx2108/bmt1Nnr0NCYiw8RmtJK8lMsiBg8YQ9lJvL/2DPynRsmlJjKcOBYTJKhgZHA5ORSQwaAw69I5c7U2YuOz8vnHBOtJRke9ImIhME4oG8Vb1nadVaFpUtAmD36G5aS1qLFpuD7ErkW1u2ctp/muHgMFsathQ9rtnZjNvk5tjUMZ7re47F5Yv58dYf85uh3zDgG6Db102tvZblFYVDU4JwqRBBi3BZycgZHul8BIvOwtUNV+d9K42n4xzwHMBhcLCsYhmxVIzJyCTRdBS3yc3xyeO4zW5WVq4EwBPxcNp3GqPaSHt9O831zaysXEmNUoPL5KLGkV0sMZlJZleYNpQU5DNk5AzeqBe73p73zTuUDGHVWRnwD6BVa7Hr7RyfPM76mvVY9JZcjY/F5Ytzj/nd4QHh4vOEPbiMroKEWSDXeyIrMuPhcaqsVUXPsb5mPRqVhkPjh6iwVBQNXjQqDU2O7FDm3tG9DIQGmE/hEhI2vY311etxGVzsHN1JlaWKm1puEhWWhcuGCFqEy8qvT/2aycgk72p/V16PRyqT4uX+l6m0VrK0fCmQLTxX76gnlUnxSNcjLC1bmvuWDFBvr2fqs1P0+nqZjk6zpnoNkJ09NJvIeNp3GpvexorKFQVtGQuNYdFZaC1pBbLDBlPRKVqcLbw+/Drra9Zj0BhQSSosOgsb67ILMTrVf3ohOuHCURSFYCKITWcrGrSYtCZMWhORZARP2EOZuazosOHstnJzOQ69g57pHsrMZQUzjTQqDU6DkyZnE2FtmKdPP801zddg0+ev0SVJEm2uNuwGO491P8bhPYe5c/GdZ+3JEYRLiahSJVw2uqa7eOjkQ2yo34BVZ0VRFE7NnOJ7e7/H51/8PHaDnUVli/JmToSTYQYCAyx0L6SlpKXg5nNs8hilhtKiXeqyIucVp5s1HZsmlAxxYuoEA/5sJdMTUydQSSrUkhqtWsvVDVdj09uosFTkhnvezIyOVCaFL+57My+PcI5JkkRbaRtmnZnTvtOMhcaKHmfWmVlRuQK1pGbv2F5mYjNFj6uyVmHUGknKSdJymqHAUNHqty6TC6feSYujhe7pbvaN7curvtvr6+WlgZfQqDR8ePGHiWfi3PvcvXhCnnNz4YJwEYmeFuGy8a/7/pWeqR4iiQjf3fNdOqc7SaQTKIrC8orlrKpalfdNN5aKMRwcxh/3s65mXd650nKacDKMRtKgUWsKgpnRyChjnrFcvsKxyWNISCQyCTwhD2qVmg5XBwatAZcxO8tHrVJnp8Z6u97U6tCzC65l5Ayn/aept9cTSASYjk7/WcsDCOdOqakUtaQmkU4QSUWKDuVJkkStrRa73s5IcAS7wY5VZy04bqF7IQCvDLyCXqPHpDUVHCdJEvNK5hGSQ4wER/hl5y+5vul67Ho7A/4BHjzxID9N/RS1Sk0qneLlwZf521f/lh+/7cfn5wUQhAtEBC3CZaPOVscTiSf461f+moWuhaysWomsyJQaS7mt/ba8cf20nObE1AmiqSgb6zcWnGswMMhIcIRN9Zty23wxH6+PvM5znc/hT/pZVL2IsdAYDoMDBYWF7oUkMglaSlpIZpK4Te68c6bldG7dmT8kkU7gi/uosFTQNd2FRWeh2lqNWlIjkS0c9rvnFy6MVCZV8B7OTmn3hD34Yr6z5h/NDtH4E9lp0TqVrmhvHcDmhs34436eP/08N7XcVPQ4t8lNiaEEh97Bw50P01baxobaDWyq28TJ6ZO8NvQahyYOsbR8qSgsJ1wWRNAiXDb+ev1fc/fyu1Gr1Fh0FnYO7+TVoVe5tuNaDBoDkB3SSaQTHBw/iKzIRQtyHR4/jN1g58ra7L4lP16CJ+rBprNh09tQK2o6nB0sKlvEVXVXYdAYkCQpNyR0Re0VBavxJtIJDowfYHnF8oLeEVmRiaVimHVmTkydoMJSQUbOMB2bpsJSQZ2tLnfDanI2nadXT/hjZOQMRyaP0F7ajllnLthfaanM1ulJRemZ7mFR2aKCKcrwRm/K7tHduEyuXO2e3+UwOLix+Ub6/H1MRia5qv6qgmPUKjWNzkbeb34/T/U9xS9P/pKtLVtZUraEJWVvfnkJQZjLRNAiXFbshmywMB7Orvx856I7KbeUk8wkeX3kdZ4//Tz+mJ93zn8nV9ZeWfDN9cTUCWx6G3a9PXez0Wl1VJgruL39dq6ou4JGGlGr1XmLpU1Fp7BoLSwpX1KQbBlOhjFoDCwuW4xBYyAtp9GoNJz2nUZRFMKpMLF0jLXVa7Hr7Rg1RkxaUy7XpdjNUbg41Co1C1wLMGqNeKNeDBpD0UUydWodTqMTlaRiMjKJy+Qq2puytHwpOrWOo5NHcRldRWcYmbQmmhxNlJvLean/pVz15N9l1pm5bf5tbDuxjf+77/9yx8I7aCttK3qsIFyqRNAiXJa+u/e72eRXVPzj6//ISe9JTBpTrtjcqqpVedOTFUXBE/YwE5thYdnCvN6QfXftYyQ4gl6jJyNnSM4kC55vLDRGibEkV679TL0zvbnnarA38NLAS2yo20BGyZDMJCkzleWq29bYat7UdRYbqhDOr9n3MpgMoqAUDVo0Kg319vpsMrjvFBqVpuiQ0WwPYImhBJPWRJ+vj3JzecE5DRoDBo2BeaXz8Mf97J3eS5vyRkCSzCT5zp7v4Da5ubbhWgxaA1/+zZd57/z3clvHbefy8gXhohJBi3BZMmvNeCIevvDyF1hWuYwtDVuos9ZxbOoY72h7R8F00snoJDtHdvL21rfnFqU703BwmFpbLTW2GoZm3lgwcSY2w0RkIrfK82zPSSKdYNA/SFJJEk1GWWJbwmBgEL1Gz7qadahQoVVpc9Oh3wxf3IderUev1v/eoQrh/GpyZIfqPCEPkXSk6BCPJElcUXMFAEcmjlBhqcgtonim2WC1d6YXiy67KnSxXpdaWy0lhhIGDANs69rGlqYtVNuqc8sIvD78Og93PoxVZ2X36G4Ojh8UQYtwWRFBi3BZ+sqGr/CRJR/BorPgMDgYC43xi2O/YHPD5txNI5qK0jXdhdvo5vWR13l3+7sL8g+molMcHj/MlsYted37GTlDz0wPwUQQyK7xctp3GqfRSed0J1fWXIkv4aPCVMH80vk4Dc5cb4rT4CScDCNTWIo/mUmiklS5IabZmUO+uA9fzEeTs4mZ2AxmrZkKS0VuqEI4f6KpKMPBYVpLWosO8Vj1VvQafa4k/9lmdLlMLqw6K56wp6DY4KyVVSuJpqK80P8CV+uuRqfWFQTRZp2ZWnMtrZWt7PXsxTJtYV31Oj6+7ON8bOnH6Jru4vnTz5PJZGgtffNBsSDMZSJoES5bs99eZUXmX/b9C02OJlZXrSaVSbHt5DYOjB8AYG3VWrY0bCkIWMbD44QSIeaVzkMlqfCEPPyy85ccGDrARHQCv+yn0d7IBxd9kHg6jsPgoM5WR4O9gYHAAFfUXFE0oOjydlFtq859Uz/TYGCQZCZJpaWSEmMJe0b30Framr15qbM3r2Znc+54EbCcfzq1LlfddjYf6UyzQzmTkUmGAkOsrFpZ9Dyz61z1TPegKEqusODvMmlN3Nx6M13TXYyERrim4ZqiNXwa7A1UOar47dBv+fWpX7OpbhM1thraXe20u9q5d/W9f+6lC8KcI4IW4bL3aNejPNP7DH+/6e/5l73/wq7RXeg1eq5vvJ6knGSBewFuc/704ZnYDIFEgLScptXeyo0P3shrI69h1VupM9XR7mznLxb8BasrV1NprcwLeBRFQafWnXWtIaPGiEZ645/e7HpFBzwHaHG2EEgEmIhMUGIsyVY21dtRSaqiNT2E80+j0uSGao5MHqHeVl90he0ycxluk5tEOkH3TDcdro6iFXCXViwF4LWh16i2VdPoaCw4ZrYOS42thhf7X8wFI79Lr9ZzY8uNvDb8Gj878jPW1qzl2sZr/8wrFoS5SwQtwmUvmUkyHhnnr3f8NSsrV3JN4zW8e/67ebT7UdxmNwvcC/KOT2VSdHm7MGqNLKtYBsD7Ot6HTq3jG5u/QdqfZiQ6wo1tNxZ8A+6e7iaVSbGwbGFBOwb8A2SUDM3O5mwvTjJELB1jJDRCk6OJ8fA4C90LcRrfGF4QxePmlvbSdowaI9OxafRqfUHCrCRlV2E2aoxISAQSgYLp77M63B3Y9LZssFrSUnCcWqXGqrOyuHwx3qiXZ/ue5fqm6/M+c9/b9z0GA4MsLl9Mnb2OX578Jd3ebu5Zdc+5v3hBmANE0CJc9t6/8P0sr1iOUZNda0hRFH56+KdoVVqua7wOSZKQFZltJ7dxY/ON7Pfsp8RQkgtYAO5ccid3LrmTw+OHGQ4Ns7x8eUHAMhwcpspShYLyu03g1Mwp7Ho7hycOo1FpSKaTqFVqGhwNqCU1FeYKtrZsLRiiGgoM5X3Tn6UoCrF0DJPWxFBgCJWketMzj4Szi2VieKPegh6V2TwUX9yHWWM+68yh1pJW0nKa/Z79rKhYUZD4DW8sgGnUGEHJBrzFpiiXm8ux6+2YtWYeOvkQ66rXISvZfKg7Ft7BzpGd/PrUr+nz9+EJeXi061Funnczdfa6P/dlEIQ5RwQtwlvCmWXzn+17lu6Zbr645otIksSzvc+y/dR2TFoTOpWOCmtF0Z6So5NHcRqdGB2FeSRpOc1p32mWlC/BprcxHBzGpDVxbPIY5eZynj/9PLe03cKiskU4Dc7cVFdFURgNjWLWmYv2qtgN9rxhJk/YQ4mhhGAyyHBwmOUVyyk1lp6Ll0g4Q1JOEklGig4DAbmZQp6Qh3gmXnSIR6PSsKluEypJxaHxQ7SUtBQd4utwdxBIBBgLj1FtrSajZAp6XQwaAw2OBlSSigPjBxj3jlOTqKHKXsVt7bfxrvnvYu/YXh44/gCekCeXPyMIlxsRtAhvKYqi8K1d32J1xWoe6nyI45PHUSSFK2quoM3ZhjfuZaF7YV7Jf8iu5qxChUlrQtJk1xh68tSTHJ04Sq+/l3fPfzcWnYWu6S6CiSB2vZ0SYwklhhLcJjd3LbmrYDXeUDKErMisrlpd0Gvjj/sZD48z3zUfb9TLdHSaBkcDvpgPnVqH2+TOBTliuvO5Z9faqXfUE06GGQmO0FbaVjQZ1qg1olVrycgZEplEwYyg2ZyW2YB4NDRKlaWq4Fx2vZ3N9Zs5NnkMX8zHmuo16NS6guPq7HU4DA5+Pvhznj79NFuat1Brq0WSJNZUr8mtRC4IlysRtAhvKbIiMxAYoNfXy5KyJdzSdgvvbn83p2ZOcWzyGFtbthZ8yw0nw/TM9LCsYlk2kXdgF4FUALVGTbWlmkprJRPRCSx6C83OZiSk3Df0E1Mn8MV9ReuxeKPegm/VswvumbVmen29lJnLUKvUuSCqylpFIpMAKJrkKZxbOrUOq96aW7Dyd4fvZod9hgJDTEWnWFG5ouh52krbSGVSdHo7seqsBQHsrIXuhciKzJOnnqTD3UGzs7kgoXsiMoFL78JtdfPA8QdYUraEG1tu/PMvVhAuAeK3nvCWolapefH9LzIeHmddzTrUKjXj4XGeO/0cm+s3U2GpIJQM8eueX2PQGLip9Sa292znhuYbiKaifG/f99CpdGyu3sztS24nnAqzwL2AlZX501xTmRSd051FZ5Bk5AyDgcFcdz9Ar68Xh95BJBVh39g+NjdsZnXVahwGBypJletVkRUZWS6s7yL8+QKJACaNKa/CsE6tyw7ZyBn2ju1lYdnCokM8tbZaqqxVuZlf80rmFRyjVWu5pvEa/HE/L5x+gc0Nmws+G5IkoZbUbKrfxEBggBf7XyyYjt/p7WTb4DaqIlXotDoeOvkQPTM9Yoqz8JYgghbhLaelpIWWkmxOwmxSbrW1mnXV69gxuIPHux9Hq9Zy2/zb2N6znbXVaykxlFBqLGXHHTswRU2Y9WZOZ05zVdlVRXNKFBQUJfv/74qmopz0nqTMVMbro6+zvmZ97rh6ez06tQ6L1pJXVCwjZxgJjlBrr8375j1bfC6VSXHKd+qsC+8Jf9h4eByHwVG0Yq1apWaBewFWnZWJyAROgzNvCFGSJDSSJu/9TqQTRasr2/Q22krbmI5NMx2dpsPdUXCMw+BgvmY+DoODXxz/Bdc3XU+FuQJJkrh53s3opnWc5CTPDjzLqelTPN7zOCsrVnJF3RXn6NUQhLmpeCEJQXiL+Pmxn/Ns77OYtWbufuZu/nX/v3JlzZV8bePXmInPUGospcHekMstWF65HJ1aR1+gD7fJXfRbd5e3iyMTR1hSvgStWktaTjMSHMET9vBY12P0+fuw6+1Y9BYWly3GorXQWtJKMpOk09tJpaWy4GYnKzLRdJSMnEFWZMLJMADHp45zeDw7I8lpcIohoz9DW2kb5eZyZmIzhFKhgv2zQzqTkUn8cX/RczgMDuaVzGM4OMzesb1Fj1FJKursdUhISJLEaGgUX9xXcJxBY6DeVs/1TdezY3AHLw68SCQZAaDCWMGnln2KJ979BJ9f+3m2Nm8969CUIFxORNAivKXtHd1L10wX39z1TSrMFXx7y7d5V/u72Dmyk0gqwtrqtahVamKpGJDt2ZiKTTEeHaelpAWdWsdYaIxHuh5hz+gehgJDlBhKMGqMTEYmef708/T7+jk2dQybzkaVpYpKSyUb6jYAUGmpzAVEDoMjt7LzmbxRL9F0FIfBQUbJMBGZoGe6BwCX0YVGpUGSJCotlWctaCf88eLpOAk5cdb9i8oWUWYuo3eml6noVNFjaqw1rKhcgTfqpcvbVfSYMnMZ7a52RoOjTEWm8IQ9BcfMvq83td5ELBVje892JiITuf0GjYG/XPmXPHjrgxi0hjd5pYJw6RFfy4S3tO9e91021G1gU/0mKi2VADx/+nmOTBzhriV3YdKaeKr3KR488SD3rb8Pp97Jvql93NRwE0+eepJ9Y/sYi4yhkTRcUXsFRyaOYNQYqbXXZuuvWCpocjbR4GjgteHXWFm1sqB3RlEUXht5jQWuBQVDTb64j2OTx2h3teOL+dCoNFRaKikzleGP+3GZXFRaKy/Y63U5URSFeDpesBRClbWKIX02sTaUDBVdbgGy9VW0Km2uovGZAaMkSZi0JmLpGCpJRTQVJS2niybgrq5ezUxshhcHXuS6xuswa80FK3fvG9tHz3QPpaZSfnLwJ9Sn65nP/IJzCcLlTgQtwluaTq3j9gW3534eCY7wysAr3NhyI+FkmJ8e/imDgUHe2/FeXEYXOwZ3EEgEWPPIGix6C03OJha7F9PoaGQkOMKNTTdSbinPK+wVT8cJJUO0lbZh0eYXI8vIGVJyivml2fyF6dg0pcZSDk8cxqqzEk/HaXA0UGGpoMJSkXucWqVmNDSKy+QqyMHwJ/y54mPC2fniPgYDg3lFBM+kV+tRtNkcldncoTNV27K1UA56DmIz2IrmE5UaSyk1lnJi6gTRVJRVVauKPleJsYTb5t/GywMvY9PZaHe35wW3VdYqfjP0Gw73HyaQCPBi8EXGdeN84Yov/EnXLgiXKhG0CMIZvv7br+OPZWd3DAQHaHY08+Urv4zb5GZ7z3baXe3s798PEry99e1cVX8VLSUtJNIJSo2l1NvrC25uY6ExJqOTrK1eW/B8/YF+hgJDNNgbMGvN7PfsZ2PtRqqt1SiKQiQZocHekPeYjJwhno4XLD8A2aGkA+MHqFfXn9PX5XJUYizBqrOiKAqT0UnKTGV5751Nb8Omt+EJe/CEPCyvXF70PAvcC9CoNAwFhjDrzEUTsxe4F6AoCrtGd1FmKstb9HKWSlKxoXYDnd5OXu5/mdXVq3O9f/NL5/O1jV/j9eHX+Y8j/8Hukd0c2nmIDyz+QEG1ZEG4nImgRRDO8PrI60xGJgnLYe5eejdvn/d2FEXhyVNPolfrWeheyIfnf5jVlat5z4r3kJEznPKdwp/254p8nanT24lNb8sLWPzxbE/ISe9JVJKKeSXz8MV9mLQmrm28FpWkwh/3o9foWVy+uKCNE5EJJiOTLC5fjIREWk5zwnuCVmcraknN8vLlpIKp8/5aXQ60ai2JdAJP2IND7yg626fMVIZZayYtp/FGvXk9XkDuMfF0HL1GX3QlaMgOGTXYG9Cr9ewZ3cPyiuUFw0Cz77nT6OTJU0+ytHwpKypX5Iaerqi9gitqr+Anhp/QKXfmghpBeKsQQYsgnOGhdz5Ez3QPN7fejEad/efxaPejTIYn+eDiDzIQGOAHJ36ATW/jPbyH4eAwnVOd3NJ2CwAv9L/AVXVXoVVr8cV9GNQGJCQmI5OEkiFCyRBqSU1KTpGW05QaS6mx1eTWDZq9OcXSMVSqwqTaaCqKQWNgUdkiTs2cQqPS0OxspsJcwXRsmkgqQoWhgsngZO4xiXSCjJIpqNb6VpTKpHKJy7P0Gj1Ly5cCMOgfLMgRUqvU2PQ2pqJTjIZGC4KWWfNKs7VZfjv0W6qt1TQ5C3NhKi2VJDNJJCn7mUCioOS+SlJRb6/n6oarebr3aUaCI7yt5W3oNG9MsV7lWsUH53+waJVeQbiciaBFEM6w0L2Qhe431h3q9nazZ3QPty+4nWf7nuWhkw+xyLaIj7Z/lPHwOK8Nv8a1jdfy4MkH+c3gb0gpKZocTdj1dp469RQalYY2VxsT4QlWVq1EI2lwmVz0zPSwqW5TwU0nno5zauYUHa6OvIJiM7EZJEni2MQxpmPT3NR6U95U7HJzOQoKGTlDOBjOO+dUdIp4Op6rTfNW1uvrxa63Fx1SURSFaDpKKlO8l8ptcuM2uQkmsus+FRueA1hSvgSTxsThicPMK5lXECzq1DpWV62mc6qTWDpGOBlmXsm8gs/CSwMvcXD8IIcmDtE9080HFnyAWnvtn3jlgnB5EEHLZSaTyZBOpzl9+jTxeJzu7u6CY+LxOD09PXP2W5rf7yeTyaDT6f7wwefZPa/fg16l5x/H/xFFUthSuYVVllWMeEZ44dgLHPQe5P/t/38Y1UbqLfWsLFnJI3sfYTIxyXzbfBwmBzqfjiZVE/4RPwDdqW5GIiMY/caC9yCZSTIUGUIzo0FCojfYS425hsnYJCklRb2lHrfk5nTv6dxjFEXhmO8Y9ZZ67Do78XicWCxGd3c3GSWDWlKjKArdU4WfhYvlbJ/N8y0jZwhIASYzk2hUGrSq/OEZFSpGpkfomelBfUyNTVc42yeWjjEZn0QzoyElpwrOAdn35HTgNJmp7JpEpYbCPBcVKkjDS5MvMWAdoNxYnl3x+X9cabwSvUPPf5/+b/YN7eP+/ffzvqb3cXvr7YTDYfr6+tBo5uavcL/fTzAYZGZm5mI3pahoNEoqlSKZTBbsMxjE1PG5bG5+4oU/WjqdJp1O09PTQyQSYc+ePaRSKYxGIxqNhrKyInU/vF7cbnfR4Ye5IBqN4nQ6MZsv7kKAiqJwPHgcgJubbube5fdSaa7EM+5hx/gOft73c8Zj49Saa6mx1bCiZgUukwt72s4q4yoWuhYW3NC6fd0E5SAb5m0oeL5TvlPEpBjNVc2MhkZZVrYMv9ZPma0MbVzLWGSM6vLiq/fqHXpsOhsqSUUoFGI0OkqJpYSx0BjzHPMKplnPxGdw6B0Xra7L1NRU0c/mhdLn70OtVlNmLd4G64iV6opq9Gp90deonnrGwmMMBgdZX7G+6DnKy8sZC4/RP9NPe1n7WdvSWNXIjuEdjEljNJobqTC/Mfx0c/nNbGnbwv858H/42Ymf8aPeH/GZKz5DIBDA5XLNicC+mFQqhdlsxuFwXOymFOXz+YjH43mfwQ984AP4fNkif729vaxcmb80h8vl4q/+6q+49957yWQyfOxjH+O+++7LO+b+++/nC1/4AtXV2X+n99xzDx/72MfO89W8tYig5RKjKAoTExPE43EymQz79+8nnU5jt9sxmUysX7+enTt3UllZycDAAE6ns+AcGo0Gp9M5Z4MWj8eDzWbDZiu+qNyF9Mt3/hKT1pRXbXTbiW2ElBBf2fgV/t+R/0eZuYwPLfoQW1u3MuAfoNPbydr6tQXDAqOhUapd1bTr2/MWSRwNjaIoChldhrSUprG8EZvNRom1hLUlawkkApQ6S5knzStI3BwLjeGL+VhQsQBv1MtEdIJyazmlllKq3dVUuaswa815PToZOcNQcogKS8VFWyF69jN4vsmKTCqTKkiwXeFYgSRJBBIBgIJFMivNlVS5q9g/tp9Ka2VB3gmAw+GgPlVPIpMgmAjS6GgsOMbpdNJR08GB8QNoVJpc7szvutV5a7YWS6gHySAx3/VGDRYnTv7Pjf+Hz6z7DJORSZxOJzqdDrvdPmd7BWZmZrBYLBfkPf5TJJPZvKIz2/f000/n/r5hwwb279+f95hMJsO8efN44YUXqKmpYdWqVdx88810dOQvw/De976X73//++f3At7C5uZdS8iRZZmxsTFisRg7d+4kGo0SDAbRaDSYTCbWrl2LwWCgvLx8zg73XMo21G3IC1i6vF0c9R6l0dbIce9xWkpa+Jv1f8PW1q0E4gFeHXqVlVUrC8q8y4qMN+IllUmRkTP4434Ojh/k1Mwpgokgrw+/Tpm5jKvqr0Kv0eflXPT5+hgLj+UFLIqikEgnKDGWMB2bxhP2YNfbKTVlhyFScoqx8BgWnSXvczG7Ns6S8iUXLWC5kCYiE/T5+wq2z74mgXgAf8x/1se3lrZSYa5gPDxOMpM/lCBJEhadhbScJp6Ok5bTZORM0edqdjZTZani5YGXc9WVf9eqqlWc8J7gp4d/ykv9L5GW03n7GxwNrK5e/YcuWThP9u7dS0tLC01NTeh0Om6//Xa2b99+sZv1liOCljnsxIkTxONx4vE4Op2OtWvXYjabaW1tRaPRiCDlIvj8C5/n+Mxx7u+6n2QmyTev/iZLK5aSltM8c/oZ4uk4f/fbv+NvXvmb3GMURWHbyW14Y16molP0+/vp9/dTZami1FiKWqVma8tWGh2Necm3iqLQ7+9nadnS3Lf4QCKQm6L7q65f4Yv7WFy+GLfJjVatxW1yA2DRWIrWCxkNjdIz01OwPZwMMx2bPtcv10VXYa6g1dlKRs4QShauJ1Rnr6PeUY836mU8PF6w3663o1apGQuPnfX1KTeX0+5q58jEEY5OHi16jNPgpNRYitPgpN/fz4B/oOhxS8uWcsJ7gr9/7e/521f/9qxrHAkX3ujoKLW1byRC19TUMDo6WnDcr371KxYvXsxtt93G8PDwhWziW4IIWuawBQsWYDKZaGpqQq1Wz9nhnLcKWZY5MHmA3mAv6yrW8e0t36bUWIqsyPzzrn/mweMP8nDXw5yaPkWZuYxD44fYNbqL/zz6n4yGRqm11VJqKmVF5QqWVSyjwlKBQWPAE/KgVWsLcicSmQRj4THimTinfaeRFZl+Xz/jkXEqLZVcWXslJYYSHAZHXl2Qbl83STlJibGk4BoqLZVFhzHi6TihROFN/VInSRJatRZvzEu/v/+sx52tl2TW8orlVFoqOTR+KDek9LsWuBfQ4ergoOdg0TWJ1Co1yyqWoZJUeKNeDo8fLjhma+tWfnDDD6i31/Pfx/+bBT9awIunX/zDFyrMCW9/+9sZGBjg6NGjXHvttXzoQx+62E267Ii7oCD8kVQqFS+87wWeuP4JPtL+kVyvyHUPXsc/7fonZqIzyIqMVW+lwdbAUGCIYDxIqbGUDy78IPNK5+WVeveEPRwcP8im+k0YNPm5Cd6ol8MTh3Eb3dmbXMxLWk6ztGIpFp2FXaO7qLXVFi2GZtPZ0KmyCZonpk7kVhA+OXWSmfhMwXNFU1FKjCU0OBrO5ct10czEZnLDYLPKzeUsci8ilUnlLTg4q8JSQbWtmuHgMH2+wuGkWRadBZ1Kl1tl+0wGjQG9Ro9Ba8j1khUz3zWfKmsVvriPHYM7CoaBGh2N/OztP+Njyz5GNBXlv47/1x9z2cJ5Vl1dnddzMjIykku4nVVaWopen/03+bGPfYwDBw5c0Da+FYigRRDehIVlC/NWYh70D9I304caNf6kH6fByZeu+BJ3LL6DjfUbycgZ2lxtlFvy1wcKJ8OEE2GqLG/kroSSIRLpBCenTtI93Q1K9tu5QWNgddVqdGodiqJg09lodDQWDA/6435Oek8CMB7LDnWUm8tz03ZLTaUFCaeQzZkpdiMf8A9ccsMTaTnNYGCQWLowb0SSJILJYLao21lYddbcatrFel5aS1pRUNgxuOOsuSkdrg7Scpp+fz+xVKzoeaqsVayuWk00FeWfdv0TgXh+740kSXzpii8x87kZ7r/5/j9w1cKFsGrVKk6dOkV/fz/JZJKHHnqIm2++Oe8Yj+eNlbqfeOIJ2tvPPmtM+NOI2UOC8Ge494V7MWqNtJS08K62d/HRpR9Fq9aSyqR4pu8ZrDoriXSCB088yPsWvC/3uInwBBPRCa6svZIB/wAGjYGBwABWnZVAPIBKpSq6VlHndCeJdCK3yJ+iKKTlNFq1lqMTR7HqrVQaKokZsjdUl8lFKBliJDhCu6v4L9DfLWQ3y6AxFK1BMpdpVJrcazMRmcBtcucNu80uYBhLxZiOTecqEc9yGBwADEYHcUw7ihaQM2lNXNN4DZFUhNP+00WPqbJWUWWt4oX+Fyg3l7O4rHA5BrPOTIerg2/v/jZ7R/fyxXVfZH1t/vRpkbc2d2g0Gr7//e9z/fXXk8lk+MhHPsKCBQv4yle+wsqVK7n55pv5l3/5F5544gk0Gg0lJSXcf//9F7vZlx0RtAjCn6FnuodQMsRHr/xo3mrR/3nsP3mq9ylcBhdWg5V11ety+w6OH+Tk1Ek63B0cHD+IXq1HrVKzqnIVU9Ep7Hp70Zk9/rifOlsdEhLhZBijxshoaDS3MnSJsYQF7gX4/f681aS1Km1e0bJZgUSAU9OnChYCzMgZJqOTRde1Odu6OnNNRs7gCXuwaC1FX8tEJlF0iGdWrbGW1pJWxsPjGLXGgh4qg8ZANBXNvRcqSVV0mYQra65kMDDIM73PcGPLjQX7GxwN/PRtP+WvXvgrPvXsp7i28Vq+dfW3RP7aHLV161a2bt2at+3v/u7vcn//xje+wTe+8Y0L3ay3lLn/20cQ5rADHz2AWlLn1oUJJoJ8a+e3eKznMZaXL2csPMY7qt/BxrqNDPgH+O3wb4mn47iMrtzqzWcmzPb6eik3lxf0AAAcnzwOElxZeyX7Pftxm9zUWGtwm9yEkiEMGkPBN/Px8DiyIhcNQKw6K/WO+qIJwNOxacpMZXk9MJFkhM7pTpaWL51TgctkZBKT1oRF90agplapWVq+FEVRGA2NUmmpzLtOh8GBw+BgJjbDVHSKttK2vHNqVVp0ah0zsRlssq3osFqJsYQSYwl7x/aiVWlzPTxnMmqNNDoa0Wv0PNr1KJvqNxXM6mpyNrHt1m188plP8tPDP+WRrkfov+fsScOC8FYmwnlB+DMYtcZcwPLywMvc8NANPNb9GLW2WgLJAAvLFmJSm9g3to+h4BDxdJwlZUvocHfkbnqzdo3uotHRSGtJa95zBBIBXu5/mXZXO5IkZYeHypdRb69HrVJn818gL9cGsr0NGTlDWk5zZPIIkWQkt88T8jAYGMxNkT7zMWpJzUL3woIho9nhjLkUsADEUrGCGiqzMkoGb9RLIp0oul+r1mJQn71AW4e7gxpbDXtH9xadEg2wsnIlC1wLeP7080VnFuk1ehrsDbS72nl9+HWOTh4tSBQ2ao3cf/P9bG3ZmlcRVxCEfCJoEYRz5L6X7+PI5BGGQkN0TnVSZ6vjhqYb2FS/iRuab0Cv1lNqLKV7ppuvv/b1vCCi39+PRWvBorOgKAqKojAYGKR7uptYKsZoeBSNWsMVNVeg1+jzAgqn0ZnXyxBPxwF4dfBVPGEPdfY6lpTlF5Mzao0Fpf0BRkIj9Pp6C7ZPx6Y57T9dMAQSSoaYCBcm8V5I9Y56SowljIfHC2qxaFQalpQvQafW0efrK0iKne1tGgoMcWj80Fmfo9xSjl1vZzhYWHdDJanQaXQ0OBqYic5wauZUwTGSJNHuamdx2WJ+dPBH3PHEHXhCnoLjHrjlAXbdteuPvXRBeMsRQYsgnCM/3vpjlpctx6Fz8L1rv8e/3fBvbG7YTI2thsHAIMenjhNLxXh1+FU+u+azuSAimUky4B8go2SQFZlDE4c4MnkEo8aIWqXmhPcE72l/T8EQRSQZ4aX+lyg3l6NX64mlYiiKwhHvEYLJIGWWMurt9QB5U6P7fH0oKLhMroJrqLXVFgyVAOjV+rw8mVlpOU0iU7wX40KLpWMF04dnyYpMLB0joxSvxVJmLqPOXkckGSma61JvrycpJznpPXnWVaDnlcwDKft+FutNgWwOy8eWfgx/3M9tj97G68Ovv4krFARBBC2CcI4srVjKbz/0W0buHeHW9lsBmIpO8aVXvsRfPvuX9Pv7+e3Ib/n40o+zvGI5iqIQT8d5pPMRvDEvKknFUGCI1pJWWpwtlBpLkZCosdagU+cvjDc7a6jSms3VGA2N0untRJIk5jvn44l6aCttw212F7RTJamQKJyVcmLqBJ6wp2D4Zzo2TUbJFAw/JdIJzFozdfa6vO2zSxWcL2OhMYYCQwXbGx2NOA1OhgJDBYGHVq1loXshaTnNscljyIqct9+gMVBqLGUoMHTW+ip2vZ3rm65nMDB41l6ZRkcjtbZaRoIjdE135fWmzVpWsYyHbnkInUrHHdvv4H89/7/+2EsXhLc8EbQIwnmgKArbe7bziac/wf6x/QSTQfZ59nFz680sci/i6MRRjk8d57t7votGpeHg+EH8cT9Lypdg1Vmx6Cz44j46vZ20OFsKEmz3ju1l1+guEukEY6Exam21LCpbBJCbKfS7gUk6k+Y3g7+hylKVm9p7piprVdHS/8FEsGhNkqHgEKOhwjLmA/6BotvPFZveht1QmBg7K62kz9qjolFpsgnLRYI2gHZ3O4vKFjEUHSpa1Ray9W4qLBV0T3cXDUpsehtbW7YyEhzh4ETx6rhWvZXn3vcc5aZyfnzkx/zowI/Oej2CILxBBC2CcI4lM0nu2H4HP9j/A66ovoJ94/sYCgxxfeP1KIrCM33PEEvHiKailJnLGAgMoFVr8xJwfXEfe0b3cH3T9bn8lVgqRp+vj6noFEPBIVZVrWJJ+RIaHA25cvWTkUm6Zrpoc7TlHhdOhvHH/aTkFNPx6YLhHFmROTR+KHdDP1MgEaDeXk+FpTA5tNnZnJsBdaZGZ2PR2U/nikVnwa63MxgYLBocNTmasOlsdHkLezp0al12KnNknAOes1crlf7nv2IJvk6Dk0pLJd6ol5n4zFnL+l/bdC2SIvH86ef56qtfLQj8dBoduz+ym79e89e8f+H7/4grFwRBBC2CcI59+rlP80zvM1Rbq0nLaVwmFzc23sjCsoUsLFvIlsYtrKxcSdd0F6+PvM5AYIC7Ft+Vm5ackTN0TnVSY6tBo9IwGZnkxNQJ4pk4/riftJym2dFMqbG0YLrymTVFJiITpOU0gXiAwcAgg4FB3tn2Tmx6W95jVJKKEmNJ0Vk0PdM9RXsKTk6dxBv1FvQA9fp6CSaCBe06NXMqt5zAm5XMJIs+1mFwFFzLrNkgrljRPMgWmau2VpNIJ4r2ltSaajFrzTzT98xZ67lcUXsF/riffWP7CoabZl1ZdyVV5ip2juzkzl/fmTespSjw3HPw0zu+Tm25i1TxVBlBEM4gghZBOMdunXcrSyqW0OxoZmHZQp5733P8+KYfc23jtdmhDb2dbSe38UzvMyTSCe5afFfeIoZd013s9+zn1aFX2Tu2F7VKjV6jx2lwYtVb6Z3pLSgIB9Az08OR8SO54aGx0BiBRIBqWzX19vqiN9ZwMszRyaPU2erQqvOr32bkDCsqV1BuLi94nNvsLjrEZNVZC3prIBtgmLWFRd7+GP64v2jpfbvejlVnpdvbjSdcOBOn2dkMwH7P/oIpzzq1jgpLBf3+/tyU8d9l1Bq5qv4qIqnIWY9Z6F7I2uq1PHTiobMGZZsbN/MPV/0Dk5FJbn3kVp48eJCFCzVYLHre+U4D09NqUikV//APv/dlEAQBUVxOEM65G1pu4IaWG/K2RZIRvrXrW/jiPu5YcAdff+3rGDVGvnfd91hVtQrILlx4wHOAnSM72TW6ixuabqDOXpcrPe+L+4in4rSUtBQ850hwhEwmg9vsZmJmgnLKc8XOPGEPvpiPDndHweM0Kk3RarmxVIw9o3tYW7M2LwiRFZlTM6dosDcUJAePhkYpM5UVBD+joVHKzeV5Cb4ZJUOvr5cGe8MfrPtSZi6jzFxGPB0nJacKpmq7zK6i1wDZ4MRtcqNVa5EVuaAHqK20DYXs4oY2va0gp8eutxNNRZEVmenYNA69I6/3RpIkLDoLm+o3cWDsANW26qLLJayqWsX7zP/K507eznvHN4P7W9A/m4CrAApPPKHlq18V3S2C8PuIoEUQzrMjE0f47t7vYtKa+Ozqz/KpZz7FWGiM7133PdpK2xgLjXFw/CC19lpe6H8BX9zH/NL53LXkrtxUZUVR8IQ8RNIRFlveWMdmJDiC0+Dk6ORRkpkkt7TdQpmqjMnJN3omtCptXn2VjJyha7oLm85GOBUuepM1ao0sq1xW0GtytgRWRVGy1WP1trygRVZkAokADr0DjS7/141G0pz1fMVMRaaIp+NYS/ODltlA49D4Iaqt1XmznFSSinp7PZ6whz5fH1fWXpl/PVI2dyWWip018Km0VFJpqeTp3qeZXzqfJmdTwTHV1urc6+qL+fDFs0Fio6ORRx+Fj3xESyq1CmwvwSdWgbOPbLDyhmhUdHwLwh8ighZBOI9+cugn/PzYz7mx+Ub+avVfoZbUfHPLNznoOYhFb6Hb241Ba2CBewED/gGOTx6n3FzOp9Z+Kq+2ynBwmG2d2/jk8k8iKzJd3i6anc1Mx6YZC4/R4Gigw1XYk9I93U0ik2Bx2WK8US/BZJAmRxNOgxO9Wk9aKaxrMhGZoN/fX7BgY1pOMxoaLVrHJZKK5GYvnSmVSRW0S1EUwukwDY6G/HMkI8zEZ7Dr7QwFh1jgWpCXM1NrrwWyBe00kgajNj/IqLHV4DQ4i66PVG4uz60XlJJTBTVvOtwdRFNRnu59mk11mwquA+C6puvo8/Xx2vBrBcEPQJ29Dr1Gz87hnWw/tR3fToW93/xHgkNNMBucBRvh297cYyQJLBaFBQtkPvSh8zdNXBAuFyK0F4Tz5NT0Kf73K/8btUpNs6OZbSe28R9H/oMTkydodjbT4epgfe16VlWtQq1S842d32AmPsMdi+7Iq32SyqR4pPMRPCEP23u2k0wniWfiyIpMW2kbVp31rEM8Vr2VZDqbyKpT6zBpsj0usiIzEZnIy6WZVWosLTorKJFO4I16C2qwxNNxDngO5Crxzkpmkuzz7CuYNRNLxxiLjRUUaVPIVgI2a82UmcrOusKxJ+QpmhzsNrlJy2leHXqVaCqat08lqXAanAwHh+n3Fa/DYtKaWFK+hGg6ymSiMIdGo9JQaamk2lrNS/0vFZ0GXm4uZ0vDtQw+fxMv7vUQXH8PlJwuPJcGamoUPvKRFC+8kOTZZ1PceWfxZF5BEN4geloE4TyxG+xUWisxqU0MBAZYUr6E9tJ2ys3lHJg4QHtpe+7GfNNDNzEaGeVfr/1Xrqi9IneO6dg0L55+ke2ntvOhRR9ibfVadBodyyuyibhHJ4/ijXq5uuHqvOdOySme7H2SOlsdVdYqVJIKm96Wm21j1BoL8jsg26MzFZ3KnX9WWk4TSoZYUbmi4DEGjYH1NesLcll0ah0rK1cWDDGZtCbabe0Fw0gKSi5YKzOXMRmZZMA3QI29hiprVe7YeaXzgOxCiU6DM+88Bo2BlZUr0av1+OI+nAZn3nPP9hKdmjlFibGkIIel2lrNgH+AQDJAMpMsyNux6W2YtCa8US/HJo9RZ6/Lmw6+ezds3VpCInEHLJRg01fgjqvhv56BmQXodArl5TJ33JHmrrtkKitBLOgsCH88EbQIwnlSZi7j6MeP5m2LpWL84OAP2O/ZT7OjOXfDc5vcNDmbcvU6Or2dlBpK+fnxn7NzeCfNzmY+vOTDeYFGz3QPz/c9z2fXfja3bTo2zWBgkJnoDDaHjWXly3ILOs46OXWSRCbBsopleKNenAZnLrnUZXQV3KgBvFEvg4HBgnot/rif41PHWV+zPm97NBWle7q7YMgonAzTPd2dV/wtI2c4NHEIFFhZtTK33aKzUGouRavKD4ZmecIeFJSC2U02vY3R0CiDgcGCdp35nLIioyhKQY9Og6MBj8XDU71PsaZqTV7ABNkel1VVq9jv2U/ndCexVIxAMsAvfgH//rdryA0FHX8faMLwtnvg3bdT+cQRPvCBNJ/8ZIby8uzQkCAIb44IWgThAjntO803dn4jOxS0+Rt5AcDz73+ePn8fh8YPISsyKTmFTq1jx+AOosko377m23kBSzKT5GdHfsZoeBRZkRkJjgDZSqsAKSVFk6OpIGCJpqLY9DaU/0kCHY+M5+q09Pp68cV8udlMs1KZFBqVhjXVawquyaa30VrSWtBrY9AYcJlcqKX8OikmrSnbmyEN5LYpKNj19tw6SbPUkhqXyYUv5iMRTOQK1vX6erHqrCwpX4KsyAz4B6iz1+W1odpaTaWlkqnoFIqiFCxBMN81n5nYDE/3Ps31zdcX5MBIksSmuk1MRaY4Ej3CkvIlBde+snIlRyaOcGTyCH///SE6p7rBbYGpBWQDFxWqo3+BPtHKpmXV/MOTCebNEz0rgvDnEEGLIFwAJ6dOcu8L99LibOGftvwTVp2VVCZF13QXgUSAtJzGF/Oxuno1GpWGcnM5Nz54Iye9J/nG5m8UzFh5/vTzHPMe49a2W5mITOR6R5wGJ8dTx5mITXBj6Y15j0nLaTqnOpmKTeE2uam11bLQvTC3z2VwUWIoKWi7J+xhLDxWsMDidGyaAf9AwZBRPB1nwD9AW2lbXi/GmdsHGMhtDyVDxFIxUpkU09FpKq2VufMHEgHKzeV504xLjaW5QnjJTJKp6BQ2vQ2nwZn3fCpJRTARBCgIWmZfqxWVK3L5MbPF/WaVGEtIZBJICYn9nv2sqFhR0Cuz0LWE226xMqaOQKsfNn8Znvk3VJFq1Gpobpb5m/uu5KabZAyF5WsEQXiTRNAiCBfAXzz9F4yGR/mLpX/Bk6eeJJlJEklGKDeX0+BooMpalXfTlBWZ3aO7qbXX5pV4T2aSjIZG+ebOb7KhdgM3Nt2IUWvM5W7sHt3NgckDbCp5YwZMRs6gVqnZObyTWCrGdc3XFaxA3OXtIpgMFgynxNNx9Bp9wUwiAIvWUjQYUBSF2f9+d0rz7E1/Ojmda5dFa8FtdpOUk0TSb1SnrbJW5Q3NHJs8hlFjpNRUmps5NJvD8srgK7hMLhaXLc57vtkCc/vG9tHoaMwLvCRJosJSQZe3C0mScBldBXk5lZZKDBoDo6FRjk0dY17JvLwcnZYWHZOTHWA3g20Q2p6AWz+Ibvt2br7exNe+lqIufz1JQRD+DKKjUhAugEZHIyaNiZcGX2IiMkGTo4lb2m5Bo9Zg0poKvuWrJBUvfeAlDnwkuz7OVHQKf9zPruFdfPaFz6JRa/jMqs9QZa3KBSypTIqfHP4Jo6FRTBpTrgLuAc8BRoOjuE3uXFLumT0XgUQAu8HOiorCJNuJyASD/sGC7d6ol05vJ7W22rztyUyS8cg47a72vOGajJxhMjKZS4QNJANE01Hi6TiDwUEqLZU4DU5anC1Mx6bpnu7m2OSxvHO7TC5UkopoMn9mkCRJLHAtwKh+YwZVJBlhwD+Q+7nEWIJRYySUDBVcy3zXfCotlTzW81jRGUFOg5Or6q9iODDMAc8BAokAXdNdVG14lcnJ/wnKAvWw+3PgWQ51r+H+zNv4t38TAYsgnGuip0UQLoCfv+PneT9HkhG+vefbTEQm8qY3n2lZxTLGI+PMxGaIp+MYNUae7H2SA+MH+PY1384No8x64PgDjARGuPfKe4mH4+wY3MH6mvXMd81nKDDEcHCYG1tuLHie6eg0hyYOsb5mfV7wFEwEkZBYXb264DEmranoSsvxdJyZ2Az19vq8oCWWjjERmaDaWo1apabJ0oRRY2QslJ36fGZCbCQZQafWFQxHlZvLmY5Oc8p3igpLRS7wOjV9CofRQWtpKyemTmDX2wtmDTU7m+nydjEQGOCG5vxqxZBdfmBLwxa6prtw6B0F+yVJ4m2tb+PF/hd5ffh17v6kikDt4zBph5Fsro8qZafqyI+o2hDi/euuxmQqOI0gCH8m0dMiCBdYOBnmcy99jn5/P59b8zmWlS/L2z8SHOG07zS/7Pxl7pv/isoV1Fhr+Mmhn2DT23hP+3vyHnPad5r/Ov5fbGnYwnBoGIPawJLyJRg0Bl4eeJmdIztZV7Mu7zGpTIoXT7+IJ+xhS8OWgnyWUDJUtB7KVHSKE1MnCmq8pOU0M7EZVlSuKEjMHQmOsLR8aV4Pz3RsmlMzp2h3teflipQYS3LDZ2fWfhkODjMWHqPR0YgkSaTlbGE8u8GeW9eoxFiCRWchlAwVJPZq1Voa7A2cmDrBRGQib58kSZQaS7HpbUTTUYYiQxRzTeM1fOFeO9PRaQhVwqavgnUEjQaammR+/O8yL3/qF3x8+V1FHy8Iwp9HBC2CcAEpisJHn/oo4+FxvrH5GzQ7m7N5KsFRDo4f5OHOh+nz9RFNRVlfs55GRyML3AsAMOvMvH3e29n+ru2586XlNIFEgC++/EViqRifW/s5FpRmjy81luIJe3ii54m8KrD+uJ/emd7cMJHL5MJhcORV4J2MTDIRnihal8WitRT0gkC2Kq4n7CkoPhdOhtk9ursgADJrzZi0JnwxX14xOAkpl0R7ZtBSa6ulw9VBpaWSUzOnODZ5jJnYDBqVJrdMQaWlEo2k4YFjDxQsstjoaKSttA2VpCIjZ5iJzXB04mguWReyPTIWnQVfysfxqeMF13jffTCwYwv460AbAVUS3vaXzO9I8ItfJNm0SRFTmQXhPBLDQ4JwAf361K95ZeAVPrDgA+wa2UUkFSGUDGHVWam2VNPh6qDD1VG0GqxapeYX7/gFkB2GmYxMkpST/HDfD9k3to9/3vLPqFVq7Ho7k2Rv2L849gsi6Qi3tt3KZGQSBQWT1oRWrWUiMsFMfIar6q8qeC4JqaBMPmSDmeNTx9lcvzlve0bOMBWdYm312oK2p+U0H1r8IRSUvIJvGSXDupp19Mz0oJE0NDmbkBUZb8xLi7MFSZKIJCP0+nqx6+2Mh8dzdV+aHE2k5TTd093Y9DZKjG/0Eln0Fj6+/OMkMgk8YQ+VlkpmYjOMh8fpcHfQ7mqnZ6aHY1PHWFK2JG/16dmVmutN9fRO95LOpFlcvpijk0f5rxeP8MN//YvsgQPXQdoCJV+Hlueo3/wXLF7877/3vRcE4c8neloE4QJaXrGcJkcTPTM9nJg6gcPgYHPdZk77T7NvfB8L3AvOWr4essXpDngOMBmZZCQ0QoO9gfuP349WpeU9HflDRt3T3bzY/yJrq9Zi1BrJKBnSchqb3sYjnY/wwPEH2Fi3Me/5JiOTHJs4xqvDrzKvZF7B86tV6lxAcaZ4Os54aJyUnC3N7416iaaiJNIJdo/uJpwKMxOboW+mD8hW7N07tpdQMsS8knk0OZuYik4RTUWZjEzmhn7SSppUJoVNZ8vr3dGqtejUOqbj0wXF8E5MnSCcDBNMBJmJzQDZQnVO4xt5Lq3OVha7F/Pa0Gt4Y2+sBaSW1GhVWhw6B9c1X0f3TDfbTm7DO5Pih0/uhgW//J8jFaTRdZQf+ycaLPO4umPpWd8zQRDOHdHTIggXUI2tht137c79nJEzfGvnt+iZ6eF/r//fRR8TSUaQkdk7tpc6ax1pOU21tZo6e/bvHaUdfGHtFwoe95XffAW1pObahmtJpBO56cN7RvdwYuoEb2t9Wy7p1BP2YNVZkSSJ8cg4a6vX5uWfQDYx95WBV3hby9vytsuKzGn/aVZVrcoFEP64H1mR0aq1XF1/NTqNDqvOSqmxlHg6TlJOsrl+c66om6zInJo5Rbm5PC8nxq63Y9QY2TG4g/ml8/OedzQ0ilPvxKa30T3dTVtpG5kMaGKVeMZM+L0GAuEU21PP0uKuZPPiNx4vSRKyIlNuKWcyMkk8FafeUU8klZ2GPsQQJq2Jd8x7B/9+4N/57NcyEF0Oy38KU+0QK8FlqOKn32jl6qv3/sH3XRCEc0MELYJwkSiKwpd2fIleXy/fuvpbuZoikB1SiSQjHJk8ggoVkiTRYG+g2lpNa2lr7jiNSsNvP/TbvPP2+nt5ZeAVXht5jS+v/zILyxfm7d/esx2DxsCqylVMRaeosFQQSUbo9HayuGwxKkmFVWctaOtgYJAtDVsKho0URSGaipKW0+jJ5sW0lLSQkTM80vUIm+o25ar/SpJEv7+fV6depSPakZutpJJUrK1ey4v9L5KRM7n1hWbpVLq8cv4zM/DQT+v4/g8amImGUKwyjBvAPAmaJARm67sYwLkYHH3g94KvBVBAktGYS3jgP6opXXOSyehkLj9odrhIURRUkoqJF99HMPIU6IMQs8LVf4Nhx7/xhc+m2LQpv96NIAjnlxgeEoSL5Pv7v88L/S/w2dWfpdnZjKIonJo5xetDr/NU71O8NvIaEhIrq1ayrmYdzc7mgsUHz3Rs8hi+uI9YOsY/Hf8nVKj4xPJP5B3z+vDrHJnIlqWvslYxGholmopSba3mwZMP8sDxB3CZXLmFFWcpikIgHsiV/z/TPs8+TBoT8Uz+Ks+9vl6qLdUFawPV2erY6NqYS56dFUlFaCtpI5rOr8MyGhplbc1avv31MtyrdmN2pKltSfC1/9zN9LQaJeKC8WVQvRtsQyCf2UOkQMQFcTuo46ALZrcZZ0g7OvnoR/V0uDuw6+30zPSg1+ix6W3smdnDF1/+IqPBMf7lp3449DEIVYG9H5pfYPltT/Pxj8uo8zujBEE4z0RPiyBcJNt7tuONetk1uot9nn1EUhHMWjNj4THuXHRntiCd9g8X+zjgOUCFpQKVpEJCYpFrER3ODt614F15QzwZOcP39n6PtJzOlsaX1Ll1hn5y+CcY1UYWuBawqGwR0VQUX8xHta0agFcGX6HCUpGX8ArZXBa73o5WpUWW5bx907Fpamw1RFIR1JI610NzaPwQk8lJDo8fZmXlSsy6bM/G0cmjVFmqWFq+lNHQKDa9jRefsvLR/y9MYlyGmAkc9SBrQVHB1HxMBhUmUwadDmRLOYppGk0qjSEmI8vZmTwxaz9y0oEsSyRbn0HV9W7iCSfpybVIeujthXkt8xgKDPFC/wskM0kW2RZxxHeErf/7IZj8W0AFR+8CWYP6uq/wqfc3oCtcV1IQhPNMBC2CcJF8Yd0X+OnhnxJMBlnkXkSTo4mXB14mkAjgMDjOGrCkMimmY9OMhEZQSSpcJhdmrZlqazbA8MV8/OSKn9DWlq0+KysyyUyS/zryX7w+8jqfXf1Zbp53cy6ZNplJsnN4J+Xmctxmd7aonCQRSAT48as/5oamGzBqjDQ5mgrasnt0Nw69g/aKdtJymsHAIHW2OoaDwzQ6Gqm0VNLl7UKr1tLsbCaUDDHfNR/VqIo1tWtIySlCyRAqVCwpW4JFZwHgjk8E2PuyGQhAah4kbICCLlqPZd4xqDiAc/ptmJHRarOLEKpMdnzWo2iVIHZfHWq1gixDhnkoaR2yApFEKYFlj2NIGVBNLUIJVrFzp4qWFpmN9Rupd9Sze2Q308Fpbpv/HrY9+22o2wxDVwFgH/kAv7zxFq5cUnzlaUEQzi8RtAjCRXJ90/Vc33R97uevvvpVume6+ftNf58LQM6kKAoHxg+gVqmZjExmpxcjFQzlzMrImWwNmPAo3qiXf9z1jyQzST6+/ON5xd8eOvEQE9EJNtVtwqazMRmZZF7pPDoTnYSTYXwxHzX2mrxclqHAUC4RdkFZti5MKpMinAwjKzKxVIyMkq3XMt+VTYD1Rr0MBYdQSapcjZh+fz+RVISMkkEjabjn3Ss4cEANrACDDxpfRDO5ilKHBatVwmAAra4DSWNEqhvCkbShVWnRaiGuC1EhXw+yhqmW5zHF2pEVmYhmhJLwemRZwpG2Yta0ELT0opfiaEwK8Tj0TPfwm6Hf8OHFH8asNfOd7u/wf/8+DjoTrP0uDG1CrYabPtCHod4L5BcEFAThwhBBy2XK7/eTSqUYHR0t2JdMJhkbG/u9U2svpkgkwtTUFKFQ4Toxc0EgEEClOrfpYP/V/V/sGNzBF5d9EUPUwGg0+77Jiszp4GlmEtlS/maNmWZ7Mwt0Cwh7wwCEyH+dguEgE74JJrsnmYnPsNy9nHK5nHc2vhOz1kzIG8o9RlZkHj3+KIFYAEPSgC6qQ4eO0dFRfn3i1+gUHQPjA3QYOhgdHSUjZ3i8/3E2VG5gMjKJWWNmIvNGdVkHDp499izBRJCN1RsZjWSvI5qOsnt8N/WWepodzZxOnWZ0dBQjRpSUwl/94wQ7HtwIsgZQABmDfQaj3kGpswyTKYHBoKDXK8Qt3Wg1EpJlGl3Ej1FjRK1WETZ2IScTmCnDnq5E1nlRy1pcqSbU2giZTDYR2KRREdfFSZccRq2J4vNVcLj/ME+deoql5qVUmau41nUthxY+yVTgCBjCoI5SW6vmU3dIWNO2ov+uLqR4PM74+Dha7dzs8QmFQqRSKZLJ5MVuSlHBYJBkMln0fdSJcb85TQQtlxmv10skEmFwcBBFUchkMkWPy2QyczZomW332dp+sc3mbpyr9imKwv1d97O0dCkLnAuYikzRH+pnIjqBJ+qh0dZInaWOBnMDpYbSvDacKS2nGY+OE4lF6A/2c13ldVQaKlFkBTVqPrPoMwXt/vHxH/Oa5zVubbiVNe41uX2jkVH6/H00WrILPUaT0dxQzsGpg8x3zKfX18t8+3xiyVhuqnNGzuAJe6gwVOQ9j17S02prZSg0hNvgzr3HJ05ouO0TOmjoA91KiGsxGjPY7TIadwKj1YyubA96SrHJdRgMYDGa0ag02FT1TLsOEUeHQ6mlUmlDZ7AAaYxSDV71Sbz6g7jiqzDF28lksq+1FjOmzApCqhkCZfsZp5G1hnIcOgfberbxmcWfwaQ28S+33M2PT6o4MjRGZonMhz80w0jmMFWJKrTSxQ0WFEVBluU5/W/kUmvfxz/+cXw+H5Ikcfr0aVauXJn3GJfLRVVVFU8++SRlZWUcP15YMVlRFO69916efvppTCYT999/P8uXLz/v1/NWIoKWy8TExASRSISxsTGMRiNLlixh165d1BVZZtbj8VBbW3vOewvOlXA4TEVFBTZb8WGPuUCtVlNdXTiE86eQZRlZkjnuP85Xj3wVq8FKm7ON30z9hnkl8/jQ2g/9wQDz2OQx7Ho7USXKiooVVJuraahvOOvx+8b2UWOr4VXvq2SUDF+6+ks0ON84/rmDz+HL+NhUvolV9aswaU3ZQnFDu2mvakexKFxXcR1Og5MqSxXP9z/PloYthFNhylPlXF1/NVOxqdz6P6FkiO7RbuaXzEetUaOb0vGhDzWx90AG3Cfh0MfR2LzYKoco1dRgdsbRuOO45VVg9WDSmNCZoiiqBBmDjyppBZKkokS3mIwqgkd1ABctuJQqsh9riUplPYOkiVmncabT6GQnSTnOpDSGO7aEdLKeaazMWJ/leBxuX3o7//D6P+DX+9FoNaRtab659ZvZ3KJPA7gYD6+k1FiKVn1xg5aZmRmqqqowGM4+m+xiSqVS2Gw23G73xW5KURMTE0Sj0bzfj88991zu7xs2bGD//v0Fj3v11Ve55557uPPOO4ue95lnnuHUqVOcOnWKPXv28MlPfpI9e/ac+wt4CxNByyVubGyMSCTC9PQ0RqORxYsXs3PnzovdLOFNUKlUPPu+Z9kxtIN6az0L3Av44cEf4jQ6+cLaLxQNWBRFIZaO0TPdg1FrJJAIUGev45rGa/D5fKil4nNx947tpd5eT62tlhJjCY/e9ig9Mz15AUsyneTh7ofxx/2srl6dW/soI2f41u5vcWXNlfhiPjbUbsCkNXHQc5AHjj/A/NL5bxR5I8N0bJpaWy0qSYU/7sdldLGobBGyInPn3yU4tlcL2gRoI9jsGSyVMcz2CJU60NnSpGwxnJKCQV9J1NBHTDdErXo5KU0ak0pCo5FRqRz45CB6TYYyatGhoFKpUKkUxjJHKVFcqJQaTstP0JK+GX3KggkLg5aH8dHP8tCXKTMHOTXzn9mEYEXFd/Z8h7vdd2PQGPJeR0/Yk6trI7w1bdy4kYGBgbPu3759O3feeSeSJLF27Vr8fj8ej4fKysqzPkZ4c0TQcgmSZZlkMkkymSQYDGI0Guno6BDByiWs0dGYWzX5X/f9K3vH9vL967+P3WAvOLbP18dMbIaBwABrq9eiU+toK237veffO7qXGlsNVZYqHPo3Fkd0mVx55fHHw+M83vU4e0b2cHX91SyveKNr++jEUSbCE/gTfuaVzsvNbjo4fpAyc1l2yEKRqbPVoVfr8cV9jARHsOltnJw6SZOzCUmSiKVijIf8oEqiqttDiVSP06DGaa3BbEmScL+KrItRpVqKSS1hMinY1U2gq+Cw9kcs1bwHi1ZBUUCtBptaRTThpE/9a+Zrt2BWl5NOg1OpQq3SYlJb0aavpFv9EHZdKy55Psb0Wn6t+hk+ZTMrLR1s6ngv2zq3ccJ7Al/Sh9fmpVJdmbeIpFVnRVFEMTnh7EZHR6mtrc39XFNTw+joqAhaziERtFxCMpkMyWSSXbt2oSgKZrOZ+fPni2DlMuKNevnZ0Z9x1+K7aHJmpxin5TSesIex8Bi9M71UWaqos9cxr3RebuXm3yUrMrIis29sH26TmyprFSXGkrMWpxsLjTERmWBp+VKWlC+hzFLG/1r9v/J6eR7reQwFhWgySoerI7d9IDBAva2ex3se56q6qzBoDATjQbad3Ma7578bnVqHVW+ltSRbydeis7CkZAGvNxzCWJqgJFJOiVXBZlMwmjQkdQ4kgxYTOqwmGY1GQq9XyGjSVNGCQSuRUUWw6M0oCgSUCAuca5CVDL2xvSSVAEadmYD+JMus16AoEiVyE9bUVeyObKNMr2aR5ip6gqsZMzyKbHYwr3Qe373mu2yo3cDissX0HOvJm2EF0DvTS6VV3HwE4WISQcslIJ1Ok0gk2L17N4qisGbNGvbuFeudXI5eHngZT9iDP+7ngeMPkJEzjIZGeXHgRW7vuJ2r6q8qumDhrIycwRf30envJOwJ0+BowKK15Aq4/a7ZIY+2kjY0Kg2SJLGudh2nPnkqd4w36sWsNbO9ezvxTJybW29mKjpFibGEZDrJCe8J2lrbGIuM0VLaAsCOoR1YdVY21m3kse7HKDNle2Ie63mMG5puYPPmKY7ExtHrVJTpjFitCgYDmEzgN4YIm46RVukx6pai0Sjo9dCvHKBc24BP7iVtMGNlGXq9hCR5MZhLcZtLKTM0ElYPYjcb2WJfQLVDQatVeGn4aarkKO9WdTAWOozG7OEzxuv40qtfxF6/BYkyJEniXfPfRVpO83riddym/HyMEmMJFq3lHL3TwuWourqa4eHh3M8jIyPnLPdNyJqbmZgCkJ2anEgk2LNnT26MVK/Xo9GIWPNy9baWt7GiYgWHJg5xdPIoGpWGI5NHWFK2hDsX3UlrSetZA5aT3pN0T3dzaOoQTdYmOlwdlJvLiwYsE5EJjk4exaqz4jK60Gv0lJnL8o6JpWIoisJUdAp/ws9kZBK1Ss0nln2CFmcLvriPnaM70UgaxqPjrK9Zz2nfacbD47keodP+08QzcdymbNG65/qeYyo6ReuSAaqbgrhoI+HehV6vYDSC2hxAZ5BZoLyfMlMlAf1xjEaISV4cehdV6gUscV5BhcNCuua3VC/pYePqEt51Ywk33qjwzs31rF9YRWfiJSako7hcYLdDHD+/GXmB5VWLeVfHLRz1HuT+4z+iP9THU6cfy6v0KyGhVWnzqgnP9kSdLfgTBICbb76Zn//85yiKwu7du7Hb7WJo6BwTd785rK+vL/vNd906du/ejVosdHLZM+vMPPneJ3M/f+3Vr6FVa/naxq/l5VfMCiQCeEIeIqkI/oSfxWWLKasuY9o7XfQGO1vgrcXZgkPvwKKz5KrQ/q7jU8eptFTS7moH4Dd3/oYqSxUqlYrJyCTdM928MvgKvb5e6mx1rKxciS/uy+awzJxkY81Gdg7vZE31GhaWLeTf9v8bGSVDvb2eF5MvcsXi+Zz0ljJS8f8oj9swqNpJGifRahUcOjMZTYS4lCJJCIMlisEaZ00LVFWBonMznY7TM3OU9qrVmM+41BJjCasrVzOTmOHg+EGWlS9ja8tWfnToR7zU/xJ3r7ibL1/xZf72N3+LQ+9gXc26vOs+7T+NSlLlpnEDOAyOXLE84a3rfe97Hzt27MDr9VJTU8PXvvY1UqkUAHfffTdbt27l6aefpqWlBZPJxM9+9rOL3OLLjwha5rD29vbzUshMuDQcnjjM06ef5stXfBmr/o1VlxPpBBORCTxhD6dmTtHuasdlcrGicgUAvoSv4FyRZIRD44dY4F6ATW/L/V/M3rG91FhrWFK+JG9l5TNXXi4zl+EyufjCi19gwD/Als3Z1Z+NWiNHJ47SN9PH2sq1JNKJXEVcX8xHpaWSyfAknriH26/dyvdOxBjQTxDUHMQlOwnqT1KubUHRBrCp3dS7rfQbf4XDleDm9htocs8mwtoJzUxwbOoY1dZqqqxVuRwUX9xH13QX7a529o7updPbyTvmvYM6Wx0Hxg8AYNVb+T/X/R/eNvA22kvb867fqrNi1+bnCh3wHKDKWoXw1vbggw/+3v2SJPFv//ZvF6g1b00iaBGEOeqB4w8QSoS4uu5qPGEPnrCHx7sfp8/Xx9tb306Do4Fb227FpDUVHTKSlWwButeHX6feXk+pqRSHwYHT6Cz6fIfHD1NuKafZ2Yxdb0ej+v2/HlSSio8t/RjDwWG2Nm/NbZ8IT+CP+3ll8BU+ueKTufNE01EUFB7uehi1pGZ5ayWVC3ajHdfSFn8v45ZnkbU+yvV1PKv/BPet+jpr5zUynFzIzpGdaHSpvOdPppO8veXtjARH2Du2l9VVq0nLaU77TzMaHiWSivCe9vfwX8f/iy/v+DKpTIpDE4eQFRmVpCKRTlBuKs/LXUmkE+we241F9Ubvk6zIlJnLqLXVIgjCxSW+wgvCHLWyciXJTJIbt93IV1/9Kr888Ute7H+RhWULubXtVtZWr8WsMxcELBk5w3hsnP1j+9k/tp9aWy3l5nLaXe1Fg5tjk8fwhD2Umctw6B2UGkv/YMAy62PLPsZnVn8Gb9Sb2zYUGuK0/zT9vn5WVKzIbZ+MTlJuKmciMkGJNptDUjm/H4vGgkmvJm48TU2pk+Y13bTUGbl6RRV2h8xoaJRyczn/fvDf6fX15q5xKjZFuaWczY2bGfAP8MrAK/jjfkwaE5vqNnFo4hBOo5NPrvgkM/EZXht+jVDijSUPen29TMem8wrFRdNRWp2tWDRvBC2joVFO+0/nDRcJgnBxiJ4WQZij3tvxXq6svRJPyEONtYavv/51NtZt5L519501GbfL28Wkb5LjM8e5s/1OVJLqrMmj3dPd6NV6nAYnJq3prNOn/5CZ2AwKSq7ey/sXvJ+9nr18bvXn8Ma8lCglmHVmjk8dxxP0sLRyKRL/037LJHUVFtTJI3TUaNi6pIwDU69i0hvQa/QMB4eZik6xrmYdvzjxC/pn+rHqrPjjfmaiMwyqB1lXvY54Jk7ndCeHxg+xtmYtm+o28Z093+Gp3qd4Z9s7+eiSj/KhhR/KDSMpikI4GWZR2aK8axkNjhJPx/O2xdNxlpQt+ZNeG0EQzi0RtAjCHFZtrabaWs3DnQ9zeOIwD97yYEHAMhWdYiQ4Qs9MD432Rpx6J1uqtuTlwZxpKDCEL+6jzFyGWlIXzBp6s1pLWvHFfZycOkmHuwOj1siPbvwRkC1qJyFh1pkZDAwyJo3xgxt/wEjnCC/0v0Cnt5PqxlJSPML6hhVc0bCKTt8RRkIjKIrCjqEdNNgbiKailBpLsegtHB4/jDfuxaqz8tPDP2V5xXJ2jexiRcUKjswcYTo6jUVnwagxsnt0N20lbaTkFNc0XZNrcyqTIp6J580QSstpJqITrKxcSedQZ257NBXFqHljhWtBEC4eEbQIwiXgV52/Qq/RU2evQ1ZkxkJjfGPXN7BqrayoWEGZuYyVlStpdjbj8/mYjE3mPT4jZwgkAhwcP8iS8iWoJBWVlvM/FXN19WpOzZyiy9vFD67/ASWmEuod9XgkD3W2Onp9vQwHX+Hq+qu5ofEGJEkilAqBAg93PsxwYJi3Nb+NQ+OH0Kg0rKtZx8OdD7N3ZC+b6jcBoFfr6XB1sHt0N3X2OnaO7OTg5EGOTRxDkiTGwmMsLluc166T0ydJppN5vUuRZAQJKS9A8Ua9dE138c62d57310oQhD9MBC2CcAkoM5Wxd2wv73/8/dRYa+ie6WY0NMpXN3yVK2qvwGkonlwbT8cJJ8McHD9Ina2OBkdDQdG0c8FpcGLWmnmp/yVWVa3Km5lk1VlJySk21W/Kq4fSVtqGoigEEgFuar0JnSabM5JW0qhUKn596te0u9qzC0GmorkS+qF4iOHgMDuHdxKMB5EkidaSVn586MesqVrD4vLFbG3ayqPdj/KBBR9gW9c2uqa7+Pzaz+eee8A/UDA0NBgYRKPS5OWu+ON+6u31F32BxDxTU1g6O5FcLqipuditEYQLSgQtgnAJ+O5132V19Wpi6RiV5kq6prv44rovclPrTWd9zJGJI8TSMSLJCMsrlmPX28/rzVen1tHkbCKWimHQGHI3/wpLBT0zPXR5u7i26dq8x7zw/hd47vRzuM1vBFKBWIBXB19lUdkirmu6jj2jewglQ0yEJ9g3to/94/tRq9XIikzndCfP9T2HUW0knAwTTUWJp+Nc3Xg1VzdezdO9T2PVZoOmWZ6wh4c6HyKcDNPsbAayC1A+1/8ct7Xdlte+ocDQnCrdr/7lL9F96lN0qFSoZZnkD39I5t3vvtjNEoQLRsweEoRLgFql5o5Fd/DxZR/nwMQBMnKGW+fdWnCcJ+xh/8R+XhvPzpRpsDewuWEzLpPrgvQWNDoaOTl9kuHgcN72eSXzWF6xnJcHXiYjv1GkTaVSsb52PbIsc3jiMAB/ufIvWV6xnM+v/Ty3tN3CQGCAPl8fR6eO8tVXv8oC1wIqTBVIkoTL5OK/j/83dz15F+FUGL1Wz2Q0OzQ2FZ2i399PuaWccCKce86TUyfRq/S5KeEAhyYOsb1ne16OSzwdZyQ0cl56pv4kU1PoPvUppFgMTSSCFIuh++QnYWrqYrdMEC4YEbQIwiXmt0O/xWlw5oKQycgk+8f286uuX/Hq0Kuk5BR1ljqurLuSCktFwcJ/59vm+s3E03H6fH152806MyXGEk56T+ZVl7Xr7VTbqgkmgpycOkmVtYrn3v8cW1u2YtVZseqtHBw/SCwdYyIywYeWfAiTzkR/oB9f3IdW0lJiKOGjSz7K4rLF+GN+kpkkz59+Hqs+u0zBSGgEyA73dHo7KTOX5YIbyJbp16v1jEfGc9uOTBzBaXDmrYJ9MamGhkD7O4GnVpvdLghvEWJ4SBAuMSXGEvaN7eMTT36Ccms5apWaclM5C9wLWFK+BDkqMzk5+YdPdB5ZtBaScpLp2DSlxlIADBoDbSVt2d6WRAZFUXIzoaqt1STSCbq8XUhItLvfqFLbYG8glonxmRWfISNnGAmNoFPpiCajRFNRnu5/mh/c8ANuabuFkeAIoUSInxz6CbtGd/HlK77MUGCIyegkaTnNywMvA1BlqcrVfJmKTNE305fdNtPL6qrVKIpCt7ebNlfbBX7lzk6uq4NUfoE9UqnsdkF4ixA9LYJwibn/pvv59KpPs6R8CVvqt/CXy/+Su5ffzca6jX9yrZVzrd5RTyQVodPbmbfdqDVyffP17PfvZzQ0mrevydlEU0kTOwZ38Fzfc7ntHa4OUpkUHa4ONGoNn33+s/z8+M9JyAmefO+TPPCOB7il7RYgG/zEMjG+/tuvU2utZb5rPovLFzMdm+bf9/07v+r6FUsqllBjq2EyMomsyDze8ziKpNBgb2AsPAbAydBJTgdOs6x82fl9od4Mt5vkD3+IYjSSNptRjEaSP/whuOfI8JUgXACip0UQLjEmnYl7Vt1zsZvxBy0pW8JJ70l2DO7gqvqrcts1Kg2b3Zt5ceBFrmm4hhrbGzNg5pfOZ9uJbTza8yjRdJRb5t2CWqXGoXcwGZ3ki+u+yIrKFTzb9yx3L7+blpIW5pfOzz1ekiR+8fZf8GL/i3xm9WeAbCKwVtLy9de/zq3tt3JlzZUMBYb4+mtf5xNPfYJwKsz/WvO/2Du2l98M/oZ0Js0u7y42L9lcdJHKiynz7ncTu+oqel98kdoNG9CL2UPCW4wIWgRBOC8kSaLOXkcsFcutDTTLqDayoXYDT/U+xfXN19Ngb8jtW165nIMTBzkwdoA+Xx/XNl5Lv7+fiegEkiRxTeM1XNN4TZFnzFpUvohF5fnTmR+57RGe7HmSDy/5MJIkUe+op8HewNN9T3PnojtZU7UGq9bKD/b/gDX3r8GWsfFPTf90zl+Tc8LtJtzejuKaG7k2gnAhieEhQRDOG6vOSo2thmgyyrHJY3n7mp3NLCtfxt+8/Df8x+H/yG2vMFeQUTLctfQuPCEP79/+fqZj01zfeP2f3I5KSyUfX/7xvBlUj7/7cXZ/eDffvPqbAHS4O9jashWdWsc7qt+RV1NGEIS5QfS0CIJwXlVYKkhkEhyZOIJapabD1ZHbt6pqFU6jk8e6H6PaUs11zdchITESHMGhd/Dta77N+8beh91gp7W09Zy2SyWpqLPnJ7F+55rvALB79+5z+lyCIJwbImgRBOG8q7fXE0/HOTB2AEl5Y+0kSZKotlYzEZ5g+6ntPN7zOJ3TnaTlNHaDHUmSWFm98iK2XBCEuUQMDwmCcEG0lbaxrGIZrwy+wmve13Jl+fVqPWadma9u+CpOg5N0Js2HF30YjUp8pxIEIZ/4rSAIwgXT7m4nmAryg64f8L193+Omlpt4dfhVTBoT5ZZyvnH1Ny52EwVBmMNET4sgCBfUmqo13FZ7GwP+Ad776Hs5Mn6Eq+uvvtjNEgThEiB6WgRBuODcejff3fhd9oztQaPSsLJS5K0IgvCHiaBFEISLQpIk1lavvdjNEAThEiKGhwRBEARBuCSIoEUQBEEQhEuCCFoEQRAEQbgkiKBFEARBEIRLgghaBEG4PE1NoTpwAKamLnZLBEE4R0TQIgjCZUf9y19ibG9Hf9NNGNvbUT/88MVukiAI54AIWgRBuLxMTaH71KeQYjGkYBApFkP3yU+KHhdBuAyIoEUQhMuKamgItNr8jVptdrsgCJc0EbQIgnBZkevqIJXK35hKZbcLgnBJExVxL1OpVIpMJkMwGCzYN7tdpZqbMWsymSQSiVzsZpxVPB5HpVIVfW3ngkgkQjKZnLPtA8762Twn9HqS3/kO9s9+FkWrRUqlCHznO8T1evgjn/O8tu8cSKVShMNhksnkxW5KUYlEgmg0Omdfw2g0SjweL9o+tVp9EVok/LFE0HIZSqfT7Nu3j1QqxcjISMH+ZDLJ6OgokiRdhNb9YZFIhImJCXQ63cVuSlHhcBhJkub0DSMWixV97+eKs302z5lVq9A88QS6sTGSVVWknU54E8933tv3Z4rH43g8njl7gw0Gg8Tj8Tn75SMWi5FOp1EUJbft05/+NIFAAEmSGBgYYOXK/PWwXC4XVVVVPPnkk5SVlXH8+PGC8+7YsYN3vOMdNDY2AvDOd76Tr3zlK+f3Yt5iRNBymenq6iKZTHLFFVdw8OBBOjo6Co4JhUK0t7fP2Z6WkydPUlNTg81mu9hNKWpoaAi1Wk11dfXFbkpRPp+PyclJ2traLnZTzioYDBb9bM4Vc719hw8fprW1FYPBcLGbUlRfXx82mw23232xm1LUxMQE0Wg0F1wAvPTSS7m/b9iwgf379xc87tVXX+Wee+7hzjvvPOu5N2zYwJNPPnluGyzkzM27lvCmhcNhIpEIBoMBk8mEXq+/2E0SBEG4rGzcuJGSkpKL3Yy3NBG0XOIURSGZTHL06FEMBgMNDQ0Xu0mCIAhvWbt27WLJkiXceOONnDhx4mI357IjgpZLmKIoHD58mEwmw5o1a+bs+LYgCMJbwfLlyxkcHOTIkSN8+tOf5pZbbrnYTbrsiKDlEpVOp4lEIlRVVWE0GkXAIgiCcJHZbDYsFgsAW7duJZVK4fV6L3KrLi8iaLkEnTp1ikQigclkory8/GI3RxAEQQDGx8dzM5L27t2LLMuUlpZe5FZdXsTsoUtINBolEolQXl6O2Wy+2M0RBEF4S3nf+97Hjh078Hq91NTU8LWvfY3U/xQyvPvuu3nkkUf44Q9/iEajwWg08tBDD83Z0hKXKhG0XCJSqRSHDx/GYDDQ3NzMxMTExW6SIAjCW8qDDz74e/ffc8893HPPPReoNW9NYnhojlMUhaNHj5JOp1m9erXIXREEQRDeskTQMof5/X4ikQgulwuj0YhGIzrGBEEQhLcuEbTMYV6vF5PJRFVV1cVuiiAIgiBcdCJomcNaWlrmbKl9QRAEQbjQxB1REARBEIRLgghaBEEQBEG4JIigRRAEQRCES4IIWgRBEARBuCSIoEUQBEEQhEuCCFoEQRAEQbgkiKBFEARBEIRLgghaBEEQBEG4JIigRRAEQRCES4IIWgRBEARBuCSIoEUQBEEQhEuCCFoEQRAEQbgkiKBFEARBEIRLgghaBEEQBEG4JIigRRAEQRCES4IIWgRBEARBuCSIoEUQBEEQhEuCCFoEQRAEQbgkiKBFEARBEIRLgghaBEEQBEG4JIigRRAEQRCES4IIWgRBEARBuCSIoEUQBEEQhEuCCFoEQRAEQbgkiKBFEARBEIRLgghaBEEQBEG4JGgudgOE80NRFBRFQZblovuKbZ8rfl/b5wLRvj+faN+fZ66/x6J9wvkigpbLkKIoHDx4kGg0yr59+wr2RyIRDhw4cBFa9seJxWIEAgHUavXFbkpRyWQSgNHR0YvckuIymQypVIpAIHCxm3JWZ/tszhWXQvuOHDmCJEkXuylFJRIJJiYm6O/vv9hNKSqVSiHLMlNTU7ltf/3Xf00gEEClUjE0NMTKlSvzHuNyufjJT37CnXfeycTEBJIk8YlPfIJ777037zhFUbj33nt5+umnMZlM3H///SxfvvyCXNdbgQhaLjOpVIpoNEpDQwPJZJI1a9YUHLNnzx5WrVqFSjU3RwdPnjxJTU0NNpvtYjelqKGhIdRqNdXV1Re7KUX5fD4mJydpa2u72E05q927dxf9bM4Vc719hw8fZv78+RgMhovdlKL6+vqw2Wy43e6L3ZSiJiYmiEajNDY25rbt2LEj9/cNGzawf//+gsd5PB6+853vsHz5ckKhECtWrODaa6+lo6Mjd8wzzzzDqVOnOHXqFHv27OGTn/wke/bsOa/X81YyN+9awp9EURQOHDiATqejsrLyYjdHEAThslJZWZnrNbFarbS3txf0uG7fvp0777wTSZJYu3Ytfr8fj8dzMZp7WRJBy2UikUgQjUZpbm5Gq9Ve7OYIgiBc1gYGBjh06FBBj9zo6Ci1tbW5n2tqaubsUPKlSAQtlwFZljlw4AB6vX7OdscKgiBcLsLhMO9617v4v//3/87ZYezLlQhaLnHRaJRoNEp7ezsajUhREgRBOJ9SqRTvete7+MAHPsA73/nOgv3V1dUMDw/nfh4ZGZmz+W+XInGXu4TJssyhQ4cwGo04nc6CfYqikMlkCh43u11RlAvV1DdFlmVkWS7a9rlAlmUkSZrT7ZvLr98s0b4/3ey/4bnaxrn+Gfx97Tt48OBZp0IrisJHP/pR2tvb+exnP1v0mJtvvpnvf//73H777ezZswe73S5yDM8hEbRcojKZDLFYjHXr1nHs2LG8fYqiEIvFUKlU7N27N29fKpUinU6zb9++OTldUpZl4vE4wWBwTrYPslOydTodIyMjF7spRaVSKRRFwe/3X+ymnFU0Gi34bM4l0WiUPXv2zNnPYCKR4MCBA3M2fy2dTjM8PIzBYJiTr+Hs75nx8fHcLMr77ruPYDBIOp1mZmaG5cuX582wdLlcfPnLX+YXv/gFixYtYunSpQD84z/+I0NDQwDcfffdbN26laeffpqWlhZMJhM/+9nPLvj1Xc5E0HIJCgQCxGIxTKb/v717j8my/v84/rrhFgRSEPCIBhmmqUAgeGLZcjqduco8bLW5/mHLTmprTTfXcm6pbW76h83+aOVqaUvTdB5o0zLzgJbYDRgYSh4RFPDELff5+v3Bbr4RB+v7U+/r8/X52NpaV23vy+7D63pdn+tzx6tXr17tjgUCAYVCITmdzg77DFy9elWXLl1STk6OLfdAsSxLZWVlGjZsmFJSUiI9Tqfcbrf++OMP5ebmRnqULlVVVWnAgAFKSkqK9Chd+uWXX1RQUBDpMbp05swZ9enTx7avQ5/PJ5fLpfz8fFuGAkk6ffq0HnnkEdveGrl9+7YqKyuVnZ2tnj176ocffmg79uWXX2rHjh367rvvFBcX1+6/u1tD7XA49PHHH9+XmUFoMU4wGFRFRYXi4+M77LMSblicTqdKS0vbHQsEAvL5fIqLi+twzC58Pp9CoZBqampUU1MT6XE61dLSoh49eth64zG3223rpkpqndHOf4aBQEB1dXWKjY2N9Chd8ng8Kikpse1aNsuydOXKFV26dMm2e0KFQiGVlJQoLi6uXeNy8+ZNBQIBZWRkKC0trUPjUlxcHKmRH3r2fLWjU42NjWppadGkSZM6DSXhhmXMmDHtjjU1Nammpkb5+fm2rZNv376t06dPd6hk7aSlpUW///678vLybBsIfD6fKioqbL8Dp92bFr/fr/Lyclv/OXo8HlVUVGjMmDG2fT02NzerqqpKubm5tmx3JenGjRuqrq5WTk6OYmJitH///rZj3TUuiAzHXaoue67UfAiEQiF5vV6VlJRo4sSJOnjwoHr06KFQKKTCwkIdOXJEkjRx4kQdPnxY2dnZqqio6LCALBgMyuPxKD4+3rYfbJZl6c6dO+2uduzI4/HI6XTa9spWag2vwWDQ1g2B1Nq0JCQkRHqMbrndblu/byQzXpPhBtWuu/dKre8br9fb7v/3XxuX5uZmGpcHr9M3nn1f6WhTX18vr9er8ePHd9hauruG5datW6qqqtKECRNs/SVWVVWlwYMHa9CgQZEepUter1dlZWW2XkMgSdXV1UpOTrbtWowwuzctUuuajP79+9t6bdCdO3dUVVVl60bIsiyVl5dr0KBBSk1NjfQ4XWpoaND58+eVk5Mjp9NJ42JTNC02FW5afv75Z/Xs2VOBQKCtYZk4caKOHDkiy7Lu2rDYvb0IP81k9w8Cj8ej6Oho295eCzOhHZDMaFrCP6pn58Av/edpNrvefpHMaVP9fr/8fr/i4uI6NC4+n09NTU16/PHHaVweDJoW01y+fFk+n0+FhYUdHg8N/6x6Zw2L2+3WqVOnNHbsWMXHxz/Ikf8Vj8ejsrIyFRQU2DoM+P1+nTx5UgUFBbYOA4FAQL/99luHp8bsyISmxev1qrKysu3RVrtqbm7W2bNnlZOTE+lRutXU1KQLFy4oJyfH1u+jK1euqL6+XllZWYqOjqZxsRmaFpsKBoOqrq5WbW1th4Zl7NixOnr0qJxOZ4eGJRQKqaWlRT179rT1lZfUWm3Hxsbafk6v16uoqChbByupNbQEAgFbrx0IM6FpkcxprkxoW6TW95LD4VBMTEykR+mWz+dTMBhs22cm3LaEj9G4PBCdvuns29M95BwOh9LT0zt8WP71sWaTA4vX61V0dLTt57QsS4FAwNYLHcOCwaARc5okKiqqy91R7SQmJkY+ny/SY9xVTEyM/H6/bXfKDQsHQI/HI0lavXq1NmzYoMTERMXExCg1NVWX/5b6PAAACtRJREFUL19u99poaGhQfn6+pk+fHqmxHwp8whmku0W34YWiubm5SkxMjNCE/8zNmzd15swZ5ebm2vr+ttT6S65Op1ODBw+O9Ch3VVpaqtGjR9v+KlYy4/aQJNXW1ioYDLb71V67OnnypJ544gnbN1h37tzRqVOnlJeXZ/uLlnPnzrX9tpvD4eBWkQ1we8im/v7Ic3ePNYcXucXGxtr+StuUBXnSf2Y14fZAeFa7f2GFmXJ7KPw+NOFLKRAItC0itbtw22LCrUyv1yvLstrNerfHoSVuF90DLMQ1VXcNi9/vl8vlUlZWlu0fc5WkU6dO6dFHH9WAAQMiPcpdXbx4UaFQSOnp6ZEe5a5u3Lihuro6jRgxItKj/COmNC2WZen48eO2f9Rdap21tLRUI0eOtH1wsSxLp06dUr9+/dSvX79Ij9Mty7J05swZSVJmZiaNS4TRtNhU+Arv6NGj3TYs4QV4dm9YJHMeb5bMalkkcxYLh5nStEiti1xjY2Nt3wxKZi3GNql1lVqfdnQ4HO0egadxua9oWkwTDAa7bFiCwaDKysqUnp6u/v37R2jCf+7OnTuqqKjQ2LFjjQhYtbW18ng8Gjp0aKRH+UdcLpeGDx9uxJeVZE7TIrU2blFRUbb94b+/sixLJ06cUFZWlu33l5FaG8Kamhrl5uba/uLAsixVVlYqPj5eGRkZkqQJEyZo7969iouLU69evZSUlKTdu3d3uDArLi7WokWLFAwGVVRUpKVLl0bgDP430LTYVDAY1M2bN+VyuTp9esHv90uSMVfWPp/PiKeFwrxer2JiYmz/QRrm9XqN+JIKC7dYJgiFQgoGg8a81wKBgCQZcXEgmffZ4PF4FBMTo6ioKP3666/Kzc3VsmXLdPbs2bafK/j7wv3y8nKNGzdO+/btU0FBgTZv3qyRI0dG6AyM0emHL6HFxgKBQJdfmpZlGfOFCvzd+fPnjVgrBPzd3z97t27dqhUrVqiyslJTp07Vxo0bNXDgwLbjR48e1fLly3X69Gn16tVLjY2NioqK0qVLlyIxvkm4PWQaU66UgH/LlNtuwN1kZ2dr27ZtmjhxogoLC9sFFql1Z/MhQ4bo9OnT+vHHH7V3714dO3YsQtOaz/6rnwAAiJApU6Zo9OjRHf7asWOHJOnJJ5/U1q1b5fP5tHHjxrbbRl0pKyvTV199pczMTK1evfpBncb/DC7lAQDowr59+7o9vnHjRu3atUvDhw/X0qVLtX79+nbH09LSdPHiRTkcDk2dOlXl5eWaPHmydu3apYKCAj3//POsb/kXCC0AAHRjypQpqqur6/DPZ8+erS1btuinn37S3LlzlZGRIb/frzfffFNNTU3KyMjQpk2bVF1drU2bNsnv92vGjBk6f/684uLi1L9/f02ZMkX5+fnauXNnBM7MPCzEBQDgv5CZmSmv16uUlBSdOXNG06ZN0/HjxzVlyhTV19dr0qRJun79up555hktXrxYN2/e1JAhQzRz5kytWrVKn376qY4dO9ahnYEknh4CAOC/11Xj8uGHH2rt2rVas2aNCgsL1bdvX/Xs2VPz5s3Tt99+q9LSUoVCIX3//ffasWOH9u7dq6amJj322GMqLCzUF198EYGzsT1CCwAA99L27dv19ttv69q1a0pMTNS1a9e0fft2PfXUU8rOzpZlWXK5XJo1a5bcbrcuXryonJwclZaWqnfv3vJ4PNq0aZNefPHFSJ+K3XQaWnh6CACAf+GvTxS9//77SkpK0rBhw/Tuu+9KkgYPHqyqqirFxsbK7Xbrm2++kcvlUlVVlaKjo7VkyRKtWrVKt27dktfr1bx587Ry5coIn5UZaFoAAPh/2r59u4qKitTU1KTk5GS1tLTovffe09q1ayVJJSUlGjlypDIyMuRwOOR2uzV06FBdu3ZNBQUFmjNnjubMmRPhs7AVbg8BAHC/hHfHLSwsVE1NjRITE7V//355vV4lJCSovr5eo0eP1tdff62jR4/K5XJp165dun79uhITE5WUlKSNGzcqLy8v0qdiB+yICwDA/ZKWlqbU1FQdOnRIFy5cUN++ffXcc89p2LBhWrNmjYqKipSWlqYjR45oxYoVqq+vVyAQUO/evXXo0CHV1tbq9ddfZ8fcbrCmBQCAe6CgoEDnzp3TggULNHv2bCUkJGjJkiVKSEiQx+PRzp07VV5ertLSUrlcLn322WcaOnSo5s6dqxdeeEHTpk2Ty+XSqFGjtGLFikifji3RtAAAcA84nU6tX79er732mhobG7Vs2TKNGjVKRUVF6tGjh4qLizV//nw1NjZq3LhxSk5OVnp6uubPn6/Nmzfr2WefVUtLiz766CPl5+dH+nRsiaYFAIB7ZMaMGfrzzz81YMAAvfLKK/L5fLpx44YGDhyo5ORkBQIBNTQ0KCoqSm63W/X19Tp8+HC7Xz23LEsLFy5UZmamsrOzVVpaGsEzshcW4gIAcI/t2bNHixcvVjAY1KxZs1RcXKzJkydr27Zt2rlzp7Zs2aLt27erurpaGRkZ+uCDD/TOO++oublZI0aMUO/evXXw4EEdO3ZMixYtehjXufD0EAAAD9q5c+c0c+ZM7dq1SzNnzlRFRUXbsfHjxysYDGrfvn06ceKEli5dqpSUFJ08ebJt993hw4frwIEDGjhwYKROIRJ4eggAgAfp5Zdf1oEDB9TQ0KDx48crOjpan3zyiSRp+vTpqq2t1bRp05SXl6f4+Hh9/vnnWr58uSSpoaFBqampGjx4sC5fvvywhZZOEVoAALhPNm/e3Pb34cZlwYIFam5u1jPPPKN169bppZdeUl1dnfr37y+Hw6EbN27IsiylpKREcHJ7IrQAAHCf/bVxSUtLU58+fTRixAhdvXpVUuvGdBs2bJDT6VRdXZ3eeOMNORytd0guXbqktLS0SI5vG6xpAQDgAbEsS6+++qqSk5O1bt26Tv+d3bt3a/369dqzZ4+OHTumhQsX6vjx4w920MhjIS4AAJF06NAhPf3008rKylJUVOuuIytXrtSFCxckSQsWLJBlWXrrrbdUXFzcts7lIdy3hdACAACM0GloYXM5AABgBEILAAAwAqEFAAAYgdACAACMQGgBAABGILQAAAAjEFoAAIARCC0AAMAIhBYAAGAEQgsAADACoQUAABiB0AIAAIxAaAEAAEYgtAAAACMQWgAAgBEILQAAwAiEFgAAYARCCwAAMAKhBQAAGIHQAgAAjEBoAQAARiC0AAAAIxBaAACAEQgtAADACIQWAABgBEILAAAwAqEFAAAYgdACAACMQGgBAABGILQAAAAjEFoAAIARCC0AAMAIhBYAAGAEQgsAADACoQUAABiB0AIAAIxAaAEAAEYgtAAAACMQWgAAgBEILQAAwAiEFgAAYARCCwAAMAKhBQAAGIHQAgAAjEBoAQAARiC0AAAAIxBaAACAEQgtAADACIQWAABgBEILAAAwAqEFAAAYgdACAACMQGgBAABGILQAAAAjEFoAAIARCC0AAMAIhBYAAGAEQgsAADACoQUAABiB0AIAAIxAaAEAAEYgtAAAACMQWgAAgBEILQAAwAiEFgAAYARCCwAAMAKhBQAAGIHQAgAAjEBoAQAARiC0AAAAIxBaAACAEQgtAADACIQWAABgBOddjjseyBQAAAB3QdMCAACMQGgBAABGILQAAAAjEFoAAIARCC0AAMAIhBYAAGCE/wOsOa7jsYl/8gAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# レンダリング領域の確認(カメラ位置とポジションを視覚化)\n", + "\n", + "fig = plt.figure(figsize=(10, 10))\n", + "ax = fig.add_subplot(111, projection='3d')\n", + "ax.set_xlim(-2, 2)\n", + "ax.set_ylim(-2, 2)\n", + "ax.set_zlim(-2, 2)\n", + "ax.view_init(elev=90, azim=90)\n", + "\n", + "for data in dataset_raw[::16]:\n", + " o, d = camera_params_to_rays(f, cx, cy, data['pose'], width, height)\n", + "\n", + " # 焦点(赤)\n", + " o_x, o_y, o_z = o[0, :1].T\n", + " if np.abs(o_z) > 0.5:\n", + " continue\n", + " ax.scatter(o_x, o_y, o_z, c='red')\n", + "\n", + " # レンダリング下限(青)\n", + " t_n = .5\n", + " x_n, y_n, z_n = (o + d * t_n)[::16, ::16].reshape(-1, 3).T\n", + " ax.scatter(x_n, y_n, z_n, c='blue', s=0.1)\n", + "\n", + " # レンダリング上限(緑)\n", + " t_f = 2.5\n", + " x_f, y_f, z_f = (o + d * t_f)[::16, ::16].reshape(-1, 3).T\n", + " ax.scatter(x_f, y_f, z_f, c='green', s=0.1)" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": {}, + "outputs": [], + "source": [ + "os = []\n", + "ds = []\n", + "Cs = []\n", + "\n", + "for data in dataset_raw:\n", + " pose = data['pose']\n", + " rgb = data['rgb']\n", + " # カメラ情報をベクトル化(NeRFに合わせる)\n", + " o, d = camera_params_to_rays(f, cx, cy, pose, width, height)\n", + " C = (np.array(rgb, dtype=np.float32) / 255.)[:, :, :3]\n", + "\n", + " o = o.reshape(-1, 3)\n", + " d = d.reshape(-1, 3)\n", + " C = C.reshape(-1, 3)\n", + "\n", + " os.append(o)\n", + " ds.append(d)\n", + " Cs.append(C)\n", + "\n", + "os = np.concatenate(os)\n", + "ds = np.concatenate(ds)\n", + "Cs = np.concatenate(Cs)\n", + "\n", + "dataset = {'o': os, 'd': ds, 'C': Cs}\n", + "\n", + "# 保存しておく\n", + "np.savez('./dataset.npz', **dataset)" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": {}, + "outputs": [], + "source": [ + "# 2回め以降実行用(データセット整理済み) \n", + "_dataset = np.load('./dataset.npz')\n", + "dataset = {'o': _dataset['o'], 'd': _dataset['d'], 'C': _dataset['C']}" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "metadata": {}, + "outputs": [], + "source": [ + "n_epoch = 10\n", + "batch_size = 2048" + ] + }, + { + "cell_type": "code", + "execution_count": 87, + "metadata": {}, + "outputs": [], + "source": [ + "# 学習\n", + "nerf = NeRF(t_n=0., t_f=2.5, c_bg=(1, 1, 1))\n", + "loss_func = NeRFLoss(nerf)\n", + "\n", + "optimizer = torch.optim.Adam(\n", + " loss_func.parameters(),\n", + " lr=3e-4, betas=(0.9, 0.999), eps=1e-7)\n", + "\n", + "loss_func.cuda('cuda:0')\n", + "None" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "metadata": {}, + "outputs": [], + "source": [ + "# # スナップショットの読み込み\n", + "# checkpoint = torch.load('./train.state')\n", + "\n", + "# loss_func.load_state_dict(checkpoint['model_state_dict'])\n", + "# optimizer.load_state_dict(checkpoint['optimizer_state_dict'])" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": {}, + "outputs": [], + "source": [ + "# # 学習率の変更\n", + "# for pg in optimizer.param_groups:\n", + "# pg['lr'] = 3e-5" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "epoch: 1\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "mat1 and mat2 must have the same dtype", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0mC\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'C'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mperm\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 14\u001b[0;31m \u001b[0mloss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mloss_func\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 15\u001b[0m \u001b[0msum_loss\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitem\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0msum_loss100\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitem\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1499\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_pre_hooks\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_hooks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1500\u001b[0m or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1501\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1502\u001b[0m \u001b[0;31m# Do not call functions when jit is used\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1503\u001b[0m \u001b[0mfull_backward_hooks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnon_full_backward_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, o, d, C)\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0mN_f\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnerf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mN_f\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0mc_bg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnerf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mc_bg\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 20\u001b[0;31m C_c, C_f = volume_rendering_with_radiance_field(\n\u001b[0m\u001b[1;32m 21\u001b[0m rf_c, rf_f, o, d, t_n, t_f, N_c=N_c, N_f=N_f, c_bg=c_bg)\n\u001b[1;32m 22\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mvolume_rendering_with_radiance_field\u001b[0;34m(func_c, func_f, o, d, t_n, t_f, N_c, N_f, c_bg)\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0mt_c\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mt_c\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 34\u001b[0m \u001b[0;31m# 大域的な色空間の作成\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 35\u001b[0;31m \u001b[0mrgb_c\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mw_c\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_rgb_and_weight\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfunc_c\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt_c\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mN_c\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 36\u001b[0m \u001b[0mC_c\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw_c\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m...\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mrgb_c\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maxis\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[0mC_c\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m1.\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw_c\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maxis\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkeepdims\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mbg\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m_rgb_and_weight\u001b[0;34m(func, o, d, t, N)\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0;31m# forward.(出力)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0mrgb\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msigma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0md\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0;31m# RGB/SIGMAも同様に変換\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0mrgb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrgb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mview\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mN\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1499\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_pre_hooks\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_hooks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1500\u001b[0m or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1501\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1502\u001b[0m \u001b[0;31m# Do not call functions when jit is used\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1503\u001b[0m \u001b[0mfull_backward_hooks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnon_full_backward_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x, d)\u001b[0m\n\u001b[1;32m 48\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[0;31m# forward\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 50\u001b[0;31m \u001b[0mh\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mF\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrelu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer0\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 51\u001b[0m \u001b[0mh\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mF\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrelu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer1\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mh\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0mh\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mF\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrelu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mh\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1499\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_pre_hooks\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_hooks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1500\u001b[0m or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1501\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1502\u001b[0m \u001b[0;31m# Do not call functions when jit is used\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1503\u001b[0m \u001b[0mfull_backward_hooks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnon_full_backward_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/torch/nn/modules/linear.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 112\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 113\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 114\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mF\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlinear\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbias\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 115\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 116\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mextra_repr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mRuntimeError\u001b[0m: mat1 and mat2 must have the same dtype" + ] + } + ], + "source": [ + "n_sample = dataset['o'].shape[0]\n", + "\n", + "for e in range(1, n_epoch+1):\n", + " print('epoch: {}'.format(e))\n", + " perm = np.random.permutation(n_sample)\n", + " sum_loss = 0.\n", + " sum_loss100 = 0.\n", + "\n", + " for i in range(0, n_sample, batch_size):\n", + " o = dataset['o'][perm[i:i+batch_size]]\n", + " d = dataset['d'][perm[i:i+batch_size]]\n", + " C = dataset['C'][perm[i:i+batch_size]]\n", + "\n", + " loss = loss_func(o, d, C)\n", + " sum_loss += loss.item() * o.shape[0]\n", + " sum_loss100 += loss.item()\n", + "\n", + " if (i / batch_size) % 100 == 99:\n", + " print(sum_loss100 / 100.)\n", + " sum_loss100 = 0.\n", + "\n", + " loss_func.zero_grad()\n", + " loss.backward()\n", + " optimizer.step()\n", + "\n", + " print('sum loss: {}'.format(sum_loss / n_sample))\n", + "\n", + " # save state.\n", + " torch.save({\n", + " 'model_state_dict': loss_func.state_dict(),\n", + " 'optimizer_state_dict': optimizer.state_dict()\n", + " }, './epoch_{}.state'.format(e))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# # スナップショット保存\n", + "torch.save({\n", + " 'model_state_dict': loss_func.state_dict(),\n", + " 'optimizer_state_dict': optimizer.state_dict()\n", + "}, './train.state'.format(e))\n", + "# wandb.finish()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 評価" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ind = 200\n", + "pose = dataset_raw[ind]['pose']\n", + "rgb = dataset_raw[ind]['rgb']\n", + "\n", + "# 512 * 512 はやや時間がかかるので半分のサイズでレンダリング\n", + "view = {\n", + " 'f': f / 2,\n", + " 'cx': cy / 2,\n", + " 'cy': cy / 2,\n", + " 'height': height // 2,\n", + " 'width': width // 2,\n", + " 'pose': pose\n", + "}\n", + "\n", + "# original size.\n", + "# view = {\n", + "# 'f': f,\n", + "# 'cx': cy,\n", + "# 'cy': cy,\n", + "# 'height': height,\n", + "# 'width': width,\n", + "# 'pose': pose\n", + "# }\n", + "\n", + "C_c, C_f = nerf(view)\n", + "\n", + "# show.\n", + "plt.figure(figsize=(15, 5))\n", + "\n", + "plt.subplot(1, 3, 1)\n", + "plt.title('coarse')\n", + "plt.imshow(C_c)\n", + "\n", + "plt.subplot(1, 3, 2)\n", + "plt.title('fine')\n", + "plt.imshow(C_f)\n", + "\n", + "plt.subplot(1, 3, 3)\n", + "plt.title('original')\n", + "plt.imshow(rgb)\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure(figsize=(15, 15))\n", + "\n", + "for ind, a in enumerate(np.linspace(-np.pi, np.pi, 16)):\n", + " c = np.cos(a)\n", + " s = np.sin(a)\n", + "\n", + " # y軸回り\n", + " R = np.array([[c, 0, -s, 0],\n", + " [0, 1, 0, 0],\n", + " [s, 0, c, 0],\n", + " [0, 0, 0, 1]], dtype=np.float32)\n", + "\n", + "# # z軸回り\n", + "# R = np.array([[c, -s, 0, 0],\n", + "# [s, c, 0, 0],\n", + "# [0, 0, 1, 0],\n", + "# [0, 0, 0, 1]], dtype=np.float32)\n", + "\n", + " _view = view.copy()\n", + " _view['pose'] = R @ view['pose']\n", + "\n", + " C_c, C_f = nerf(_view)\n", + "\n", + " plt.subplot(4, 4, ind+1)\n", + " plt.imshow(C_f)\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "images = []\n", + "\n", + "for ind, a in enumerate(np.linspace(-np.pi, np.pi, 65)[:-1]):\n", + " print(ind, end=', ')\n", + "\n", + " c = np.cos(a)\n", + " s = np.sin(a)\n", + "\n", + " # y軸回り\n", + " R = np.array([[c, 0, -s, 0],\n", + " [0, 1, 0, 0],\n", + " [s, 0, c, 0],\n", + " [0, 0, 0, 1]], dtype=np.float32)\n", + "\n", + "# # z軸回り\n", + "# R = np.array([[c, -s, 0, 0],\n", + "# [s, c, 0, 0],\n", + "# [0, 0, 1, 0],\n", + "# [0, 0, 0, 1]], dtype=np.float32)\n", + "\n", + " _view = view.copy()\n", + " _view['pose'] = R @ view['pose']\n", + "\n", + " C_c, C_f = nerf(_view)\n", + "\n", + " image = Image.fromarray((C_f * 255.).astype(np.uint8))\n", + " images.append(image)\n", + "\n", + "# APNG版\n", + "images[0].save('./greek.png', save_all=True, append_images=images[1:],\n", + " duration=125, loop=0)\n", + "# GIF版\n", + "images[0].save('./greek.gif', save_all=True, append_images=images[1:],\n", + " duration=125, loop=0)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/README.md b/README.md index 27b89ba71..113d3f46d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# NeRF-pytorch +# NeRF-pytorch-360 [NeRF](http://www.matthewtancik.com/nerf) (Neural Radiance Fields) is a method that achieves state-of-the-art results for synthesizing novel views of complex scenes. Here are some videos generated by this repository (pre-trained models are provided below): diff --git a/configs/A302.txt b/configs/A302.txt new file mode 100644 index 000000000..948f03808 --- /dev/null +++ b/configs/A302.txt @@ -0,0 +1,16 @@ +expname = TUT-A302 +basedir = ./logs +datadir = ./data/indoor/A302 +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 1024 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/KE101-afrotated.txt b/configs/KE101-afrotated.txt new file mode 100644 index 000000000..9082afe67 --- /dev/null +++ b/configs/KE101-afrotated.txt @@ -0,0 +1,16 @@ +expname = TUT-KE101-rotatedAdd +basedir = ./logs +datadir = ./data/indoor/KE101 +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 128 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/KE101-plain.txt b/configs/KE101-plain.txt new file mode 100644 index 000000000..abd5b2d8a --- /dev/null +++ b/configs/KE101-plain.txt @@ -0,0 +1,16 @@ +expname = TUT-KE101-nerf +basedir = ./logs +datadir = ./data/indoor/KE101 +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 128 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/KE101.txt b/configs/KE101.txt new file mode 100644 index 000000000..71be51d98 --- /dev/null +++ b/configs/KE101.txt @@ -0,0 +1,16 @@ +expname = TUT-KE101 +basedir = ./logs +datadir = ./data/indoor/KE101 +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 1024 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/LAB-old.txt b/configs/LAB-old.txt new file mode 100644 index 000000000..ea43d1e7b --- /dev/null +++ b/configs/LAB-old.txt @@ -0,0 +1,16 @@ +expname = TUT-LAB-nerf +basedir = ./logs +datadir = ./data/indoor/LAB +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 128 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/LAB.txt b/configs/LAB.txt new file mode 100644 index 000000000..af332cd52 --- /dev/null +++ b/configs/LAB.txt @@ -0,0 +1,16 @@ +expname = TUT-LAB +basedir = ./logs +datadir = ./data/indoor/LAB +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 1024 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/Living-4Point.txt b/configs/Living-4Point.txt new file mode 100644 index 000000000..ff116cc08 --- /dev/null +++ b/configs/Living-4Point.txt @@ -0,0 +1,19 @@ +expname = LivingPoint-360-5Frame-x4 +basedir = ./logs +datadir = ./data/LivingPoint-360-5Frame-x4 +dataset_type = blender + +no_batching = True + +use_viewdirs = True +white_bkgd = True +lrate_decay = 500 + +N_samples = 64 +N_importance = 128 +N_rand = 1024 + +precrop_iters = 500 +precrop_frac = 0.5 + +half_res = True diff --git a/configs/Sakura360.txt b/configs/Sakura360.txt new file mode 100644 index 000000000..c1df42b19 --- /dev/null +++ b/configs/Sakura360.txt @@ -0,0 +1,19 @@ +expname = sakura-360 +basedir = ./logs +datadir = ./data/sakura-360 +dataset_type = blender + +no_batching = True + +use_viewdirs = True +white_bkgd = True +lrate_decay = 500 + +N_samples = 64 +N_importance = 128 +N_rand = 1024 + +precrop_iters = 500 +precrop_frac = 0.5 + +half_res = True diff --git a/configs/apartment_0.txt b/configs/apartment_0.txt new file mode 100644 index 000000000..cc480c109 --- /dev/null +++ b/configs/apartment_0.txt @@ -0,0 +1,16 @@ +expname = apartment +basedir = ./logs +datadir = ./data/indoor/apartment_0 +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/doll-large.txt b/configs/doll-large.txt new file mode 100644 index 000000000..10298b264 --- /dev/null +++ b/configs/doll-large.txt @@ -0,0 +1,16 @@ +expname = TUT-doll-360Padd-L +basedir = ./logs +datadir = ./data/indoor/doll +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 1024 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/fisheye-living.txt b/configs/fisheye-living.txt new file mode 100644 index 000000000..4c25e5dd2 --- /dev/null +++ b/configs/fisheye-living.txt @@ -0,0 +1,19 @@ +expname = fisheye-living +basedir = ./logs +datadir = ./data/living-fisheye +dataset_type = blender + +no_batching = True + +use_viewdirs = True +white_bkgd = True +lrate_decay = 500 + +N_samples = 64 +N_importance = 128 +N_rand = 1024 + +precrop_iters = 500 +precrop_frac = 0.5 + +half_res = True diff --git a/configs/hikage-doll-360-np-arrange.txt b/configs/hikage-doll-360-np-arrange.txt new file mode 100644 index 000000000..37819476f --- /dev/null +++ b/configs/hikage-doll-360-np-arrange.txt @@ -0,0 +1,16 @@ +expname = TUT-hikage-doll-360-np-arr +basedir = ./logs +datadir = ./data/outdoor/hikage-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/hikage-doll-360-np.txt b/configs/hikage-doll-360-np.txt new file mode 100644 index 000000000..e0c267e8a --- /dev/null +++ b/configs/hikage-doll-360-np.txt @@ -0,0 +1,16 @@ +expname = TUT-hikage-doll-360-np +basedir = ./logs +datadir = ./data/outdoor/hikage-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/hikage-doll-360.txt b/configs/hikage-doll-360.txt new file mode 100644 index 000000000..5f36c4ee7 --- /dev/null +++ b/configs/hikage-doll-360.txt @@ -0,0 +1,16 @@ +expname = TUT-hikage-doll-360 +basedir = ./logs +datadir = ./data/outdoor/hikage-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/hikage-doll-nerf.txt b/configs/hikage-doll-nerf.txt new file mode 100644 index 000000000..aeae25ac1 --- /dev/null +++ b/configs/hikage-doll-nerf.txt @@ -0,0 +1,16 @@ +expname = TUT-out-doll-nerf +basedir = ./logs +datadir = ./data/outdoor/hikage-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/hikage-doll-nerfclip.txt b/configs/hikage-doll-nerfclip.txt new file mode 100644 index 000000000..00835b943 --- /dev/null +++ b/configs/hikage-doll-nerfclip.txt @@ -0,0 +1,16 @@ +expname = TUT-hikage-doll-NeRFClip +basedir = ./logs +datadir = ./data/360-nerf_FOR_DIFF/doll-hikage_x5 +dataset_type = blender + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/home_living.txt b/configs/home_living.txt new file mode 100644 index 000000000..5a34ecfb8 --- /dev/null +++ b/configs/home_living.txt @@ -0,0 +1,19 @@ +expname = HomeLiving +basedir = ./logs +datadir = ./data/nerf_synthetic/living +dataset_type = blender + +no_batching = True + +use_viewdirs = True +white_bkgd = True +lrate_decay = 500 + +N_samples = 64 +N_importance = 128 +N_rand = 1024 + +precrop_iters = 500 +precrop_frac = 0.5 + +half_res = True diff --git a/configs/in-doll-addroted-360-arr.txt b/configs/in-doll-addroted-360-arr.txt new file mode 100644 index 000000000..87b228817 --- /dev/null +++ b/configs/in-doll-addroted-360-arr.txt @@ -0,0 +1,16 @@ +expname = TUT-in-doll-360-radd +basedir = ./logs +datadir = ./data/indoor/doll-rotateadd +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/room.txt b/configs/indoor.txt similarity index 60% rename from configs/room.txt rename to configs/indoor.txt index 90d3c3256..9e91356a9 100644 --- a/configs/room.txt +++ b/configs/indoor.txt @@ -1,7 +1,7 @@ -expname = room_test +expname = indoor_360 basedir = ./logs -datadir = ./data/nerf_llff_data/room -dataset_type = llff +datadir = ./data/indoor/apartment_0/train +dataset_type = synth360 factor = 8 llffhold = 8 diff --git a/configs/local-360nop.txt b/configs/local-360nop.txt new file mode 100644 index 000000000..cddd45f16 --- /dev/null +++ b/configs/local-360nop.txt @@ -0,0 +1,16 @@ +expname = TUT-local-360noP +basedir = ./logs +datadir = ./data/indoor/local +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 128 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/local-nerf.txt b/configs/local-nerf.txt new file mode 100644 index 000000000..ec9b68523 --- /dev/null +++ b/configs/local-nerf.txt @@ -0,0 +1,16 @@ +expname = TUT-local-nerf +basedir = ./logs +datadir = ./data/indoor/local +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 128 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/office_1.txt b/configs/office_1.txt new file mode 100644 index 000000000..ffe9ce080 --- /dev/null +++ b/configs/office_1.txt @@ -0,0 +1,16 @@ +expname = office +basedir = ./logs +datadir = ./data/indoor/office_1 +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/office_2.txt b/configs/office_2.txt new file mode 100644 index 000000000..9e2784833 --- /dev/null +++ b/configs/office_2.txt @@ -0,0 +1,17 @@ +expname = office_nomal_2 +basedir = ./logs +datadir = ./data/indoor/office_1 +dataset_type = synth360 + +factor = 1 +llffhold = 8 + +N_rand = 1024 +N_samples = 1532 +N_importance = 32 + + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/out-doll-360-arrange.txt b/configs/out-doll-360-arrange.txt new file mode 100644 index 000000000..a1ee23985 --- /dev/null +++ b/configs/out-doll-360-arrange.txt @@ -0,0 +1,16 @@ +expname = TUT-out-doll-360-arr +basedir = ./logs +datadir = ./data/outdoor/out-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/out-doll-360-np.txt b/configs/out-doll-360-np.txt new file mode 100644 index 000000000..1f46a98ca --- /dev/null +++ b/configs/out-doll-360-np.txt @@ -0,0 +1,16 @@ +expname = TUT-out-doll-360-np +basedir = ./logs +datadir = ./data/outdoor/out-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/out-doll-360.txt b/configs/out-doll-360.txt new file mode 100644 index 000000000..6d8177015 --- /dev/null +++ b/configs/out-doll-360.txt @@ -0,0 +1,16 @@ +expname = TUT-out-doll-360 +basedir = ./logs +datadir = ./data/outdoor/out-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/out-doll-nerf.txt b/configs/out-doll-nerf.txt new file mode 100644 index 000000000..c1f8a88d1 --- /dev/null +++ b/configs/out-doll-nerf.txt @@ -0,0 +1,16 @@ +expname = TUT-out-doll-nerf +basedir = ./logs +datadir = ./data/outdoor/out-table +dataset_type = synth360 + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/out-doll-nerfclip.txt b/configs/out-doll-nerfclip.txt new file mode 100644 index 000000000..58d82f0b9 --- /dev/null +++ b/configs/out-doll-nerfclip.txt @@ -0,0 +1,16 @@ +expname = TUT-out-doll-NeRFClip +basedir = ./logs +datadir = ./data/360-nerf_FOR_DIFF/doll-out_x5 +dataset_type = blender + +factor = 8 +llffhold = 8 + +N_rand = 1024 +N_samples = 64 +N_importance = 64 + +use_viewdirs = True +raw_noise_std = 1e0 + +no_batching = True \ No newline at end of file diff --git a/configs/waterbskt.txt b/configs/waterbskt.txt new file mode 100644 index 000000000..29694da20 --- /dev/null +++ b/configs/waterbskt.txt @@ -0,0 +1,19 @@ +expname = waterbskt-tape-360-3frame +basedir = ./logs +datadir = ./data/waterbskt-tape-360-3frame +dataset_type = blender + +no_batching = True + +use_viewdirs = True +white_bkgd = True +lrate_decay = 500 + +N_samples = 64 +N_importance = 128 +N_rand = 1024 + +precrop_iters = 500 +precrop_frac = 0.5 + +half_res = True diff --git a/load_blender.py b/load_blender.py index 99daf8f1a..e79049a3c 100644 --- a/load_blender.py +++ b/load_blender.py @@ -54,7 +54,9 @@ def load_blender_data(basedir, half_res=False, testskip=1): skip = testskip for frame in meta['frames'][::skip]: - fname = os.path.join(basedir, frame['file_path'] + '.png') + + # fname = os.path.join(basedir, frame['file_path'] + '.png') + fname = os.path.join(basedir, frame['file_path'] ) imgs.append(imageio.imread(fname)) poses.append(np.array(frame['transform_matrix'])) imgs = (np.array(imgs) / 255.).astype(np.float32) # keep all 4 channels (RGBA) @@ -81,7 +83,10 @@ def load_blender_data(basedir, half_res=False, testskip=1): imgs_half_res = np.zeros((imgs.shape[0], H, W, 4)) for i, img in enumerate(imgs): - imgs_half_res[i] = cv2.resize(img, (W, H), interpolation=cv2.INTER_AREA) + img_half_res = cv2.resize(img, (W, H), interpolation=cv2.INTER_AREA) + img_half_res_RGBA = cv2.cvtColor(img_half_res,cv2.COLOR_BGR2RGBA) + img_half_res_BGRA = img_half_res_RGBA[:,:,[2,1,0,3]] + imgs_half_res[i] = img_half_res_BGRA imgs = imgs_half_res # imgs = tf.image.resize_area(imgs, [400, 400]).numpy() diff --git a/load_llff.py b/load_llff.py index 98b791637..f7a002b6c 100644 --- a/load_llff.py +++ b/load_llff.py @@ -60,8 +60,9 @@ def _minify(basedir, factors=[], resolutions=[]): def _load_data(basedir, factor=None, width=None, height=None, load_imgs=True): - + # width=None, height=None, load_imgs=True)=Option poses_arr = np.load(os.path.join(basedir, 'poses_bounds.npy')) + # POSE:NP配列から2このデータを後ろ3つを削除→1次元掘り下げて、3列、5行の配列にReshappe→1,2,0の順番に転置(次元をくっつけて削減) poses = poses_arr[:, :-2].reshape([-1, 3, 5]).transpose([1,2,0]) bds = poses_arr[:, -2:].transpose([1,0]) @@ -70,7 +71,7 @@ def _load_data(basedir, factor=None, width=None, height=None, load_imgs=True): sh = imageio.imread(img0).shape sfx = '' - + # downsampling Sorce Images if factor is not None: sfx = '_{}'.format(factor) _minify(basedir, factors=[factor]) @@ -106,10 +107,11 @@ def _load_data(basedir, factor=None, width=None, height=None, load_imgs=True): return poses, bds def imread(f): - if f.endswith('png'): - return imageio.imread(f, ignoregamma=True) - else: - return imageio.imread(f) + # if f.endswith('png'): + # return imageio.imread(f, ignoregamma=True) + # else: + # return imageio.imread(f) + return imageio.imread(f) imgs = imgs = [imread(f)[...,:3]/255. for f in imgfiles] imgs = np.stack(imgs, -1) diff --git a/load_synth360.py b/load_synth360.py new file mode 100644 index 000000000..3fcb06f59 --- /dev/null +++ b/load_synth360.py @@ -0,0 +1,106 @@ +import os +import numpy as np +import cv2 +import torch +def _load_data(basedir): + + imgdir = os.path.join(basedir, 'images') + + def imread(f): + return cv2.cvtColor(cv2.imread(f), cv2.COLOR_BGR2RGB) + + poses = [] + imgs = [] + with open(os.path.join(basedir, 'poses.txt')) as f: + for line in f.readlines(): + line = line.rstrip() + line = line.split(" ") + print(len(line)) + pose = np.array([ + [line[1], line[2], line[3], line[10]], + [line[4], line[5], line[6], line[11]], + [line[7], line[8], line[9], line[12]], + [0, 0, 0, 1] + ]) + poses.append(pose) + img = imread(os.path.join(imgdir, line[0]+".png"))/255. + img = cv2.resize(img, (640, 320)) + # img = cv2.resize(img, (320, 160)) + imgs.append(img) + + imgs = np.array(imgs).astype(np.float32) + poses = np.array(poses).astype(np.float32) + + return poses, imgs +def normalize(x): + return x / np.linalg.norm(x) + +def viewmatrix(z, up, pos): + vec2 = normalize(z) + vec1_avg = up + vec0 = normalize(np.cross(vec1_avg, vec2)) + vec1 = normalize(np.cross(vec2, vec0)) + m = np.stack([vec0, vec1, vec2, pos], 1) + return m +def poses_avg(poses): + + hwf = poses[0, :3, -1:] + + center = poses[:, :3, 3].mean(0) + vec2 = normalize(poses[:, :3, 2].sum(0)) + up = poses[:, :3, 1].sum(0) + c2w = np.concatenate([viewmatrix(vec2, up, center), hwf], 1) + + return c2w +def render_path_spiral(c2w, up, rads, focal, zdelta, zrate, rots, N): + render_poses = [] + rads = np.array(list(rads) + [1.]) + hwf = c2w[:,4:5] + + for theta in np.linspace(0., 2. * np.pi * rots, N+1)[:-1]: + c = np.dot(c2w[:3,:4], np.array([np.cos(theta), -np.sin(theta), -np.sin(theta*zrate), 1.]) * rads) + z = normalize(c - np.dot(c2w[:3,:4], np.array([0,0,-focal, 1.]))) + render_poses.append(np.concatenate([viewmatrix(z, up, c), hwf], 1)) + return render_poses +# def load_synth360_data(basedir): +# poses, images = _load_data(basedir) + +# bds = np.array([images.shape[1], images.shape[2], None]) + +# return images, poses, bds +def load_synth360_data(basedir): + train = basedir+'/train/' + test = basedir+'/test/' + t_poses, t_images = _load_data(train) + l_poses, l_images = _load_data(test) + images = np.concatenate([t_images,l_images],0) + poses = np.concatenate([t_poses,l_poses],0) + bds = np.array([images.shape[1], images.shape[2], None]) + + # NeRF座標系に逆変換 + # poses = np.concatenate([poses[:, 1:2, :], -poses[:, 0:1, :], poses[:, 2:, :]], 1) + # poses = np.moveaxis(poses, -1, 0).astype(np.float32) + # imgs = np.moveaxis(images, -1, 0).astype(np.float32) + # images = imgs + + # bds = np.moveaxis(bds, -1, 0).astype(np.float32) + + # print(images.shape,poses.shape) + i_test = np.array([i for i in range(t_images.shape[0],l_images.shape[0]+t_images.shape[0])]) + # i_test = l_images.shape + # この辺は確認用のRendringPathなので過度に気にしなくてOK + c2w_path = poses_avg(poses) + up = normalize(poses[:, :3, 1].sum(0)) + tt = poses[:,:3,3] # ptstocam(poses[:3,3,:].T, c2w).T + rads = np.percentile(np.abs(tt), 90, 0) + dt = .75 + # close_depth, inf_depth = np.ravel(bds).min()*.9, np.ravel(bds).max()*5. + # mean_dz = 1./(((1.-dt)/close_depth + dt/inf_depth)) + focal = 1 + zdelta = images.shape[1]*.9 * .2 + N_rots = 2 + N_views = 120 + render_poses = render_path_spiral(c2w_path, up, rads, focal, zdelta, zrate=.5, rots=N_rots, N=N_views) + render_poses = np.array(render_poses).astype(np.float32) + # + return images, poses, bds ,render_poses,i_test \ No newline at end of file diff --git a/run_nerf-new-np.py b/run_nerf-new-np.py new file mode 100644 index 000000000..a7d302b4c --- /dev/null +++ b/run_nerf-new-np.py @@ -0,0 +1,950 @@ +import os, sys +import numpy as np +import imageio +import json +import random +import time +from load_synth360 import load_synth360_data +import torch +import torch.nn as nn +import torch.nn.functional as F +from tqdm import tqdm, trange + +import matplotlib.pyplot as plt + +from run_nerf_helpers_new_np import * + +from load_llff import load_llff_data +from load_deepvoxels import load_dv_data +from load_blender import load_blender_data +from load_LINEMOD import load_LINEMOD_data + +import json +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") +np.random.seed(0) +DEBUG = False + + +def batchify(fn, chunk): + """Constructs a version of 'fn' that applies to smaller batches. + """ + if chunk is None: + return fn + def ret(inputs): + return torch.cat([fn(inputs[i:i+chunk]) for i in range(0, inputs.shape[0], chunk)], 0) + return ret + + +def run_network(inputs, viewdirs, fn, embed_fn, embeddirs_fn, netchunk=1024*64): + """Prepares inputs and applies network 'fn'. + """ + inputs_flat = torch.reshape(inputs, [-1, inputs.shape[-1]]) + embedded = embed_fn(inputs_flat) + + if viewdirs is not None: + input_dirs = viewdirs[:,None].expand(inputs.shape) + input_dirs_flat = torch.reshape(input_dirs, [-1, input_dirs.shape[-1]]) + embedded_dirs = embeddirs_fn(input_dirs_flat) + embedded = torch.cat([embedded, embedded_dirs], -1) + + outputs_flat = batchify(fn, netchunk)(embedded) + outputs = torch.reshape(outputs_flat, list(inputs.shape[:-1]) + [outputs_flat.shape[-1]]) + return outputs + + +def batchify_rays(rays_flat, chunk=1024*32, **kwargs): + """Render rays in smaller minibatches to avoid OOM. + """ + all_ret = {} + for i in range(0, rays_flat.shape[0], chunk): + ret = render_rays(rays_flat[i:i+chunk], **kwargs) + for k in ret: + if k not in all_ret: + all_ret[k] = [] + all_ret[k].append(ret[k]) + + all_ret = {k : torch.cat(all_ret[k], 0) for k in all_ret} + return all_ret + + +def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True, + near=0., far=1., + use_viewdirs=False, c2w_staticcam=None, + **kwargs): + """Render rays + Args: + H: int. Height of image in pixels. + W: int. Width of image in pixels. + focal: float. Focal length of pinhole camera. + chunk: int. Maximum number of rays to process simultaneously. Used to + control maximum memory usage. Does not affect final results. + rays: array of shape [2, batch_size, 3]. Ray origin and direction for + each example in batch. + c2w: array of shape [3, 4]. Camera-to-world transformation matrix. + ndc: bool. If True, represent ray origin, direction in NDC coordinates. + near: float or array of shape [batch_size]. Nearest distance for a ray. + far: float or array of shape [batch_size]. Farthest distance for a ray. + use_viewdirs: bool. If True, use viewing direction of a point in space in model. + c2w_staticcam: array of shape [3, 4]. If not None, use this transformation matrix for + camera while using other c2w argument for viewing directions. + Returns: + rgb_map: [batch_size, 3]. Predicted RGB values for rays. + disp_map: [batch_size]. Disparity map. Inverse of depth. + acc_map: [batch_size]. Accumulated opacity (alpha) along a ray. + extras: dict with everything returned by render_rays(). + """ + if c2w is not None: + # special case to render full image + rays_o, rays_d = get_rays(H, W, K, c2w) + else: + # use provided ray batch + rays_o, rays_d = rays + + if use_viewdirs: + # provide ray directions as input + viewdirs = rays_d + if c2w_staticcam is not None: + # special case to visualize effect of viewdirs + rays_o, rays_d = get_rays(H, W, K, c2w_staticcam) + viewdirs = viewdirs / torch.norm(viewdirs, dim=-1, keepdim=True) + viewdirs = torch.reshape(viewdirs, [-1,3]).float() + + sh = rays_d.shape # [..., 3] + if ndc: + # for forward facing scenes + rays_o, rays_d = ndc_rays(H, W, K[0][0], 1., rays_o, rays_d) + + # Create ray batch + rays_o = torch.reshape(rays_o, [-1,3]).float() + rays_d = torch.reshape(rays_d, [-1,3]).float() + + near, far = near * torch.ones_like(rays_d[...,:1]), far * torch.ones_like(rays_d[...,:1]) + rays = torch.cat([rays_o, rays_d, near, far], -1) + # save_tensor_to_npz(rays,"rays_all_info") + if use_viewdirs: + rays = torch.cat([rays, viewdirs], -1) + + # Render and reshape + all_ret = batchify_rays(rays, chunk, **kwargs) + for k in all_ret: + k_sh = list(sh[:-1]) + list(all_ret[k].shape[1:]) + all_ret[k] = torch.reshape(all_ret[k], k_sh) + + k_extract = ['rgb_map', 'disp_map', 'acc_map'] + ret_list = [all_ret[k] for k in k_extract] + ret_dict = {k : all_ret[k] for k in all_ret if k not in k_extract} + return ret_list + [ret_dict] + + +def render_path(render_poses, hwf, K, chunk, render_kwargs, gt_imgs=None, savedir=None, render_factor=0): + + H, W, focal = hwf + + if render_factor!=0: + # Render downsampled for speed + H = H//render_factor + W = W//render_factor + focal = focal/render_factor + + rgbs = [] + disps = [] + + t = time.time() + for i, c2w in enumerate(tqdm(render_poses)): + print(i, time.time() - t) + t = time.time() + rgb, disp, acc, _ = render(H, W, K, chunk=chunk, c2w=c2w[:3,:4], **render_kwargs) + rgbs.append(rgb.cpu().numpy()) + disps.append(disp.cpu().numpy()) + if i==0: + print(rgb.shape, disp.shape) + + """ + if gt_imgs is not None and render_factor==0: + p = -10. * np.log10(np.mean(np.square(rgb.cpu().numpy() - gt_imgs[i]))) + print(p) + """ + + if savedir is not None: + rgb8 = to8b(rgbs[-1]) + filename = os.path.join(savedir, '{:03d}.png'.format(i)) + imageio.imwrite(filename, rgb8) + + + rgbs = np.stack(rgbs, 0) + disps = np.stack(disps, 0) + + return rgbs, disps + + +def create_nerf(args): + """Instantiate NeRF's MLP model. + """ + embed_fn, input_ch = get_embedder(args.multires, args.i_embed) + + input_ch_views = 0 + embeddirs_fn = None + if args.use_viewdirs: + embeddirs_fn, input_ch_views = get_embedder(args.multires_views, args.i_embed) + output_ch = 5 if args.N_importance > 0 else 4 + skips = [4] + model = NeRF(D=args.netdepth, W=args.netwidth, + input_ch=input_ch, output_ch=output_ch, skips=skips, + input_ch_views=input_ch_views, use_viewdirs=args.use_viewdirs).to(device) + grad_vars = list(model.parameters()) + + model_fine = None + if args.N_importance > 0: + model_fine = NeRF(D=args.netdepth_fine, W=args.netwidth_fine, + input_ch=input_ch, output_ch=output_ch, skips=skips, + input_ch_views=input_ch_views, use_viewdirs=args.use_viewdirs).to(device) + grad_vars += list(model_fine.parameters()) + + network_query_fn = lambda inputs, viewdirs, network_fn : run_network(inputs, viewdirs, network_fn, + embed_fn=embed_fn, + embeddirs_fn=embeddirs_fn, + netchunk=args.netchunk) + + # Create optimizer + optimizer = torch.optim.Adam(params=grad_vars, lr=args.lrate, betas=(0.9, 0.999)) + + start = 0 + basedir = args.basedir + expname = args.expname + + ########################## + + # Load checkpoints + if args.ft_path is not None and args.ft_path!='None': + ckpts = [args.ft_path] + else: + ckpts = [os.path.join(basedir, expname, f) for f in sorted(os.listdir(os.path.join(basedir, expname))) if '.tar' in f] + + print('Found ckpts', ckpts) + if len(ckpts) > 0 and not args.no_reload: + ckpt_path = ckpts[-1] + print('Reloading from', ckpt_path) + ckpt = torch.load(ckpt_path) + + start = ckpt['global_step'] + optimizer.load_state_dict(ckpt['optimizer_state_dict']) + + # Load model + model.load_state_dict(ckpt['network_fn_state_dict']) + if model_fine is not None: + model_fine.load_state_dict(ckpt['network_fine_state_dict']) + + ########################## + + render_kwargs_train = { + 'network_query_fn' : network_query_fn, + 'perturb' : args.perturb, + 'N_importance' : args.N_importance, + 'network_fine' : model_fine, + 'N_samples' : args.N_samples, + 'network_fn' : model, + 'use_viewdirs' : args.use_viewdirs, + 'white_bkgd' : args.white_bkgd, + 'raw_noise_std' : args.raw_noise_std, + } + + # NDC only good for LLFF-style forward facing data + if args.dataset_type != 'llff' or args.no_ndc: + print('Not ndc!') + render_kwargs_train['ndc'] = False + render_kwargs_train['lindisp'] = args.lindisp + + render_kwargs_test = {k : render_kwargs_train[k] for k in render_kwargs_train} + render_kwargs_test['perturb'] = False + render_kwargs_test['raw_noise_std'] = 0. + + return render_kwargs_train, render_kwargs_test, start, grad_vars, optimizer + + +def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=False): + """Transforms model's predictions to semantically meaningful values. + Args: + raw: [num_rays, num_samples along ray, 4]. Prediction from model. + z_vals: [num_rays, num_samples along ray]. Integration time. + rays_d: [num_rays, 3]. Direction of each ray. + Returns: + rgb_map: [num_rays, 3]. Estimated RGB color of a ray. + disp_map: [num_rays]. Disparity map. Inverse of depth map. + acc_map: [num_rays]. Sum of weights along each ray. + weights: [num_rays, num_samples]. Weights assigned to each sampled color. + depth_map: [num_rays]. Estimated distance to object. + """ + raw2alpha = lambda raw, dists, act_fn=F.relu: 1.-torch.exp(-act_fn(raw)*dists) + + dists = z_vals[...,1:] - z_vals[...,:-1] + dists = torch.cat([dists, torch.Tensor([1e10]).expand(dists[...,:1].shape)], -1) # [N_rays, N_samples] + + dists = dists * torch.norm(rays_d[...,None,:], dim=-1) + + rgb = torch.sigmoid(raw[...,:3]) # [N_rays, N_samples, 3] + noise = 0. + if raw_noise_std > 0.: + noise = torch.randn(raw[...,3].shape) * raw_noise_std + + # Overwrite randomly sampled data if pytest + if pytest: + np.random.seed(0) + noise = np.random.rand(*list(raw[...,3].shape)) * raw_noise_std + noise = torch.Tensor(noise) + + alpha = raw2alpha(raw[...,3] + noise, dists) # [N_rays, N_samples] + # weights = alpha * tf.math.cumprod(1.-alpha + 1e-10, -1, exclusive=True) + # torch.cumprod:要素をその一個前の値と掛け合わせる. + weights = alpha * torch.cumprod(torch.cat([torch.ones((alpha.shape[0], 1)), 1.-alpha + 1e-10], -1), -1)[:, :-1] + rgb_map = torch.sum(weights[...,None] * rgb, -2) # [N_rays, 3] + + depth_map = torch.sum(weights * z_vals, -1) + disp_map = 1./torch.max(1e-10 * torch.ones_like(depth_map), depth_map / torch.sum(weights, -1)) + acc_map = torch.sum(weights, -1) + + if white_bkgd: + rgb_map = rgb_map + (1.-acc_map[...,None]) + + return rgb_map, disp_map, acc_map, weights, depth_map + + +def render_rays(ray_batch, + network_fn, + network_query_fn, + N_samples, + retraw=False, + lindisp=False, + perturb=0., + N_importance=0, + network_fine=None, + white_bkgd=False, + raw_noise_std=0., + verbose=False, + pytest=False): + """Volumetric rendering. + Args: + ray_batch: array of shape [batch_size, ...]. All information necessary + for sampling along a ray, including: ray origin, ray direction, min + dist, max dist, and unit-magnitude viewing direction. + network_fn: function. Model for predicting RGB and density at each point + in space. + network_query_fn: function used for passing queries to network_fn. + N_samples: int. Number of different times to sample along each ray. + retraw: bool. If True, include model's raw, unprocessed predictions. + lindisp: bool. If True, sample linearly in inverse depth rather than in depth. + perturb: float, 0 or 1. If non-zero, each ray is sampled at stratified + random points in time. + N_importance: int. Number of additional times to sample along each ray. + These samples are only passed to network_fine. + network_fine: "fine" network with same spec as network_fn. + white_bkgd: bool. If True, assume a white background. + raw_noise_std: ... + verbose: bool. If True, print more debugging info. + Returns: + rgb_map: [num_rays, 3]. Estimated RGB color of a ray. Comes from fine model. + disp_map: [num_rays]. Disparity map. 1 / depth. + acc_map: [num_rays]. Accumulated opacity along each ray. Comes from fine model. + raw: [num_rays, num_samples, 4]. Raw predictions from model. + rgb0: See rgb_map. Output for coarse model. + disp0: See disp_map. Output for coarse model. + acc0: See acc_map. Output for coarse model. + z_std: [num_rays]. Standard deviation of distances along ray for each + sample. + """ + N_rays = ray_batch.shape[0] + rays_o, rays_d = ray_batch[:,0:3], ray_batch[:,3:6] # [N_rays, 3] each + viewdirs = ray_batch[:,-3:] if ray_batch.shape[-1] > 8 else None + bounds = torch.reshape(ray_batch[...,6:8], [-1,1,2]) + near, far = bounds[...,0], bounds[...,1] # [-1,1] + + t_vals = torch.linspace(0., 1., steps=N_samples) + if not lindisp: + z_vals = near * (1.-t_vals) + far * (t_vals) + else: + z_vals = 1./(1./near * (1.-t_vals) + 1./far * (t_vals)) + + z_vals = z_vals.expand([N_rays, N_samples]) + + if perturb > 0.: + # get intervals between samples + mids = .5 * (z_vals[...,1:] + z_vals[...,:-1]) + upper = torch.cat([mids, z_vals[...,-1:]], -1) + lower = torch.cat([z_vals[...,:1], mids], -1) + # stratified samples in those intervals + t_rand = torch.rand(z_vals.shape) + + # Pytest, overwrite u with numpy's fixed random numbers + if pytest: + np.random.seed(0) + t_rand = np.random.rand(*list(z_vals.shape)) + t_rand = torch.Tensor(t_rand) + + z_vals = lower + (upper - lower) * t_rand + + pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples, 3] + + + + raw = network_query_fn(pts, viewdirs, network_fn) # raw = run_network(pts) + # save_tensor_to_npz(raw,"") + rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) + + if N_importance > 0: + + rgb_map_0, disp_map_0, acc_map_0 = rgb_map, disp_map, acc_map + + z_vals_mid = .5 * (z_vals[...,1:] + z_vals[...,:-1]) + z_samples = sample_pdf(z_vals_mid, weights[...,1:-1], N_importance, det=(perturb==0.), pytest=pytest) + z_samples = z_samples.detach() + + z_vals, _ = torch.sort(torch.cat([z_vals, z_samples], -1), -1) + pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples + N_importance, 3] + + run_fn = network_fn if network_fine is None else network_fine +# raw = run_network(pts, fn=run_fn) + raw = network_query_fn(pts, viewdirs, run_fn) + + rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) + + ret = {'rgb_map' : rgb_map, 'disp_map' : disp_map, 'acc_map' : acc_map} + if retraw: + ret['raw'] = raw + if N_importance > 0: + ret['rgb0'] = rgb_map_0 + ret['disp0'] = disp_map_0 + ret['acc0'] = acc_map_0 + ret['z_std'] = torch.std(z_samples, dim=-1, unbiased=False) # [N_rays] + + for k in ret: + if (torch.isnan(ret[k]).any() or torch.isinf(ret[k]).any()) and DEBUG: + print(f"! [Numerical Error] {k} contains nan or inf.") + + return ret + + +def config_parser(): + + import configargparse + parser = configargparse.ArgumentParser() + parser.add_argument('--config', is_config_file=True, + help='config file path') + parser.add_argument("--expname", type=str, + help='experiment name') + parser.add_argument("--basedir", type=str, default='./logs/', + help='where to store ckpts and logs') + parser.add_argument("--datadir", type=str, default='./data/llff/fern', + help='input data directory') + + # training options + parser.add_argument("--netdepth", type=int, default=8, + help='layers in network') + parser.add_argument("--netwidth", type=int, default=256, + help='channels per layer') + parser.add_argument("--netdepth_fine", type=int, default=8, + help='layers in fine network') + parser.add_argument("--netwidth_fine", type=int, default=256, + help='channels per layer in fine network') + parser.add_argument("--N_rand", type=int, default=32*32*4, + help='batch size (number of random rays per gradient step)') + parser.add_argument("--lrate", type=float, default=5e-4, + help='learning rate') + parser.add_argument("--lrate_decay", type=int, default=250, + help='exponential learning rate decay (in 1000 steps)') + parser.add_argument("--chunk", type=int, default=1024*32, + help='number of rays processed in parallel, decrease if running out of memory') + parser.add_argument("--netchunk", type=int, default=1024*64, + help='number of pts sent through network in parallel, decrease if running out of memory') + parser.add_argument("--no_batching", action='store_true', + help='only take random rays from 1 image at a time') + parser.add_argument("--no_reload", action='store_true', + help='do not reload weights from saved ckpt') + parser.add_argument("--ft_path", type=str, default=None, + help='specific weights npy file to reload for coarse network') + + # rendering options + parser.add_argument("--N_samples", type=int, default=64, + help='number of coarse samples per ray') + parser.add_argument("--N_importance", type=int, default=0, + help='number of additional fine samples per ray') + parser.add_argument("--perturb", type=float, default=1., + help='set to 0. for no jitter, 1. for jitter') + parser.add_argument("--use_viewdirs", action='store_true', + help='use full 5D input instead of 3D') + parser.add_argument("--i_embed", type=int, default=0, + help='set 0 for default positional encoding, -1 for none') + parser.add_argument("--multires", type=int, default=10, + help='log2 of max freq for positional encoding (3D location)') + parser.add_argument("--multires_views", type=int, default=4, + help='log2 of max freq for positional encoding (2D direction)') + parser.add_argument("--raw_noise_std", type=float, default=0., + help='std dev of noise added to regularize sigma_a output, 1e0 recommended') + + parser.add_argument("--render_only", action='store_true', + help='do not optimize, reload weights and render out render_poses path') + parser.add_argument("--render_test", action='store_true', + help='render the test set instead of render_poses path') + parser.add_argument("--render_factor", type=int, default=0, + help='downsampling factor to speed up rendering, set 4 or 8 for fast preview') + + # training options + parser.add_argument("--precrop_iters", type=int, default=0, + help='number of steps to train on central crops') + parser.add_argument("--precrop_frac", type=float, + default=.5, help='fraction of img taken for central crops') + + # dataset options + parser.add_argument("--dataset_type", type=str, default='llff', + help='options: llff / blender / deepvoxels') + parser.add_argument("--testskip", type=int, default=8, + help='will load 1/N images from test/val sets, useful for large datasets like deepvoxels') + + ## deepvoxels flags + parser.add_argument("--shape", type=str, default='greek', + help='options : armchair / cube / greek / vase') + + ## blender flags + parser.add_argument("--white_bkgd", action='store_true', + help='set to render synthetic data on a white bkgd (always use for dvoxels)') + parser.add_argument("--half_res", action='store_true', + help='load blender synthetic data at 400x400 instead of 800x800') + + ## llff flags + parser.add_argument("--factor", type=int, default=8, + help='downsample factor for LLFF images') + parser.add_argument("--no_ndc", action='store_true', + help='do not use normalized device coordinates (set for non-forward facing scenes)') + parser.add_argument("--lindisp", action='store_true', + help='sampling linearly in disparity rather than depth') + parser.add_argument("--spherify", action='store_true', + help='set for spherical 360 scenes') + parser.add_argument("--llffhold", type=int, default=8, + help='will take every 1/N images as LLFF test set, paper uses 8') + + # logging/saving options + parser.add_argument("--i_print", type=int, default=100, + help='frequency of console printout and metric loggin') + parser.add_argument("--i_img", type=int, default=500, + help='frequency of tensorboard image logging') + parser.add_argument("--i_weights", type=int, default=10000, + help='frequency of weight ckpt saving') + parser.add_argument("--i_testset", type=int, default=50000, + help='frequency of testset saving') + parser.add_argument("--i_video", type=int, default=50000, + help='frequency of render_poses video saving') + + return parser + +def save_tensor_to_npz(tensor, file_path): + """ + テンソルをNPZファイルに書き出す + + Args: + tensor (numpy.ndarray): 書き出すテンソル + file_path (str): 出力ファイルのパス + """ + tensor_data = tensor.tolist() + np.savez(file_path,tensor_data) + +def train(): + + parser = config_parser() + args = parser.parse_args() + + # Load data + K = None + if args.dataset_type == 'llff': + images, poses, bds, render_poses, i_test = load_llff_data(args.datadir, args.factor, + recenter=True, bd_factor=.75, + spherify=args.spherify) + hwf = poses[0,:3,-1] + poses = poses[:,:3,:4] + print('Loaded llff', images.shape, render_poses.shape, hwf, args.datadir) + if not isinstance(i_test, list): + i_test = [i_test] + + if args.llffhold > 0: + print('Auto LLFF holdout,', args.llffhold) + i_test = np.arange(images.shape[0])[::args.llffhold] + + i_val = i_test + i_train = np.array([i for i in np.arange(int(images.shape[0])) if + (i not in i_test and i not in i_val)]) + + print('DEFINING BOUNDS') + if args.no_ndc: + near = np.ndarray.min(bds) * .9 + far = np.ndarray.max(bds) * 1. + + else: + near = 0. + far = 1. + print('NEAR FAR', near, far) + + if args.dataset_type == 'synth360': + images, poses, bds, render_poses, i_test= load_synth360_data(args.datadir) + # print(i_test) + hwf = poses[0,:3,-1] + poses = poses[:,:3,:4] + print(poses.shape) + print('Loaded 360', images.shape, render_poses.shape, hwf, args.datadir) + # if not isinstance(i_test, list): + # i_test = [i_test] + + # if args.llffhold > 0: + # print('Auto LLFF holdout,', args.llffhold) + # i_test = np.arange(images.shape[0])[::args.llffhold] + # print(":",i_test) + i_val = i_test + i_train = np.array([i for i in range(0,i_test[0])]) + + # i_train = np.array([i for i in np.arange(int(images.shape[0])) if + # (i not in i_test and i not in i_val)]) + print(i_train) + print('DEFINING BOUNDS') + if args.no_ndc: + near = np.ndarray.min(bds) * .9 + far = np.ndarray.max(bds) * 1. + + else: + near = 0. + far = 1. + print('NEAR FAR', near, far) + + elif args.dataset_type == 'blender': + images, poses, render_poses, hwf, i_split = load_blender_data(args.datadir, args.half_res, args.testskip) + print('Loaded blender', images.shape, render_poses.shape, hwf, args.datadir) + i_train, i_val, i_test = i_split + + near = 2. + far = 6. + + if args.white_bkgd: + images = images[...,:3]*images[...,-1:] + (1.-images[...,-1:]) + else: + images = images[...,:3] + + elif args.dataset_type == 'LINEMOD': + images, poses, render_poses, hwf, K, i_split, near, far = load_LINEMOD_data(args.datadir, args.half_res, args.testskip) + print(f'Loaded LINEMOD, images shape: {images.shape}, hwf: {hwf}, K: {K}') + print(f'[CHECK HERE] near: {near}, far: {far}.') + i_train, i_val, i_test = i_split + + if args.white_bkgd: + images = images[...,:3]*images[...,-1:] + (1.-images[...,-1:]) + else: + images = images[...,:3] + + elif args.dataset_type == 'deepvoxels': + + images, poses, render_poses, hwf, i_split = load_dv_data(scene=args.shape, + basedir=args.datadir, + testskip=args.testskip) + + print('Loaded deepvoxels', images.shape, render_poses.shape, hwf, args.datadir) + i_train, i_val, i_test = i_split + + hemi_R = np.mean(np.linalg.norm(poses[:,:3,-1], axis=-1)) + near = hemi_R-1. + far = hemi_R+1. + + else: + print('Unknown dataset type', args.dataset_type, 'exiting') + return + + # Cast intrinsics to right types + print("hwf",hwf) + _, _, focal = hwf + # H, W, focal = hwf + H, W,focal = images.shape[1],images.shape[2],1 + H, W = int(H), int(W) + hwf = [H, W, focal] + + if K is None: + K = np.array([ + [focal, 0, 0.5*W], + [0, focal, 0.5*H], + [0, 0, 1] + ]) + + if args.render_test: + render_poses = np.array(poses[i_test]) + + # Create log dir and copy the config file + basedir = args.basedir + expname = args.expname + os.makedirs(os.path.join(basedir, expname), exist_ok=True) + f = os.path.join(basedir, expname, 'args.txt') + with open(f, 'w') as file: + for arg in sorted(vars(args)): + attr = getattr(args, arg) + file.write('{} = {}\n'.format(arg, attr)) + if args.config is not None: + f = os.path.join(basedir, expname, 'config.txt') + with open(f, 'w') as file: + file.write(open(args.config, 'r').read()) + + # Create nerf model + render_kwargs_train, render_kwargs_test, start, grad_vars, optimizer = create_nerf(args) + global_step = start + + bds_dict = { + 'near' : near, + 'far' : far, + } + render_kwargs_train.update(bds_dict) + render_kwargs_test.update(bds_dict) + + # Move testing data to GPU + render_poses = torch.Tensor(render_poses).to(device) + + # Short circuit if only rendering out from trained model + if args.render_only: + print('RENDER ONLY') + with torch.no_grad(): + if args.render_test: + # render_test switches to test poses + images = images[i_test] + else: + # Default is smoother render_poses path + images = None + + testsavedir = os.path.join(basedir, expname, 'renderonly_{}_{:06d}'.format('test' if args.render_test else 'path', start)) + os.makedirs(testsavedir, exist_ok=True) + print('test poses shape', render_poses.shape) + + rgbs, _ = render_path(render_poses, hwf, K, args.chunk, render_kwargs_test, gt_imgs=images, savedir=testsavedir, render_factor=args.render_factor) + print('Done rendering', testsavedir) + imageio.mimwrite(os.path.join(testsavedir, 'video.mp4'), to8b(rgbs), fps=30, quality=8) + + return + + # Prepare raybatch tensor if batching random rays + N_rand = args.N_rand + use_batching = not args.no_batching # defaults is no_batching=True(use_batching=False) + # non Default process + if use_batching: + # For random ray batching + print('get rays') + rays = np.stack([get_rays_np(H, W, K, p) for p in poses[:,:3,:4]], 0) # [N, ro+rd, H, W, 3] + print('done, concats') + rays_rgb = np.concatenate([rays, images[:,None]], 1) # [N, ro+rd+rgb, H, W, 3] + rays_rgb = np.transpose(rays_rgb, [0,2,3,1,4]) # [N, H, W, ro+rd+rgb, 3] + rays_rgb = np.stack([rays_rgb[i] for i in i_train], 0) # train images only + rays_rgb = np.reshape(rays_rgb, [-1,3,3]) # [(N-1)*H*W, ro+rd+rgb, 3] + rays_rgb = rays_rgb.astype(np.float32) + print('shuffle rays') + np.random.shuffle(rays_rgb) + + print('done') + i_batch = 0 + + # Move training data to GPU + if use_batching: + images = torch.Tensor(images).to(device) + poses = torch.Tensor(poses).to(device) + if use_batching: + rays_rgb = torch.Tensor(rays_rgb).to(device) + + + # N_iters = 200000 + 1 + # N_iters = 100000 + 1 + N_iters = 1000000 + 1 + + print('Begin') + print('TRAIN views are', i_train) + print('TEST views are', i_test) + print('VAL views are', i_val) + + # Summary writers + # writer = SummaryWriter(os.path.join(basedir, 'summaries', expname)) + + start = start + 1 + for i in trange(start, N_iters): + time0 = time.time() + + # Sample random ray batch + if use_batching: + # Random over all images + batch = rays_rgb[i_batch:i_batch+N_rand] # [B, 2+1, 3*?] + batch = torch.transpose(batch, 0, 1) + batch_rays, target_s = batch[:2], batch[2] + + i_batch += N_rand + if i_batch >= rays_rgb.shape[0]: + print("Shuffle data after an epoch!") + rand_idx = torch.randperm(rays_rgb.shape[0]) + rays_rgb = rays_rgb[rand_idx] + i_batch = 0 + + else: + # Random from one image + img_i = np.random.choice(i_train) + target = images[img_i] + target = torch.Tensor(target).to(device) + pose = poses[img_i, :3,:4] + + if N_rand is not None: + rays_o, rays_d = get_rays(H, W, K, torch.Tensor(pose)) # (H, W, 3), (H, W, 3) + # print(H,W,K) + if i < args.precrop_iters: + dH = int(H//2 * args.precrop_frac) + dW = int(W//2 * args.precrop_frac) + coords = torch.stack( + torch.meshgrid( + torch.linspace(H//2 - dH, H//2 + dH - 1, 2*dH), + torch.linspace(W//2 - dW, W//2 + dW - 1, 2*dW) + ), -1) + if i == start: + print(f"[Config] Center cropping of size {2*dH} x {2*dW} is enabled until iter {args.precrop_iters}") + else: + # この状態だとまだ箱を作るだけ. + coords = torch.stack(torch.meshgrid(torch.linspace(0, H-1, H), torch.linspace(0, W-1, W)), -1) # (H, W, 2) + # coords = torch.stack(torch.meshgrid(torch.linspace(0, np.sin(H-1), H), torch.linspace(0, np.sin(W-1), W)), -1) # (H, W, 2) + + coords = torch.reshape(coords, [-1,2]) # (H * W, 2) + # print('coordes',coords.shape[0],N_rand) + # 箱の中からランダムな光線を選択 + select_inds = np.random.choice(int(coords.shape[0]), size=[N_rand], replace=False) # (N_rand,) + # long()はInt64に変換 + select_coords = coords[select_inds].long() # (N_rand, 2) + rays_o = rays_o[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + rays_d = rays_d[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + + batch_rays = torch.stack([rays_o, rays_d], 0)## RAYのInfo() + target_s = target[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + + + ##### Core optimization loop ##### + rgb, disp, acc, extras = render(H, W, K, chunk=args.chunk, rays=batch_rays, + verbose=i < 10, retraw=True, + **render_kwargs_train) + + + + # print("ray_o",rays_o) + # print("rey_d",rays_d) + # + optimizer.zero_grad() + img_loss = img2mse(rgb, target_s) + trans = extras['raw'][...,-1] + loss = img_loss + psnr = mse2psnr(img_loss) + + if 'rgb0' in extras: + img_loss0 = img2mse(extras['rgb0'], target_s) + loss = loss + img_loss0 + psnr0 = mse2psnr(img_loss0) + + loss.backward() + optimizer.step() + + # NOTE: IMPORTANT! + ### update learning rate ### + decay_rate = 0.1 + decay_steps = args.lrate_decay * 1000 + new_lrate = args.lrate * (decay_rate ** (global_step / decay_steps)) + for param_group in optimizer.param_groups: + param_group['lr'] = new_lrate + ################################ + + dt = time.time()-time0 + # print(f"Step: {global_step}, Loss: {loss}, Time: {dt}") + ##### end ##### + + # Rest is logging + if i%args.i_weights==0: + path = os.path.join(basedir, expname, '{:06d}.tar'.format(i)) + torch.save({ + 'global_step': global_step, + 'network_fn_state_dict': render_kwargs_train['network_fn'].state_dict(), + 'network_fine_state_dict': render_kwargs_train['network_fine'].state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + }, path) + print('Saved checkpoints at', path) + + if i%args.i_video==0 and i > 0: + # Turn on testing mode + with torch.no_grad(): + rgbs, disps = render_path(render_poses, hwf, K, args.chunk, render_kwargs_test) + print('Done, saving', rgbs.shape, disps.shape) + moviebase = os.path.join(basedir, expname, '{}_spiral_{:06d}_'.format(expname, i)) + imageio.mimwrite(moviebase + 'rgb.mp4', to8b(rgbs), fps=30, quality=8) + imageio.mimwrite(moviebase + 'disp.mp4', to8b(disps / np.max(disps)), fps=30, quality=8) + save_tensor_to_npz(rays_o,f'{basedir}/{expname}/{i}_ray_o') + save_tensor_to_npz(rays_d,f'{basedir}/{expname}/{i}_ray_d') + save_tensor_to_npz(batch_rays,f'{basedir}/{expname}/{i}_batch_rays') + save_tensor_to_npz(coords,f'{basedir}/{expname}/{i}_coords') + save_tensor_to_npz(target_s,f'{basedir}/{expname}/{i}_target_s') + print(f"extras:{extras}") + save_tensor_to_npz(extras['raw'],f'{basedir}/{expname}/{i}_extras') + # save_tensor_to_npz(extras,f'{basedir}/{expname}/{i}_extras') + save_tensor_to_npz(rgb,f'{basedir}/{expname}/{i}_rgb') + if args.use_viewdirs: + render_kwargs_test['c2w_staticcam'] = render_poses[0][:3,:4] + with torch.no_grad(): + # rgbs_still, _ = render_path(render_poses, hwf, args.chunk, render_kwargs_test) + rgbs_still, _ = render_path(render_poses, hwf, K,args.chunk, render_kwargs_test) + render_kwargs_test['c2w_staticcam'] = None + imageio.mimwrite(moviebase + 'rgb_still.mp4', to8b(rgbs_still), fps=30, quality=8) + + if i%args.i_testset==0 and i > 0: + testsavedir = os.path.join(basedir, expname, 'testset_{:06d}'.format(i)) + os.makedirs(testsavedir, exist_ok=True) + print('test poses shape', poses[i_test].shape) + with torch.no_grad(): + render_path(torch.Tensor(poses[i_test]).to(device), hwf, K, args.chunk, render_kwargs_test, gt_imgs=images[i_test], savedir=testsavedir) + print('Saved test set') + + + + if i%args.i_print==0: + tqdm.write(f"[TRAIN] Iter: {i} Loss: {loss.item()} PSNR: {psnr.item()}") + """ + # print(expname, i, psnr.detach(), loss.detach(), global_step.cpu().detach().numpy()) + print('iter time {:.05f}'.format(dt)) + + with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_print): + tf.contrib.summary.scalar('loss', loss) + tf.contrib.summary.scalar('psnr', psnr) + tf.contrib.summary.histogram('tran', trans) + if args.N_importance > 0: + tf.contrib.summary.scalar('psnr0', psnr0) + + + if i%args.i_img==0: + + # Log a rendered validation view to Tensorboard + img_i=np.random.choice(i_val) + target = images[img_i] + pose = poses[img_i, :3,:4] + with torch.no_grad(): + rgb, disp, acc, extras = render(H, W, focal, chunk=args.chunk, c2w=pose, + **render_kwargs_test) + + psnr = mse2psnr(img2mse(rgb, target)) + + with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_img): + + tf.contrib.summary.image('rgb', to8b(rgb)[tf.newaxis]) + tf.contrib.summary.image('disp', disp[tf.newaxis,...,tf.newaxis]) + tf.contrib.summary.image('acc', acc[tf.newaxis,...,tf.newaxis]) + + tf.contrib.summary.scalar('psnr_holdout', psnr) + tf.contrib.summary.image('rgb_holdout', target[tf.newaxis]) + + + if args.N_importance > 0: + + with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_img): + tf.contrib.summary.image('rgb0', to8b(extras['rgb0'])[tf.newaxis]) + tf.contrib.summary.image('disp0', extras['disp0'][tf.newaxis,...,tf.newaxis]) + tf.contrib.summary.image('z_std', extras['z_std'][tf.newaxis,...,tf.newaxis]) + """ + + global_step += 1 + + +if __name__=='__main__': + torch.set_default_tensor_type('torch.cuda.FloatTensor') + + train() diff --git a/run_nerf-new.py b/run_nerf-new.py new file mode 100644 index 000000000..f4c361115 --- /dev/null +++ b/run_nerf-new.py @@ -0,0 +1,952 @@ +import os, sys +import numpy as np +import imageio +import json +import random +import time +from load_synth360 import load_synth360_data +import torch +import torch.nn as nn +import torch.nn.functional as F +from tqdm import tqdm, trange + +import matplotlib.pyplot as plt + +from run_nerf_helpers_new import * + +from load_llff import load_llff_data +from load_deepvoxels import load_dv_data +from load_blender import load_blender_data +from load_LINEMOD import load_LINEMOD_data + +import json +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") +np.random.seed(0) +DEBUG = False + + +def batchify(fn, chunk): + """Constructs a version of 'fn' that applies to smaller batches. + """ + if chunk is None: + return fn + def ret(inputs): + return torch.cat([fn(inputs[i:i+chunk]) for i in range(0, inputs.shape[0], chunk)], 0) + return ret + + +def run_network(inputs, viewdirs, fn, embed_fn, embeddirs_fn, netchunk=1024*64): + """Prepares inputs and applies network 'fn'. + """ + inputs_flat = torch.reshape(inputs, [-1, inputs.shape[-1]]) + embedded = embed_fn(inputs_flat) + + if viewdirs is not None: + input_dirs = viewdirs[:,None].expand(inputs.shape) + input_dirs_flat = torch.reshape(input_dirs, [-1, input_dirs.shape[-1]]) + embedded_dirs = embeddirs_fn(input_dirs_flat) + embedded = torch.cat([embedded, embedded_dirs], -1) + + outputs_flat = batchify(fn, netchunk)(embedded) + outputs = torch.reshape(outputs_flat, list(inputs.shape[:-1]) + [outputs_flat.shape[-1]]) + return outputs + + +def batchify_rays(rays_flat, chunk=1024*32, **kwargs): + """Render rays in smaller minibatches to avoid OOM. + """ + all_ret = {} + for i in range(0, rays_flat.shape[0], chunk): + ret = render_rays(rays_flat[i:i+chunk], **kwargs) + for k in ret: + if k not in all_ret: + all_ret[k] = [] + all_ret[k].append(ret[k]) + + all_ret = {k : torch.cat(all_ret[k], 0) for k in all_ret} + return all_ret + + +def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True, + near=0., far=1., + use_viewdirs=False, c2w_staticcam=None, + **kwargs): + """Render rays + Args: + H: int. Height of image in pixels. + W: int. Width of image in pixels. + focal: float. Focal length of pinhole camera. + chunk: int. Maximum number of rays to process simultaneously. Used to + control maximum memory usage. Does not affect final results. + rays: array of shape [2, batch_size, 3]. Ray origin and direction for + each example in batch. + c2w: array of shape [3, 4]. Camera-to-world transformation matrix. + ndc: bool. If True, represent ray origin, direction in NDC coordinates. + near: float or array of shape [batch_size]. Nearest distance for a ray. + far: float or array of shape [batch_size]. Farthest distance for a ray. + use_viewdirs: bool. If True, use viewing direction of a point in space in model. + c2w_staticcam: array of shape [3, 4]. If not None, use this transformation matrix for + camera while using other c2w argument for viewing directions. + Returns: + rgb_map: [batch_size, 3]. Predicted RGB values for rays. + disp_map: [batch_size]. Disparity map. Inverse of depth. + acc_map: [batch_size]. Accumulated opacity (alpha) along a ray. + extras: dict with everything returned by render_rays(). + """ + if c2w is not None: + # special case to render full image + rays_o, rays_d = get_rays(H, W, K, c2w) + else: + # use provided ray batch + rays_o, rays_d = rays + + if use_viewdirs: + # provide ray directions as input + viewdirs = rays_d + if c2w_staticcam is not None: + # special case to visualize effect of viewdirs + rays_o, rays_d = get_rays(H, W, K, c2w_staticcam) + viewdirs = viewdirs / torch.norm(viewdirs, dim=-1, keepdim=True) + viewdirs = torch.reshape(viewdirs, [-1,3]).float() + + sh = rays_d.shape # [..., 3] + if ndc: + # for forward facing scenes + rays_o, rays_d = ndc_rays(H, W, K[0][0], 1., rays_o, rays_d) + + # Create ray batch + rays_o = torch.reshape(rays_o, [-1,3]).float() + rays_d = torch.reshape(rays_d, [-1,3]).float() + + near, far = near * torch.ones_like(rays_d[...,:1]), far * torch.ones_like(rays_d[...,:1]) + rays = torch.cat([rays_o, rays_d, near, far], -1) + # save_tensor_to_npz(rays,"rays_all_info") + if use_viewdirs: + rays = torch.cat([rays, viewdirs], -1) + + # Render and reshape + all_ret = batchify_rays(rays, chunk, **kwargs) + for k in all_ret: + k_sh = list(sh[:-1]) + list(all_ret[k].shape[1:]) + all_ret[k] = torch.reshape(all_ret[k], k_sh) + + k_extract = ['rgb_map', 'disp_map', 'acc_map'] + ret_list = [all_ret[k] for k in k_extract] + ret_dict = {k : all_ret[k] for k in all_ret if k not in k_extract} + return ret_list + [ret_dict] + + +def render_path(render_poses, hwf, K, chunk, render_kwargs, gt_imgs=None, savedir=None, render_factor=0): + + H, W, focal = hwf + + if render_factor!=0: + # Render downsampled for speed + H = H//render_factor + W = W//render_factor + focal = focal/render_factor + + rgbs = [] + disps = [] + + t = time.time() + for i, c2w in enumerate(tqdm(render_poses)): + print(i, time.time() - t) + t = time.time() + rgb, disp, acc, _ = render(H, W, K, chunk=chunk, c2w=c2w[:3,:4], **render_kwargs) + rgbs.append(rgb.cpu().numpy()) + disps.append(disp.cpu().numpy()) + if i==0: + print(rgb.shape, disp.shape) + + """ + if gt_imgs is not None and render_factor==0: + p = -10. * np.log10(np.mean(np.square(rgb.cpu().numpy() - gt_imgs[i]))) + print(p) + """ + + if savedir is not None: + rgb8 = to8b(rgbs[-1]) + filename = os.path.join(savedir, '{:03d}.png'.format(i)) + imageio.imwrite(filename, rgb8) + + + rgbs = np.stack(rgbs, 0) + disps = np.stack(disps, 0) + + return rgbs, disps + + +def create_nerf(args): + """Instantiate NeRF's MLP model. + """ + embed_fn, input_ch = get_embedder(args.multires, args.i_embed) + + input_ch_views = 0 + embeddirs_fn = None + if args.use_viewdirs: + embeddirs_fn, input_ch_views = get_embedder(args.multires_views, args.i_embed) + output_ch = 5 if args.N_importance > 0 else 4 + skips = [4] + model = NeRF(D=args.netdepth, W=args.netwidth, + input_ch=input_ch, output_ch=output_ch, skips=skips, + input_ch_views=input_ch_views, use_viewdirs=args.use_viewdirs).to(device) + grad_vars = list(model.parameters()) + + model_fine = None + if args.N_importance > 0: + model_fine = NeRF(D=args.netdepth_fine, W=args.netwidth_fine, + input_ch=input_ch, output_ch=output_ch, skips=skips, + input_ch_views=input_ch_views, use_viewdirs=args.use_viewdirs).to(device) + grad_vars += list(model_fine.parameters()) + + network_query_fn = lambda inputs, viewdirs, network_fn : run_network(inputs, viewdirs, network_fn, + embed_fn=embed_fn, + embeddirs_fn=embeddirs_fn, + netchunk=args.netchunk) + + # Create optimizer + optimizer = torch.optim.Adam(params=grad_vars, lr=args.lrate, betas=(0.9, 0.999)) + + start = 0 + basedir = args.basedir + expname = args.expname + + ########################## + + # Load checkpoints + if args.ft_path is not None and args.ft_path!='None': + ckpts = [args.ft_path] + else: + ckpts = [os.path.join(basedir, expname, f) for f in sorted(os.listdir(os.path.join(basedir, expname))) if '.tar' in f] + + print('Found ckpts', ckpts) + if len(ckpts) > 0 and not args.no_reload: + ckpt_path = ckpts[-1] + print('Reloading from', ckpt_path) + ckpt = torch.load(ckpt_path) + + start = ckpt['global_step'] + optimizer.load_state_dict(ckpt['optimizer_state_dict']) + + # Load model + model.load_state_dict(ckpt['network_fn_state_dict']) + if model_fine is not None: + model_fine.load_state_dict(ckpt['network_fine_state_dict']) + + ########################## + + render_kwargs_train = { + 'network_query_fn' : network_query_fn, + 'perturb' : args.perturb, + 'N_importance' : args.N_importance, + 'network_fine' : model_fine, + 'N_samples' : args.N_samples, + 'network_fn' : model, + 'use_viewdirs' : args.use_viewdirs, + 'white_bkgd' : args.white_bkgd, + 'raw_noise_std' : args.raw_noise_std, + } + + # NDC only good for LLFF-style forward facing data + if args.dataset_type != 'llff' or args.no_ndc: + print('Not ndc!') + render_kwargs_train['ndc'] = False + render_kwargs_train['lindisp'] = args.lindisp + + render_kwargs_test = {k : render_kwargs_train[k] for k in render_kwargs_train} + render_kwargs_test['perturb'] = False + render_kwargs_test['raw_noise_std'] = 0. + + return render_kwargs_train, render_kwargs_test, start, grad_vars, optimizer + + +def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=False): + """Transforms model's predictions to semantically meaningful values. + Args: + raw: [num_rays, num_samples along ray, 4]. Prediction from model. + z_vals: [num_rays, num_samples along ray]. Integration time. + rays_d: [num_rays, 3]. Direction of each ray. + Returns: + rgb_map: [num_rays, 3]. Estimated RGB color of a ray. + disp_map: [num_rays]. Disparity map. Inverse of depth map. + acc_map: [num_rays]. Sum of weights along each ray. + weights: [num_rays, num_samples]. Weights assigned to each sampled color. + depth_map: [num_rays]. Estimated distance to object. + """ + raw2alpha = lambda raw, dists, act_fn=F.relu: 1.-torch.exp(-act_fn(raw)*dists) + + dists = z_vals[...,1:] - z_vals[...,:-1] + dists = torch.cat([dists, torch.Tensor([1e10]).expand(dists[...,:1].shape)], -1) # [N_rays, N_samples] + + dists = dists * torch.norm(rays_d[...,None,:], dim=-1) + + rgb = torch.sigmoid(raw[...,:3]) # [N_rays, N_samples, 3] + noise = 0. + if raw_noise_std > 0.: + noise = torch.randn(raw[...,3].shape) * raw_noise_std + + # Overwrite randomly sampled data if pytest + if pytest: + np.random.seed(0) + noise = np.random.rand(*list(raw[...,3].shape)) * raw_noise_std + noise = torch.Tensor(noise) + + alpha = raw2alpha(raw[...,3] + noise, dists) # [N_rays, N_samples] + # weights = alpha * tf.math.cumprod(1.-alpha + 1e-10, -1, exclusive=True) + # torch.cumprod:要素をその一個前の値と掛け合わせる. + weights = alpha * torch.cumprod(torch.cat([torch.ones((alpha.shape[0], 1)), 1.-alpha + 1e-10], -1), -1)[:, :-1] + rgb_map = torch.sum(weights[...,None] * rgb, -2) # [N_rays, 3] + + depth_map = torch.sum(weights * z_vals, -1) + disp_map = 1./torch.max(1e-10 * torch.ones_like(depth_map), depth_map / torch.sum(weights, -1)) + acc_map = torch.sum(weights, -1) + + if white_bkgd: + rgb_map = rgb_map + (1.-acc_map[...,None]) + + return rgb_map, disp_map, acc_map, weights, depth_map + + +def render_rays(ray_batch, + network_fn, + network_query_fn, + N_samples, + retraw=False, + lindisp=False, + perturb=0., + N_importance=0, + network_fine=None, + white_bkgd=False, + raw_noise_std=0., + verbose=False, + pytest=False): + """Volumetric rendering. + Args: + ray_batch: array of shape [batch_size, ...]. All information necessary + for sampling along a ray, including: ray origin, ray direction, min + dist, max dist, and unit-magnitude viewing direction. + network_fn: function. Model for predicting RGB and density at each point + in space. + network_query_fn: function used for passing queries to network_fn. + N_samples: int. Number of different times to sample along each ray. + retraw: bool. If True, include model's raw, unprocessed predictions. + lindisp: bool. If True, sample linearly in inverse depth rather than in depth. + perturb: float, 0 or 1. If non-zero, each ray is sampled at stratified + random points in time. + N_importance: int. Number of additional times to sample along each ray. + These samples are only passed to network_fine. + network_fine: "fine" network with same spec as network_fn. + white_bkgd: bool. If True, assume a white background. + raw_noise_std: ... + verbose: bool. If True, print more debugging info. + Returns: + rgb_map: [num_rays, 3]. Estimated RGB color of a ray. Comes from fine model. + disp_map: [num_rays]. Disparity map. 1 / depth. + acc_map: [num_rays]. Accumulated opacity along each ray. Comes from fine model. + raw: [num_rays, num_samples, 4]. Raw predictions from model. + rgb0: See rgb_map. Output for coarse model. + disp0: See disp_map. Output for coarse model. + acc0: See acc_map. Output for coarse model. + z_std: [num_rays]. Standard deviation of distances along ray for each + sample. + """ + N_rays = ray_batch.shape[0] + rays_o, rays_d = ray_batch[:,0:3], ray_batch[:,3:6] # [N_rays, 3] each + viewdirs = ray_batch[:,-3:] if ray_batch.shape[-1] > 8 else None + bounds = torch.reshape(ray_batch[...,6:8], [-1,1,2]) + near, far = bounds[...,0], bounds[...,1] # [-1,1] + + t_vals = torch.linspace(0., 1., steps=N_samples) + if not lindisp: + z_vals = near * (1.-t_vals) + far * (t_vals) + else: + z_vals = 1./(1./near * (1.-t_vals) + 1./far * (t_vals)) + + z_vals = z_vals.expand([N_rays, N_samples]) + + if perturb > 0.: + # get intervals between samples + mids = .5 * (z_vals[...,1:] + z_vals[...,:-1]) + upper = torch.cat([mids, z_vals[...,-1:]], -1) + lower = torch.cat([z_vals[...,:1], mids], -1) + # stratified samples in those intervals + t_rand = torch.rand(z_vals.shape) + + # Pytest, overwrite u with numpy's fixed random numbers + if pytest: + np.random.seed(0) + t_rand = np.random.rand(*list(z_vals.shape)) + t_rand = torch.Tensor(t_rand) + + z_vals = lower + (upper - lower) * t_rand + + pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples, 3] + + + + raw = network_query_fn(pts, viewdirs, network_fn) # raw = run_network(pts) + # save_tensor_to_npz(raw,"") + rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) + + if N_importance > 0: + + rgb_map_0, disp_map_0, acc_map_0 = rgb_map, disp_map, acc_map + + z_vals_mid = .5 * (z_vals[...,1:] + z_vals[...,:-1]) + z_samples = sample_pdf(z_vals_mid, weights[...,1:-1], N_importance, det=(perturb==0.), pytest=pytest) + z_samples = z_samples.detach() + + z_vals, _ = torch.sort(torch.cat([z_vals, z_samples], -1), -1) + pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples + N_importance, 3] + + run_fn = network_fn if network_fine is None else network_fine +# raw = run_network(pts, fn=run_fn) + raw = network_query_fn(pts, viewdirs, run_fn) + + rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) + + ret = {'rgb_map' : rgb_map, 'disp_map' : disp_map, 'acc_map' : acc_map} + if retraw: + ret['raw'] = raw + if N_importance > 0: + ret['rgb0'] = rgb_map_0 + ret['disp0'] = disp_map_0 + ret['acc0'] = acc_map_0 + ret['z_std'] = torch.std(z_samples, dim=-1, unbiased=False) # [N_rays] + + for k in ret: + if (torch.isnan(ret[k]).any() or torch.isinf(ret[k]).any()) and DEBUG: + print(f"! [Numerical Error] {k} contains nan or inf.") + + return ret + + +def config_parser(): + + import configargparse + parser = configargparse.ArgumentParser() + parser.add_argument('--config', is_config_file=True, + help='config file path') + parser.add_argument("--expname", type=str, + help='experiment name') + parser.add_argument("--basedir", type=str, default='./logs/', + help='where to store ckpts and logs') + parser.add_argument("--datadir", type=str, default='./data/llff/fern', + help='input data directory') + + # training options + parser.add_argument("--netdepth", type=int, default=8, + help='layers in network') + parser.add_argument("--netwidth", type=int, default=256, + help='channels per layer') + parser.add_argument("--netdepth_fine", type=int, default=8, + help='layers in fine network') + parser.add_argument("--netwidth_fine", type=int, default=256, + help='channels per layer in fine network') + parser.add_argument("--N_rand", type=int, default=32*32*4, + help='batch size (number of random rays per gradient step)') + parser.add_argument("--lrate", type=float, default=5e-4, + help='learning rate') + parser.add_argument("--lrate_decay", type=int, default=250, + help='exponential learning rate decay (in 1000 steps)') + parser.add_argument("--chunk", type=int, default=1024*32, + help='number of rays processed in parallel, decrease if running out of memory') + parser.add_argument("--netchunk", type=int, default=1024*64, + help='number of pts sent through network in parallel, decrease if running out of memory') + parser.add_argument("--no_batching", action='store_true', + help='only take random rays from 1 image at a time') + parser.add_argument("--no_reload", action='store_true', + help='do not reload weights from saved ckpt') + parser.add_argument("--ft_path", type=str, default=None, + help='specific weights npy file to reload for coarse network') + + # rendering options + parser.add_argument("--N_samples", type=int, default=64, + help='number of coarse samples per ray') + parser.add_argument("--N_importance", type=int, default=0, + help='number of additional fine samples per ray') + parser.add_argument("--perturb", type=float, default=1., + help='set to 0. for no jitter, 1. for jitter') + parser.add_argument("--use_viewdirs", action='store_true', + help='use full 5D input instead of 3D') + parser.add_argument("--i_embed", type=int, default=0, + help='set 0 for default positional encoding, -1 for none') + parser.add_argument("--multires", type=int, default=10, + help='log2 of max freq for positional encoding (3D location)') + parser.add_argument("--multires_views", type=int, default=4, + help='log2 of max freq for positional encoding (2D direction)') + parser.add_argument("--raw_noise_std", type=float, default=0., + help='std dev of noise added to regularize sigma_a output, 1e0 recommended') + + parser.add_argument("--render_only", action='store_true', + help='do not optimize, reload weights and render out render_poses path') + parser.add_argument("--render_test", action='store_true', + help='render the test set instead of render_poses path') + parser.add_argument("--render_factor", type=int, default=0, + help='downsampling factor to speed up rendering, set 4 or 8 for fast preview') + + # training options + parser.add_argument("--precrop_iters", type=int, default=0, + help='number of steps to train on central crops') + parser.add_argument("--precrop_frac", type=float, + default=.5, help='fraction of img taken for central crops') + + # dataset options + parser.add_argument("--dataset_type", type=str, default='llff', + help='options: llff / blender / deepvoxels') + parser.add_argument("--testskip", type=int, default=8, + help='will load 1/N images from test/val sets, useful for large datasets like deepvoxels') + + ## deepvoxels flags + parser.add_argument("--shape", type=str, default='greek', + help='options : armchair / cube / greek / vase') + + ## blender flags + parser.add_argument("--white_bkgd", action='store_true', + help='set to render synthetic data on a white bkgd (always use for dvoxels)') + parser.add_argument("--half_res", action='store_true', + help='load blender synthetic data at 400x400 instead of 800x800') + + ## llff flags + parser.add_argument("--factor", type=int, default=8, + help='downsample factor for LLFF images') + parser.add_argument("--no_ndc", action='store_true', + help='do not use normalized device coordinates (set for non-forward facing scenes)') + parser.add_argument("--lindisp", action='store_true', + help='sampling linearly in disparity rather than depth') + parser.add_argument("--spherify", action='store_true', + help='set for spherical 360 scenes') + parser.add_argument("--llffhold", type=int, default=8, + help='will take every 1/N images as LLFF test set, paper uses 8') + + # logging/saving options + parser.add_argument("--i_print", type=int, default=100, + help='frequency of console printout and metric loggin') + parser.add_argument("--i_img", type=int, default=500, + help='frequency of tensorboard image logging') + parser.add_argument("--i_weights", type=int, default=10000, + help='frequency of weight ckpt saving') + parser.add_argument("--i_testset", type=int, default=50000, + help='frequency of testset saving') + parser.add_argument("--i_video", type=int, default=50000, + help='frequency of render_poses video saving') + + return parser + +def save_tensor_to_npz(tensor, file_path): + """ + テンソルをNPZファイルに書き出す + + Args: + tensor (numpy.ndarray): 書き出すテンソル + file_path (str): 出力ファイルのパス + """ + tensor_data = tensor.tolist() + np.savez(file_path,tensor_data) + +def train(): + + parser = config_parser() + args = parser.parse_args() + + # Load data + K = None + if args.dataset_type == 'llff': + images, poses, bds, render_poses, i_test = load_llff_data(args.datadir, args.factor, + recenter=True, bd_factor=.75, + spherify=args.spherify) + hwf = poses[0,:3,-1] + poses = poses[:,:3,:4] + print('Loaded llff', images.shape, render_poses.shape, hwf, args.datadir) + if not isinstance(i_test, list): + i_test = [i_test] + + if args.llffhold > 0: + print('Auto LLFF holdout,', args.llffhold) + i_test = np.arange(images.shape[0])[::args.llffhold] + + i_val = i_test + i_train = np.array([i for i in np.arange(int(images.shape[0])) if + (i not in i_test and i not in i_val)]) + + print('DEFINING BOUNDS') + if args.no_ndc: + near = np.ndarray.min(bds) * .9 + far = np.ndarray.max(bds) * 1. + + else: + near = 0. + far = 1. + print('NEAR FAR', near, far) + + if args.dataset_type == 'synth360': + images, poses, bds, render_poses, i_test= load_synth360_data(args.datadir) + # print(i_test) + hwf = poses[0,:3,-1] + poses = poses[:,:3,:4] + print(poses.shape) + print('Loaded 360', images.shape, render_poses.shape, hwf, args.datadir) + # if not isinstance(i_test, list): + # i_test = [i_test] + + # if args.llffhold > 0: + # print('Auto LLFF holdout,', args.llffhold) + # i_test = np.arange(images.shape[0])[::args.llffhold] + # print(":",i_test) + i_val = i_test + i_train = np.array([i for i in range(0,i_test[0])]) + + # i_train = np.array([i for i in np.arange(int(images.shape[0])) if + # (i not in i_test and i not in i_val)]) + print(i_train) + print('DEFINING BOUNDS') + if args.no_ndc: + near = np.ndarray.min(bds) * .9 + far = np.ndarray.max(bds) * 1. + + else: + near = 0. + far = 1. + print('NEAR FAR', near, far) + + elif args.dataset_type == 'blender': + images, poses, render_poses, hwf, i_split = load_blender_data(args.datadir, args.half_res, args.testskip) + print('Loaded blender', images.shape, render_poses.shape, hwf, args.datadir) + i_train, i_val, i_test = i_split + + near = 2. + far = 6. + + if args.white_bkgd: + images = images[...,:3]*images[...,-1:] + (1.-images[...,-1:]) + else: + images = images[...,:3] + + elif args.dataset_type == 'LINEMOD': + images, poses, render_poses, hwf, K, i_split, near, far = load_LINEMOD_data(args.datadir, args.half_res, args.testskip) + print(f'Loaded LINEMOD, images shape: {images.shape}, hwf: {hwf}, K: {K}') + print(f'[CHECK HERE] near: {near}, far: {far}.') + i_train, i_val, i_test = i_split + + if args.white_bkgd: + images = images[...,:3]*images[...,-1:] + (1.-images[...,-1:]) + else: + images = images[...,:3] + + elif args.dataset_type == 'deepvoxels': + + images, poses, render_poses, hwf, i_split = load_dv_data(scene=args.shape, + basedir=args.datadir, + testskip=args.testskip) + + print('Loaded deepvoxels', images.shape, render_poses.shape, hwf, args.datadir) + i_train, i_val, i_test = i_split + + hemi_R = np.mean(np.linalg.norm(poses[:,:3,-1], axis=-1)) + near = hemi_R-1. + far = hemi_R+1. + + else: + print('Unknown dataset type', args.dataset_type, 'exiting') + return + + # Cast intrinsics to right types + print("hwf",hwf) + _, _, focal = hwf + # H, W, focal = hwf + H, W,focal = images.shape[1],images.shape[2],1 + H, W = int(H), int(W) + hwf = [H, W, focal] + + if K is None: + K = np.array([ + [focal, 0, 0.5*W], + [0, focal, 0.5*H], + [0, 0, 1] + ]) + + if args.render_test: + render_poses = np.array(poses[i_test]) + + # Create log dir and copy the config file + basedir = args.basedir + expname = args.expname + os.makedirs(os.path.join(basedir, expname), exist_ok=True) + f = os.path.join(basedir, expname, 'args.txt') + with open(f, 'w') as file: + for arg in sorted(vars(args)): + attr = getattr(args, arg) + file.write('{} = {}\n'.format(arg, attr)) + if args.config is not None: + f = os.path.join(basedir, expname, 'config.txt') + with open(f, 'w') as file: + file.write(open(args.config, 'r').read()) + + # Create nerf model + render_kwargs_train, render_kwargs_test, start, grad_vars, optimizer = create_nerf(args) + global_step = start + + bds_dict = { + 'near' : near, + 'far' : far, + } + render_kwargs_train.update(bds_dict) + render_kwargs_test.update(bds_dict) + + # Move testing data to GPU + render_poses = torch.Tensor(render_poses).to(device) + + # Short circuit if only rendering out from trained model + if args.render_only: + print('RENDER ONLY') + with torch.no_grad(): + if args.render_test: + # render_test switches to test poses + images = images[i_test] + else: + # Default is smoother render_poses path + images = None + + testsavedir = os.path.join(basedir, expname, 'renderonly_{}_{:06d}'.format('test' if args.render_test else 'path', start)) + os.makedirs(testsavedir, exist_ok=True) + print('test poses shape', render_poses.shape) + + rgbs, _ = render_path(render_poses, hwf, K, args.chunk, render_kwargs_test, gt_imgs=images, savedir=testsavedir, render_factor=args.render_factor) + print('Done rendering', testsavedir) + imageio.mimwrite(os.path.join(testsavedir, 'video.mp4'), to8b(rgbs), fps=30, quality=8) + + return + + # Prepare raybatch tensor if batching random rays + N_rand = args.N_rand + use_batching = not args.no_batching # defaults is no_batching=True(use_batching=False) + # non Default process + if use_batching: + # For random ray batching + print('get rays') + rays = np.stack([get_rays_np(H, W, K, p) for p in poses[:,:3,:4]], 0) # [N, ro+rd, H, W, 3] + print('done, concats') + rays_rgb = np.concatenate([rays, images[:,None]], 1) # [N, ro+rd+rgb, H, W, 3] + rays_rgb = np.transpose(rays_rgb, [0,2,3,1,4]) # [N, H, W, ro+rd+rgb, 3] + rays_rgb = np.stack([rays_rgb[i] for i in i_train], 0) # train images only + rays_rgb = np.reshape(rays_rgb, [-1,3,3]) # [(N-1)*H*W, ro+rd+rgb, 3] + rays_rgb = rays_rgb.astype(np.float32) + print('shuffle rays') + np.random.shuffle(rays_rgb) + + print('done') + i_batch = 0 + + # Move training data to GPU + if use_batching: + images = torch.Tensor(images).to(device) + poses = torch.Tensor(poses).to(device) + if use_batching: + rays_rgb = torch.Tensor(rays_rgb).to(device) + + + # N_iters = 200000 + 1 + # N_iters = 100000 + 1 + # N_iters = 1000000 + 1 + N_iters = 10000000 + 1 + + + print('Begin') + print('TRAIN views are', i_train) + print('TEST views are', i_test) + print('VAL views are', i_val) + + # Summary writers + # writer = SummaryWriter(os.path.join(basedir, 'summaries', expname)) + + start = start + 1 + for i in trange(start, N_iters): + time0 = time.time() + + # Sample random ray batch + if use_batching: + # Random over all images + batch = rays_rgb[i_batch:i_batch+N_rand] # [B, 2+1, 3*?] + batch = torch.transpose(batch, 0, 1) + batch_rays, target_s = batch[:2], batch[2] + + i_batch += N_rand + if i_batch >= rays_rgb.shape[0]: + print("Shuffle data after an epoch!") + rand_idx = torch.randperm(rays_rgb.shape[0]) + rays_rgb = rays_rgb[rand_idx] + i_batch = 0 + + else: + # Random from one image + img_i = np.random.choice(i_train) + target = images[img_i] + target = torch.Tensor(target).to(device) + pose = poses[img_i, :3,:4] + + if N_rand is not None: + rays_o, rays_d = get_rays(H, W, K, torch.Tensor(pose)) # (H, W, 3), (H, W, 3) + # print(H,W,K) + if i < args.precrop_iters: + dH = int(H//2 * args.precrop_frac) + dW = int(W//2 * args.precrop_frac) + coords = torch.stack( + torch.meshgrid( + torch.linspace(H//2 - dH, H//2 + dH - 1, 2*dH), + torch.linspace(W//2 - dW, W//2 + dW - 1, 2*dW) + ), -1) + if i == start: + print(f"[Config] Center cropping of size {2*dH} x {2*dW} is enabled until iter {args.precrop_iters}") + else: + # この状態だとまだ箱を作るだけ. + coords = torch.stack(torch.meshgrid(torch.linspace(0, H-1, H), torch.linspace(0, W-1, W)), -1) # (H, W, 2) + # coords = torch.stack(torch.meshgrid(torch.linspace(0, np.sin(H-1), H), torch.linspace(0, np.sin(W-1), W)), -1) # (H, W, 2) + + coords = torch.reshape(coords, [-1,2]) # (H * W, 2) + # print('coordes',coords.shape[0],N_rand) + # 箱の中からランダムな光線を選択 + select_inds = np.random.choice(int(coords.shape[0]), size=[N_rand], replace=False) # (N_rand,) + # long()はInt64に変換 + select_coords = coords[select_inds].long() # (N_rand, 2) + rays_o = rays_o[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + rays_d = rays_d[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + + batch_rays = torch.stack([rays_o, rays_d], 0)## RAYのInfo() + target_s = target[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + + + ##### Core optimization loop ##### + rgb, disp, acc, extras = render(H, W, K, chunk=args.chunk, rays=batch_rays, + verbose=i < 10, retraw=True, + **render_kwargs_train) + + + + # print("ray_o",rays_o) + # print("rey_d",rays_d) + # + optimizer.zero_grad() + img_loss = img2mse(rgb, target_s) + trans = extras['raw'][...,-1] + loss = img_loss + psnr = mse2psnr(img_loss) + + if 'rgb0' in extras: + img_loss0 = img2mse(extras['rgb0'], target_s) + loss = loss + img_loss0 + psnr0 = mse2psnr(img_loss0) + + loss.backward() + optimizer.step() + + # NOTE: IMPORTANT! + ### update learning rate ### + decay_rate = 0.1 + decay_steps = args.lrate_decay * 1000 + new_lrate = args.lrate * (decay_rate ** (global_step / decay_steps)) + for param_group in optimizer.param_groups: + param_group['lr'] = new_lrate + ################################ + + dt = time.time()-time0 + # print(f"Step: {global_step}, Loss: {loss}, Time: {dt}") + ##### end ##### + + # Rest is logging + if i%args.i_weights==0: + path = os.path.join(basedir, expname, '{:06d}.tar'.format(i)) + torch.save({ + 'global_step': global_step, + 'network_fn_state_dict': render_kwargs_train['network_fn'].state_dict(), + 'network_fine_state_dict': render_kwargs_train['network_fine'].state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + }, path) + print('Saved checkpoints at', path) + + if i%args.i_video==0 and i > 0: + # Turn on testing mode + with torch.no_grad(): + rgbs, disps = render_path(render_poses, hwf, K, args.chunk, render_kwargs_test) + print('Done, saving', rgbs.shape, disps.shape) + moviebase = os.path.join(basedir, expname, '{}_spiral_{:06d}_'.format(expname, i)) + imageio.mimwrite(moviebase + 'rgb.mp4', to8b(rgbs), fps=30, quality=8) + imageio.mimwrite(moviebase + 'disp.mp4', to8b(disps / np.max(disps)), fps=30, quality=8) + save_tensor_to_npz(rays_o,f'{basedir}/{expname}/{i}_ray_o') + save_tensor_to_npz(rays_d,f'{basedir}/{expname}/{i}_ray_d') + save_tensor_to_npz(batch_rays,f'{basedir}/{expname}/{i}_batch_rays') + save_tensor_to_npz(coords,f'{basedir}/{expname}/{i}_coords') + save_tensor_to_npz(target_s,f'{basedir}/{expname}/{i}_target_s') + print(f"extras:{extras}") + save_tensor_to_npz(extras['raw'],f'{basedir}/{expname}/{i}_extras') + # save_tensor_to_npz(extras,f'{basedir}/{expname}/{i}_extras') + save_tensor_to_npz(rgb,f'{basedir}/{expname}/{i}_rgb') + if args.use_viewdirs: + render_kwargs_test['c2w_staticcam'] = render_poses[0][:3,:4] + with torch.no_grad(): + # rgbs_still, _ = render_path(render_poses, hwf, args.chunk, render_kwargs_test) + rgbs_still, _ = render_path(render_poses, hwf, K,args.chunk, render_kwargs_test) + render_kwargs_test['c2w_staticcam'] = None + imageio.mimwrite(moviebase + 'rgb_still.mp4', to8b(rgbs_still), fps=30, quality=8) + + if i%args.i_testset==0 and i > 0: + testsavedir = os.path.join(basedir, expname, 'testset_{:06d}'.format(i)) + os.makedirs(testsavedir, exist_ok=True) + print('test poses shape', poses[i_test].shape) + with torch.no_grad(): + render_path(torch.Tensor(poses[i_test]).to(device), hwf, K, args.chunk, render_kwargs_test, gt_imgs=images[i_test], savedir=testsavedir) + print('Saved test set') + + + + if i%args.i_print==0: + tqdm.write(f"[TRAIN] Iter: {i} Loss: {loss.item()} PSNR: {psnr.item()}") + """ + # print(expname, i, psnr.detach(), loss.detach(), global_step.cpu().detach().numpy()) + print('iter time {:.05f}'.format(dt)) + + with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_print): + tf.contrib.summary.scalar('loss', loss) + tf.contrib.summary.scalar('psnr', psnr) + tf.contrib.summary.histogram('tran', trans) + if args.N_importance > 0: + tf.contrib.summary.scalar('psnr0', psnr0) + + + if i%args.i_img==0: + + # Log a rendered validation view to Tensorboard + img_i=np.random.choice(i_val) + target = images[img_i] + pose = poses[img_i, :3,:4] + with torch.no_grad(): + rgb, disp, acc, extras = render(H, W, focal, chunk=args.chunk, c2w=pose, + **render_kwargs_test) + + psnr = mse2psnr(img2mse(rgb, target)) + + with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_img): + + tf.contrib.summary.image('rgb', to8b(rgb)[tf.newaxis]) + tf.contrib.summary.image('disp', disp[tf.newaxis,...,tf.newaxis]) + tf.contrib.summary.image('acc', acc[tf.newaxis,...,tf.newaxis]) + + tf.contrib.summary.scalar('psnr_holdout', psnr) + tf.contrib.summary.image('rgb_holdout', target[tf.newaxis]) + + + if args.N_importance > 0: + + with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_img): + tf.contrib.summary.image('rgb0', to8b(extras['rgb0'])[tf.newaxis]) + tf.contrib.summary.image('disp0', extras['disp0'][tf.newaxis,...,tf.newaxis]) + tf.contrib.summary.image('z_std', extras['z_std'][tf.newaxis,...,tf.newaxis]) + """ + + global_step += 1 + + +if __name__=='__main__': + torch.set_default_tensor_type('torch.cuda.FloatTensor') + + train() diff --git a/run_nerf.py b/run_nerf.py index bc270be86..a06d05f85 100644 --- a/run_nerf.py +++ b/run_nerf.py @@ -4,6 +4,7 @@ import json import random import time +from load_synth360 import load_synth360_data import torch import torch.nn as nn import torch.nn.functional as F @@ -18,7 +19,7 @@ from load_blender import load_blender_data from load_LINEMOD import load_LINEMOD_data - +import json device = torch.device("cuda" if torch.cuda.is_available() else "cpu") np.random.seed(0) DEBUG = False @@ -119,6 +120,7 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True, near, far = near * torch.ones_like(rays_d[...,:1]), far * torch.ones_like(rays_d[...,:1]) rays = torch.cat([rays_o, rays_d, near, far], -1) + save_tensor_to_npz(rays,"rays_all_info") if use_viewdirs: rays = torch.cat([rays, viewdirs], -1) @@ -216,7 +218,7 @@ def create_nerf(args): if args.ft_path is not None and args.ft_path!='None': ckpts = [args.ft_path] else: - ckpts = [os.path.join(basedir, expname, f) for f in sorted(os.listdir(os.path.join(basedir, expname))) if 'tar' in f] + ckpts = [os.path.join(basedir, expname, f) for f in sorted(os.listdir(os.path.join(basedir, expname))) if '.tar' in f] print('Found ckpts', ckpts) if len(ckpts) > 0 and not args.no_reload: @@ -292,6 +294,7 @@ def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=F alpha = raw2alpha(raw[...,3] + noise, dists) # [N_rays, N_samples] # weights = alpha * tf.math.cumprod(1.-alpha + 1e-10, -1, exclusive=True) + # torch.cumprod:要素をその一個前の値と掛け合わせる. weights = alpha * torch.cumprod(torch.cat([torch.ones((alpha.shape[0], 1)), 1.-alpha + 1e-10], -1), -1)[:, :-1] rgb_map = torch.sum(weights[...,None] * rgb, -2) # [N_rays, 3] @@ -381,8 +384,9 @@ def render_rays(ray_batch, pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples, 3] -# raw = run_network(pts) - raw = network_query_fn(pts, viewdirs, network_fn) + + raw = network_query_fn(pts, viewdirs, network_fn) # raw = run_network(pts) + # save_tensor_to_npz(raw,"") rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) if N_importance > 0: @@ -530,6 +534,16 @@ def config_parser(): return parser +def save_tensor_to_npz(tensor, file_path): + """ + テンソルをNPZファイルに書き出す + + Args: + tensor (numpy.ndarray): 書き出すテンソル + file_path (str): 出力ファイルのパス + """ + tensor_data = tensor.tolist() + np.savez(file_path,tensor_data) def train(): @@ -566,6 +580,36 @@ def train(): far = 1. print('NEAR FAR', near, far) + if args.dataset_type == 'synth360': + images, poses, bds, render_poses, i_test= load_synth360_data(args.datadir) + # print(i_test) + hwf = poses[0,:3,-1] + poses = poses[:,:3,:4] + print(poses.shape) + print('Loaded 360', images.shape, render_poses.shape, hwf, args.datadir) + # if not isinstance(i_test, list): + # i_test = [i_test] + + # if args.llffhold > 0: + # print('Auto LLFF holdout,', args.llffhold) + # i_test = np.arange(images.shape[0])[::args.llffhold] + # print(":",i_test) + i_val = i_test + i_train = np.array([i for i in range(0,i_test[0])]) + + # i_train = np.array([i for i in np.arange(int(images.shape[0])) if + # (i not in i_test and i not in i_val)]) + print(i_train) + print('DEFINING BOUNDS') + if args.no_ndc: + near = np.ndarray.min(bds) * .9 + far = np.ndarray.max(bds) * 1. + + else: + near = 0. + far = 1. + print('NEAR FAR', near, far) + elif args.dataset_type == 'blender': images, poses, render_poses, hwf, i_split = load_blender_data(args.datadir, args.half_res, args.testskip) print('Loaded blender', images.shape, render_poses.shape, hwf, args.datadir) @@ -602,13 +646,16 @@ def train(): hemi_R = np.mean(np.linalg.norm(poses[:,:3,-1], axis=-1)) near = hemi_R-1. far = hemi_R+1. - + else: print('Unknown dataset type', args.dataset_type, 'exiting') return # Cast intrinsics to right types - H, W, focal = hwf + print("hwf",hwf) + _, _, focal = hwf + # H, W, focal = hwf + H, W,focal = images.shape[1],images.shape[2],1 H, W = int(H), int(W) hwf = [H, W, focal] @@ -637,7 +684,7 @@ def train(): file.write(open(args.config, 'r').read()) # Create nerf model - render_kwargs_train, render_kwargs_test, start, grad_vars, optimizer = create_nerf(args) + render_kwargs_train, render_kwargs_test, start, grad_vars, optimizer = create_nerf(args) global_step = start bds_dict = { @@ -673,7 +720,8 @@ def train(): # Prepare raybatch tensor if batching random rays N_rand = args.N_rand - use_batching = not args.no_batching + use_batching = not args.no_batching # defaults is no_batching=True(use_batching=False) + # non Default process if use_batching: # For random ray batching print('get rays') @@ -698,7 +746,10 @@ def train(): rays_rgb = torch.Tensor(rays_rgb).to(device) - N_iters = 200000 + 1 + # N_iters = 200000 + 1 + # N_iters = 100000 + 1 + N_iters = 1000000 + 1 + print('Begin') print('TRAIN views are', i_train) print('TEST views are', i_test) @@ -734,7 +785,7 @@ def train(): if N_rand is not None: rays_o, rays_d = get_rays(H, W, K, torch.Tensor(pose)) # (H, W, 3), (H, W, 3) - + # print(H,W,K) if i < args.precrop_iters: dH = int(H//2 * args.precrop_frac) dW = int(W//2 * args.precrop_frac) @@ -746,21 +797,33 @@ def train(): if i == start: print(f"[Config] Center cropping of size {2*dH} x {2*dW} is enabled until iter {args.precrop_iters}") else: + # この状態だとまだ箱を作るだけ. coords = torch.stack(torch.meshgrid(torch.linspace(0, H-1, H), torch.linspace(0, W-1, W)), -1) # (H, W, 2) - + # coords = torch.stack(torch.meshgrid(torch.linspace(0, np.sin(H-1), H), torch.linspace(0, np.sin(W-1), W)), -1) # (H, W, 2) + coords = torch.reshape(coords, [-1,2]) # (H * W, 2) - select_inds = np.random.choice(coords.shape[0], size=[N_rand], replace=False) # (N_rand,) + # print('coordes',coords.shape[0],N_rand) + # 箱の中からランダムな光線を選択 + select_inds = np.random.choice(int(coords.shape[0]), size=[N_rand], replace=False) # (N_rand,) + # long()はInt64に変換 select_coords = coords[select_inds].long() # (N_rand, 2) rays_o = rays_o[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) rays_d = rays_d[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) - batch_rays = torch.stack([rays_o, rays_d], 0) + + batch_rays = torch.stack([rays_o, rays_d], 0)## RAYのInfo() target_s = target[select_coords[:, 0], select_coords[:, 1]] # (N_rand, 3) + ##### Core optimization loop ##### rgb, disp, acc, extras = render(H, W, K, chunk=args.chunk, rays=batch_rays, verbose=i < 10, retraw=True, **render_kwargs_train) + + + # print("ray_o",rays_o) + # print("rey_d",rays_d) + # optimizer.zero_grad() img_loss = img2mse(rgb, target_s) trans = extras['raw'][...,-1] @@ -807,13 +870,22 @@ def train(): moviebase = os.path.join(basedir, expname, '{}_spiral_{:06d}_'.format(expname, i)) imageio.mimwrite(moviebase + 'rgb.mp4', to8b(rgbs), fps=30, quality=8) imageio.mimwrite(moviebase + 'disp.mp4', to8b(disps / np.max(disps)), fps=30, quality=8) - - # if args.use_viewdirs: - # render_kwargs_test['c2w_staticcam'] = render_poses[0][:3,:4] - # with torch.no_grad(): - # rgbs_still, _ = render_path(render_poses, hwf, args.chunk, render_kwargs_test) - # render_kwargs_test['c2w_staticcam'] = None - # imageio.mimwrite(moviebase + 'rgb_still.mp4', to8b(rgbs_still), fps=30, quality=8) + save_tensor_to_npz(rays_o,f'{basedir}/{expname}/{i}_ray_o') + save_tensor_to_npz(rays_d,f'{basedir}/{expname}/{i}_ray_d') + save_tensor_to_npz(batch_rays,f'{basedir}/{expname}/{i}_batch_rays') + save_tensor_to_npz(coords,f'{basedir}/{expname}/{i}_coords') + save_tensor_to_npz(target_s,f'{basedir}/{expname}/{i}_target_s') + print(f"extras:{extras}") + save_tensor_to_npz(extras['raw'],f'{basedir}/{expname}/{i}_extras') + # save_tensor_to_npz(extras,f'{basedir}/{expname}/{i}_extras') + save_tensor_to_npz(rgb,f'{basedir}/{expname}/{i}_rgb') + if args.use_viewdirs: + render_kwargs_test['c2w_staticcam'] = render_poses[0][:3,:4] + with torch.no_grad(): + # rgbs_still, _ = render_path(render_poses, hwf, args.chunk, render_kwargs_test) + rgbs_still, _ = render_path(render_poses, hwf, K,args.chunk, render_kwargs_test) + render_kwargs_test['c2w_staticcam'] = None + imageio.mimwrite(moviebase + 'rgb_still.mp4', to8b(rgbs_still), fps=30, quality=8) if i%args.i_testset==0 and i > 0: testsavedir = os.path.join(basedir, expname, 'testset_{:06d}'.format(i)) @@ -827,8 +899,8 @@ def train(): if i%args.i_print==0: tqdm.write(f"[TRAIN] Iter: {i} Loss: {loss.item()} PSNR: {psnr.item()}") - """ - print(expname, i, psnr.numpy(), loss.numpy(), global_step.numpy()) + """ + # print(expname, i, psnr.detach(), loss.detach(), global_step.cpu().detach().numpy()) print('iter time {:.05f}'.format(dt)) with tf.contrib.summary.record_summaries_every_n_global_steps(args.i_print): @@ -867,7 +939,7 @@ def train(): tf.contrib.summary.image('rgb0', to8b(extras['rgb0'])[tf.newaxis]) tf.contrib.summary.image('disp0', extras['disp0'][tf.newaxis,...,tf.newaxis]) tf.contrib.summary.image('z_std', extras['z_std'][tf.newaxis,...,tf.newaxis]) - """ + """ global_step += 1 diff --git a/run_nerf_helpers.py b/run_nerf_helpers.py index bc6ee779d..2e71076d8 100644 --- a/run_nerf_helpers.py +++ b/run_nerf_helpers.py @@ -90,6 +90,9 @@ def __init__(self, D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips= self.feature_linear = nn.Linear(W, W) self.alpha_linear = nn.Linear(W, 1) self.rgb_linear = nn.Linear(W//2, 3) + # self.r_linear = nn.Linear(W//2, 1) + # self.g_linear = nn.Linear(W//2, 1) + # self.b_linear = nn.Linear(W//2, 1) else: self.output_linear = nn.Linear(W, output_ch) @@ -110,8 +113,12 @@ def forward(self, x): for i, l in enumerate(self.views_linears): h = self.views_linears[i](h) h = F.relu(h) - rgb = self.rgb_linear(h) + # Split_RGB + # r = self.r_linear(h) + # g = self.g_linear(h) + # b = self.b_linear(h) + # rgb = torch.cat([r,g,b],-1) outputs = torch.cat([rgb, alpha], -1) else: outputs = self.output_linear(h) @@ -151,6 +158,10 @@ def load_weights_from_keras(self, weights): # Ray helpers def get_rays(H, W, K, c2w): + # Rays_Dは光線のθφω(向き情報),これはあくまでワールド座標系 + # Rays_Oは光線のxzy位置,これもワールド座標系 + # Dirs:W,Hの要素を取り出して,各配列に入れて,レンズの歪みを加算したもの + # 光線の一つなので,θにはWの焦点距離の影響,φにはHの焦点距離の影響,ωは1が移入される. i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' i = i.t() j = j.t() @@ -171,6 +182,34 @@ def get_rays_np(H, W, K, c2w): rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) return rays_o, rays_d +# def get_rays(H, W, K, c2w): +# # Rays_Dは光線のθφω(向き情報),これはあくまでワールド座標系 +# # Rays_Oは光線のxzy位置,これもワールド座標系 +# # Dirs:W,Hの要素を取り出して,各配列に入れて,レンズの歪みを加算したもの +# # 光線の一つなので,θにはWの焦点距離の影響,φにはHの焦点距離の影響,ωは1が移入される. +# i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' +# i = i.t() +# j = j.t() +# # print(i,j) +# dirs = torch.stack([(i-K[0][2])/K[0][0]*torch.sin(i*np.pi), -(j-K[1][2])/K[1][1]*torch.sin(j*np.pi), -torch.ones_like(i)], -1) +# # Rotate ray directions from camera frame to the world frame +# # x_rotate =c2w[:3,:3][] +# rays_d = torch.sum(dirs[..., np.newaxis, :] * c2w[:3,:3] , -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] +# # Translate camera frame's origin to the world frame. It is the origin of all rays. +# rays_o = c2w[:3,-1].expand(rays_d.shape) +# return rays_o, rays_d + + +# def get_rays_np(H, W, K, c2w): +# i, j = np.meshgrid(np.arange(W, dtype=np.float32), np.arange(H, dtype=np.float32), indexing='xy') +# dirs = torch.stack([(i-K[0][2])/K[0][0]*np.sin(i*np.pi), -(j-K[1][2])/K[1][1]*np.sin(j*np.pi), -torch.ones_like(i)], -1) +# # print(i,j) +# # dirs = np.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -np.ones_like(i)], -1) +# # Rotate ray directions from camera frame to the world frame +# rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] +# # Translate camera frame's origin to the world frame. It is the origin of all rays. +# rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) +# return rays_o, rays_d def ndc_rays(H, W, focal, near, rays_o, rays_d): # Shift ray origins to near plane diff --git a/run_nerf_helpers_new.py b/run_nerf_helpers_new.py new file mode 100644 index 000000000..53f754da0 --- /dev/null +++ b/run_nerf_helpers_new.py @@ -0,0 +1,295 @@ +import torch +# torch.autograd.set_detect_anomaly(True) +import torch.nn as nn +import torch.nn.functional as F +import numpy as np + + +# Misc +img2mse = lambda x, y : torch.mean((x - y) ** 2) +mse2psnr = lambda x : -10. * torch.log(x) / torch.log(torch.Tensor([10.])) +to8b = lambda x : (255*np.clip(x,0,1)).astype(np.uint8) + + +# Positional encoding (section 5.1) +class Embedder: + def __init__(self, **kwargs): + self.kwargs = kwargs + self.create_embedding_fn() + + def create_embedding_fn(self): + embed_fns = [] + d = self.kwargs['input_dims'] + out_dim = 0 + if self.kwargs['include_input']: + embed_fns.append(lambda x : x) + out_dim += d + + max_freq = self.kwargs['max_freq_log2'] + N_freqs = self.kwargs['num_freqs'] + + if self.kwargs['log_sampling']: + freq_bands = 2.**torch.linspace(0., max_freq, steps=N_freqs) + else: + freq_bands = torch.linspace(2.**0., 2.**max_freq, steps=N_freqs) + + for freq in freq_bands: + for p_fn in self.kwargs['periodic_fns']: + embed_fns.append(lambda x, p_fn=p_fn, freq=freq : p_fn(x * freq)) + out_dim += d + + self.embed_fns = embed_fns + self.out_dim = out_dim + + def embed(self, inputs): + return torch.cat([fn(inputs) for fn in self.embed_fns], -1) + + +def get_embedder(multires, i=0): + if i == -1: + return nn.Identity(), 3 + + embed_kwargs = { + 'include_input' : True, + 'input_dims' : 3, + 'max_freq_log2' : multires-1, + 'num_freqs' : multires, + 'log_sampling' : True, + 'periodic_fns' : [torch.sin, torch.cos], + } + + embedder_obj = Embedder(**embed_kwargs) + embed = lambda x, eo=embedder_obj : eo.embed(x) + return embed, embedder_obj.out_dim + + +# Model +class NeRF(nn.Module): + def __init__(self, D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False): + """ + """ + super(NeRF, self).__init__() + self.D = D + self.W = W + self.input_ch = input_ch + self.input_ch_views = input_ch_views + self.skips = skips + self.use_viewdirs = use_viewdirs + + self.pts_linears = nn.ModuleList( + [nn.Linear(input_ch, W)] + [nn.Linear(W, W) if i not in self.skips else nn.Linear(W + input_ch, W) for i in range(D-1)]) + + ### Implementation according to the official code release (https://github.com/bmild/nerf/blob/master/run_nerf_helpers.py#L104-L105) + self.views_linears = nn.ModuleList([nn.Linear(input_ch_views + W, W//2)]) + + ### Implementation according to the paper + # self.views_linears = nn.ModuleList( + # [nn.Linear(input_ch_views + W, W//2)] + [nn.Linear(W//2, W//2) for i in range(D//2)]) + + if use_viewdirs: + self.feature_linear = nn.Linear(W, W) + self.alpha_linear = nn.Linear(W, 1) + self.rgb_linear = nn.Linear(W//2, 3) + # self.r_linear = nn.Linear(W//2, 1) + # self.g_linear = nn.Linear(W//2, 1) + # self.b_linear = nn.Linear(W//2, 1) + else: + self.output_linear = nn.Linear(W, output_ch) + + def forward(self, x): + input_pts, input_views = torch.split(x, [self.input_ch, self.input_ch_views], dim=-1) + h = input_pts + for i, l in enumerate(self.pts_linears): + h = self.pts_linears[i](h) + h = F.relu(h) + if i in self.skips: + h = torch.cat([input_pts, h], -1) + + if self.use_viewdirs: + alpha = self.alpha_linear(h) + feature = self.feature_linear(h) + h = torch.cat([feature, input_views], -1) + + for i, l in enumerate(self.views_linears): + h = self.views_linears[i](h) + h = F.relu(h) + rgb = self.rgb_linear(h) + # Split_RGB + # r = self.r_linear(h) + # g = self.g_linear(h) + # b = self.b_linear(h) + # rgb = torch.cat([r,g,b],-1) + outputs = torch.cat([rgb, alpha], -1) + else: + outputs = self.output_linear(h) + + return outputs + + def load_weights_from_keras(self, weights): + assert self.use_viewdirs, "Not implemented if use_viewdirs=False" + + # Load pts_linears + for i in range(self.D): + idx_pts_linears = 2 * i + self.pts_linears[i].weight.data = torch.from_numpy(np.transpose(weights[idx_pts_linears])) + self.pts_linears[i].bias.data = torch.from_numpy(np.transpose(weights[idx_pts_linears+1])) + + # Load feature_linear + idx_feature_linear = 2 * self.D + self.feature_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_feature_linear])) + self.feature_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_feature_linear+1])) + + # Load views_linears + idx_views_linears = 2 * self.D + 2 + self.views_linears[0].weight.data = torch.from_numpy(np.transpose(weights[idx_views_linears])) + self.views_linears[0].bias.data = torch.from_numpy(np.transpose(weights[idx_views_linears+1])) + + # Load rgb_linear + idx_rbg_linear = 2 * self.D + 4 + self.rgb_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_rbg_linear])) + self.rgb_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_rbg_linear+1])) + + # Load alpha_linear + idx_alpha_linear = 2 * self.D + 6 + self.alpha_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_alpha_linear])) + self.alpha_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_alpha_linear+1])) + + + +# Ray helpers +def get_rays(H, W, K, c2w): + # Rays_Dは光線のθφω(向き情報),これはあくまでワールド座標系 + # Rays_Oは光線のxzy位置,これもワールド座標系 + # Dirs:W,Hの要素を取り出して,各配列に入れて,レンズの歪みを加算したもの + # 光線の一つなので,θにはWの焦点距離の影響,φにはHの焦点距離の影響,ωは1が移入される. + i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' + i = i.t() + j = j.t() + dirs = torch.stack([(i/W-1/2)*360, -(j/H-1/2)*360, -torch.ones_like(i)], -1) + # i, j = torch.meshgrid(torch.linspace(0, THETA-1, THETA), torch.linspace(0, PHI-1, PHI)) # pytorch's meshgrid has indexing='ij' + # i = i.t() + # j = j.t() + # dirs = torch.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -torch.ones_like(i)], -1) + + # Rotate ray directions from camera frame to the world frame + rays_d = torch.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] + # Translate camera frame's origin to the world frame. It is the origin of all rays. + # print(rays_d,c2w[:3,-1]) + + + rays_o = c2w[:3,-1].expand(rays_d.shape) + # add rotate + rays_o = rays_d*rays_o + # print(rays_d,c2w[:3,-1]) + return rays_o, rays_d + + +def get_rays_np(H, W, K, c2w): + i, j = np.meshgrid(np.arange(W, dtype=np.float32), np.arange(H, dtype=np.float32), indexing='xy') + dirs = np.stack([(i/W-1/2)*360, -(j/H-1/2)*360, -np.ones_like(i)], -1) + + # dirs = np.stack([(i-K[0][2])/K[0][0]*np.pi, -(j-K[1][2])/K[1][1]*np.pi/2, -np.ones_like(i)], -1) + # Rotate ray directions from camera frame to the world frame + rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] + # Translate camera frame's origin to the world frame. It is the origin of all rays. + # rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) + #rotation ADDEDD + # roteted_p = np.dot(rays_d,c2w[:3,-1]) + rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) + rays_o = rays_d*rays_o + return rays_o, rays_d + +# def get_rays(H, W, K, c2w): +# # Rays_Dは光線のθφω(向き情報),これはあくまでワールド座標系 +# # Rays_Oは光線のxzy位置,これもワールド座標系 +# # Dirs:W,Hの要素を取り出して,各配列に入れて,レンズの歪みを加算したもの +# # 光線の一つなので,θにはWの焦点距離の影響,φにはHの焦点距離の影響,ωは1が移入される. +# i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' +# i = i.t() +# j = j.t() +# # print(i,j) +# dirs = torch.stack([(i-K[0][2])/K[0][0]*torch.sin(i*np.pi), -(j-K[1][2])/K[1][1]*torch.sin(j*np.pi), -torch.ones_like(i)], -1) +# # Rotate ray directions from camera frame to the world frame +# # x_rotate =c2w[:3,:3][] +# rays_d = torch.sum(dirs[..., np.newaxis, :] * c2w[:3,:3] , -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] +# # Translate camera frame's origin to the world frame. It is the origin of all rays. +# rays_o = c2w[:3,-1].expand(rays_d.shape) +# return rays_o, rays_d + + +# def get_rays_np(H, W, K, c2w): +# i, j = np.meshgrid(np.arange(W, dtype=np.float32), np.arange(H, dtype=np.float32), indexing='xy') +# dirs = torch.stack([(i-K[0][2])/K[0][0]*np.sin(i*np.pi), -(j-K[1][2])/K[1][1]*np.sin(j*np.pi), -torch.ones_like(i)], -1) +# # print(i,j) +# # dirs = np.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -np.ones_like(i)], -1) +# # Rotate ray directions from camera frame to the world frame +# rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] +# # Translate camera frame's origin to the world frame. It is the origin of all rays. +# rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) +# return rays_o, rays_d + +def ndc_rays(H, W, focal, near, rays_o, rays_d): + # Shift ray origins to near plane + t = -(near + rays_o[...,2]) / rays_d[...,2] + rays_o = rays_o + t[...,None] * rays_d + + # Projection + o0 = -1./(W/(2.*focal)) * rays_o[...,0] / rays_o[...,2] + o1 = -1./(H/(2.*focal)) * rays_o[...,1] / rays_o[...,2] + o2 = 1. + 2. * near / rays_o[...,2] + + d0 = -1./(W/(2.*focal)) * (rays_d[...,0]/rays_d[...,2] - rays_o[...,0]/rays_o[...,2]) + d1 = -1./(H/(2.*focal)) * (rays_d[...,1]/rays_d[...,2] - rays_o[...,1]/rays_o[...,2]) + d2 = -2. * near / rays_o[...,2] + + rays_o = torch.stack([o0,o1,o2], -1) + rays_d = torch.stack([d0,d1,d2], -1) + + return rays_o, rays_d + + +# Hierarchical sampling (section 5.2) +def sample_pdf(bins, weights, N_samples, det=False, pytest=False): + # Get pdf + weights = weights + 1e-5 # prevent nans + pdf = weights / torch.sum(weights, -1, keepdim=True) + cdf = torch.cumsum(pdf, -1) + cdf = torch.cat([torch.zeros_like(cdf[...,:1]), cdf], -1) # (batch, len(bins)) + + # Take uniform samples + if det: + u = torch.linspace(0., 1., steps=N_samples) + u = u.expand(list(cdf.shape[:-1]) + [N_samples]) + else: + u = torch.rand(list(cdf.shape[:-1]) + [N_samples]) + + # Pytest, overwrite u with numpy's fixed random numbers + if pytest: + np.random.seed(0) + new_shape = list(cdf.shape[:-1]) + [N_samples] + if det: + u = np.linspace(0., 1., N_samples) + u = np.broadcast_to(u, new_shape) + else: + u = np.random.rand(*new_shape) + u = torch.Tensor(u) + + # Invert CDF + u = u.contiguous() + inds = torch.searchsorted(cdf, u, right=True) + below = torch.max(torch.zeros_like(inds-1), inds-1) + above = torch.min((cdf.shape[-1]-1) * torch.ones_like(inds), inds) + inds_g = torch.stack([below, above], -1) # (batch, N_samples, 2) + + # cdf_g = tf.gather(cdf, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2) + # bins_g = tf.gather(bins, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2) + matched_shape = [inds_g.shape[0], inds_g.shape[1], cdf.shape[-1]] + cdf_g = torch.gather(cdf.unsqueeze(1).expand(matched_shape), 2, inds_g) + bins_g = torch.gather(bins.unsqueeze(1).expand(matched_shape), 2, inds_g) + + denom = (cdf_g[...,1]-cdf_g[...,0]) + denom = torch.where(denom<1e-5, torch.ones_like(denom), denom) + t = (u-cdf_g[...,0])/denom + samples = bins_g[...,0] + t * (bins_g[...,1]-bins_g[...,0]) + + return samples diff --git a/run_nerf_helpers_new_np.py b/run_nerf_helpers_new_np.py new file mode 100644 index 000000000..461a1fab1 --- /dev/null +++ b/run_nerf_helpers_new_np.py @@ -0,0 +1,286 @@ +import torch +# torch.autograd.set_detect_anomaly(True) +import torch.nn as nn +import torch.nn.functional as F +import numpy as np + + +# Misc +img2mse = lambda x, y : torch.mean((x - y) ** 2) +mse2psnr = lambda x : -10. * torch.log(x) / torch.log(torch.Tensor([10.])) +to8b = lambda x : (255*np.clip(x,0,1)).astype(np.uint8) + + +# Positional encoding (section 5.1) +class Embedder: + def __init__(self, **kwargs): + self.kwargs = kwargs + self.create_embedding_fn() + + def create_embedding_fn(self): + embed_fns = [] + d = self.kwargs['input_dims'] + out_dim = 0 + if self.kwargs['include_input']: + embed_fns.append(lambda x : x) + out_dim += d + + max_freq = self.kwargs['max_freq_log2'] + N_freqs = self.kwargs['num_freqs'] + + if self.kwargs['log_sampling']: + freq_bands = 2.**torch.linspace(0., max_freq, steps=N_freqs) + else: + freq_bands = torch.linspace(2.**0., 2.**max_freq, steps=N_freqs) + + for freq in freq_bands: + for p_fn in self.kwargs['periodic_fns']: + embed_fns.append(lambda x, p_fn=p_fn, freq=freq : p_fn(x * freq)) + out_dim += d + + self.embed_fns = embed_fns + self.out_dim = out_dim + + def embed(self, inputs): + return torch.cat([fn(inputs) for fn in self.embed_fns], -1) + + +def get_embedder(multires, i=0): + if i == -1: + return nn.Identity(), 3 + + embed_kwargs = { + 'include_input' : True, + 'input_dims' : 3, + 'max_freq_log2' : multires-1, + 'num_freqs' : multires, + 'log_sampling' : True, + 'periodic_fns' : [torch.sin, torch.cos], + } + + embedder_obj = Embedder(**embed_kwargs) + embed = lambda x, eo=embedder_obj : eo.embed(x) + return embed, embedder_obj.out_dim + + +# Model +class NeRF(nn.Module): + def __init__(self, D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False): + """ + """ + super(NeRF, self).__init__() + self.D = D + self.W = W + self.input_ch = input_ch + self.input_ch_views = input_ch_views + self.skips = skips + self.use_viewdirs = use_viewdirs + + self.pts_linears = nn.ModuleList( + [nn.Linear(input_ch, W)] + [nn.Linear(W, W) if i not in self.skips else nn.Linear(W + input_ch, W) for i in range(D-1)]) + + ### Implementation according to the official code release (https://github.com/bmild/nerf/blob/master/run_nerf_helpers.py#L104-L105) + self.views_linears = nn.ModuleList([nn.Linear(input_ch_views + W, W//2)]) + + ### Implementation according to the paper + # self.views_linears = nn.ModuleList( + # [nn.Linear(input_ch_views + W, W//2)] + [nn.Linear(W//2, W//2) for i in range(D//2)]) + + if use_viewdirs: + self.feature_linear = nn.Linear(W, W) + self.alpha_linear = nn.Linear(W, 1) + self.rgb_linear = nn.Linear(W//2, 3) + # self.r_linear = nn.Linear(W//2, 1) + # self.g_linear = nn.Linear(W//2, 1) + # self.b_linear = nn.Linear(W//2, 1) + else: + self.output_linear = nn.Linear(W, output_ch) + + def forward(self, x): + input_pts, input_views = torch.split(x, [self.input_ch, self.input_ch_views], dim=-1) + h = input_pts + for i, l in enumerate(self.pts_linears): + h = self.pts_linears[i](h) + h = F.relu(h) + if i in self.skips: + h = torch.cat([input_pts, h], -1) + + if self.use_viewdirs: + alpha = self.alpha_linear(h) + feature = self.feature_linear(h) + h = torch.cat([feature, input_views], -1) + + for i, l in enumerate(self.views_linears): + h = self.views_linears[i](h) + h = F.relu(h) + rgb = self.rgb_linear(h) + # Split_RGB + # r = self.r_linear(h) + # g = self.g_linear(h) + # b = self.b_linear(h) + # rgb = torch.cat([r,g,b],-1) + outputs = torch.cat([rgb, alpha], -1) + else: + outputs = self.output_linear(h) + + return outputs + + def load_weights_from_keras(self, weights): + assert self.use_viewdirs, "Not implemented if use_viewdirs=False" + + # Load pts_linears + for i in range(self.D): + idx_pts_linears = 2 * i + self.pts_linears[i].weight.data = torch.from_numpy(np.transpose(weights[idx_pts_linears])) + self.pts_linears[i].bias.data = torch.from_numpy(np.transpose(weights[idx_pts_linears+1])) + + # Load feature_linear + idx_feature_linear = 2 * self.D + self.feature_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_feature_linear])) + self.feature_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_feature_linear+1])) + + # Load views_linears + idx_views_linears = 2 * self.D + 2 + self.views_linears[0].weight.data = torch.from_numpy(np.transpose(weights[idx_views_linears])) + self.views_linears[0].bias.data = torch.from_numpy(np.transpose(weights[idx_views_linears+1])) + + # Load rgb_linear + idx_rbg_linear = 2 * self.D + 4 + self.rgb_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_rbg_linear])) + self.rgb_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_rbg_linear+1])) + + # Load alpha_linear + idx_alpha_linear = 2 * self.D + 6 + self.alpha_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_alpha_linear])) + self.alpha_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_alpha_linear+1])) + + + +# Ray helpers +def get_rays(H, W, K, c2w): + # Rays_Dは光線のθφω(向き情報),これはあくまでワールド座標系 + # Rays_Oは光線のxzy位置,これもワールド座標系 + # Dirs:W,Hの要素を取り出して,各配列に入れて,レンズの歪みを加算したもの + # 光線の一つなので,θにはWの焦点距離の影響,φにはHの焦点距離の影響,ωは1が移入される. + i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' + i = i.t() + j = j.t() + dirs = torch.stack([(i/W-1/2)*360, -(j/H-1/2)*360, -torch.ones_like(i)], -1) + print(dirs[]) + # i, j = torch.meshgrid(torch.linspace(0, THETA-1, THETA), torch.linspace(0, PHI-1, PHI)) # pytorch's meshgrid has indexing='ij' + # i = i.t() + # j = j.t() + # dirs = torch.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -torch.ones_like(i)], -1) + + # Rotate ray directions from camera frame to the world frame + rays_d = torch.sum(c2w[:3,:3]* dirs[..., np.newaxis, :] * torch.(c2w[:3,:3]), -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] + # Translate camera frame's origin to the world frame. It is the origin of all rays. + rays_o = c2w[:3,-1].expand(rays_d.shape) + return rays_o, rays_d + + +def get_rays_np(H, W, K, c2w): + i, j = np.meshgrid(np.arange(W, dtype=np.float32), np.arange(H, dtype=np.float32), indexing='xy') + dirs = np.stack([(i/W-1/2)*360, -(j/H-1/2)*360, -np.ones_like(i)], -1) + + # dirs = np.stack([(i-K[0][2])/K[0][0]*np.pi, -(j-K[1][2])/K[1][1]*np.pi/2, -np.ones_like(i)], -1) + # Rotate ray directions from camera frame to the world frame + rays_d = np.sum(c2w[:3,:3] * dirs[..., np.newaxis, :] * np.linalg.inv(c2w[:3,:3]), -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] + # Translate camera frame's origin to the world frame. It is the origin of all rays. + rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) + return rays_o, rays_d + +# def get_rays(H, W, K, c2w): +# # Rays_Dは光線のθφω(向き情報),これはあくまでワールド座標系 +# # Rays_Oは光線のxzy位置,これもワールド座標系 +# # Dirs:W,Hの要素を取り出して,各配列に入れて,レンズの歪みを加算したもの +# # 光線の一つなので,θにはWの焦点距離の影響,φにはHの焦点距離の影響,ωは1が移入される. +# i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' +# i = i.t() +# j = j.t() +# # print(i,j) +# dirs = torch.stack([(i-K[0][2])/K[0][0]*torch.sin(i*np.pi), -(j-K[1][2])/K[1][1]*torch.sin(j*np.pi), -torch.ones_like(i)], -1) +# # Rotate ray directions from camera frame to the world frame +# # x_rotate =c2w[:3,:3][] +# rays_d = torch.sum(dirs[..., np.newaxis, :] * c2w[:3,:3] , -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] +# # Translate camera frame's origin to the world frame. It is the origin of all rays. +# rays_o = c2w[:3,-1].expand(rays_d.shape) +# return rays_o, rays_d + + +# def get_rays_np(H, W, K, c2w): +# i, j = np.meshgrid(np.arange(W, dtype=np.float32), np.arange(H, dtype=np.float32), indexing='xy') +# dirs = torch.stack([(i-K[0][2])/K[0][0]*np.sin(i*np.pi), -(j-K[1][2])/K[1][1]*np.sin(j*np.pi), -torch.ones_like(i)], -1) +# # print(i,j) +# # dirs = np.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -np.ones_like(i)], -1) +# # Rotate ray directions from camera frame to the world frame +# rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] +# # Translate camera frame's origin to the world frame. It is the origin of all rays. +# rays_o = np.broadcast_to(c2w[:3,-1], np.shape(rays_d)) +# return rays_o, rays_d + +def ndc_rays(H, W, focal, near, rays_o, rays_d): + # Shift ray origins to near plane + t = -(near + rays_o[...,2]) / rays_d[...,2] + rays_o = rays_o + t[...,None] * rays_d + + # Projection + o0 = -1./(W/(2.*focal)) * rays_o[...,0] / rays_o[...,2] + o1 = -1./(H/(2.*focal)) * rays_o[...,1] / rays_o[...,2] + o2 = 1. + 2. * near / rays_o[...,2] + + d0 = -1./(W/(2.*focal)) * (rays_d[...,0]/rays_d[...,2] - rays_o[...,0]/rays_o[...,2]) + d1 = -1./(H/(2.*focal)) * (rays_d[...,1]/rays_d[...,2] - rays_o[...,1]/rays_o[...,2]) + d2 = -2. * near / rays_o[...,2] + + rays_o = torch.stack([o0,o1,o2], -1) + rays_d = torch.stack([d0,d1,d2], -1) + + return rays_o, rays_d + + +# Hierarchical sampling (section 5.2) +def sample_pdf(bins, weights, N_samples, det=False, pytest=False): + # Get pdf + weights = weights + 1e-5 # prevent nans + pdf = weights / torch.sum(weights, -1, keepdim=True) + cdf = torch.cumsum(pdf, -1) + cdf = torch.cat([torch.zeros_like(cdf[...,:1]), cdf], -1) # (batch, len(bins)) + + # Take uniform samples + if det: + u = torch.linspace(0., 1., steps=N_samples) + u = u.expand(list(cdf.shape[:-1]) + [N_samples]) + else: + u = torch.rand(list(cdf.shape[:-1]) + [N_samples]) + + # Pytest, overwrite u with numpy's fixed random numbers + if pytest: + np.random.seed(0) + new_shape = list(cdf.shape[:-1]) + [N_samples] + if det: + u = np.linspace(0., 1., N_samples) + u = np.broadcast_to(u, new_shape) + else: + u = np.random.rand(*new_shape) + u = torch.Tensor(u) + + # Invert CDF + u = u.contiguous() + inds = torch.searchsorted(cdf, u, right=True) + below = torch.max(torch.zeros_like(inds-1), inds-1) + above = torch.min((cdf.shape[-1]-1) * torch.ones_like(inds), inds) + inds_g = torch.stack([below, above], -1) # (batch, N_samples, 2) + + # cdf_g = tf.gather(cdf, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2) + # bins_g = tf.gather(bins, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2) + matched_shape = [inds_g.shape[0], inds_g.shape[1], cdf.shape[-1]] + cdf_g = torch.gather(cdf.unsqueeze(1).expand(matched_shape), 2, inds_g) + bins_g = torch.gather(bins.unsqueeze(1).expand(matched_shape), 2, inds_g) + + denom = (cdf_g[...,1]-cdf_g[...,0]) + denom = torch.where(denom<1e-5, torch.ones_like(denom), denom) + t = (u-cdf_g[...,0])/denom + samples = bins_g[...,0] + t * (bins_g[...,1]-bins_g[...,0]) + + return samples diff --git a/run_tensorboad.sh b/run_tensorboad.sh new file mode 100644 index 000000000..a69a8d73d --- /dev/null +++ b/run_tensorboad.sh @@ -0,0 +1 @@ +python -m tensorboard.main --logdir=tmp & \ No newline at end of file diff --git a/tmp.log b/tmp.log new file mode 100644 index 000000000..22d1558bb --- /dev/null +++ b/tmp.log @@ -0,0 +1,16046 @@ +13 +13 +13 +13 +13 +13 +13 +(7, 3, 4) +Loaded 360 (7, 320, 640, 3) (120, 3, 5) [ 0.29668272 0.01365371 -0.48893556] ./data/outdoor/out-table +[0 1 2] +DEFINING BOUNDS +NEAR FAR 0.0 1.0 +hwf [ 0.29668272 0.01365371 -0.48893556] +Found ckpts ['./logs/TUT-out-doll-360-np/010000.tar', './logs/TUT-out-doll-360-np/020000.tar'] +Reloading fr[TRAIN] Iter: 100 Loss: 0.03718850761651993 PSNR: 17.348209381103516 +[TRAIN] Iter: 200 Loss: 0.037885963916778564 PSNR: 17.283662796020508 +[TRAIN] Iter: 300 Loss: 0.037001751363277435 PSNR: 17.276391983032227 +[TRAIN] Iter: 400 Loss: 0.042241133749485016 PSNR: 16.77484703063965 +[TRAIN] Iter: 500 Loss: 0.04326743632555008 PSNR: 16.838102340698242 +[TRAIN] Iter: 600 Loss: 0.04291911423206329 PSNR: 16.70254898071289 +[TRAIN] Iter: 700 Loss: 0.03859862685203552 PSNR: 17.377765655517578 +[TRAIN] Iter: 800 Loss: 0.03993350639939308 PSNR: 17.070907592773438 +[TRAIN] Iter: 900 Loss: 0.03877441585063934 PSNR: 17.099754333496094 +[TRAIN] Iter: 1000 Loss: 0.0318710133433342 PSNR: 17.93689727783203 +[TRAIN] Iter: 1100 Loss: 0.03252512216567993 PSNR: 17.97999382019043 +[TRAIN] Iter: 1200 Loss: 0.03865733742713928 PSNR: 17.185861587524414 +[TRAIN] Iter: 1300 Loss: 0.03172677755355835 PSNR: 18.067113876342773 +[TRAIN] Iter: 1400 Loss: 0.03531119227409363 PSNR: 17.470670700073242 +[TRAIN] Iter: 1500 Loss: 0.030862607061862946 PSNR: 18.234819412231445 +[TRAIN] Iter: 1600 Loss: 0.03298935666680336 PSNR: 17.998964309692383 +[TRAIN] Iter: 1700 Loss: 0.03465043753385544 PSNR: 17.59949493408203 +[TRAIN] Iter: 1800 Loss: 0.026017922908067703 PSNR: 18.951927185058594 +[TRAIN] Iter: 1900 Loss: 0.03522593155503273 PSNR: 17.91802978515625 +[TRAIN] Iter: 2000 Loss: 0.03204741328954697 PSNR: 17.982913970947266 +[TRAIN] Iter: 2100 Loss: 0.030897168442606926 PSNR: 18.345407485961914 +[TRAIN] Iter: 2200 Loss: 0.028267767280340195 PSNR: 18.685914993286133 +[TRAIN] Iter: 2300 Loss: 0.03374011442065239 PSNR: 17.980791091918945 +[TRAIN] Iter: 2400 Loss: 0.02818892151117325 PSNR: 18.58786392211914 +[TRAIN] Iter: 2500 Loss: 0.032874371856451035 PSNR: 17.872655868530273 +[TRAIN] Iter: 2600 Loss: 0.028974253684282303 PSNR: 18.596742630004883 +[TRAIN] Iter: 2700 Loss: 0.030282240360975266 PSNR: 18.334381103515625 +[TRAIN] Iter: 2800 Loss: 0.0314846895635128 PSNR: 18.21671485900879 +[TRAIN] Iter: 2900 Loss: 0.03231258690357208 PSNR: 17.972862243652344 +[TRAIN] Iter: 3000 Loss: 0.026399411261081696 PSNR: 18.728801727294922 +[TRAIN] Iter: 3100 Loss: 0.02898123674094677 PSNR: 18.342491149902344 +[TRAIN] Iter: 3200 Loss: 0.02515268325805664 PSNR: 19.18907928466797 +[TRAIN] Iter: 3300 Loss: 0.025692284107208252 PSNR: 18.952552795410156 +[TRAIN] Iter: 3400 Loss: 0.030750708654522896 PSNR: 18.280498504638672 +[TRAIN] Iter: 3500 Loss: 0.029847826808691025 PSNR: 18.411357879638672 +[TRAIN] Iter: 3600 Loss: 0.025906844064593315 PSNR: 18.936216354370117 +[TRAIN] Iter: 3700 Loss: 0.030059942975640297 PSNR: 18.472003936767578 +[TRAIN] Iter: 3800 Loss: 0.02622838318347931 PSNR: 18.718725204467773 +[TRAIN] Iter: 3900 Loss: 0.025625839829444885 PSNR: 19.12383460998535 +[TRAIN] Iter: 4000 Loss: 0.025717101991176605 PSNR: 19.12517547607422 +[TRAIN] Iter: 4100 Loss: 0.027689646929502487 PSNR: 18.647701263427734 +[TRAIN] Iter: 4200 Loss: 0.028668589890003204 PSNR: 18.56693458557129 +[TRAIN] Iter: 4300 Loss: 0.028311146423220634 PSNR: 18.788244247436523 +[TRAIN] Iter: 4400 Loss: 0.02850891463458538 PSNR: 18.690223693847656 +[TRAIN] Iter: 4500 Loss: 0.028165938332676888 PSNR: 18.67549705505371 +[TRAIN] Iter: 4600 Loss: 0.02662317454814911 PSNR: 18.600021362304688 +[TRAIN] Iter: 4700 Loss: 0.02927066944539547 PSNR: 18.424354553222656 +[TRAIN] Iter: 4800 Loss: 0.02882426604628563 PSNR: 18.622278213500977 +[TRAIN] Iter: 4900 Loss: 0.025811342522501945 PSNR: 19.173744201660156 +[TRAIN] Iter: 5000 Loss: 0.026067906990647316 PSNR: 18.919170379638672 +[TRAIN] Iter: 5100 Loss: 0.02473454549908638 PSNR: 19.168663024902344 +[TRAIN] Iter: 5200 Loss: 0.026939719915390015 PSNR: 18.727907180786133 +[TRAIN] Iter: 5300 Loss: 0.027894029393792152 PSNR: 18.675535202026367 +[TRAIN] Iter: 5400 Loss: 0.025181099772453308 PSNR: 19.114242553710938 +[TRAIN] Iter: 5500 Loss: 0.02450508251786232 PSNR: 19.214210510253906 +[TRAIN] Iter: 5600 Loss: 0.027508366852998734 PSNR: 18.843305587768555 +[TRAIN] Iter: 5700 Loss: 0.026574688032269478 PSNR: 18.882339477539062 +[TRAIN] Iter: 5800 Loss: 0.025096675381064415 PSNR: 19.213924407958984 +[TRAIN] Iter: 5900 Loss: 0.026607919484376907 PSNR: 18.884845733642578 +[TRAIN] Iter: 6000 Loss: 0.02675095945596695 PSNR: 18.880155563354492 +[TRAIN] Iter: 6100 Loss: 0.029394060373306274 PSNR: 18.62303352355957 +[TRAIN] Iter: 6200 Loss: 0.026636553928256035 PSNR: 18.99199867248535 +[TRAIN] Iter: 6300 Loss: 0.02520756796002388 PSNR: 19.11595344543457 +[TRAIN] Iter: 6400 Loss: 0.02588490955531597 PSNR: 19.036287307739258 +[TRAIN] Iter: 6500 Loss: 0.02646547369658947 PSNR: 18.94683074951172 +[TRAIN] Iter: 6600 Loss: 0.024364635348320007 PSNR: 19.270538330078125 +[TRAIN] Iter: 6700 Loss: 0.026509154587984085 PSNR: 19.102554321289062 +[TRAIN] Iter: 6800 Loss: 0.02365100383758545 PSNR: 19.476036071777344 +[TRAIN] Iter: 6900 Loss: 0.026766721159219742 PSNR: 18.861013412475586 +[TRAIN] Iter: 7000 Loss: 0.024937279522418976 PSNR: 19.152667999267578 +[TRAIN] Iter: 7100 Loss: 0.026666849851608276 PSNR: 18.99146842956543 +[TRAIN] Iter: 7200 Loss: 0.02725336328148842 PSNR: 18.968591690063477 +[TRAIN] Iter: 7300 Loss: 0.02703840844333172 PSNR: 18.720714569091797 +[TRAIN] Iter: 7400 Loss: 0.025259096175432205 PSNR: 19.067920684814453 +[TRAIN] Iter: 7500 Loss: 0.025690525770187378 PSNR: 19.10218048095703 +[TRAIN] Iter: 7600 Loss: 0.027509696781635284 PSNR: 18.688791275024414 +[TRAIN] Iter: 7700 Loss: 0.02557511441409588 PSNR: 19.19070816040039 +[TRAIN] Iter: 7800 Loss: 0.024319183081388474 PSNR: 19.125808715820312 +[TRAIN] Iter: 7900 Loss: 0.025902077555656433 PSNR: 19.234207153320312 +[TRAIN] Iter: 8000 Loss: 0.025211302563548088 PSNR: 19.109973907470703 +[TRAIN] Iter: 8100 Loss: 0.024001237004995346 PSNR: 19.479063034057617 +[TRAIN] Iter: 8200 Loss: 0.023587169125676155 PSNR: 19.44603729248047 +[TRAIN] Iter: 8300 Loss: 0.02794993296265602 PSNR: 18.789186477661133 +[TRAIN] Iter: 8400 Loss: 0.026101060211658478 PSNR: 19.029293060302734 +[TRAIN] Iter: 8500 Loss: 0.024926889687776566 PSNR: 19.134794235229492 +[TRAIN] Iter: 8600 Loss: 0.02470710314810276 PSNR: 19.379018783569336 +[TRAIN] Iter: 8700 Loss: 0.027563106268644333 PSNR: 18.663772583007812 +[TRAIN] Iter: 8800 Loss: 0.024131517857313156 PSNR: 19.35860252380371 +[TRAIN] Iter: 8900 Loss: 0.02625429444015026 PSNR: 19.126150131225586 +[TRAIN] Iter: 9000 Loss: 0.02677091956138611 PSNR: 18.95303726196289 +[TRAIN] Iter: 9100 Loss: 0.029468797147274017 PSNR: 18.484119415283203 +[TRAIN] Iter: 9200 Loss: 0.02245151624083519 PSNR: 19.50986671447754 +[TRAIN] Iter: 9300 Loss: 0.02357408031821251 PSNR: 19.322423934936523 +[TRAIN] Iter: 9400 Loss: 0.02331162989139557 PSNR: 19.426973342895508 +[TRAIN] Iter: 9500 Loss: 0.023247361183166504 PSNR: 19.43665885925293 +[TRAIN] Iter: 9600 Loss: 0.025172198191285133 PSNR: 19.345932006835938 +[TRAIN] Iter: 9700 Loss: 0.02282855473458767 PSNR: 19.588516235351562 +[TRAIN] Iter: 9800 Loss: 0.02407795563340187 PSNR: 19.363298416137695 +[TRAIN] Iter: 9900 Loss: 0.024747125804424286 PSNR: 19.35881996154785 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/010000.tar +[TRAIN] Iter: 10000 Loss: 0.02405175380408764 PSNR: 19.356664657592773 +[TRAIN] Iter: 10100 Loss: 0.02439013496041298 PSNR: 19.310041427612305 +[TRAIN] Iter: 10200 Loss: 0.02377932518720627 PSNR: 19.357255935668945 +[TRAIN] Iter: 10300 Loss: 0.026587147265672684 PSNR: 18.925552368164062 +[TRAIN] Iter: 10400 Loss: 0.02304757758975029 PSNR: 19.452301025390625 +[TRAIN] Iter: 10500 Loss: 0.023070309311151505 PSNR: 19.67510223388672 +[TRAIN] Iter: 10600 Loss: 0.02618163451552391 PSNR: 18.9342041015625 +[TRAIN] Iter: 10700 Loss: 0.02653941512107849 PSNR: 18.922096252441406 +[TRAIN] Iter: 10800 Loss: 0.02416842058300972 PSNR: 19.589679718017578 +[TRAIN] Iter: 10900 Loss: 0.024533621966838837 PSNR: 19.222881317138672 +[TRAIN] Iter: 11000 Loss: 0.0234815776348114 PSNR: 19.546794891357422 +[TRAIN] Iter: 11100 Loss: 0.023931875824928284 PSNR: 19.45511245727539 +[TRAIN] Iter: 11200 Loss: 0.024139976128935814 PSNR: 19.321792602539062 +[TRAIN] Iter: 11300 Loss: 0.024074196815490723 PSNR: 19.30796241760254 +[TRAIN] Iter: 11400 Loss: 0.025792118161916733 PSNR: 18.996421813964844 +[TRAIN] Iter: 11500 Loss: 0.022377025336027145 PSNR: 19.83331298828125 +[TRAIN] Iter: 11600 Loss: 0.022883422672748566 PSNR: 19.59124183654785 +[TRAIN] Iter: 11700 Loss: 0.02182072028517723 PSNR: 19.828067779541016 +[TRAIN] Iter: 11800 Loss: 0.02510673739016056 PSNR: 19.106163024902344 +[TRAIN] Iter: 11900 Loss: 0.021093908697366714 PSNR: 20.00177001953125 +[TRAIN] Iter: 12000 Loss: 0.02177947387099266 PSNR: 19.97064781188965 +[TRAIN] Iter: 12100 Loss: 0.024270232766866684 PSNR: 19.30282211303711 +[TRAIN] Iter: 12200 Loss: 0.02330864779651165 PSNR: 19.51003074645996 +[TRAIN] Iter: 12300 Loss: 0.02424275130033493 PSNR: 19.378583908081055 +[TRAIN] Iter: 12400 Loss: 0.022884579375386238 PSNR: 19.650487899780273 +[TRAIN] Iter: 12500 Loss: 0.025625472888350487 PSNR: 19.09358787536621 +[TRAIN] Iter: 12600 Loss: 0.024212054908275604 PSNR: 19.412519454956055 +[TRAIN] Iter: 12700 Loss: 0.024605652317404747 PSNR: 19.589603424072266 +[TRAIN] Iter: 12800 Loss: 0.0215434692800045 PSNR: 19.895694732666016 +[TRAIN] Iter: 12900 Loss: 0.021911680698394775 PSNR: 19.821884155273438 +[TRAIN] Iter: 13000 Loss: 0.0204482339322567 PSNR: 19.907594680786133 +[TRAIN] Iter: 13100 Loss: 0.027011623606085777 PSNR: 19.150753021240234 +[TRAIN] Iter: 13200 Loss: 0.02045466937124729 PSNR: 20.224323272705078 +[TRAIN] Iter: 13300 Loss: 0.019313691183924675 PSNR: 20.600154876708984 +[TRAIN] Iter: 13400 Loss: 0.022749386727809906 PSNR: 19.448406219482422 +[TRAIN] Iter: 13500 Loss: 0.025029130280017853 PSNR: 19.071372985839844 +[TRAIN] Iter: 13600 Loss: 0.02667238935828209 PSNR: 18.996706008911133 +[TRAIN] Iter: 13700 Loss: 0.021429741755127907 PSNR: 19.9366455078125 +[TRAIN] Iter: 13800 Loss: 0.023382622748613358 PSNR: 19.884462356567383 +[TRAIN] Iter: 13900 Loss: 0.019465260207653046 PSNR: 20.678890228271484 +[TRAIN] Iter: 14000 Loss: 0.022777259349822998 PSNR: 19.583438873291016 +[TRAIN] Iter: 14100 Loss: 0.022074900567531586 PSNR: 19.77812385559082 +[TRAIN] Iter: 14200 Loss: 0.021338563412427902 PSNR: 19.76991081237793 +[TRAIN] Iter: 14300 Loss: 0.02170158363878727 PSNR: 19.805171966552734 +[TRAIN] Iter: 14400 Loss: 0.022572968155145645 PSNR: 19.59900665283203 +[TRAIN] Iter: 14500 Loss: 0.02180453948676586 PSNR: 19.92594337463379 +[TRAIN] Iter: 14600 Loss: 0.02415001019835472 PSNR: 19.3997802734375 +[TRAIN] Iter: 14700 Loss: 0.02254592254757881 PSNR: 19.822786331176758 +[TRAIN] Iter: 14800 Loss: 0.023184796795248985 PSNR: 19.78053092956543 +[TRAIN] Iter: 14900 Loss: 0.022060904651880264 PSNR: 19.75309181213379 +[TRAIN] Iter: 15000 Loss: 0.02682344987988472 PSNR: 18.927478790283203 +[TRAIN] Iter: 15100 Loss: 0.024608856067061424 PSNR: 19.25737953186035 +[TRAIN] Iter: 15200 Loss: 0.020455803722143173 PSNR: 20.100873947143555 +[TRAIN] Iter: 15300 Loss: 0.022044118493795395 PSNR: 19.790538787841797 +[TRAIN] Iter: 15400 Loss: 0.0226878821849823 PSNR: 19.874704360961914 +[TRAIN] Iter: 15500 Loss: 0.020950347185134888 PSNR: 20.016016006469727 +[TRAIN] Iter: 15600 Loss: 0.022792894393205643 PSNR: 19.674381256103516 +[TRAIN] Iter: 15700 Loss: 0.021517803892493248 PSNR: 19.841506958007812 +[TRAIN] Iter: 15800 Loss: 0.020421341061592102 PSNR: 20.161415100097656 +[TRAIN] Iter: 15900 Loss: 0.02765588089823723 PSNR: 18.99943733215332 +[TRAIN] Iter: 16000 Loss: 0.021517127752304077 PSNR: 20.023597717285156 +[TRAIN] Iter: 16100 Loss: 0.024789541959762573 PSNR: 19.275625228881836 +[TRAIN] Iter: 16200 Loss: 0.0247119702398777 PSNR: 19.39764404296875 +[TRAIN] Iter: 16300 Loss: 0.021566875278949738 PSNR: 19.89727020263672 +[TRAIN] Iter: 16400 Loss: 0.02273818664252758 PSNR: 19.755863189697266 +[TRAIN] Iter: 16500 Loss: 0.02105664648115635 PSNR: 20.124935150146484 +[TRAIN] Iter: 16600 Loss: 0.022435693070292473 PSNR: 20.096946716308594 +[TRAIN] Iter: 16700 Loss: 0.022107617929577827 PSNR: 19.80086898803711 +[TRAIN] Iter: 16800 Loss: 0.023464202880859375 PSNR: 19.4782772064209 +[TRAIN] Iter: 16900 Loss: 0.0217722337692976 PSNR: 19.895084381103516 +[TRAIN] Iter: 17000 Loss: 0.01842004433274269 PSNR: 20.781951904296875 +[TRAIN] Iter: 17100 Loss: 0.019941013306379318 PSNR: 20.54511833190918 +[TRAIN] Iter: 17200 Loss: 0.02164376527070999 PSNR: 19.777849197387695 +[TRAIN] Iter: 17300 Loss: 0.022898102179169655 PSNR: 19.519495010375977 +[TRAIN] Iter: 17400 Loss: 0.022246040403842926 PSNR: 19.886898040771484 +[TRAIN] Iter: 17500 Loss: 0.023046215996146202 PSNR: 19.504852294921875 +[TRAIN] Iter: 17600 Loss: 0.02197694405913353 PSNR: 19.766143798828125 +[TRAIN] Iter: 17700 Loss: 0.023969341069459915 PSNR: 19.554773330688477 +[TRAIN] Iter: 17800 Loss: 0.02246866747736931 PSNR: 19.776826858520508 +[TRAIN] Iter: 17900 Loss: 0.01931985467672348 PSNR: 20.68185043334961 +[TRAIN] Iter: 18000 Loss: 0.02188943326473236 PSNR: 20.024608612060547 +[TRAIN] Iter: 18100 Loss: 0.0244807880371809 PSNR: 19.324134826660156 +[TRAIN] Iter: 18200 Loss: 0.02125268243253231 PSNR: 20.111038208007812 +[TRAIN] Iter: 18300 Loss: 0.020951831713318825 PSNR: 19.90504264831543 +[TRAIN] Iter: 18400 Loss: 0.020193137228488922 PSNR: 20.487899780273438 +[TRAIN] Iter: 18500 Loss: 0.021724984049797058 PSNR: 19.859657287597656 +[TRAIN] Iter: 18600 Loss: 0.018379997462034225 PSNR: 20.5207576751709 +[TRAIN] Iter: 18700 Loss: 0.019070664420723915 PSNR: 20.753559112548828 +[TRAIN] Iter: 18800 Loss: 0.017697341740131378 PSNR: 20.94074249267578 +[TRAIN] Iter: 18900 Loss: 0.020446334034204483 PSNR: 20.04452133178711 +[TRAIN] Iter: 19000 Loss: 0.021162545308470726 PSNR: 19.955148696899414 +[TRAIN] Iter: 19100 Loss: 0.021151389926671982 PSNR: 20.175048828125 +[TRAIN] Iter: 19200 Loss: 0.02052377164363861 PSNR: 20.41937255859375 +[TRAIN] Iter: 19300 Loss: 0.022104477509856224 PSNR: 19.797138214111328 +[TRAIN] Iter: 19400 Loss: 0.020911330357193947 PSNR: 20.085546493530273 +[TRAIN] Iter: 19500 Loss: 0.022014297544956207 PSNR: 19.99750328063965 +[TRAIN] Iter: 19600 Loss: 0.020798416808247566 PSNR: 20.2077579498291 +[TRAIN] Iter: 19700 Loss: 0.01974399946630001 PSNR: 20.372968673706055 +[TRAIN] Iter: 19800 Loss: 0.02300582453608513 PSNR: 19.644506454467773 +[TRAIN] Iter: 19900 Loss: 0.024904895573854446 PSNR: 19.393339157104492 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/020000.tar +[TRAIN] Iter: 20000 Loss: 0.020006347447633743 PSNR: 20.348541259765625 +[TRAIN] Iter: 20100 Loss: 0.01799261011183262 PSNR: 20.59661293029785 +[TRAIN] Iter: 20200 Loss: 0.019420849159359932 PSNR: 20.488746643066406 +[TRAIN] Iter: 20300 Loss: 0.021525077521800995 PSNR: 19.82537078857422 +[TRAIN] Iter: 20400 Loss: 0.018131762742996216 PSNR: 20.636428833007812 +[TRAIN] Iter: 20500 Loss: 0.020231943577528 PSNR: 20.304973602294922 +[TRAIN] Iter: 20600 Loss: 0.01933470368385315 PSNR: 20.33591079711914 +[TRAIN] Iter: 20700 Loss: 0.021227765828371048 PSNR: 19.94101905822754 +[TRAIN] Iter: 20800 Loss: 0.02246560901403427 PSNR: 19.669588088989258 +[TRAIN] Iter: 20900 Loss: 0.021668674424290657 PSNR: 19.985946655273438 +[TRAIN] Iter: 21000 Loss: 0.022261619567871094 PSNR: 19.693769454956055 +[TRAIN] Iter: 21100 Loss: 0.02038257196545601 PSNR: 19.97760581970215 +[TRAIN] Iter: 21200 Loss: 0.019593864679336548 PSNR: 20.43000030517578 +[TRAIN] Iter: 21300 Loss: 0.02600756473839283 PSNR: 19.125534057617188 +[TRAIN] Iter: 21400 Loss: 0.02338544651865959 PSNR: 19.577144622802734 +[TRAIN] Iter: 21500 Loss: 0.02108943834900856 PSNR: 20.13995933532715 +[TRAIN] Iter: 21600 Loss: 0.022216346114873886 PSNR: 19.680118560791016 +[TRAIN] Iter: 21700 Loss: 0.023523105308413506 PSNR: 19.63095474243164 +[TRAIN] Iter: 21800 Loss: 0.023892413824796677 PSNR: 19.283531188964844 +[TRAIN] Iter: 21900 Loss: 0.020670302212238312 PSNR: 20.2299747467041 +[TRAIN] Iter: 22000 Loss: 0.020027685910463333 PSNR: 20.242448806762695 +[TRAIN] Iter: 22100 Loss: 0.023731576278805733 PSNR: 19.517885208129883 +[TRAIN] Iter: 22200 Loss: 0.021568164229393005 PSNR: 19.81028938293457 +[TRAIN] Iter: 22300 Loss: 0.020164813846349716 PSNR: 20.393301010131836 +[TRAIN] Iter: 22400 Loss: 0.01944240741431713 PSNR: 20.538087844848633 +[TRAIN] Iter: 22500 Loss: 0.022985858842730522 PSNR: 19.633285522460938 +[TRAIN] Iter: 22600 Loss: 0.020513858646154404 PSNR: 20.174758911132812 +[TRAIN] Iter: 22700 Loss: 0.01996292546391487 PSNR: 20.29469871520996 +[TRAIN] Iter: 22800 Loss: 0.01752113550901413 PSNR: 20.705278396606445 +[TRAIN] Iter: 22900 Loss: 0.01810915395617485 PSNR: 20.602819442749023 +[TRAIN] Iter: 23000 Loss: 0.01949688419699669 PSNR: 20.54597282409668 +[TRAIN] Iter: 23100 Loss: 0.022159406915307045 PSNR: 19.774242401123047 +[TRAIN] Iter: 23200 Loss: 0.018580898642539978 PSNR: 20.525243759155273 +[TRAIN] Iter: 23300 Loss: 0.017028123140335083 PSNR: 21.000768661499023 +[TRAIN] Iter: 23400 Loss: 0.019417399540543556 PSNR: 20.565500259399414 +[TRAIN] Iter: 23500 Loss: 0.021418485790491104 PSNR: 19.921831130981445 +[TRAIN] Iter: 23600 Loss: 0.019181353971362114 PSNR: 20.46629524230957 +[TRAIN] Iter: 23700 Loss: 0.019774653017520905 PSNR: 20.44745445251465 +[TRAIN] Iter: 23800 Loss: 0.021688755601644516 PSNR: 20.026599884033203 +[TRAIN] Iter: 23900 Loss: 0.017406413331627846 PSNR: 21.077381134033203 +[TRAIN] Iter: 24000 Loss: 0.019046643748879433 PSNR: 20.44277572631836 +[TRAIN] Iter: 24100 Loss: 0.020080754533410072 PSNR: 20.086875915527344 +[TRAIN] Iter: 24200 Loss: 0.019576752558350563 PSNR: 20.63144874572754 +[TRAIN] Iter: 24300 Loss: 0.019541457295417786 PSNR: 20.306821823120117 +[TRAIN] Iter: 24400 Loss: 0.01897910237312317 PSNR: 20.81873893737793 +[TRAIN] Iter: 24500 Loss: 0.01853594183921814 PSNR: 20.366662979125977 +[TRAIN] Iter: 24600 Loss: 0.022895891219377518 PSNR: 19.78921127319336 +[TRAIN] Iter: 24700 Loss: 0.01907724142074585 PSNR: 20.54973793029785 +[TRAIN] Iter: 24800 Loss: 0.02180902287364006 PSNR: 19.920740127563477 +[TRAIN] Iter: 24900 Loss: 0.02016637474298477 PSNR: 20.32438850402832 +[TRAIN] Iter: 25000 Loss: 0.019668642431497574 PSNR: 20.39643669128418 +[TRAIN] Iter: 25100 Loss: 0.018328193575143814 PSNR: 20.429319381713867 +[TRAIN] Iter: 25200 Loss: 0.01944766193628311 PSNR: 20.427400588989258 +[TRAIN] Iter: 25300 Loss: 0.021350139752030373 PSNR: 20.17901039123535 +[TRAIN] Iter: 25400 Loss: 0.021459661424160004 PSNR: 20.03773307800293 +[TRAIN] Iter: 25500 Loss: 0.02085956186056137 PSNR: 20.111186981201172 +[TRAIN] Iter: 25600 Loss: 0.02121434360742569 PSNR: 19.957290649414062 +[TRAIN] Iter: 25700 Loss: 0.020085429772734642 PSNR: 20.479310989379883 +[TRAIN] Iter: 25800 Loss: 0.01909005269408226 PSNR: 20.48464012145996 +[TRAIN] Iter: 25900 Loss: 0.019762352108955383 PSNR: 20.43509292602539 +[TRAIN] Iter: 26000 Loss: 0.021732717752456665 PSNR: 20.05605125427246 +[TRAIN] Iter: 26100 Loss: 0.019412335008382797 PSNR: 20.213336944580078 +[TRAIN] Iter: 26200 Loss: 0.020625118166208267 PSNR: 20.473588943481445 +[TRAIN] Iter: 26300 Loss: 0.0221057441085577 PSNR: 20.03179931640625 +[TRAIN] Iter: 26400 Loss: 0.018508944660425186 PSNR: 20.771448135375977 +[TRAIN] Iter: 26500 Loss: 0.019965779036283493 PSNR: 20.134397506713867 +[TRAIN] Iter: 26600 Loss: 0.017458423972129822 PSNR: 20.928115844726562 +[TRAIN] Iter: 26700 Loss: 0.018901294097304344 PSNR: 20.55658721923828 +[TRAIN] Iter: 26800 Loss: 0.017541557550430298 PSNR: 21.108844757080078 +[TRAIN] Iter: 26900 Loss: 0.01923469267785549 PSNR: 20.45210838317871 +[TRAIN] Iter: 27000 Loss: 0.02084100991487503 PSNR: 20.10986328125 +[TRAIN] Iter: 27100 Loss: 0.02005288004875183 PSNR: 20.305866241455078 +[TRAIN] Iter: 27200 Loss: 0.01832612231373787 PSNR: 20.79145622253418 +[TRAIN] Iter: 27300 Loss: 0.018224721774458885 PSNR: 20.694278717041016 +[TRAIN] Iter: 27400 Loss: 0.02131224051117897 PSNR: 20.252519607543945 +[TRAIN] Iter: 27500 Loss: 0.020797550678253174 PSNR: 19.69441032409668 +[TRAIN] Iter: 27600 Loss: 0.018908105790615082 PSNR: 20.589359283447266 +[TRAIN] Iter: 27700 Loss: 0.016104681417346 PSNR: 21.343069076538086 +[TRAIN] Iter: 27800 Loss: 0.02068544551730156 PSNR: 20.1560001373291 +[TRAIN] Iter: 27900 Loss: 0.017219362780451775 PSNR: 21.097339630126953 +[TRAIN] Iter: 28000 Loss: 0.018474366515874863 PSNR: 20.831295013427734 +[TRAIN] Iter: 28100 Loss: 0.01973991096019745 PSNR: 20.425540924072266 +[TRAIN] Iter: 28200 Loss: 0.01985260099172592 PSNR: 20.31985092163086 +[TRAIN] Iter: 28300 Loss: 0.021270597353577614 PSNR: 20.174257278442383 +[TRAIN] Iter: 28400 Loss: 0.02206631749868393 PSNR: 19.65260124206543 +[TRAIN] Iter: 28500 Loss: 0.01851005293428898 PSNR: 20.800565719604492 +[TRAIN] Iter: 28600 Loss: 0.020245512947440147 PSNR: 20.23333168029785 +[TRAIN] Iter: 28700 Loss: 0.020424794405698776 PSNR: 20.27134895324707 +[TRAIN] Iter: 28800 Loss: 0.018761128187179565 PSNR: 20.5836238861084 +[TRAIN] Iter: 28900 Loss: 0.02066025882959366 PSNR: 20.512954711914062 +[TRAIN] Iter: 29000 Loss: 0.01959517039358616 PSNR: 20.406450271606445 +[TRAIN] Iter: 29100 Loss: 0.017248377203941345 PSNR: 21.191883087158203 +[TRAIN] Iter: 29200 Loss: 0.020905235782265663 PSNR: 20.05157470703125 +[TRAIN] Iter: 29300 Loss: 0.018603459000587463 PSNR: 20.934181213378906 +[TRAIN] Iter: 29400 Loss: 0.021295318379998207 PSNR: 20.169775009155273 +[TRAIN] Iter: 29500 Loss: 0.01752699539065361 PSNR: 20.88819694519043 +[TRAIN] Iter: 29600 Loss: 0.018734153360128403 PSNR: 20.600419998168945 +[TRAIN] Iter: 29700 Loss: 0.018795805051922798 PSNR: 20.45610809326172 +[TRAIN] Iter: 29800 Loss: 0.01812604069709778 PSNR: 20.77297019958496 +[TRAIN] Iter: 29900 Loss: 0.020941320806741714 PSNR: 20.258398056030273 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/030000.tar +[TRAIN] Iter: 30000 Loss: 0.020027928054332733 PSNR: 20.350685119628906 +[TRAIN] Iter: 30100 Loss: 0.01990177109837532 PSNR: 20.159656524658203 +[TRAIN] Iter: 30200 Loss: 0.017142586410045624 PSNR: 20.719066619873047 +[TRAIN] Iter: 30300 Loss: 0.018380027264356613 PSNR: 20.51740264892578 +[TRAIN] Iter: 30400 Loss: 0.016133008524775505 PSNR: 21.192981719970703 +[TRAIN] Iter: 30500 Loss: 0.01968991570174694 PSNR: 20.675945281982422 +[TRAIN] Iter: 30600 Loss: 0.01962187886238098 PSNR: 20.364816665649414 +[TRAIN] Iter: 30700 Loss: 0.01575285941362381 PSNR: 21.201139450073242 +[TRAIN] Iter: 30800 Loss: 0.021340619772672653 PSNR: 20.100828170776367 +[TRAIN] Iter: 30900 Loss: 0.016961224377155304 PSNR: 21.329160690307617 +[TRAIN] Iter: 31000 Loss: 0.016410985961556435 PSNR: 20.881925582885742 +[TRAIN] Iter: 31100 Loss: 0.020520014688372612 PSNR: 20.32178497314453 +[TRAIN] Iter: 31200 Loss: 0.017558500170707703 PSNR: 21.216201782226562 +[TRAIN] Iter: 31300 Loss: 0.02010577917098999 PSNR: 20.217050552368164 +[TRAIN] Iter: 31400 Loss: 0.01910696178674698 PSNR: 20.312036514282227 +[TRAIN] Iter: 31500 Loss: 0.023981356993317604 PSNR: 19.588560104370117 +[TRAIN] Iter: 31600 Loss: 0.017364712432026863 PSNR: 21.273508071899414 +[TRAIN] Iter: 31700 Loss: 0.01950421929359436 PSNR: 20.31558609008789 +[TRAIN] Iter: 31800 Loss: 0.017885442823171616 PSNR: 20.92841339111328 +[TRAIN] Iter: 31900 Loss: 0.01804618164896965 PSNR: 20.923248291015625 +[TRAIN] Iter: 32000 Loss: 0.02134586125612259 PSNR: 20.026212692260742 +[TRAIN] Iter: 32100 Loss: 0.018961183726787567 PSNR: 20.48720932006836 +[TRAIN] Iter: 32200 Loss: 0.018454188480973244 PSNR: 20.93279266357422 +[TRAIN] Iter: 32300 Loss: 0.01626761443912983 PSNR: 21.371152877807617 +[TRAIN] Iter: 32400 Loss: 0.01666271686553955 PSNR: 21.244731903076172 +[TRAIN] Iter: 32500 Loss: 0.01856037601828575 PSNR: 20.733945846557617 +[TRAIN] Iter: 32600 Loss: 0.01736113429069519 PSNR: 21.200450897216797 +[TRAIN] Iter: 32700 Loss: 0.02180771343410015 PSNR: 19.839500427246094 +[TRAIN] Iter: 32800 Loss: 0.018475759774446487 PSNR: 20.44672966003418 +[TRAIN] Iter: 32900 Loss: 0.017761971801519394 PSNR: 20.921918869018555 +[TRAIN] Iter: 33000 Loss: 0.019007768481969833 PSNR: 20.629304885864258 +[TRAIN] Iter: 33100 Loss: 0.019667062908411026 PSNR: 20.431747436523438 +[TRAIN] Iter: 33200 Loss: 0.021999794989824295 PSNR: 19.94985008239746 +[TRAIN] Iter: 33300 Loss: 0.019352611154317856 PSNR: 20.492849349975586 +[TRAIN] Iter: 33400 Loss: 0.01899736374616623 PSNR: 20.76041603088379 +[TRAIN] Iter: 33500 Loss: 0.02027738466858864 PSNR: 20.051664352416992 +[TRAIN] Iter: 33600 Loss: 0.018683766946196556 PSNR: 20.787939071655273 +[TRAIN] Iter: 33700 Loss: 0.017122257500886917 PSNR: 20.940675735473633 +[TRAIN] Iter: 33800 Loss: 0.019006749615073204 PSNR: 20.37685203552246 +[TRAIN] Iter: 33900 Loss: 0.015932150185108185 PSNR: 21.63031768798828 +[TRAIN] Iter: 34000 Loss: 0.020516354590654373 PSNR: 20.067909240722656 +[TRAIN] Iter: 34100 Loss: 0.02014489658176899 PSNR: 20.577789306640625 +[TRAIN] Iter: 34200 Loss: 0.01859644427895546 PSNR: 20.609590530395508 +[TRAIN] Iter: 34300 Loss: 0.016462041065096855 PSNR: 21.20381736755371 +[TRAIN] Iter: 34400 Loss: 0.0181780643761158 PSNR: 21.02515411376953 +[TRAIN] Iter: 34500 Loss: 0.015521401539444923 PSNR: 21.474609375 +[TRAIN] Iter: 34600 Loss: 0.017604172229766846 PSNR: 21.00061798095703 +[TRAIN] Iter: 34700 Loss: 0.021730270236730576 PSNR: 19.97110939025879 +[TRAIN] Iter: 34800 Loss: 0.023298081010580063 PSNR: 19.426176071166992 +[TRAIN] Iter: 34900 Loss: 0.01596398651599884 PSNR: 21.370574951171875 +[TRAIN] Iter: 35000 Loss: 0.018848229199647903 PSNR: 20.70248031616211 +[TRAIN] Iter: 35100 Loss: 0.02116609737277031 PSNR: 20.18480682373047 +[TRAIN] Iter: 35200 Loss: 0.0165362898260355 PSNR: 21.408863067626953 +[TRAIN] Iter: 35300 Loss: 0.01589117757976055 PSNR: 21.373184204101562 +[TRAIN] Iter: 35400 Loss: 0.018690403550863266 PSNR: 20.770841598510742 +[TRAIN] Iter: 35500 Loss: 0.017664045095443726 PSNR: 20.78170394897461 +[TRAIN] Iter: 35600 Loss: 0.016897838562726974 PSNR: 21.27876091003418 +[TRAIN] Iter: 35700 Loss: 0.017549801617860794 PSNR: 20.984628677368164 +[TRAIN] Iter: 35800 Loss: 0.01856815069913864 PSNR: 20.595703125 +[TRAIN] Iter: 35900 Loss: 0.017824184149503708 PSNR: 20.70750617980957 +[TRAIN] Iter: 36000 Loss: 0.018808074295520782 PSNR: 20.606794357299805 +[TRAIN] Iter: 36100 Loss: 0.01916244998574257 PSNR: 20.528945922851562 +[TRAIN] Iter: 36200 Loss: 0.01674744486808777 PSNR: 21.049684524536133 +[TRAIN] Iter: 36300 Loss: 0.01655512861907482 PSNR: 21.401639938354492 +[TRAIN] Iter: 36400 Loss: 0.01578396186232567 PSNR: 21.285198211669922 +[TRAIN] Iter: 36500 Loss: 0.019818145781755447 PSNR: 20.11956214904785 +[TRAIN] Iter: 36600 Loss: 0.01565796136856079 PSNR: 21.58867835998535 +[TRAIN] Iter: 36700 Loss: 0.01688152551651001 PSNR: 20.956483840942383 +[TRAIN] Iter: 36800 Loss: 0.018362771719694138 PSNR: 20.66952133178711 +[TRAIN] Iter: 36900 Loss: 0.0204197745770216 PSNR: 20.497791290283203 +[TRAIN] Iter: 37000 Loss: 0.01820753887295723 PSNR: 20.605527877807617 +[TRAIN] Iter: 37100 Loss: 0.020235195755958557 PSNR: 20.110198974609375 +[TRAIN] Iter: 37200 Loss: 0.018762357532978058 PSNR: 20.777503967285156 +[TRAIN] Iter: 37300 Loss: 0.020269159227609634 PSNR: 20.1530704498291 +[TRAIN] Iter: 37400 Loss: 0.018294692039489746 PSNR: 20.598655700683594 +[TRAIN] Iter: 37500 Loss: 0.01862107217311859 PSNR: 20.88337516784668 +[TRAIN] Iter: 37600 Loss: 0.01794230379164219 PSNR: 20.878068923950195 +[TRAIN] Iter: 37700 Loss: 0.01717308908700943 PSNR: 20.987943649291992 +[TRAIN] Iter: 37800 Loss: 0.019802961498498917 PSNR: 20.381851196289062 +[TRAIN] Iter: 37900 Loss: 0.019916463643312454 PSNR: 20.250097274780273 +[TRAIN] Iter: 38000 Loss: 0.016986306756734848 PSNR: 21.019479751586914 +[TRAIN] Iter: 38100 Loss: 0.01731938309967518 PSNR: 20.92448616027832 +[TRAIN] Iter: 38200 Loss: 0.01683150790631771 PSNR: 21.11773109436035 +[TRAIN] Iter: 38300 Loss: 0.018634576350450516 PSNR: 20.4145698547360 0.0009870529174804688 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.35471773147583 +2 21.507369995117188 +3 21.791800260543823 +4 21.297770977020264 +5 20.961392164230347 +6 22.004557371139526 +7 21.011335372924805 +8 21.269644498825073 +9 21.143242359161377 +10 21.66395592689514 +11 21.452940225601196 +12 20.910688400268555 +13 21.29912757873535 +14 21.701935529708862 +15 21.189992904663086 +16 21.010352849960327 +17 21.426230430603027 +18 21.326383352279663 +19 21.43883776664734 +20 21.78035569190979 +21 21.36365556716919 +22 21.480046272277832 +23 21.173962354660034 +24 21.582932233810425 +25 21.551676273345947 +26 21.049025774002075 +27 21.53275203704834 +28 21.310991287231445 +29 21.713786125183105 +30 21.920108556747437 +31 21.12987780570984 +32 21.742748737335205 +33 21.613798141479492 +34 21.070107221603394 +35 21.360742568969727 +36 21.602428436279297 +37 21.78841471672058 +38 21.11359715461731 +39 21.321080207824707 +40 21.374006271362305 +41 21.61448097229004 +42 21.029094219207764 +43 21.3634934425354 +44 21.599627256393433 +45 21.22093105316162 +46 21.462966918945312 +47 21.708412647247314 +48 21.45725154876709 +49 21.156963348388672 +50 21.29206919670105 +51 21.26539969444275 +52 21.275917291641235 +53 21.794899702072144 +54 21.05268931388855 +55 20.871134519577026 +56 21.386253833770752 +57 21.422987461090088 +58 21.27917981147766 +59 21.398996353149414 +60 21.21705412864685 +61 21.684190034866333 +62 21.13457489013672 +63 21.275920152664185 +64 21.369810819625854 +65 21.50665783882141 +66 21.382478952407837 +67 21.528708696365356 +68 21.13896417617798 +69 21.234221935272217 +70 21.757189750671387 +71 20.968891859054565 +72 21.63609290122986 +73 21.388970375061035 +74 21.483018398284912 +75 21.08527374267578 +76 21.379505157470703 +77 21.602778434753418 +78 21.284668684005737 +79 21.323320388793945 +80 21.487537145614624 +81 21.247618198394775 +82 21.362998008728027 +83 21.187073469161987 +84 21.25369381904602 +85 21.715400457382202 +86 21.061046361923218 +87 21.277470111846924 +88 21.541442155838013 +89 21.261268615722656 +90 21.50375008583069 +91 21.078149795532227 +92 21.161661386489868 +93 21.695106267929077 +94 21.30199670791626 +95 21.15385937690735 +96 21.324979305267334 +97 20.94145941734314 +98 21.546135425567627 +99 21.04444169998169 +100 21.335919857025146 +101 21.488012313842773 +102 21.489988088607788 +103 21.113861322402954 +104 21.293787002563477 +105 21.431129217147827 +106 21.579928636550903 +107 21.283199310302734 +108 21.128576040267944 +109 21.43745231628418 +110 21.0880229473114 +111 21.78900980949402 +112 20.971376657485962 +113 21.235390424728394 +114 21.58761167526245 +115 21.763766050338745 +116 21.36637854576111 +117 21.242961406707764 +118 22.025831937789917 +119 20.874688625335693 +test poses shape torch.Size([4, 3, 4]) +0 0.001688241958618164 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.877614974975586 +2 21.84169054031372 +3 21.228931665420532 +Saved test set +[TRAIN] Iter: 50000 Loss: 0.01143245305866003 PSNR: 22.913898468017578 +[TRAIN] Iter: 50100 Loss: 0.011399051174521446 PSNR: 23.119672775268555 +[TRAIN] Iter: 50200 Loss: 0.0105777932330966 PSNR: 23.353374481201172 +[TRAIN] Iter: 50300 Loss: 0.009727870114147663 PSNR: 24.209047317504883 +[TRAIN] Iter: 50400 Loss: 0.010022965259850025 PSNR: 23.944509506225586 +[TRAIN] Iter: 50500 Loss: 0.011355875059962273 PSNR: 23.12137222290039 +[TRAIN] Iter: 50600 Loss: 0.011560510843992233 PSNR: 23.397693634033203 +[TRAIN] Iter: 50700 Loss: 0.012608584016561508 PSNR: 22.891691207885742 +[TRAIN] Iter: 50800 Loss: 0.011559924110770226 PSNR: 23.18375587463379 +[TRAIN] Iter: 50900 Loss: 0.010103564709424973 PSNR: 23.944087982177734 +[TRAIN] Iter: 51000 Loss: 0.00942387618124485 PSNR: 23.98133659362793 +[TRAIN] Iter: 51100 Loss: 0.011549160815775394 PSNR: 23.172771453857422 +[TRAIN] Iter: 51200 Loss: 0.011015821248292923 PSNR: 23.128816604614258 +[TRAIN] Iter: 51300 Loss: 0.01119869016110897 PSNR: 23.179841995239258 +[TRAIN] Iter: 51400 Loss: 0.010043621063232422 PSNR: 24.018537521362305 +[TRAIN] Iter: 51500 Loss: 0.010403774678707123 PSNR: 23.8206729888916 +[TRAIN] Iter: 51600 Loss: 0.009282073937356472 PSNR: 24.0002498626709 +[TRAIN] Iter: 51700 Loss: 0.009939545765519142 PSNR: 23.424396514892578 +[TRAIN] Iter: 51800 Loss: 0.011134551838040352 PSNR: 23.36337661743164 +[TRAIN] Iter: 51900 Loss: 0.011334158480167389 PSNR: 23.30613899230957 +[TRAIN] Iter: 52000 Loss: 0.010021870955824852 PSNR: 23.592819213867188 +[TRAIN] Iter: 52100 Loss: 0.009479960426688194 PSNR: 24.35552978515625 +[TRAIN] Iter: 52200 Loss: 0.010088900104165077 PSNR: 23.999126434326172 +[TRAIN] Iter: 52300 Loss: 0.010315576568245888 PSNR: 23.838178634643555 +[TRAIN] Iter: 52400 Loss: 0.012387924827635288 PSNR: 22.505516052246094 +[TRAIN] Iter: 52500 Loss: 0.011199995875358582 PSNR: 23.32735824584961 +[TRAIN] Iter: 52600 Loss: 0.011115988716483116 PSNR: 22.971040725708008 +[TRAIN] Iter: 52700 Loss: 0.009875292889773846 PSNR: 23.776752471923828 +[TRAIN] Iter: 52800 Loss: 0.011576201766729355 PSNR: 23.09243392944336 +[TRAIN] Iter: 52900 Loss: 0.01180347241461277 PSNR: 23.112689971923828 +[TRAIN] Iter: 53000 Loss: 0.01358858309686184 PSNR: 22.126161575317383 +[TRAIN] Iter: 53100 Loss: 0.011016730219125748 PSNR: 23.106874465942383 +[TRAIN] Iter: 53200 Loss: 0.009238543920218945 PSNR: 24.03544044494629 +[TRAIN] Iter: 53300 Loss: 0.013157617300748825 PSNR: 22.32413101196289 +[TRAIN] Iter: 53400 Loss: 0.009605996310710907 PSNR: 23.6484432220459 +[TRAIN] Iter: 53500 Loss: 0.01237606629729271 PSNR: 22.702768325805664 +[TRAIN] Iter: 53600 Loss: 0.011221544817090034 PSNR: 23.116079330444336 +[TRAIN] Iter: 53700 Loss: 0.010056779719889164 PSNR: 23.601974487304688 +[TRAIN] Iter: 53800 Loss: 0.010139741003513336 PSNR: 23.945894241333008 +[TRAIN] Iter: 53900 Loss: 0.012352529913187027 PSNR: 22.65301513671875 +[TRAIN] Iter: 54000 Loss: 0.009569859132170677 PSNR: 24.6364688873291 +[TRAIN] Iter: 54100 Loss: 0.009649259969592094 PSNR: 23.946056365966797 +[TRAIN] Iter: 54200 Loss: 0.011341644451022148 PSNR: 23.422832489013672 +[TRAIN] Iter: 54300 Loss: 0.010756667703390121 PSNR: 23.550634384155273 +[TRAIN] Iter: 54400 Loss: 0.01036706194281578 PSNR: 23.625904083251953 +[TRAIN] Iter: 54500 Loss: 0.011124052107334137 PSNR: 23.8461856842041 +[TRAIN] Iter: 54600 Loss: 0.011273187585175037 PSNR: 22.94017791748047 +[TRAIN] Iter: 54700 Loss: 0.012936255894601345 PSNR: 22.490524291992188 +[TRAIN] Iter: 54800 Loss: 0.013343196362257004 PSNR: 22.654054641723633 +[TRAIN] Iter: 54900 Loss: 0.011125313118100166 PSNR: 22.95455551147461 +[TRAIN] Iter: 55000 Loss: 0.012308732606470585 PSNR: 22.691137313842773 +[TRAIN] Iter: 55100 Loss: 0.01068664900958538 PSNR: 23.536029815673828 +[TRAIN] Iter: 55200 Loss: 0.013123384676873684 PSNR: 22.540327072143555 +[TRAIN] Iter: 55300 Loss: 0.011228520423173904 PSNR: 23.26116943359375 +[TRAIN] Iter: 55400 Loss: 0.011708654463291168 PSNR: 22.939035415649414 +[TRAIN] Iter: 55500 Loss: 0.01137129683047533 PSNR: 22.982624053955078 +[TRAIN] Iter: 55600 Loss: 0.010393292643129826 PSNR: 23.57090950012207 +[TRAIN] Iter: 55700 Loss: 0.00976151879876852 PSNR: 23.862140655517578 +[TRAIN] Iter: 55800 Loss: 0.00967783760279417 PSNR: 24.18153953552246 +[TRAIN] Iter: 55900 Loss: 0.009913546964526176 PSNR: 23.680734634399414 +[TRAIN] Iter: 56000 Loss: 0.01198931410908699 PSNR: 22.784774780273438 +[TRAIN] Iter: 56100 Loss: 0.011241121217608452 PSNR: 23.68570899963379 +[TRAIN] Iter: 56200 Loss: 0.00891389511525631 PSNR: 24.000030517578125 +[TRAIN] Iter: 56300 Loss: 0.008721832185983658 PSNR: 24.473278045654297 +[TRAIN] Iter: 56400 Loss: 0.010706488974392414 PSNR: 23.657432556152344 +[TRAIN] Iter: 56500 Loss: 0.013705221004784107 PSNR: 22.405420303344727 +[TRAIN] Iter: 56600 Loss: 0.010132171213626862 PSNR: 24.19088363647461 +[TRAIN] Iter: 56700 Loss: 0.008337914943695068 PSNR: 24.903282165527344 +[TRAIN] Iter: 56800 Loss: 0.010796485468745232 PSNR: 23.72163963317871 +[TRAIN] Iter: 56900 Loss: 0.010154963470995426 PSNR: 23.745708465576172 +[TRAIN] Iter: 57000 Loss: 0.0105954110622406 PSNR: 23.172210693359375 +[TRAIN] Iter: 57100 Loss: 0.011934582144021988 PSNR: 22.657617568969727 +[TRAIN] Iter: 57200 Loss: 0.010486766695976257 PSNR: 23.219589233398438 +[TRAIN] Iter: 57300 Loss: 0.010051031596958637 PSNR: 23.69344139099121 +[TRAIN] Iter: 57400 Loss: 0.009827258065342903 PSNR: 24.139982223510742 +[TRAIN] Iter: 57500 Loss: 0.00905519351363182 PSNR: 23.961719512939453 +[TRAIN] Iter: 57600 Loss: 0.010338693857192993 PSNR: 23.37183952331543 +[TRAIN] Iter: 57700 Loss: 0.01051198411732912 PSNR: 23.303403854370117 +[TRAIN] Iter: 57800 Loss: 0.009637190029025078 PSNR: 23.94331169128418 +[TRAIN] Iter: 57900 Loss: 0.012071534991264343 PSNR: 22.911100387573242 +[TRAIN] Iter: 58000 Loss: 0.008824270218610764 PSNR: 24.4958553314209 +[TRAIN] Iter: 58100 Loss: 0.010361180640757084 PSNR: 23.634458541870117 +[TRAIN] Iter: 58200 Loss: 0.007100474089384079 PSNR: 25.471988677978516 +[TRAIN] Iter: 58300 Loss: 0.011538021266460419 PSNR: 22.937694549560547 +[TRAIN] Iter: 58400 Loss: 0.010706834495067596 PSNR: 23.25052833557129 +[TRAIN] Iter: 58500 Loss: 0.010313532315194607 PSNR: 23.726688385009766 +[TRAIN] Iter: 58600 Loss: 0.011559040285646915 PSNR: 23.142681121826172 +[TRAIN] Iter: 58700 Loss: 0.011436439119279385 PSNR: 23.551837921142578 +[TRAIN] Iter: 58800 Loss: 0.012026595883071423 PSNR: 23.284854888916016 +[TRAIN] Iter: 58900 Loss: 0.010096253827214241 PSNR: 23.512094497680664 +[TRAIN] Iter: 59000 Loss: 0.010609844699501991 PSNR: 23.5383243560791 +[TRAIN] Iter: 59100 Loss: 0.009568585082888603 PSNR: 23.940311431884766 +[TRAIN] Iter: 59200 Loss: 0.008570237085223198 PSNR: 24.47844696044922 +[TRAIN] Iter: 59300 Loss: 0.010054921731352806 PSNR: 23.905305862426758 +[TRAIN] Iter: 59400 Loss: 0.009448371827602386 PSNR: 23.824880599975586 +[TRAIN] Iter: 59500 Loss: 0.010747931897640228 PSNR: 23.502628326416016 +[TRAIN] Iter: 59600 Loss: 0.00960302259773016 PSNR: 23.626537322998047 +[TRAIN] Iter: 59700 Loss: 0.010409262031316757 PSNR: 23.338542938232422 +[TRAIN] Iter: 59800 Loss: 0.008751897141337395 PSNR: 24.22724723815918 +[TRAIN] Iter: 59900 Loss: 0.009777729399502277 PSNR: 23.99890899658203 +Saved checkpoints at ./logs/TUT-out-doll-360-np/060000.tar +[TRAIN] Iter: 60000 Loss: 0.010560926981270313 PSNR: 23.71286964416504 +[TRAIN] Iter: 60100 Loss: 0.01288636215031147 PSNR: 22.31355857849121 +[TRAIN] Iter: 60200 Loss: 0.012238677591085434 PSNR: 22.860679626464844 +[TRAIN] Iter: 60300 Loss: 0.010057138279080391 PSNR: 23.667285919189453 +[TRAIN] Iter: 60400 Loss: 0.010443774051964283 PSNR: 23.702054977416992 +[TRAIN] Iter: 60500 Loss: 0.01188830379396677 PSNR: 23.509811401367188 +[TRAIN] Iter: 60600 Loss: 0.009889671579003334 PSNR: 23.698955535888672 +[TRAIN] Iter: 60700 Loss: 0.008477158844470978 PSNR: 24.396207809448242 +[TRAIN] Iter: 60800 Loss: 0.010998005978763103 PSNR: 23.640058517456055 +[TRAIN] Iter: 60900 Loss: 0.010068695060908794 PSNR: 23.53317642211914 +[TRAIN] Iter: 61000 Loss: 0.01058689784258604 PSNR: 23.367658615112305 +[TRAIN] Iter: 61100 Loss: 0.008317152969539165 PSNR: 24.97100257873535 +[TRAIN] Iter: 61200 Loss: 0.008239919319748878 PSNR: 24.637088775634766 +[TRAIN] Iter: 61300 Loss: 0.009771943092346191 PSNR: 23.944364547729492 +[TRAIN] Iter: 61400 Loss: 0.008643098175525665 PSNR: 24.818540573120117 +[TRAIN] Iter: 61500 Loss: 0.009292454458773136 PSNR: 24.17890167236328 +[TRAIN] Iter: 61600 Loss: 0.012652184814214706 PSNR: 23.06920623779297 +[TRAIN] Iter: 61700 Loss: 0.010681581683456898 PSNR: 23.489341735839844 +[TRAIN] Iter: 61800 Loss: 0.009918419644236565 PSNR: 23.65679359436035 +[TRAIN] Iter: 61900 Loss: 0.00812561996281147 PSNR: 25.129518508911133 +[TRAIN] Iter: 62000 Loss: 0.011054763570427895 PSNR: 23.477985382080078 +[TRAIN] Iter: 62100 Loss: 0.01125168427824974 PSNR: 23.11786460876465 +[TRAIN] Iter: 62200 Loss: 0.010887380689382553 PSNR: 23.32058334350586 +[TRAIN] Iter: 62300 Loss: 0.010778194293379784 PSNR: 23.250207901000977 +[TRAIN] Iter: 62400 Loss: 0.011365164071321487 PSNR: 23.374540328979492 +[TRAIN] Iter: 62500 Loss: 0.010389463044703007 PSNR: 24.352914810180664 +[TRAIN] Iter: 62600 Loss: 0.009664627723395824 PSNR: 23.724557876586914 +[TRAIN] Iter: 62700 Loss: 0.00970211997628212 PSNR: 23.89076042175293 +[TRAIN] Iter: 62800 Loss: 0.013209577649831772 PSNR: 22.3233699798584 +[TRAIN] Iter: 62900 Loss: 0.010408233851194382 PSNR: 23.81305503845215 +[TRAIN] Iter: 63000 Loss: 0.011388283222913742 PSNR: 23.313491821289062 +[TRAIN] Iter: 63100 Loss: 0.010383416898548603 PSNR: 23.62929344177246 +[TRAIN] Iter: 63200 Loss: 0.011249786242842674 PSNR: 23.05235481262207 +[TRAIN] Iter: 63300 Loss: 0.009676325134932995 PSNR: 23.72649383544922 +[TRAIN] Iter: 63400 Loss: 0.010257057845592499 PSNR: 23.333093643188477 +[TRAIN] Iter: 63500 Loss: 0.011124901473522186 PSNR: 23.592323303222656 +[TRAIN] Iter: 63600 Loss: 0.009470777586102486 PSNR: 24.057315826416016 +[TRAIN] Iter: 63700 Loss: 0.008641167543828487 PSNR: 24.547462463378906 +[TRAIN] Iter: 63800 Loss: 0.00922815129160881 PSNR: 24.022863388061523 +[TRAIN] Iter: 63900 Loss: 0.009276464581489563 PSNR: 23.949352264404297 +[TRAIN] Iter: 64000 Loss: 0.00909519661217928 PSNR: 24.26876449584961 +[TRAIN] Iter: 64100 Loss: 0.009376492351293564 PSNR: 24.384220123291016 +[TRAIN] Iter: 64200 Loss: 0.010291577316820621 PSNR: 23.503612518310547 +[TRAIN] Iter: 64300 Loss: 0.010836399160325527 PSNR: 23.84068489074707 +[TRAIN] Iter: 64400 Loss: 0.009259389713406563 PSNR: 23.818340301513672 +[TRAIN] Iter: 64500 Loss: 0.011075646616518497 PSNR: 23.594194412231445 +[TRAIN] Iter: 64600 Loss: 0.011355139315128326 PSNR: 23.117023468017578 +[TRAIN] Iter: 64700 Loss: 0.010351954028010368 PSNR: 23.3460693359375 +[TRAIN] Iter: 64800 Loss: 0.010345237329602242 PSNR: 23.704870223999023 +[TRAIN] Iter: 64900 Loss: 0.010082796216011047 PSNR: 23.95781707763672 +[TRAIN] Iter: 65000 Loss: 0.010647345334291458 PSNR: 23.2275333404541 +[TRAIN] Iter: 65100 Loss: 0.010549280792474747 PSNR: 23.551542282104492 +[TRAIN] Iter: 65200 Loss: 0.01087125577032566 PSNR: 23.550050735473633 +[TRAIN] Iter: 65300 Loss: 0.009122178889811039 PSNR: 24.037689208984375 +[TRAIN] Iter: 65400 Loss: 0.010701402090489864 PSNR: 23.46820831298828 +[TRAIN] Iter: 65500 Loss: 0.010500537231564522 PSNR: 23.77016258239746 +[TRAIN] Iter: 65600 Loss: 0.010545637458562851 PSNR: 23.455238342285156 +[TRAIN] Iter: 65700 Loss: 0.010856963694095612 PSNR: 23.391281127929688 +[TRAIN] Iter: 65800 Loss: 0.00969194620847702 PSNR: 24.092025756835938 +[TRAIN] Iter: 65900 Loss: 0.008860902860760689 PSNR: 24.362512588500977 +[TRAIN] Iter: 66000 Loss: 0.010054206475615501 PSNR: 23.662399291992188 +[TRAIN] Iter: 66100 Loss: 0.011545754969120026 PSNR: 23.365131378173828 +[TRAIN] Iter: 66200 Loss: 0.010011106729507446 PSNR: 24.08353614807129 +[TRAIN] Iter: 66300 Loss: 0.011899641714990139 PSNR: 23.29903793334961 +[TRAIN] Iter: 66400 Loss: 0.011409713886678219 PSNR: 23.19007682800293 +[TRAIN] Iter: 66500 Loss: 0.01009896770119667 PSNR: 24.24439239501953 +[TRAIN] Iter: 66600 Loss: 0.008614834398031235 PSNR: 24.226062774658203 +[TRAIN] Iter: 66700 Loss: 0.008909919299185276 PSNR: 24.70171356201172 +[TRAIN] Iter: 66800 Loss: 0.011212949641048908 PSNR: 23.23550033569336 +[TRAIN] Iter: 66900 Loss: 0.009996320120990276 PSNR: 24.05999755859375 +[TRAIN] Iter: 67000 Loss: 0.009417845867574215 PSNR: 24.067264556884766 +[TRAIN] Iter: 67100 Loss: 0.008569585159420967 PSNR: 24.323575973510742 +[TRAIN] Iter: 67200 Loss: 0.009080043993890285 PSNR: 24.651344299316406 +[TRAIN] Iter: 67300 Loss: 0.011702021583914757 PSNR: 22.89091682434082 +[TRAIN] Iter: 67400 Loss: 0.0098817003890872 PSNR: 23.564565658569336 +[TRAIN] Iter: 67500 Loss: 0.009965578094124794 PSNR: 23.80428123474121 +[TRAIN] Iter: 67600 Loss: 0.010004105977714062 PSNR: 24.171171188354492 +[TRAIN] Iter: 67700 Loss: 0.009196826256811619 PSNR: 24.23448371887207 +[TRAIN] Iter: 67800 Loss: 0.009090064093470573 PSNR: 24.007539749145508 +[TRAIN] Iter: 67900 Loss: 0.01088651455938816 PSNR: 23.323827743530273 +[TRAIN] Iter: 68000 Loss: 0.012556827627122402 PSNR: 22.573223114013672 +[TRAIN] Iter: 68100 Loss: 0.008221641182899475 PSNR: 25.391481399536133 +[TRAIN] Iter: 68200 Loss: 0.010465245693922043 PSNR: 23.887771606445312 +[TRAIN] Iter: 68300 Loss: 0.01082354411482811 PSNR: 23.7010440826416 +[TRAIN] Iter: 68400 Loss: 0.009904470294713974 PSNR: 23.599164962768555 +[TRAIN] Iter: 68500 Loss: 0.008072116412222385 PSNR: 24.809274673461914 +[TRAIN] Iter: 68600 Loss: 0.010031197220087051 PSNR: 23.74131965637207 +[TRAIN] Iter: 68700 Loss: 0.010417396202683449 PSNR: 23.495040893554688 +[TRAIN] Iter: 68800 Loss: 0.010706906206905842 PSNR: 23.596555709838867 +[TRAIN] Iter: 68900 Loss: 0.008185820654034615 PSNR: 24.528621673583984 +[TRAIN] Iter: 69000 Loss: 0.009639963507652283 PSNR: 24.37493896484375 +[TRAIN] Iter: 69100 Loss: 0.011282665655016899 PSNR: 23.312532424926758 +[TRAIN] Iter: 69200 Loss: 0.008836276829242706 PSNR: 24.23605918884270 0.0016942024230957031 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 25.82297682762146 +2 25.31856107711792 +3 19.257384777069092 +Saved test set +[TRAIN] Iter: 50000 Loss: 0.015681564807891846 PSNR: 21.252235412597656 +[TRAIN] Iter: 50100 Loss: 0.01617719605565071 PSNR: 21.593902587890625 +[TRAIN] Iter: 50200 Loss: 0.01888766884803772 PSNR: 20.80487632751465 +[TRAIN] Iter: 50300 Loss: 0.01435700710862875 PSNR: 22.027082443237305 +[TRAIN] Iter: 50400 Loss: 0.016863299533724785 PSNR: 21.21891975402832 +[TRAIN] Iter: 50500 Loss: 0.013996651396155357 PSNR: 22.237951278686523 +[TRAIN] Iter: 50600 Loss: 0.016948828473687172 PSNR: 21.157733917236328 +[TRAIN] Iter: 50700 Loss: 0.015113810077309608 PSNR: 21.821163177490234 +[TRAIN] Iter: 50800 Loss: 0.015434801578521729 PSNR: 21.603389739990234 +[TRAIN] Iter: 50900 Loss: 0.01560349389910698 PSNR: 21.955434799194336 +[TRAIN] Iter: 51000 Loss: 0.017871899530291557 PSNR: 20.812175750732422 +[TRAIN] Iter: 51100 Loss: 0.016940196976065636 PSNR: 21.37659454345703 +[TRAIN] Iter: 51200 Loss: 0.01834496669471264 PSNR: 20.709705352783203 +[TRAIN] Iter: 51300 Loss: 0.016788337379693985 PSNR: 21.076160430908203 +[TRAIN] Iter: 51400 Loss: 0.017536241561174393 PSNR: 20.944068908691406 +[TRAIN] Iter: 51500 Loss: 0.01889979839324951 PSNR: 21.191099166870117 +[TRAIN] Iter: 51600 Loss: 0.01679309830069542 PSNR: 21.14020538330078 +[TRAIN] Iter: 51700 Loss: 0.01781114749610424 PSNR: 21.185604095458984 +[TRAIN] Iter: 51800 Loss: 0.01975281350314617 PSNR: 20.47869300842285 +[TRAIN] Iter: 51900 Loss: 0.014827324077486992 PSNR: 22.10502052307129 +[TRAIN] Iter: 52000 Loss: 0.016365353018045425 PSNR: 21.091569900512695 +[TRAIN] Iter: 52100 Loss: 0.014602448791265488 PSNR: 22.029766082763672 +[TRAIN] Iter: 52200 Loss: 0.018671425059437752 PSNR: 20.569168090820312 +[TRAIN] Iter: 52300 Loss: 0.01670495793223381 PSNR: 21.379056930541992 +[TRAIN] Iter: 52400 Loss: 0.015142235904932022 PSNR: 21.43289566040039 +[TRAIN] Iter: 52500 Loss: 0.01705116778612137 PSNR: 21.058347702026367 +[TRAIN] Iter: 52600 Loss: 0.018120743334293365 PSNR: 21.011287689208984 +[TRAIN] Iter: 52700 Loss: 0.01746394857764244 PSNR: 21.181577682495117 +[TRAIN] Iter: 52800 Loss: 0.017594080418348312 PSNR: 21.31877899169922 +[TRAIN] Iter: 52900 Loss: 0.018885046243667603 PSNR: 20.430418014526367 +[TRAIN] Iter: 53000 Loss: 0.015553723089396954 PSNR: 21.823776245117188 +[TRAIN] Iter: 53100 Loss: 0.015274785459041595 PSNR: 22.096769332885742 +[TRAIN] Iter: 53200 Loss: 0.0188011284917593 PSNR: 20.51251792907715 +[TRAIN] Iter: 53300 Loss: 0.015302831307053566 PSNR: 21.71762466430664 +[TRAIN] Iter: 53400 Loss: 0.016765354201197624 PSNR: 20.917776107788086 +[TRAIN] Iter: 53500 Loss: 0.017329785972833633 PSNR: 20.96489906311035 +[TRAIN] Iter: 53600 Loss: 0.01603986881673336 PSNR: 21.754802703857422 +[TRAIN] Iter: 53700 Loss: 0.021306224167346954 PSNR: 20.2353458404541 +[TRAIN] Iter: 53800 Loss: 0.016987059265375137 PSNR: 21.355485916137695 +[TRAIN] Iter: 53900 Loss: 0.013645889237523079 PSNR: 22.20656967163086 +[TRAIN] Iter: 54000 Loss: 0.015863722190260887 PSNR: 21.309799194335938 +[TRAIN] Iter: 54100 Loss: 0.02007265016436577 PSNR: 20.504297256469727 +[TRAIN] Iter: 54200 Loss: 0.018524054437875748 PSNR: 20.627967834472656 +[TRAIN] Iter: 54300 Loss: 0.018368570134043694 PSNR: 20.972713470458984 +[TRAIN] Iter: 54400 Loss: 0.0173629242926836 PSNR: 21.056133270263672 +[TRAIN] Iter: 54500 Loss: 0.0170353464782238 PSNR: 21.35157585144043 +[TRAIN] Iter: 54600 Loss: 0.014306878671050072 PSNR: 22.13018226623535 +[TRAIN] Iter: 54700 Loss: 0.01884164661169052 PSNR: 20.493122100830078 +[TRAIN] Iter: 54800 Loss: 0.016822732985019684 PSNR: 21.44826889038086 +[TRAIN] Iter: 54900 Loss: 0.014196249656379223 PSNR: 21.877038955688477 +[TRAIN] Iter: 55000 Loss: 0.016939159482717514 PSNR: 21.22136116027832 +[TRAIN] Iter: 55100 Loss: 0.0179816335439682 PSNR: 20.776636123657227 +[TRAIN] Iter: 55200 Loss: 0.017719648778438568 PSNR: 21.169931411743164 +[TRAIN] Iter: 55300 Loss: 0.02047778107225895 PSNR: 20.17986488342285 +[TRAIN] Iter: 55400 Loss: 0.017231035977602005 PSNR: 21.050792694091797 +[TRAIN] Iter: 55500 Loss: 0.01754045858979225 PSNR: 21.006237030029297 +[TRAIN] Iter: 55600 Loss: 0.016252143308520317 PSNR: 21.204790115356445 +[TRAIN] Iter: 55700 Loss: 0.016240693628787994 PSNR: 21.622108459472656 +[TRAIN] Iter: 55800 Loss: 0.01597769185900688 PSNR: 21.5188045501709 +[TRAIN] Iter: 55900 Loss: 0.016874276101589203 PSNR: 21.018543243408203 +[TRAIN] Iter: 56000 Loss: 0.016308855265378952 PSNR: 21.007099151611328 +[TRAIN] Iter: 56100 Loss: 0.01581001840531826 PSNR: 21.709285736083984 +[TRAIN] Iter: 56200 Loss: 0.017080379649996758 PSNR: 21.163415908813477 +[TRAIN] Iter: 56300 Loss: 0.014466078951954842 PSNR: 22.290088653564453 +[TRAIN] Iter: 56400 Loss: 0.016789300367236137 PSNR: 21.230852127075195 +[TRAIN] Iter: 56500 Loss: 0.01636202447116375 PSNR: 21.699926376342773 +[TRAIN] Iter: 56600 Loss: 0.016011470928788185 PSNR: 21.298812866210938 +[TRAIN] Iter: 56700 Loss: 0.01833433471620083 PSNR: 20.93544578552246 +[TRAIN] Iter: 56800 Loss: 0.016352994367480278 PSNR: 21.194246292114258 +[TRAIN] Iter: 56900 Loss: 0.015624490566551685 PSNR: 21.59941864013672 +[TRAIN] Iter: 57000 Loss: 0.01681787520647049 PSNR: 21.208831787109375 +[TRAIN] Iter: 57100 Loss: 0.01850656047463417 PSNR: 20.783803939819336 +[TRAIN] Iter: 57200 Loss: 0.015629209578037262 PSNR: 21.303842544555664 +[TRAIN] Iter: 57300 Loss: 0.021735135465860367 PSNR: 20.26783561706543 +[TRAIN] Iter: 57400 Loss: 0.014246908016502857 PSNR: 22.256481170654297 +[TRAIN] Iter: 57500 Loss: 0.014935147017240524 PSNR: 21.703359603881836 +[TRAIN] Iter: 57600 Loss: 0.014962449669837952 PSNR: 21.486547470092773 +[TRAIN] Iter: 57700 Loss: 0.016991902142763138 PSNR: 21.532201766967773 +[TRAIN] Iter: 57800 Loss: 0.01724986545741558 PSNR: 21.117473602294922 +[TRAIN] Iter: 57900 Loss: 0.015618857927620411 PSNR: 21.71560287475586 +[TRAIN] Iter: 58000 Loss: 0.019379355013370514 PSNR: 20.440486907958984 +[TRAIN] Iter: 58100 Loss: 0.01716378889977932 PSNR: 21.375362396240234 +[TRAIN] Iter: 58200 Loss: 0.01657678745687008 PSNR: 21.14963150024414 +[TRAIN] Iter: 58300 Loss: 0.014391973614692688 PSNR: 21.937685012817383 +[TRAIN] Iter: 58400 Loss: 0.014767041429877281 PSNR: 21.85321617126465 +[TRAIN] Iter: 58500 Loss: 0.020235246047377586 PSNR: 20.417430877685547 +[TRAIN] Iter: 58600 Loss: 0.017722714692354202 PSNR: 21.114673614501953 +[TRAIN] Iter: 58700 Loss: 0.016601979732513428 PSNR: 21.44217300415039 +[TRAIN] Iter: 58800 Loss: 0.01367215160280466 PSNR: 22.14289665222168 +[TRAIN] Iter: 58900 Loss: 0.015663903206586838 PSNR: 21.76473045349121 +[TRAIN] Iter: 59000 Loss: 0.015688540413975716 PSNR: 21.68600082397461 +[TRAIN] Iter: 59100 Loss: 0.013380412012338638 PSNR: 22.35826301574707 +[TRAIN] Iter: 59200 Loss: 0.015132379718124866 PSNR: 21.783342361450195 +[TRAIN] Iter: 59300 Loss: 0.017638295888900757 PSNR: 20.92780303955078 +[TRAIN] Iter: 59400 Loss: 0.014244085177779198 PSNR: 21.87523078918457 +[TRAIN] Iter: 59500 Loss: 0.01957658678293228 PSNR: 20.610584259033203 +[TRAIN] Iter: 59600 Loss: 0.0157192200422287 PSNR: 21.73297882080078 +[TRAIN] Iter: 59700 Loss: 0.017186516895890236 PSNR: 21.084264755249023 +[TRAIN] Iter: 59800 Loss: 0.016951797530055046 PSNR: 20.976152420043945 +[TRAIN] Iter: 59900 Loss: 0.014878830872476101 PSNR: 21.691007614135742 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/060000.tar +[TRAIN] Iter: 60000 Loss: 0.016920138150453568 PSNR: 21.121675491333008 +[TRAIN] Iter: 60100 Loss: 0.01688060536980629 PSNR: 21.261978149414062 +[TRAIN] Iter: 60200 Loss: 0.015682924538850784 PSNR: 21.547361373901367 +[TRAIN] Iter: 60300 Loss: 0.017532624304294586 PSNR: 21.485536575317383 +[TRAIN] Iter: 60400 Loss: 0.014766620472073555 PSNR: 21.60332679748535 +[TRAIN] Iter: 60500 Loss: 0.01675959676504135 PSNR: 21.18524932861328 +[TRAIN] Iter: 60600 Loss: 0.01911330409348011 PSNR: 20.776508331298828 +[TRAIN] Iter: 60700 Loss: 0.015703508630394936 PSNR: 21.668161392211914 +[TRAIN] Iter: 60800 Loss: 0.013743209652602673 PSNR: 22.279253005981445 +[TRAIN] Iter: 60900 Loss: 0.016746778041124344 PSNR: 21.058462142944336 +[TRAIN] Iter: 61000 Loss: 0.01613726280629635 PSNR: 21.578386306762695 +[TRAIN] Iter: 61100 Loss: 0.01744314655661583 PSNR: 21.026531219482422 +[TRAIN] Iter: 61200 Loss: 0.01814521849155426 PSNR: 20.97705841064453 +[TRAIN] Iter: 61300 Loss: 0.01708715222775936 PSNR: 21.14178466796875 +[TRAIN] Iter: 61400 Loss: 0.015954336151480675 PSNR: 21.44716453552246 +[TRAIN] Iter: 61500 Loss: 0.015174804255366325 PSNR: 21.805397033691406 +[TRAIN] Iter: 61600 Loss: 0.01808229647576809 PSNR: 21.262832641601562 +[TRAIN] Iter: 61700 Loss: 0.018699131906032562 PSNR: 20.65138053894043 +[TRAIN] Iter: 61800 Loss: 0.017271297052502632 PSNR: 21.336896896362305 +[TRAIN] Iter: 61900 Loss: 0.016098184511065483 PSNR: 21.158281326293945 +[TRAIN] Iter: 62000 Loss: 0.017603948712348938 PSNR: 20.72063446044922 +[TRAIN] Iter: 62100 Loss: 0.017993856221437454 PSNR: 21.126855850219727 +[TRAIN] Iter: 62200 Loss: 0.01923207938671112 PSNR: 20.61246681213379 +[TRAIN] Iter: 62300 Loss: 0.015785321593284607 PSNR: 21.588787078857422 +[TRAIN] Iter: 62400 Loss: 0.016685359179973602 PSNR: 21.367015838623047 +[TRAIN] Iter: 62500 Loss: 0.013764433562755585 PSNR: 22.552494049072266 +[TRAIN] Iter: 62600 Loss: 0.016786452382802963 PSNR: 21.56314468383789 +[TRAIN] Iter: 62700 Loss: 0.0159379243850708 PSNR: 21.618276596069336 +[TRAIN] Iter: 62800 Loss: 0.016825567930936813 PSNR: 21.305587768554688 +[TRAIN] Iter: 62900 Loss: 0.01622271165251732 PSNR: 21.410228729248047 +[TRAIN] Iter: 63000 Loss: 0.018024200573563576 PSNR: 20.65583038330078 +[TRAIN] Iter: 63100 Loss: 0.017452271655201912 PSNR: 21.508697509765625 +[TRAIN] Iter: 63200 Loss: 0.015821781009435654 PSNR: 21.38132667541504 +[TRAIN] Iter: 63300 Loss: 0.018170025199651718 PSNR: 20.684350967407227 +[TRAIN] Iter: 63400 Loss: 0.017735036090016365 PSNR: 20.84420394897461 +[TRAIN] Iter: 63500 Loss: 0.015784852206707 PSNR: 21.56484031677246 +[TRAIN] Iter: 63600 Loss: 0.014506910927593708 PSNR: 21.953205108642578 +[TRAIN] Iter: 63700 Loss: 0.014187046326696873 PSNR: 22.236948013305664 +[TRAIN] Iter: 63800 Loss: 0.01444636844098568 PSNR: 21.744718551635742 +[TRAIN] Iter: 63900 Loss: 0.015446403995156288 PSNR: 21.50067138671875 +[TRAIN] Iter: 64000 Loss: 0.015585556626319885 PSNR: 21.823354721069336 +[TRAIN] Iter: 64100 Loss: 0.017733074724674225 PSNR: 21.075279235839844 +[TRAIN] Iter: 64200 Loss: 0.01619921624660492 PSNR: 21.45123863220215 +[TRAIN] Iter: 64300 Loss: 0.015061127953231335 PSNR: 21.74616050720215 +[TRAIN] Iter: 64400 Loss: 0.01802641898393631 PSNR: 21.068334579467773 +[TRAIN] Iter: 64500 Loss: 0.015999235212802887 PSNR: 21.534059524536133 +[TRAIN] Iter: 64600 Loss: 0.0177223589271307 PSNR: 21.23658561706543 +[TRAIN] Iter: 64700 Loss: 0.015670262277126312 PSNR: 22.014753341674805 +[TRAIN] Iter: 64800 Loss: 0.015077872201800346 PSNR: 21.64544677734375 +[TRAIN] Iter: 64900 Loss: 0.015573213808238506 PSNR: 21.554357528686523 +[TRAIN] Iter: 65000 Loss: 0.014452140778303146 PSNR: 21.94048309326172 +[TRAIN] Iter: 65100 Loss: 0.015534840524196625 PSNR: 21.621665954589844 +[TRAIN] Iter: 65200 Loss: 0.01800226792693138 PSNR: 20.68110466003418 +[TRAIN] Iter: 65300 Loss: 0.01623724400997162 PSNR: 21.561315536499023 +[TRAIN] Iter: 65400 Loss: 0.015562132000923157 PSNR: 21.449535369873047 +[TRAIN] Iter: 65500 Loss: 0.014284373261034489 PSNR: 22.05752944946289 +[TRAIN] Iter: 65600 Loss: 0.015583185479044914 PSNR: 21.662036895751953 +[TRAIN] Iter: 65700 Loss: 0.01580311916768551 PSNR: 21.64496612548828 +[TRAIN] Iter: 65800 Loss: 0.018471332266926765 PSNR: 20.756887435913086 +[TRAIN] Iter: 65900 Loss: 0.016977617517113686 PSNR: 21.369121551513672 +[TRAIN] Iter: 66000 Loss: 0.01841677352786064 PSNR: 20.634368896484375 +[TRAIN] Iter: 66100 Loss: 0.014449250884354115 PSNR: 22.02419662475586 +[TRAIN] Iter: 66200 Loss: 0.01592177525162697 PSNR: 21.626941680908203 +[TRAIN] Iter: 66300 Loss: 0.01424311101436615 PSNR: 21.814414978027344 +[TRAIN] Iter: 66400 Loss: 0.01406361535191536 PSNR: 22.038532257080078 +[TRAIN] Iter: 66500 Loss: 0.014771767891943455 PSNR: 21.913860321044922 +[TRAIN] Iter: 66600 Loss: 0.01593441516160965 PSNR: 21.487977981567383 +[TRAIN] Iter: 66700 Loss: 0.016949933022260666 PSNR: 21.09122657775879 +[TRAIN] Iter: 66800 Loss: 0.01641787588596344 PSNR: 21.38241958618164 +[TRAIN] Iter: 66900 Loss: 0.012381931766867638 PSNR: 22.71370506286621 +[TRAIN] Iter: 67000 Loss: 0.013659598305821419 PSNR: 22.123727798461914 +[TRAIN] Iter: 67100 Loss: 0.014418570324778557 PSNR: 22.087955474853516 +[TRAIN] Iter: 67200 Loss: 0.015305341221392155 PSNR: 21.394208908081055 +[TRAIN] Iter: 67300 Loss: 0.014639977365732193 PSNR: 22.233795166015625 +[TRAIN] Iter: 67400 Loss: 0.01596146821975708 PSNR: 21.51808738708496 +[TRAIN] Iter: 67500 Loss: 0.017355995252728462 PSNR: 21.064884185791016 +[TRAIN] Iter: 67600 Loss: 0.016196228563785553 PSNR: 21.647077560424805 +[TRAIN] Iter: 67700 Loss: 0.019614852964878082 PSNR: 20.515378952026367 +[TRAIN] Iter: 67800 Loss: 0.017520958557724953 PSNR: 21.114206314086914 +[TRAIN] Iter: 67900 Loss: 0.015409962274134159 PSNR: 21.74976348876953 +[TRAIN] Iter: 68000 Loss: 0.01620897278189659 PSNR: 21.307571411132812 +[TRAIN] Iter: 68100 Loss: 0.015343297272920609 PSNR: 21.571796417236328 +[TRAIN] Iter: 68200 Loss: 0.0159755926579237 PSNR: 21.475522994995117 +[TRAIN] Iter: 68300 Loss: 0.016529999673366547 PSNR: 21.126018524169922 +[TRAIN] Iter: 68400 Loss: 0.01608399860560894 PSNR: 21.553115844726562 +[TRAIN] Iter: 68500 Loss: 0.01737050525844097 PSNR: 21.20025062561035 +[TRAIN] Iter: 68600 Loss: 0.015683624893426895 PSNR: 21.684389114379883 +[TRAIN] Iter: 68700 Loss: 0.013639179989695549 PSNR: 22.139070510864258 +[TRAIN] Iter: 68800 Loss: 0.015636418014764786 PSNR: 21.76211166381836 +[TRAIN] Iter: 68900 Loss: 0.016504229977726936 PSNR: 21.363384246826172 +[TRAIN] Iter: 69000 Loss: 0.015250704251229763 PSNR: 21.489253997802734 +[TRAIN] Iter: 69100 Loss: 0.01568363979458809 PSNR: 21.542158126831055 +[TRAIN] Iter: 69200 Loss: 0.01650836691260338 PSNR: 21.36492156982422 +[TRAIN] Iter: 69300 Loss: 0.014703750610351562 PSNR: 21.881553649902344 +[TRAIN] Iter: 69400 Loss: 0.014087511226534843 PSNR: 21.995277404785156 +[TRAIN] Iter: 69500 Loss: 0.017922502011060715 PSNR: 20.676095962524414 +[TRAIN] Iter: 69600 Loss: 0.01423930935561657 PSNR: 22.2695369720459 +[TRAIN] Iter: 69700 Loss: 0.020418517291545868 PSNR: 20.59254264831543 +[TRAIN] Iter: 69800 Loss: 0.013634786009788513 PSNR: 22.29314613342285 +[TRAIN] Iter: 69900 Loss: 0.016900453716516495 PSNR: 21.40664291381836 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/070000.tar +[TRAIN] Iter: 70000 Loss: 0.014554123394191265 PSNR: 21.93939781188965 +[TRAIN] Iter: 70100 Loss: 0.016246207058429718 PSNR: 21.48090362548828 +[TRAIN] Iter: 70200 Loss: 0.015512768179178238 PSNR: 21.636276245117188 +[TRAIN] Iter: 70300 Loss: 0.015858599916100502 PSNR: 21.476394653320312 +[TRAIN] Iter: 70400 Loss: 0.015394849702715874 PSNR: 21.5455322265625 +[TRAIN] Iter: 70500 Loss: 0.015452494844794273 PSNR: 21.62576675415039 +[TRAIN] Iter: 70600 Loss: 0.015727028250694275 PSNR: 21.580808639526367 +[TRAIN] Iter: 70700 Loss: 0.01741589792072773 PSNR: 20.970500946044922 +[TRAIN] Iter: 70800 Loss: 0.016143888235092163 PSNR: 21.30956268310547 +[TRAIN] Iter: 70900 Loss: 0.01475541852414608 PSNR: 21.92889404296875 +[TRAIN] Iter: 71000 Loss: 0.014350494369864464 PSNR: 21.918170928955078 +[TRAIN] Iter: 71100 Loss: 0.01370318979024887 PSNR: 21.978179931640625 +[TRAIN] Iter: 71200 Loss: 0.017539720982313156 PSNR: 21.17483139038086 +[TRAIN] Iter: 71300 Loss: 0.01576540619134903 PSNR: 21.625490188598633 +[TRAIN] Iter: 71400 Loss: 0.016448698937892914 PSNR: 22.123310089111328 +[TRAIN] Iter: 71500 Loss: 0.016607599332928658 PSNR: 21.334138870239258 +[TRAIN] Iter: 71600 Loss: 0.015211396850645542 PSNR: 21.673892974853516 +[TRAIN] Iter: 71700 Loss: 0.014666689559817314 PSNR: 21.900041580200195 +[TRAIN] Iter: 71800 Loss: 0.015266083180904388 PSNR: 22.144149780273438 +[TRAIN] Iter: 71900 Loss: 0.016997449100017548 PSNR: 21.084280014038086 +[TRAIN] Iter: 72000 Loss: 0.015387587249279022 PSNR: 21.972719192504883 +[TRAIN] Iter: 72100 Loss: 0.01657230034470558 PSNR: 21.25227928161621 +[TRAIN] Iter: 72200 Loss: 0.015475690364837646 PSNR: 21.803091049194336 +[TRAIN] Iter: 72300 Loss: 0.018662063404917717 PSNR: 20.88778305053711 +[TRAIN] Iter: 72400 Loss: 0.01726638711988926 PSNR: 20.984949111938477 +[TRAIN] Iter: 72500 Loss: 0.015535824000835419 PSNR: 21.439470291137695 +[TRAIN] Iter: 72600 Loss: 0.01421311404556036 PSNR: 22.0062198638916 +[TRAIN] Iter: 72700 Loss: 0.014195401221513748 PSNR: 21.731449127197266 +[TRAIN] Iter: 72800 Loss: 0.016700178384780884 PSNR: 21.406028747558594 +[TRAIN] Iter: 72900 Loss: 0.014953192323446274 PSNR: 21.68168830871582 +[TRAIN] Iter: 73000 Loss: 0.014372235164046288 PSNR: 22.00667381286621 +[TRAIN] Iter: 73100 Loss: 0.016186026856303215 PSNR: 21.875194549560547 +[TRAIN] Iter: 73200 Loss: 0.01668640784919262 PSNR: 21.11675262451172 +[TRAIN] Iter: 73300 Loss: 0.013609937392175198 PSNR: 22.280710220336914 +[TRAIN] Iter: 73400 Loss: 0.01655668579041958 PSNR: 21.444915771484375 +[TRAIN] Iter: 73500 Loss: 0.015948809683322906 PSNR: 21.759626388549805 +[TRAIN] Iter: 73600 Loss: 0.018736328929662704 PSNR: 20.6626033782959 +[TRAIN] Iter: 73700 Loss: 0.018516158685088158 PSNR: 20.822586059570312 +[TRAIN] Iter: 73800 Loss: 0.01280091144144535 PSNR: 22.96128273010254 +[TRAIN] Iter: 73900 Loss: 0.015618566423654556 PSNR: 21.45073890686035 +[TRAIN] Iter: 74000 Loss: 0.018823904916644096 PSNR: 20.735490798950195 +[TRAIN] Iter: 74100 Loss: 0.0158774945884943 PSNR: 21.571107864379883 +[TRAIN] Iter: 74200 Loss: 0.014343246817588806 PSNR: 22.1683292388916 +[TRAIN] Iter: 74300 Loss: 0.012293055653572083 PSNR: 22.484691619873047 +[TRAIN] Iter: 74400 Loss: 0.014039041474461555 PSNR: 22.36355209350586 +[TRAIN] Iter: 74500 Loss: 0.016748780384659767 PSNR: 21.130477905273438 +[TRAIN] Iter: 74600 Loss: 0.014896992594003677 PSNR: 21.979185104370117 +[TRAIN] Iter: 74700 Loss: 0.015497634187340736 PSNR: 21.802162170410156 +[TRAIN] Iter: 74800 Loss: 0.014504345133900642 PSNR: 21.708253860473633 +[TRAIN] Iter: 74900 Loss: 0.018057983368635178 PSNR: 20.818185806274414 +[TRAIN] Iter: 75000 Loss: 0.014454197138547897 PSNR: 21.99820327758789 +[TRAIN] Iter: 75100 Loss: 0.01493127178400755 PSNR: 21.782060623168945 +[TRAIN] Iter: 75200 Loss: 0.014589299447834492 PSNR: 22.1063175201416 +[TRAIN] Iter: 75300 Loss: 0.01486419327557087 PSNR: 21.67665672302246 +[TRAIN] Iter: 75400 Loss: 0.015478048473596573 PSNR: 21.515357971191406 +[TRAIN] Iter: 75500 Loss: 0.015618125908076763 PSNR: 21.763797760009766 +[TRAIN] Iter: 75600 Loss: 0.017565250396728516 PSNR: 21.073461532592773 +[TRAIN] Iter: 75700 Loss: 0.016093634068965912 PSNR: 21.370380401611328 +[TRAIN] Iter: 75800 Loss: 0.017464477568864822 PSNR: 21.11128044128418 +[TRAIN] Iter: 75900 Loss: 0.015398122370243073 PSNR: 21.53875732421875 +[TRAIN] Iter: 76000 Loss: 0.01587826944887638 PSNR: 21.71758270263672 +[TRAIN] Iter: 76100 Loss: 0.013529298827052116 PSNR: 22.324220657348633 +[TRAIN] Iter: 76200 Loss: 0.015475446358323097 PSNR: 21.70329475402832 +[TRAIN] Iter: 76300 Loss: 0.01585984230041504 PSNR: 21.641584396362305 +[TRAIN] Iter: 76400 Loss: 0.014811355620622635 PSNR: 22.10321044921875 +[TRAIN] Iter: 76500 Loss: 0.01600869745016098 PSNR: 21.627906799316406 +[TRAIN] Iter: 76600 Loss: 0.013430788181722164 PSNR: 22.52564811706543 +[TRAIN] Iter: 76700 Loss: 0.013714244589209557 PSNR: 22.543733596801758 +[TRAIN] Iter: 76800 Loss: 0.014399852603673935 PSNR: 22.306133270263672 +[TRAIN] Iter: 76900 Loss: 0.013958267867565155 PSNR: 21.8348331451416 +[TRAIN] Iter: 77000 Loss: 0.013823493383824825 PSNR: 22.195213317871094 +[TRAIN] Iter: 77100 Loss: 0.014784987084567547 PSNR: 21.857322692871094 +[TRAIN] Iter: 77200 Loss: 0.015366417355835438 PSNR: 21.735179901123047 +[TRAIN] Iter: 77300 Loss: 0.016148656606674194 PSNR: 21.497861862182617 +[TRAIN] Iter: 77400 Loss: 0.015056606382131577 PSNR: 22.06824493408203 +[TRAIN] Iter: 77500 Loss: 0.013806302100419998 PSNR: 22.655569076538086 +[TRAIN] Iter: 77600 Loss: 0.0147164985537529 PSNR: 21.542682647705078 +[TRAIN] Iter: 77700 Loss: 0.015348154120147228 PSNR: 21.7010555267334 +[TRAIN] Iter: 77800 Loss: 0.013682825490832329 PSNR: 21.889375686645508 +[TRAIN] Iter: 77900 Loss: 0.017489952966570854 PSNR: 21.22195816040039 +[TRAIN] Iter: 78000 Loss: 0.014987356960773468 PSNR: 21.846965789794922 +[TRAIN] Iter: 78100 Loss: 0.014811587519943714 PSNR: 22.00090980529785 +[TRAIN] Iter: 78200 Loss: 0.013978192582726479 PSNR: 22.25063705444336 +[TRAIN] Iter: 78300 Loss: 0.015312549658119678 PSNR: 21.882488250732422 +[TRAIN] Iter: 78400 Loss: 0.01527443714439869 PSNR: 21.75921630859375 +[TRAIN] Iter: 78500 Loss: 0.013171754777431488 PSNR: 22.215261459350586 +[TRAIN] Iter: 78600 Loss: 0.018376845866441727 PSNR: 20.88273048400879 +[TRAIN] Iter: 78700 Loss: 0.014411699958145618 PSNR: 22.097959518432617 +[TRAIN] Iter: 78800 Loss: 0.014623638242483139 PSNR: 21.79995346069336 +[TRAIN] Iter: 78900 Loss: 0.013759046792984009 PSNR: 22.17937660217285 +[TRAIN] Iter: 79000 Loss: 0.014794914983212948 PSNR: 22.062726974487305 +[TRAIN] Iter: 79100 Loss: 0.01627987250685692 PSNR: 21.80746841430664 +[TRAIN] Iter: 79200 Loss: 0.014246700331568718 PSNR: 22.363161087036133 +[TRAIN] Iter: 79300 Loss: 0.014197073876857758 PSNR: 21.972091674804688 +[TRAIN] Iter: 79400 Loss: 0.01658090576529503 PSNR: 21.508102416992188 +[TRAIN] Iter: 79500 Loss: 0.013370221480727196 PSNR: 22.610069274902344 +[TRAIN] Iter: 79600 Loss: 0.016363050788640976 PSNR: 21.537073135375977 +[TRAIN] Iter: 79700 Loss: 0.013953020796179771 PSNR: 22.196508407592773 +[TRAIN] Iter: 79800 Loss: 0.013593817129731178 PSNR: 22.387096405029297 +[TRAIN] Iter: 79900 Loss: 0.015737874433398247 PSNR: 21.392786026000977 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/080000.tar +[TRAIN] Iter: 80000 Loss: 0.013698458671569824 PSNR: 22.42013931274414 +[TRAIN] Iter: 80100 Loss: 0.015405278652906418 PSNR: 21.542192459106445 +[TRAIN] Iter: 80200 Loss: 0.014763785526156425 PSNR: 21.86000633239746 +[TRAIN] Iter: 80300 Loss: 0.015180464833974838 PSNR: 21.74300765991211 +[TRAIN] Iter: 80400 Loss: 0.014697729609906673 PSNR: 22.28685760498047 +[TRAIN] Iter: 80500 Loss: 0.01281450130045414 PSNR: 22.552135467529297 +[TRAIN] Iter: 80600 Loss: 0.014586685225367546 PSNR: 22.272499084472656 +[TRAIN] Iter: 80700 Loss: 0.015390681102871895 PSNR: 22.034381866455078 +[TRAIN] Iter: 80800 Loss: 0.015770994126796722 PSNR: 21.53705596923828 +[TRAIN] Iter: 80900 Loss: 0.016949903219938278 PSNR: 21.798189163208008 +[TRAIN] Iter: 81000 Loss: 0.01591486483812332 PSNR: 21.393150329589844 +[TRAIN] Iter: 81100 Loss: 0.01644067093729973 PSNR: 21.517953872680664 +[TRAIN] Iter: 81200 Loss: 0.015390640124678612 PSNR: 21.680139541625977 +[TRAIN] Iter: 81300 Loss: 0.014556766487658024 PSNR: 22.085285186767578 +[TRAIN] Iter: 81400 Loss: 0.015893153846263885 PSNR: 21.450403213500977 +[TRAIN] Iter: 81500 Loss: 0.0148681765422225 PSNR: 22.054481506347656 +[TRAIN] Iter: 81600 Loss: 0.018710024654865265 PSNR: 21.059656143188477 +[TRAIN] Iter: 81700 Loss: 0.01799638941884041 PSNR: 21.28876304626465 +[TRAIN] Iter: 81800 Loss: 0.013953382149338722 PSNR: 22.122011184692383 +[TRAIN] Iter: 81900 Loss: 0.012906813994050026 PSNR: 22.820968627929688 +[TRAIN] Iter: 82000 Loss: 0.013599354773759842 PSNR: 22.391401290893555 +[TRAIN] Iter: 82100 Loss: 0.012340850196778774 PSNR: 22.581043243408203 +[TRAIN] Iter: 82200 Loss: 0.017542988061904907 PSNR: 21.13799476623535 +[TRAIN] Iter: 82300 Loss: 0.015594149008393288 PSNR: 21.845415115356445 +[TRAIN] Iter: 82400 Loss: 0.014929811470210552 PSNR: 21.649694442749023 +[TRAIN] Iter: 82500 Loss: 0.016975775361061096 PSNR: 21.131088256835938 +[TRAIN] Iter: 82600 Loss: 0.014432396739721298 PSNR: 22.097572326660156 +[TRAIN] Iter: 82700 Loss: 0.013315912336111069 PSNR: 22.530841827392578 +[TRAIN] Iter: 82800 Loss: 0.015233270823955536 PSNR: 21.66739845275879 +[TRAIN] Iter: 82900 Loss: 0.01579529047012329 PSNR: 21.38693618774414 +[TRAIN] Iter: 83000 Loss: 0.013885748572647572 PSNR: 22.079999923706055 +[TRAIN] Iter: 83100 Loss: 0.01788010075688362 PSNR: 21.13422203063965 +[TRAIN] Iter: 83200 Loss: 0.013325994834303856 PSNR: 22.601701736450195 +[TRAIN] Iter: 83300 Loss: 0.013522986322641373 PSNR: 22.436887741088867 +[TRAIN] Iter: 83400 Loss: 0.015921443700790405 PSNR: 21.688383102416992 +[TRAIN] Iter: 83500 Loss: 0.014473547227680683 PSNR: 22.03417205810547 +[TRAIN] Iter: 83600 Loss: 0.01644698530435562 PSNR: 21.340024948120117 +[TRAIN] Iter: 83700 Loss: 0.01560896821320057 PSNR: 21.912113189697266 +[TRAIN] Iter: 83800 Loss: 0.014245480298995972 PSNR: 22.268476486206055 +[TRAIN] Iter: 83900 Loss: 0.01612856611609459 PSNR: 21.584196090698242 +[TRAIN] Iter: 84000 Loss: 0.013394656591117382 PSNR: 22.64154052734375 +[TRAIN] Iter: 84100 Loss: 0.013675886206328869 PSNR: 22.57151222229004 +[TRAIN] Iter: 84200 Loss: 0.012243445962667465 PSNR: 22.769268035888672 +[TRAIN] Iter: 84300 Loss: 0.01667826622724533 PSNR: 21.26723289489746 +[TRAIN] Iter: 84400 Loss: 0.016604367643594742 PSNR: 21.251230239868164 +[TRAIN] Iter: 84500 Loss: 0.013367658481001854 PSNR: 22.633630752563477 +[TRAIN] Iter: 84600 Loss: 0.017033956944942474 PSNR: 21.098299026489258 +[TRAIN] Iter: 84700 Loss: 0.015280412510037422 PSNR: 21.762733459472656 +[TRAIN] Iter: 84800 Loss: 0.012777439318597317 PSNR: 22.638093948364258 +[TRAIN] Iter: 84900 Loss: 0.01498011127114296 PSNR: 21.813146591186523 +[TRAIN] Iter: 85000 Loss: 0.013202517293393612 PSNR: 22.83365249633789 +[TRAIN] Iter: 85100 Loss: 0.012148212641477585 PSNR: 22.682079315185547 +[TRAIN] Iter: 85200 Loss: 0.014407164417207241 PSNR: 22.687681198120117 +[TRAIN] Iter: 85300 Loss: 0.01445187907665968 PSNR: 22.05682373046875 +[TRAIN] Iter: 85400 Loss: 0.014062818139791489 PSNR: 21.818071365356445 +[TRAIN] Iter: 85500 Loss: 0.014502644538879395 PSNR: 21.81793212890625 +[TRAIN] Iter: 85600 Loss: 0.013523192144930363 PSNR: 22.06991195678711 +[TRAIN] Iter: 85700 Loss: 0.015074755065143108 PSNR: 22.004663467407227 +[TRAIN] Iter: 85800 Loss: 0.01571212336421013 PSNR: 21.774036407470703 +[TRAIN] Iter: 85900 Loss: 0.01665499433875084 PSNR: 21.149988174438477 +[TRAIN] Iter: 86000 Loss: 0.014178136363625526 PSNR: 22.222251892089844 +[TRAIN] Iter: 86100 Loss: 0.015440745279192924 PSNR: 21.70029640197754 +[TRAIN] Iter: 86200 Loss: 0.01699649728834629 PSNR: 21.091283798217773 +[TRAIN] Iter: 86300 Loss: 0.013982567936182022 PSNR: 22.39603614807129 +[TRAIN] Iter: 86400 Loss: 0.012867355719208717 PSNR: 22.669832229614258 +[TRAIN] Iter: 86500 Loss: 0.01456347107887268 PSNR: 21.87116241455078 +[TRAIN] Iter: 86600 Loss: 0.01610901579260826 PSNR: 21.60930824279785 +[TRAIN] Iter: 86700 Loss: 0.012436438351869583 PSNR: 22.85289764404297 +[TRAIN] Iter: 86800 Loss: 0.013831699267029762 PSNR: 22.491445541381836 +[TRAIN] Iter: 86900 Loss: 0.015425177291035652 PSNR: 21.279298782348633 +[TRAIN] Iter: 87000 Loss: 0.013340604491531849 PSNR: 22.551952362060547 +[TRAIN] Iter: 87100 Loss: 0.013567499816417694 PSNR: 22.470752716064453 +[TRAIN] Iter: 87200 Loss: 0.011919163167476654 PSNR: 23.015716552734375 +[TRAIN] Iter: 87300 Loss: 0.014589672908186913 PSNR: 21.94397735595703 +[TRAIN] Iter: 87400 Loss: 0.014426223002374172 PSNR: 21.992721557617188 +[TRAIN] Iter: 87500 Loss: 0.013102255761623383 PSNR: 22.028284072875977 +[TRAIN] Iter: 87600 Loss: 0.012835774570703506 PSNR: 22.838092803955078 +[TRAIN] Iter: 87700 Loss: 0.01274903491139412 PSNR: 22.65662384033203 +[TRAIN] Iter: 87800 Loss: 0.014514046721160412 PSNR: 22.15953826904297 +[TRAIN] Iter: 87900 Loss: 0.015324585139751434 PSNR: 21.85957145690918 +[TRAIN] Iter: 88000 Loss: 0.015724362805485725 PSNR: 21.64862060546875 +[TRAIN] Iter: 88100 Loss: 0.01370842382311821 PSNR: 22.249536514282227 +[TRAIN] Iter: 88200 Loss: 0.01452089473605156 PSNR: 22.07832908630371 +[TRAIN] Iter: 88300 Loss: 0.01598808914422989 PSNR: 21.655763626098633 +[TRAIN] Iter:0 0.0010128021240234375 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.112873554229736 +2 22.18306541442871 +3 21.99597430229187 +4 22.289676427841187 +5 22.120903968811035 +6 21.397162199020386 +7 22.344237327575684 +8 21.797650814056396 +9 22.26648259162903 +10 22.383121728897095 +11 21.885977506637573 +12 22.094043254852295 +13 21.985605001449585 +14 21.844878673553467 +15 21.760540008544922 +16 21.93631339073181 +17 22.009843587875366 +18 21.76669216156006 +19 22.14381718635559 +20 22.481448650360107 +21 21.9845929145813 +22 21.97438621520996 +23 21.962459802627563 +24 21.310874462127686 +25 22.006980657577515 +26 22.667664766311646 +27 21.948105573654175 +28 21.93412971496582 +29 21.809397220611572 +30 23.025147676467896 +31 22.67517852783203 +32 21.084503889083862 +33 23.205427169799805 +34 21.88658881187439 +35 21.875720262527466 +36 21.888293504714966 +37 21.687703609466553 +38 22.668102979660034 +39 21.43425488471985 +40 21.533098220825195 +41 21.575276374816895 +42 21.620328426361084 +43 22.170114278793335 +44 21.036678075790405 +45 21.66377091407776 +46 22.047381162643433 +47 21.988478422164917 +48 21.294002532958984 +49 21.149705410003662 +50 21.542017698287964 +51 21.670085191726685 +52 22.498793363571167 +53 21.682425022125244 +54 21.179548025131226 +55 21.49315309524536 +56 21.086381196975708 +57 22.550005197525024 +58 22.32643461227417 +59 22.446287393569946 +60 20.871444940567017 +61 22.619842529296875 +62 22.0623197555542 +63 22.18941831588745 +64 21.282437801361084 +65 21.49570345878601 +66 21.691105127334595 +67 21.55336570739746 +68 21.55829906463623 +69 21.919872760772705 +70 22.597113132476807 +71 21.5956392288208 +72 21.79727053642273 +73 21.50696563720703 +74 22.199647188186646 +75 21.4467294216156 +76 22.420196056365967 +77 21.69508194923401 +78 22.645283222198486 +79 21.879754066467285 +80 22.50843381881714 +81 22.576837301254272 +82 22.16147017478943 +83 22.309326171875 +84 21.658576488494873 +85 21.45401167869568 +86 21.778639793395996 +87 22.20832085609436 +88 21.057549476623535 +89 21.579242944717407 +90 22.30495595932007 +91 21.511759519577026 +92 22.064640998840332 +93 21.831655025482178 +94 22.41613245010376 +95 22.337421655654907 +96 22.79496192932129 +97 21.85170888900757 +98 21.708030462265015 +99 21.222749710083008 +100 21.54926562309265 +101 22.190666675567627 +102 22.469027280807495 +103 20.704511880874634 +104 22.371901988983154 +105 21.425551176071167 +106 22.127573490142822 +107 22.687413692474365 +108 21.562246799468994 +109 21.738585710525513 +110 21.612485647201538 +111 22.275242567062378 +112 21.805731534957886 +113 21.605281591415405 +114 21.89309072494507 +115 21.691987991333008 +116 21.475885152816772 +117 21.130126237869263 +118 21.89805555343628 +119 21.50618314743042 +test poses shape torch.Size([4, 3, 4]) +0 0.0010669231414794922 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.220370054244995 +2 21.29998207092285 +3 21.846192359924316 +Saved test set +[TRAIN] Iter: 100000 Loss: 0.009053691290318966 PSNR: 24.975412368774414 +[TRAIN] Iter: 100100 Loss: 0.008041261695325375 PSNR: 25.08623504638672 +[TRAIN] Iter: 100200 Loss: 0.00856621004641056 PSNR: 25.558927536010742 +[TRAIN] Iter: 100300 Loss: 0.008162776939570904 PSNR: 25.072711944580078 +[TRAIN] Iter: 100400 Loss: 0.00767676392570138 PSNR: 25.542299270629883 +[TRAIN] Iter: 100500 Loss: 0.008811607956886292 PSNR: 24.431346893310547 +[TRAIN] Iter: 100600 Loss: 0.00788113847374916 PSNR: 24.640457153320312 +[TRAIN] Iter: 100700 Loss: 0.010122924111783504 PSNR: 24.124900817871094 +[TRAIN] Iter: 100800 Loss: 0.008168419823050499 PSNR: 24.623708724975586 +[TRAIN] Iter: 100900 Loss: 0.009188788942992687 PSNR: 24.426910400390625 +[TRAIN] Iter: 101000 Loss: 0.00966823659837246 PSNR: 23.865062713623047 +[TRAIN] Iter: 101100 Loss: 0.009370897896587849 PSNR: 24.32396125793457 +[TRAIN] Iter: 101200 Loss: 0.009749064221978188 PSNR: 24.177242279052734 +[TRAIN] Iter: 101300 Loss: 0.008639343082904816 PSNR: 24.497522354125977 +[TRAIN] Iter: 101400 Loss: 0.010150651447474957 PSNR: 24.000919342041016 +[TRAIN] Iter: 101500 Loss: 0.008431931957602501 PSNR: 24.461732864379883 +[TRAIN] Iter: 101600 Loss: 0.008296511135995388 PSNR: 25.072586059570312 +[TRAIN] Iter: 101700 Loss: 0.011255355551838875 PSNR: 23.42032241821289 +[TRAIN] Iter: 101800 Loss: 0.008032533340156078 PSNR: 25.3677978515625 +[TRAIN] Iter: 101900 Loss: 0.008756382390856743 PSNR: 23.997756958007812 +[TRAIN] Iter: 102000 Loss: 0.009250009432435036 PSNR: 24.096879959106445 +[TRAIN] Iter: 102100 Loss: 0.008357186801731586 PSNR: 24.884267807006836 +[TRAIN] Iter: 102200 Loss: 0.007857264950871468 PSNR: 26.118236541748047 +[TRAIN] Iter: 102300 Loss: 0.009392415173351765 PSNR: 23.87774085998535 +[TRAIN] Iter: 102400 Loss: 0.00852891243994236 PSNR: 25.218929290771484 +[TRAIN] Iter: 102500 Loss: 0.009578121826052666 PSNR: 24.208354949951172 +[TRAIN] Iter: 102600 Loss: 0.008603560738265514 PSNR: 24.61725616455078 +[TRAIN] Iter: 102700 Loss: 0.009284787811338902 PSNR: 24.15427017211914 +[TRAIN] Iter: 102800 Loss: 0.009603584185242653 PSNR: 23.595529556274414 +[TRAIN] Iter: 102900 Loss: 0.008221394382417202 PSNR: 25.060426712036133 +[TRAIN] Iter: 103000 Loss: 0.010574865154922009 PSNR: 23.678482055664062 +[TRAIN] Iter: 103100 Loss: 0.007578122895210981 PSNR: 25.3989315032959 +[TRAIN] Iter: 103200 Loss: 0.00794686283916235 PSNR: 25.173274993896484 +[TRAIN] Iter: 103300 Loss: 0.008463511243462563 PSNR: 24.77569580078125 +[TRAIN] Iter: 103400 Loss: 0.0069221737794578075 PSNR: 26.02263832092285 +[TRAIN] Iter: 103500 Loss: 0.010082326829433441 PSNR: 23.708465576171875 +[TRAIN] Iter: 103600 Loss: 0.008196128532290459 PSNR: 24.617752075195312 +[TRAIN] Iter: 103700 Loss: 0.006786482874304056 PSNR: 25.623672485351562 +[TRAIN] Iter: 103800 Loss: 0.007641191594302654 PSNR: 25.617351531982422 +[TRAIN] Iter: 103900 Loss: 0.0076522561721503735 PSNR: 25.17595672607422 +[TRAIN] Iter: 104000 Loss: 0.009061308577656746 PSNR: 24.53911018371582 +[TRAIN] Iter: 104100 Loss: 0.008156850934028625 PSNR: 25.438039779663086 +[TRAIN] Iter: 104200 Loss: 0.007912395521998405 PSNR: 24.611326217651367 +[TRAIN] Iter: 104300 Loss: 0.008406808599829674 PSNR: 24.62824821472168 +[TRAIN] Iter: 104400 Loss: 0.006906191818416119 PSNR: 25.378013610839844 +[TRAIN] Iter: 104500 Loss: 0.00797392800450325 PSNR: 24.885080337524414 +[TRAIN] Iter: 104600 Loss: 0.007137664593756199 PSNR: 25.975303649902344 +[TRAIN] Iter: 104700 Loss: 0.008529744111001492 PSNR: 25.015771865844727 +[TRAIN] Iter: 104800 Loss: 0.008855856955051422 PSNR: 24.598285675048828 +[TRAIN] Iter: 104900 Loss: 0.008944408968091011 PSNR: 24.957157135009766 +[TRAIN] Iter: 105000 Loss: 0.007186738774180412 PSNR: 25.65426254272461 +[TRAIN] Iter: 105100 Loss: 0.009961419738829136 PSNR: 23.999588012695312 +[TRAIN] Iter: 105200 Loss: 0.008251670747995377 PSNR: 24.822298049926758 +[TRAIN] Iter: 105300 Loss: 0.01051991619169712 PSNR: 23.39474868774414 +[TRAIN] Iter: 105400 Loss: 0.008293517865240574 PSNR: 24.691007614135742 +[TRAIN] Iter: 105500 Loss: 0.00926070287823677 PSNR: 24.01228904724121 +[TRAIN] Iter: 105600 Loss: 0.008359665051102638 PSNR: 24.855762481689453 +[TRAIN] Iter: 105700 Loss: 0.009085040539503098 PSNR: 24.200613021850586 +[TRAIN] Iter: 105800 Loss: 0.010703717358410358 PSNR: 24.48000144958496 +[TRAIN] Iter: 105900 Loss: 0.007622547447681427 PSNR: 25.902658462524414 +[TRAIN] Iter: 106000 Loss: 0.009538417682051659 PSNR: 23.780223846435547 +[TRAIN] Iter: 106100 Loss: 0.00864836759865284 PSNR: 24.762622833251953 +[TRAIN] Iter: 106200 Loss: 0.007963702082633972 PSNR: 25.383956909179688 +[TRAIN] Iter: 106300 Loss: 0.008931455202400684 PSNR: 24.689632415771484 +[TRAIN] Iter: 106400 Loss: 0.007324069272726774 PSNR: 25.630823135375977 +[TRAIN] Iter: 106500 Loss: 0.008289401419460773 PSNR: 25.160282135009766 +[TRAIN] Iter: 106600 Loss: 0.009741628542542458 PSNR: 24.782657623291016 +[TRAIN] Iter: 106700 Loss: 0.010045254603028297 PSNR: 23.505346298217773 +[TRAIN] Iter: 106800 Loss: 0.00965108908712864 PSNR: 23.90850830078125 +[TRAIN] Iter: 106900 Loss: 0.009090393781661987 PSNR: 24.622995376586914 +[TRAIN] Iter: 107000 Loss: 0.011704649776220322 PSNR: 23.453954696655273 +[TRAIN] Iter: 107100 Loss: 0.010832449421286583 PSNR: 23.659822463989258 +[TRAIN] Iter: 107200 Loss: 0.007939405739307404 PSNR: 24.733579635620117 +[TRAIN] Iter: 107300 Loss: 0.010882540605962276 PSNR: 23.500308990478516 +[TRAIN] Iter: 107400 Loss: 0.008646413683891296 PSNR: 24.637006759643555 +[TRAIN] Iter: 107500 Loss: 0.007627895101904869 PSNR: 25.750932693481445 +[TRAIN] Iter: 107600 Loss: 0.010463809594511986 PSNR: 23.585966110229492 +[TRAIN] Iter: 107700 Loss: 0.008581108413636684 PSNR: 24.188444137573242 +[TRAIN] Iter: 107800 Loss: 0.00782010704278946 PSNR: 24.862215042114258 +[TRAIN] Iter: 107900 Loss: 0.01089950930327177 PSNR: 23.586273193359375 +[TRAIN] Iter: 108000 Loss: 0.006698179058730602 PSNR: 26.117874145507812 +[TRAIN] Iter: 108100 Loss: 0.008456870913505554 PSNR: 24.726974487304688 +[TRAIN] Iter: 108200 Loss: 0.008081425912678242 PSNR: 25.30080223083496 +[TRAIN] Iter: 108300 Loss: 0.008311421610414982 PSNR: 24.977447509765625 +[TRAIN] Iter: 108400 Loss: 0.009629108011722565 PSNR: 24.017290115356445 +[TRAIN] Iter: 108500 Loss: 0.009066911414265633 PSNR: 24.708526611328125 +[TRAIN] Iter: 108600 Loss: 0.00848445575684309 PSNR: 24.74598503112793 +[TRAIN] Iter: 108700 Loss: 0.008287106640636921 PSNR: 25.013076782226562 +[TRAIN] Iter: 108800 Loss: 0.007970389910042286 PSNR: 25.249353408813477 +[TRAIN] Iter: 108900 Loss: 0.009600136429071426 PSNR: 24.266786575317383 +[TRAIN] Iter: 109000 Loss: 0.00972546637058258 PSNR: 24.407649993896484 +[TRAIN] Iter: 109100 Loss: 0.008402827195823193 PSNR: 25.348514556884766 +[TRAIN] Iter: 109200 Loss: 0.008415505290031433 PSNR: 25.02631950378418 +[TRAIN] Iter: 109300 Loss: 0.006654778495430946 PSNR: 25.621780395507812 +[TRAIN] Iter: 109400 Loss: 0.009388374164700508 PSNR: 24.131784439086914 +[TRAIN] Iter: 109500 Loss: 0.008181849494576454 PSNR: 24.770875930786133 +[TRAIN] Iter: 109600 Loss: 0.01014549657702446 PSNR: 24.193681716918945 +[TRAIN] Iter: 109700 Loss: 0.0068361395969986916 PSNR: 25.522296905517578 +[TRAIN] Iter: 109800 Loss: 0.009348089806735516 PSNR: 24.352542877197266 +[TRAIN] Iter: 109900 Loss: 0.008255425840616226 PSNR: 24.809667587280273 +Saved checkpoints at ./logs/TUT-out-doll-360-np/110000.tar +[TRAIN] Iter: 110000 Loss: 0.0070821070112288 PSNR: 26.872146606445312 +[TRAIN] Iter: 110100 Loss: 0.010531030595302582 PSNR: 23.45708656311035 +[TRAIN] Iter: 110200 Loss: 0.009115111082792282 PSNR: 24.587127685546875 +[TRAIN] Iter: 110300 Loss: 0.009074020199477673 PSNR: 24.203369140625 +[TRAIN] Iter: 110400 Loss: 0.008505205623805523 PSNR: 24.364105224609375 +[TRAIN] Iter: 110500 Loss: 0.006879214663058519 PSNR: 26.281057357788086 +[TRAIN] Iter: 110600 Loss: 0.007117095403373241 PSNR: 26.26805305480957 +[TRAIN] Iter: 110700 Loss: 0.009777776896953583 PSNR: 24.63918113708496 +[TRAIN] Iter: 110800 Loss: 0.008685801178216934 PSNR: 24.769426345825195 +[TRAIN] Iter: 110900 Loss: 0.007600970566272736 PSNR: 24.863889694213867 +[TRAIN] Iter: 111000 Loss: 0.010883808135986328 PSNR: 23.554140090942383 +[TRAIN] Iter: 111100 Loss: 0.008372760377824306 PSNR: 24.85112953186035 +[TRAIN] Iter: 111200 Loss: 0.0074089644476771355 PSNR: 25.58975601196289 +[TRAIN] Iter: 111300 Loss: 0.008373036980628967 PSNR: 24.649818420410156 +[TRAIN] Iter: 111400 Loss: 0.008299177512526512 PSNR: 24.979202270507812 +[TRAIN] Iter: 111500 Loss: 0.009217086248099804 PSNR: 25.021352767944336 +[TRAIN] Iter: 111600 Loss: 0.009357679635286331 PSNR: 24.046323776245117 +[TRAIN] Iter: 111700 Loss: 0.008150337263941765 PSNR: 24.98855972290039 +[TRAIN] Iter: 111800 Loss: 0.009365304373204708 PSNR: 24.45946502685547 +[TRAIN] Iter: 111900 Loss: 0.006703542545437813 PSNR: 26.51079559326172 +[TRAIN] Iter: 112000 Loss: 0.008847320452332497 PSNR: 24.110519409179688 +[TRAIN] Iter: 112100 Loss: 0.00829198770225048 PSNR: 24.849266052246094 +[TRAIN] Iter: 112200 Loss: 0.008019233122467995 PSNR: 24.998140335083008 +[TRAIN] Iter: 112300 Loss: 0.008901002816855907 PSNR: 24.674545288085938 +[TRAIN] Iter: 112400 Loss: 0.00827675312757492 PSNR: 24.519086837768555 +[TRAIN] Iter: 112500 Loss: 0.008456150069832802 PSNR: 25.378414154052734 +[TRAIN] Iter: 112600 Loss: 0.00865012127906084 PSNR: 24.74449920654297 +[TRAIN] Iter: 112700 Loss: 0.009196487255394459 PSNR: 23.93035316467285 +[TRAIN] Iter: 112800 Loss: 0.00943463109433651 PSNR: 23.98481559753418 +[TRAIN] Iter: 112900 Loss: 0.009443111717700958 PSNR: 24.246137619018555 +[TRAIN] Iter: 113000 Loss: 0.007426629774272442 PSNR: 25.76097869873047 +[TRAIN] Iter: 113100 Loss: 0.007770311087369919 PSNR: 25.16977310180664 +[TRAIN] Iter: 113200 Loss: 0.008409412577748299 PSNR: 24.940902709960938 +[TRAIN] Iter: 113300 Loss: 0.010323361493647099 PSNR: 23.857860565185547 +[TRAIN] Iter: 113400 Loss: 0.006992385722696781 PSNR: 26.469703674316406 +[TRAIN] Iter: 113500 Loss: 0.009968215599656105 PSNR: 23.96067237854004 +[TRAIN] Iter: 113600 Loss: 0.008474317379295826 PSNR: 24.458114624023438 +[TRAIN] Iter: 113700 Loss: 0.00967203639447689 PSNR: 23.909990310668945 +[TRAIN] Iter: 113800 Loss: 0.006360052619129419 PSNR: 26.10196876525879 +[TRAIN] Iter: 113900 Loss: 0.010223742574453354 PSNR: 24.066394805908203 +[TRAIN] Iter: 114000 Loss: 0.00833331048488617 PSNR: 24.71165657043457 +[TRAIN] Iter: 114100 Loss: 0.011503392830491066 PSNR: 23.36196517944336 +[TRAIN] Iter: 114200 Loss: 0.007571618538349867 PSNR: 26.161441802978516 +[TRAIN] Iter: 114300 Loss: 0.009490691125392914 PSNR: 24.148561477661133 +[TRAIN] Iter: 114400 Loss: 0.010034022852778435 PSNR: 23.9688720703125 +[TRAIN] Iter: 114500 Loss: 0.007727938238531351 PSNR: 25.06475830078125 +[TRAIN] Iter: 114600 Loss: 0.007914917543530464 PSNR: 25.20473861694336 +[TRAIN] Iter: 114700 Loss: 0.007310737855732441 PSNR: 25.88682746887207 +[TRAIN] Iter: 114800 Loss: 0.008549404330551624 PSNR: 24.646507263183594 +[TRAIN] Iter: 114900 Loss: 0.007539649028331041 PSNR: 25.25058937072754 +[TRAIN] Iter: 115000 Loss: 0.007184348534792662 PSNR: 25.577638626098633 +[TRAIN] Iter: 115100 Loss: 0.00794963538646698 PSNR: 24.90496253967285 +[TRAIN] Iter: 115200 Loss: 0.006766396574676037 PSNR: 25.61186408996582 +[TRAIN] Iter: 115300 Loss: 0.010740326717495918 PSNR: 23.48308753967285 +[TRAIN] Iter: 115400 Loss: 0.008892666548490524 PSNR: 24.400880813598633 +[TRAIN] Iter: 115500 Loss: 0.009405601769685745 PSNR: 23.945796966552734 +[TRAIN] Iter: 115600 Loss: 0.010493781417608261 PSNR: 23.67010498046875 +[TRAIN] Iter: 115700 Loss: 0.009509860537946224 PSNR: 24.15056610107422 +[TRAIN] Iter: 115800 Loss: 0.0082106813788414 PSNR: 24.691173553466797 +[TRAIN] Iter: 115900 Loss: 0.00897188950330019 PSNR: 25.00865936279297 +[TRAIN] Iter: 116000 Loss: 0.008909613825380802 PSNR: 24.34001922607422 +[TRAIN] Iter: 116100 Loss: 0.007762259803712368 PSNR: 25.62105369567871 +[TRAIN] Iter: 116200 Loss: 0.008004894480109215 PSNR: 24.895910263061523 +[TRAIN] Iter: 116300 Loss: 0.0065596941858530045 PSNR: 25.813526153564453 +[TRAIN] Iter: 116400 Loss: 0.009531650692224503 PSNR: 23.899452209472656 +[TRAIN] Iter: 116500 Loss: 0.010360967367887497 PSNR: 24.83150291442871 +[TRAIN] Iter: 116600 Loss: 0.00863688439130783 PSNR: 23.854755401611328 +[TRAIN] Iter: 116700 Loss: 0.007912460714578629 PSNR: 25.678728103637695 +[TRAIN] Iter: 116800 Loss: 0.007860897108912468 PSNR: 25.62283706665039 +[TRAIN] Iter: 116900 Loss: 0.00901087000966072 PSNR: 25.46631622314453 +[TRAIN] Iter: 117000 Loss: 0.007422211579978466 PSNR: 25.81391143798828 +[TRAIN] Iter: 117100 Loss: 0.008430778980255127 PSNR: 25.18214225769043 +[TRAIN] Iter: 117200 Loss: 0.007235208060592413 PSNR: 25.189228057861328 +[TRAIN] Iter: 117300 Loss: 0.00713426573202014 PSNR: 25.584869384765625 +[TRAIN] Iter: 117400 Loss: 0.008334340527653694 PSNR: 25.2363224029541 +[TRAIN] Iter: 117500 Loss: 0.008324678987264633 PSNR: 24.504776000976562 +[TRAIN] Iter: 117600 Loss: 0.007555517368018627 PSNR: 25.36231803894043 +[TRAIN] Iter: 117700 Loss: 0.008479215204715729 PSNR: 24.89674186706543 +[TRAIN] Iter: 117800 Loss: 0.008734532631933689 PSNR: 24.86836814880371 +[TRAIN] Iter: 117900 Loss: 0.008571096695959568 PSNR: 24.31724739074707 +[TRAIN] Iter: 118000 Loss: 0.009233933873474598 PSNR: 24.257518768310547 +[TRAIN] Iter: 118100 Loss: 0.00935225747525692 PSNR: 24.694034576416016 +[TRAIN] Iter: 118200 Loss: 0.010189417749643326 PSNR: 24.46002960205078 +[TRAIN] Iter: 118300 Loss: 0.007571054622530937 PSNR: 25.666065216064453 +[TRAIN] Iter: 118400 Loss: 0.007167712785303593 PSNR: 25.604520797729492 +[TRAIN] Iter: 118500 Loss: 0.007591720670461655 PSNR: 25.754600524902344 +[TRAIN] Iter: 118600 Loss: 0.009260445833206177 PSNR: 24.382516860961914 +[TRAIN] Iter: 118700 Loss: 0.00835329294204712 PSNR: 24.98382568359375 +[TRAIN] Iter: 118800 Loss: 0.009022995829582214 PSNR: 24.0823917388916 +[TRAIN] Iter: 118900 Loss: 0.009192252531647682 PSNR: 24.630584716796875 +[TRAIN] It0 0.0016148090362548828 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 25.64186143875122 +2 24.83056664466858 +3 25.298475980758667 +Saved test set +[TRAIN] Iter: 100000 Loss: 0.016239013522863388 PSNR: 21.791067123413086 +[TRAIN] Iter: 100100 Loss: 0.01650899648666382 PSNR: 21.43962860107422 +[TRAIN] Iter: 100200 Loss: 0.013481942936778069 PSNR: 23.20001983642578 +[TRAIN] Iter: 100300 Loss: 0.015164593234658241 PSNR: 22.09099006652832 +[TRAIN] Iter: 100400 Loss: 0.015618987381458282 PSNR: 21.563566207885742 +[TRAIN] Iter: 100500 Loss: 0.015083182603120804 PSNR: 21.813961029052734 +[TRAIN] Iter: 100600 Loss: 0.01638488844037056 PSNR: 21.473773956298828 +[TRAIN] Iter: 100700 Loss: 0.013172321021556854 PSNR: 22.21097183227539 +[TRAIN] Iter: 100800 Loss: 0.01313474215567112 PSNR: 22.46966552734375 +[TRAIN] Iter: 100900 Loss: 0.015395285561680794 PSNR: 21.68422508239746 +[TRAIN] Iter: 101000 Loss: 0.014129454270005226 PSNR: 22.541603088378906 +[TRAIN] Iter: 101100 Loss: 0.01560559868812561 PSNR: 21.80156135559082 +[TRAIN] Iter: 101200 Loss: 0.016091015189886093 PSNR: 21.929391860961914 +[TRAIN] Iter: 101300 Loss: 0.013761545531451702 PSNR: 22.167020797729492 +[TRAIN] Iter: 101400 Loss: 0.013954279944300652 PSNR: 22.083646774291992 +[TRAIN] Iter: 101500 Loss: 0.014971249736845493 PSNR: 21.868921279907227 +[TRAIN] Iter: 101600 Loss: 0.0164719857275486 PSNR: 21.323955535888672 +[TRAIN] Iter: 101700 Loss: 0.014806925319135189 PSNR: 22.35800552368164 +[TRAIN] Iter: 101800 Loss: 0.013436360284686089 PSNR: 22.523452758789062 +[TRAIN] Iter: 101900 Loss: 0.016268568113446236 PSNR: 21.703598022460938 +[TRAIN] Iter: 102000 Loss: 0.01248190551996231 PSNR: 23.163734436035156 +[TRAIN] Iter: 102100 Loss: 0.014355437830090523 PSNR: 21.990320205688477 +[TRAIN] Iter: 102200 Loss: 0.013783503323793411 PSNR: 22.534982681274414 +[TRAIN] Iter: 102300 Loss: 0.012672185897827148 PSNR: 22.540340423583984 +[TRAIN] Iter: 102400 Loss: 0.015240417793393135 PSNR: 22.246395111083984 +[TRAIN] Iter: 102500 Loss: 0.015376610681414604 PSNR: 21.768335342407227 +[TRAIN] Iter: 102600 Loss: 0.012358127161860466 PSNR: 22.45798683166504 +[TRAIN] Iter: 102700 Loss: 0.016206154599785805 PSNR: 21.491724014282227 +[TRAIN] Iter: 102800 Loss: 0.01631619781255722 PSNR: 21.63918685913086 +[TRAIN] Iter: 102900 Loss: 0.013626404106616974 PSNR: 22.268890380859375 +[TRAIN] Iter: 103000 Loss: 0.014596313238143921 PSNR: 21.896465301513672 +[TRAIN] Iter: 103100 Loss: 0.012806147336959839 PSNR: 22.67498016357422 +[TRAIN] Iter: 103200 Loss: 0.016013767570257187 PSNR: 21.57564353942871 +[TRAIN] Iter: 103300 Loss: 0.014550860971212387 PSNR: 22.03706932067871 +[TRAIN] Iter: 103400 Loss: 0.013245703652501106 PSNR: 22.83725357055664 +[TRAIN] Iter: 103500 Loss: 0.013164595700800419 PSNR: 22.36825942993164 +[TRAIN] Iter: 103600 Loss: 0.013578477315604687 PSNR: 22.613162994384766 +[TRAIN] Iter: 103700 Loss: 0.014304735697805882 PSNR: 22.009765625 +[TRAIN] Iter: 103800 Loss: 0.011202622205018997 PSNR: 23.063461303710938 +[TRAIN] Iter: 103900 Loss: 0.015986481681466103 PSNR: 21.882205963134766 +[TRAIN] Iter: 104000 Loss: 0.013803449459373951 PSNR: 22.356725692749023 +[TRAIN] Iter: 104100 Loss: 0.013765664771199226 PSNR: 22.025177001953125 +[TRAIN] Iter: 104200 Loss: 0.01300414651632309 PSNR: 22.64569091796875 +[TRAIN] Iter: 104300 Loss: 0.012860518880188465 PSNR: 22.284406661987305 +[TRAIN] Iter: 104400 Loss: 0.01679280400276184 PSNR: 21.595064163208008 +[TRAIN] Iter: 104500 Loss: 0.015448760241270065 PSNR: 22.155012130737305 +[TRAIN] Iter: 104600 Loss: 0.013967622071504593 PSNR: 21.993547439575195 +[TRAIN] Iter: 104700 Loss: 0.014342496171593666 PSNR: 22.179792404174805 +[TRAIN] Iter: 104800 Loss: 0.01397164911031723 PSNR: 22.065792083740234 +[TRAIN] Iter: 104900 Loss: 0.015108346939086914 PSNR: 22.07813835144043 +[TRAIN] Iter: 105000 Loss: 0.013991318643093109 PSNR: 22.342498779296875 +[TRAIN] Iter: 105100 Loss: 0.014658061787486076 PSNR: 21.972558975219727 +[TRAIN] Iter: 105200 Loss: 0.012827486731112003 PSNR: 22.559389114379883 +[TRAIN] Iter: 105300 Loss: 0.016478894278407097 PSNR: 21.450122833251953 +[TRAIN] Iter: 105400 Loss: 0.01091666892170906 PSNR: 23.323299407958984 +[TRAIN] Iter: 105500 Loss: 0.0142862219363451 PSNR: 22.226877212524414 +[TRAIN] Iter: 105600 Loss: 0.013829992152750492 PSNR: 22.55571174621582 +[TRAIN] Iter: 105700 Loss: 0.014449824579060078 PSNR: 22.158649444580078 +[TRAIN] Iter: 105800 Loss: 0.013797072693705559 PSNR: 22.59705924987793 +[TRAIN] Iter: 105900 Loss: 0.015010415576398373 PSNR: 21.988744735717773 +[TRAIN] Iter: 106000 Loss: 0.011971553787589073 PSNR: 23.479034423828125 +[TRAIN] Iter: 106100 Loss: 0.013634022325277328 PSNR: 22.15220069885254 +[TRAIN] Iter: 106200 Loss: 0.012410741299390793 PSNR: 22.67409324645996 +[TRAIN] Iter: 106300 Loss: 0.015240355394780636 PSNR: 21.638002395629883 +[TRAIN] Iter: 106400 Loss: 0.013541890308260918 PSNR: 22.018503189086914 +[TRAIN] Iter: 106500 Loss: 0.013060454279184341 PSNR: 22.3128719329834 +[TRAIN] Iter: 106600 Loss: 0.013712841086089611 PSNR: 22.133094787597656 +[TRAIN] Iter: 106700 Loss: 0.014364168979227543 PSNR: 22.126001358032227 +[TRAIN] Iter: 106800 Loss: 0.012738117016851902 PSNR: 22.939836502075195 +[TRAIN] Iter: 106900 Loss: 0.01308765634894371 PSNR: 22.48569679260254 +[TRAIN] Iter: 107000 Loss: 0.015966705977916718 PSNR: 21.683271408081055 +[TRAIN] Iter: 107100 Loss: 0.012220518663525581 PSNR: 22.94093132019043 +[TRAIN] Iter: 107200 Loss: 0.015247737057507038 PSNR: 21.8043212890625 +[TRAIN] Iter: 107300 Loss: 0.01414483692497015 PSNR: 22.366104125976562 +[TRAIN] Iter: 107400 Loss: 0.012704240158200264 PSNR: 22.583723068237305 +[TRAIN] Iter: 107500 Loss: 0.013760419562458992 PSNR: 22.27764129638672 +[TRAIN] Iter: 107600 Loss: 0.01441841572523117 PSNR: 22.24650764465332 +[TRAIN] Iter: 107700 Loss: 0.014246053993701935 PSNR: 21.97628402709961 +[TRAIN] Iter: 107800 Loss: 0.013361404649913311 PSNR: 22.68515396118164 +[TRAIN] Iter: 107900 Loss: 0.014404896646738052 PSNR: 22.26804542541504 +[TRAIN] Iter: 108000 Loss: 0.013330633752048016 PSNR: 22.37582015991211 +[TRAIN] Iter: 108100 Loss: 0.01678599789738655 PSNR: 21.410892486572266 +[TRAIN] Iter: 108200 Loss: 0.013619424775242805 PSNR: 22.091453552246094 +[TRAIN] Iter: 108300 Loss: 0.014542786404490471 PSNR: 22.28865623474121 +[TRAIN] Iter: 108400 Loss: 0.012042658403515816 PSNR: 23.220487594604492 +[TRAIN] Iter: 108500 Loss: 0.014675403013825417 PSNR: 22.021690368652344 +[TRAIN] Iter: 108600 Loss: 0.01658615842461586 PSNR: 21.187910079956055 +[TRAIN] Iter: 108700 Loss: 0.0129289161413908 PSNR: 22.581741333007812 +[TRAIN] Iter: 108800 Loss: 0.013755645602941513 PSNR: 22.199262619018555 +[TRAIN] Iter: 108900 Loss: 0.016864879056811333 PSNR: 21.069120407104492 +[TRAIN] Iter: 109000 Loss: 0.013068175874650478 PSNR: 22.376169204711914 +[TRAIN] Iter: 109100 Loss: 0.01487826369702816 PSNR: 22.084489822387695 +[TRAIN] Iter: 109200 Loss: 0.017014989629387856 PSNR: 21.390087127685547 +[TRAIN] Iter: 109300 Loss: 0.011311205103993416 PSNR: 23.475435256958008 +[TRAIN] Iter: 109400 Loss: 0.013028505258262157 PSNR: 22.66353988647461 +[TRAIN] Iter: 109500 Loss: 0.014789672568440437 PSNR: 21.912063598632812 +[TRAIN] Iter: 109600 Loss: 0.010855337604880333 PSNR: 23.568456649780273 +[TRAIN] Iter: 109700 Loss: 0.012505317106842995 PSNR: 22.870695114135742 +[TRAIN] Iter: 109800 Loss: 0.01373559981584549 PSNR: 22.275718688964844 +[TRAIN] Iter: 109900 Loss: 0.015125042758882046 PSNR: 21.96131706237793 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/110000.tar +[TRAIN] Iter: 110000 Loss: 0.014550884254276752 PSNR: 21.870433807373047 +[TRAIN] Iter: 110100 Loss: 0.01308948453515768 PSNR: 22.72954559326172 +[TRAIN] Iter: 110200 Loss: 0.012333068996667862 PSNR: 22.89573097229004 +[TRAIN] Iter: 110300 Loss: 0.014965741895139217 PSNR: 21.990535736083984 +[TRAIN] Iter: 110400 Loss: 0.012258945032954216 PSNR: 23.129667282104492 +[TRAIN] Iter: 110500 Loss: 0.015578033402562141 PSNR: 21.838911056518555 +[TRAIN] Iter: 110600 Loss: 0.014192595146596432 PSNR: 22.324012756347656 +[TRAIN] Iter: 110700 Loss: 0.011654611676931381 PSNR: 23.161256790161133 +[TRAIN] Iter: 110800 Loss: 0.015317498706281185 PSNR: 21.582012176513672 +[TRAIN] Iter: 110900 Loss: 0.012558890506625175 PSNR: 22.934667587280273 +[TRAIN] Iter: 111000 Loss: 0.013223167508840561 PSNR: 22.30270004272461 +[TRAIN] Iter: 111100 Loss: 0.01646275259554386 PSNR: 21.469770431518555 +[TRAIN] Iter: 111200 Loss: 0.01311433408409357 PSNR: 22.789506912231445 +[TRAIN] Iter: 111300 Loss: 0.01423712633550167 PSNR: 22.29834747314453 +[TRAIN] Iter: 111400 Loss: 0.013787450268864632 PSNR: 22.086347579956055 +[TRAIN] Iter: 111500 Loss: 0.012536631897091866 PSNR: 22.901124954223633 +[TRAIN] Iter: 111600 Loss: 0.013540084473788738 PSNR: 22.49211311340332 +[TRAIN] Iter: 111700 Loss: 0.014924190007150173 PSNR: 21.74251937866211 +[TRAIN] Iter: 111800 Loss: 0.015278641134500504 PSNR: 21.71760368347168 +[TRAIN] Iter: 111900 Loss: 0.013914033770561218 PSNR: 23.054872512817383 +[TRAIN] Iter: 112000 Loss: 0.01163735706359148 PSNR: 23.076000213623047 +[TRAIN] Iter: 112100 Loss: 0.014228945598006248 PSNR: 22.114788055419922 +[TRAIN] Iter: 112200 Loss: 0.01353352889418602 PSNR: 22.3586483001709 +[TRAIN] Iter: 112300 Loss: 0.013022881001234055 PSNR: 22.615087509155273 +[TRAIN] Iter: 112400 Loss: 0.013344676233828068 PSNR: 22.512226104736328 +[TRAIN] Iter: 112500 Loss: 0.013544784858822823 PSNR: 22.306493759155273 +[TRAIN] Iter: 112600 Loss: 0.01349366270005703 PSNR: 22.724225997924805 +[TRAIN] Iter: 112700 Loss: 0.014070885255932808 PSNR: 22.011442184448242 +[TRAIN] Iter: 112800 Loss: 0.010564854368567467 PSNR: 23.881319046020508 +[TRAIN] Iter: 112900 Loss: 0.01286734826862812 PSNR: 22.81387710571289 +[TRAIN] Iter: 113000 Loss: 0.013731494545936584 PSNR: 22.625629425048828 +[TRAIN] Iter: 113100 Loss: 0.01224488765001297 PSNR: 22.851320266723633 +[TRAIN] Iter: 113200 Loss: 0.01483725942671299 PSNR: 21.854400634765625 +[TRAIN] Iter: 113300 Loss: 0.012327626347541809 PSNR: 23.299015045166016 +[TRAIN] Iter: 113400 Loss: 0.012551899068057537 PSNR: 22.988544464111328 +[TRAIN] Iter: 113500 Loss: 0.010943702422082424 PSNR: 23.152284622192383 +[TRAIN] Iter: 113600 Loss: 0.014579534530639648 PSNR: 21.945560455322266 +[TRAIN] Iter: 113700 Loss: 0.010215048678219318 PSNR: 23.885013580322266 +[TRAIN] Iter: 113800 Loss: 0.017199335619807243 PSNR: 21.14944076538086 +[TRAIN] Iter: 113900 Loss: 0.012691386044025421 PSNR: 22.93191909790039 +[TRAIN] Iter: 114000 Loss: 0.014457152225077152 PSNR: 22.200660705566406 +[TRAIN] Iter: 114100 Loss: 0.015139058232307434 PSNR: 22.08249855041504 +[TRAIN] Iter: 114200 Loss: 0.01522185280919075 PSNR: 22.20355796813965 +[TRAIN] Iter: 114300 Loss: 0.012456868775188923 PSNR: 22.820039749145508 +[TRAIN] Iter: 114400 Loss: 0.014286508783698082 PSNR: 22.037309646606445 +[TRAIN] Iter: 114500 Loss: 0.01376521959900856 PSNR: 22.219655990600586 +[TRAIN] Iter: 114600 Loss: 0.014031222090125084 PSNR: 22.265859603881836 +[TRAIN] Iter: 114700 Loss: 0.013000846840441227 PSNR: 22.252168655395508 +[TRAIN] Iter: 114800 Loss: 0.012944616377353668 PSNR: 22.607271194458008 +[TRAIN] Iter: 114900 Loss: 0.015279385261237621 PSNR: 22.034210205078125 +[TRAIN] Iter: 115000 Loss: 0.01602988690137863 PSNR: 21.56459617614746 +[TRAIN] Iter: 115100 Loss: 0.013157953508198261 PSNR: 22.92285919189453 +[TRAIN] Iter: 115200 Loss: 0.014119705185294151 PSNR: 22.26775360107422 +[TRAIN] Iter: 115300 Loss: 0.015613223426043987 PSNR: 21.40616226196289 +[TRAIN] Iter: 115400 Loss: 0.01317959651350975 PSNR: 22.735029220581055 +[TRAIN] Iter: 115500 Loss: 0.012774745002388954 PSNR: 22.944040298461914 +[TRAIN] Iter: 115600 Loss: 0.011073160916566849 PSNR: 23.487823486328125 +[TRAIN] Iter: 115700 Loss: 0.012897061184048653 PSNR: 22.98897361755371 +[TRAIN] Iter: 115800 Loss: 0.012418663129210472 PSNR: 22.69452667236328 +[TRAIN] Iter: 115900 Loss: 0.013630139641463757 PSNR: 22.973865509033203 +[TRAIN] Iter: 116000 Loss: 0.012345293536782265 PSNR: 23.140960693359375 +[TRAIN] Iter: 116100 Loss: 0.012699941173195839 PSNR: 22.930246353149414 +[TRAIN] Iter: 116200 Loss: 0.016517646610736847 PSNR: 21.049842834472656 +[TRAIN] Iter: 116300 Loss: 0.014576863497495651 PSNR: 21.825040817260742 +[TRAIN] Iter: 116400 Loss: 0.013349896296858788 PSNR: 22.344999313354492 +[TRAIN] Iter: 116500 Loss: 0.012299793772399426 PSNR: 22.726003646850586 +[TRAIN] Iter: 116600 Loss: 0.015037786215543747 PSNR: 22.10910415649414 +[TRAIN] Iter: 116700 Loss: 0.01057220995426178 PSNR: 23.553539276123047 +[TRAIN] Iter: 116800 Loss: 0.013373205438256264 PSNR: 22.499656677246094 +[TRAIN] Iter: 116900 Loss: 0.015139547176659107 PSNR: 22.006906509399414 +[TRAIN] Iter: 117000 Loss: 0.014311244711279869 PSNR: 22.183439254760742 +[TRAIN] Iter: 117100 Loss: 0.013381420634686947 PSNR: 22.815916061401367 +[TRAIN] Iter: 117200 Loss: 0.01276139821857214 PSNR: 22.502002716064453 +[TRAIN] Iter: 117300 Loss: 0.01138537097722292 PSNR: 23.405292510986328 +[TRAIN] Iter: 117400 Loss: 0.01328425481915474 PSNR: 22.43817901611328 +[TRAIN] Iter: 117500 Loss: 0.013296924531459808 PSNR: 22.448144912719727 +[TRAIN] Iter: 117600 Loss: 0.012248133309185505 PSNR: 22.96125030517578 +[TRAIN] Iter: 117700 Loss: 0.014360151253640652 PSNR: 22.128395080566406 +[TRAIN] Iter: 117800 Loss: 0.013914041221141815 PSNR: 22.810850143432617 +[TRAIN] Iter: 117900 Loss: 0.014804964885115623 PSNR: 22.527568817138672 +[TRAIN] Iter: 118000 Loss: 0.011541343294084072 PSNR: 23.41827392578125 +[TRAIN] Iter: 118100 Loss: 0.014132935553789139 PSNR: 22.222671508789062 +[TRAIN] Iter: 118200 Loss: 0.013932518661022186 PSNR: 22.498756408691406 +[TRAIN] Iter: 118300 Loss: 0.014534655027091503 PSNR: 22.300764083862305 +[TRAIN] Iter: 118400 Loss: 0.014116534031927586 PSNR: 22.696735382080078 +[TRAIN] Iter: 118500 Loss: 0.013453916646540165 PSNR: 22.39806365966797 +[TRAIN] Iter: 118600 Loss: 0.01364214438945055 PSNR: 22.247303009033203 +[TRAIN] Iter: 118700 Loss: 0.015851356089115143 PSNR: 21.614269256591797 +[TRAIN] Iter: 118800 Loss: 0.01224970631301403 PSNR: 23.486005783081055 +[TRAIN] Iter: 118900 Loss: 0.012834573164582253 PSNR: 22.81944465637207 +[TRAIN] Iter: 119000 Loss: 0.009691040962934494 PSNR: 23.646183013916016 +[TRAIN] Iter: 119100 Loss: 0.01596468687057495 PSNR: 21.790918350219727 +[TRAIN] Iter: 119200 Loss: 0.016416504979133606 PSNR: 21.677236557006836 +[TRAIN] Iter: 119300 Loss: 0.013618096709251404 PSNR: 22.849716186523438 +[TRAIN] Iter: 119400 Loss: 0.012397315353155136 PSNR: 23.01017951965332 +[TRAIN] Iter: 119500 Loss: 0.015051431953907013 PSNR: 21.92101287841797 +[TRAIN] Iter: 119600 Loss: 0.014394531026482582 PSNR: 22.187082290649414 +[TRAIN] Iter: 119700 Loss: 0.014319414272904396 PSNR: 22.425378799438477 +[TRAIN] Iter: 119800 Loss: 0.01567017287015915 PSNR: 21.527542114257812 +[TRAIN] Iter: 119900 Loss: 0.013411601074039936 PSNR: 22.37607192993164 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/120000.tar +[TRAIN] Iter: 120000 Loss: 0.010553963482379913 PSNR: 23.87206268310547 +[TRAIN] Iter: 120100 Loss: 0.012510508298873901 PSNR: 22.70289421081543 +[TRAIN] Iter: 120200 Loss: 0.01694667339324951 PSNR: 21.317052841186523 +[TRAIN] Iter: 120300 Loss: 0.013033099472522736 PSNR: 22.48651695251465 +[TRAIN] Iter: 120400 Loss: 0.012664125300943851 PSNR: 22.58208656311035 +[TRAIN] Iter: 120500 Loss: 0.011886095628142357 PSNR: 23.237382888793945 +[TRAIN] Iter: 120600 Loss: 0.016170237213373184 PSNR: 22.03530502319336 +[TRAIN] Iter: 120700 Loss: 0.011240207590162754 PSNR: 23.161788940429688 +[TRAIN] Iter: 120800 Loss: 0.014429881237447262 PSNR: 22.33024787902832 +[TRAIN] Iter: 120900 Loss: 0.012035459280014038 PSNR: 23.142837524414062 +[TRAIN] Iter: 121000 Loss: 0.0134100541472435 PSNR: 22.359643936157227 +[TRAIN] Iter: 121100 Loss: 0.015208126977086067 PSNR: 21.91971778869629 +[TRAIN] Iter: 121200 Loss: 0.01513969711959362 PSNR: 22.12118911743164 +[TRAIN] Iter: 121300 Loss: 0.014645310118794441 PSNR: 22.165462493896484 +[TRAIN] Iter: 121400 Loss: 0.014348609372973442 PSNR: 21.986370086669922 +[TRAIN] Iter: 121500 Loss: 0.012924496084451675 PSNR: 22.779495239257812 +[TRAIN] Iter: 121600 Loss: 0.014881360344588757 PSNR: 22.240795135498047 +[TRAIN] Iter: 121700 Loss: 0.012580929324030876 PSNR: 22.644664764404297 +[TRAIN] Iter: 121800 Loss: 0.01568591594696045 PSNR: 21.912216186523438 +[TRAIN] Iter: 121900 Loss: 0.01305381953716278 PSNR: 22.516298294067383 +[TRAIN] Iter: 122000 Loss: 0.012040328234434128 PSNR: 23.480388641357422 +[TRAIN] Iter: 122100 Loss: 0.012939533218741417 PSNR: 22.499008178710938 +[TRAIN] Iter: 122200 Loss: 0.014800192788243294 PSNR: 22.14009666442871 +[TRAIN] Iter: 122300 Loss: 0.014246370643377304 PSNR: 22.153593063354492 +[TRAIN] Iter: 122400 Loss: 0.013281483203172684 PSNR: 22.254966735839844 +[TRAIN] Iter: 122500 Loss: 0.012835768982768059 PSNR: 22.635879516601562 +[TRAIN] Iter: 122600 Loss: 0.014823578298091888 PSNR: 22.15805435180664 +[TRAIN] Iter: 122700 Loss: 0.012494564987719059 PSNR: 22.86141586303711 +[TRAIN] Iter: 122800 Loss: 0.014362892135977745 PSNR: 21.752676010131836 +[TRAIN] Iter: 122900 Loss: 0.013516075909137726 PSNR: 22.409740447998047 +[TRAIN] Iter: 123000 Loss: 0.012575820088386536 PSNR: 22.443790435791016 +[TRAIN] Iter: 123100 Loss: 0.01366693340241909 PSNR: 22.532629013061523 +[TRAIN] Iter: 123200 Loss: 0.013180766254663467 PSNR: 22.5361270904541 +[TRAIN] Iter: 123300 Loss: 0.012986778281629086 PSNR: 22.72893714904785 +[TRAIN] Iter: 123400 Loss: 0.012448751367628574 PSNR: 22.881654739379883 +[TRAIN] Iter: 123500 Loss: 0.015041320584714413 PSNR: 21.554311752319336 +[TRAIN] Iter: 123600 Loss: 0.012477263808250427 PSNR: 22.945415496826172 +[TRAIN] Iter: 123700 Loss: 0.012643611058592796 PSNR: 23.140167236328125 +[TRAIN] Iter: 123800 Loss: 0.014626994729042053 PSNR: 22.139551162719727 +[TRAIN] Iter: 123900 Loss: 0.013681315816938877 PSNR: 22.52644920349121 +[TRAIN] Iter: 124000 Loss: 0.01447216421365738 PSNR: 21.845867156982422 +[TRAIN] Iter: 124100 Loss: 0.013757126405835152 PSNR: 22.525897979736328 +[TRAIN] Iter: 124200 Loss: 0.013936260715126991 PSNR: 22.068614959716797 +[TRAIN] Iter: 124300 Loss: 0.01164568867534399 PSNR: 23.869396209716797 +[TRAIN] Iter: 124400 Loss: 0.01358125638216734 PSNR: 22.11513900756836 +[TRAIN] Iter: 124500 Loss: 0.012187217362225056 PSNR: 23.013463973999023 +[TRAIN] Iter: 124600 Loss: 0.012076940387487411 PSNR: 22.759525299072266 +[TRAIN] Iter: 124700 Loss: 0.013229597359895706 PSNR: 22.598691940307617 +[TRAIN] Iter: 124800 Loss: 0.011841959320008755 PSNR: 23.20867156982422 +[TRAIN] Iter: 124900 Loss: 0.012897293083369732 PSNR: 22.449926376342773 +[TRAIN] Iter: 125000 Loss: 0.010810359381139278 PSNR: 23.615062713623047 +[TRAIN] Iter: 125100 Loss: 0.012252170592546463 PSNR: 23.0376033782959 +[TRAIN] Iter: 125200 Loss: 0.01132394839078188 PSNR: 23.33286476135254 +[TRAIN] Iter: 125300 Loss: 0.013290445320308208 PSNR: 22.912290573120117 +[TRAIN] Iter: 125400 Loss: 0.0141229759901762 PSNR: 22.78744888305664 +[TRAIN] Iter: 125500 Loss: 0.013708115555346012 PSNR: 22.556909561157227 +[TRAIN] Iter: 125600 Loss: 0.015152974054217339 PSNR: 21.57262420654297 +[TRAIN] Iter: 125700 Loss: 0.01531902700662613 PSNR: 21.849828720092773 +[TRAIN] Iter: 125800 Loss: 0.013657988049089909 PSNR: 22.20252227783203 +[TRAIN] Iter: 125900 Loss: 0.011834890581667423 PSNR: 23.28813362121582 +[TRAIN] Iter: 126000 Loss: 0.01321032177656889 PSNR: 22.73029136657715 +[TRAIN] Iter: 126100 Loss: 0.013298639096319675 PSNR: 22.621540069580078 +[TRAIN] Iter: 126200 Loss: 0.014915881678462029 PSNR: 21.859573364257812 +[TRAIN] Iter: 126300 Loss: 0.013676801696419716 PSNR: 22.490968704223633 +[TRAIN] Iter: 126400 Loss: 0.011419547721743584 PSNR: 23.201656341552734 +[TRAIN] Iter: 126500 Loss: 0.012902726419270039 PSNR: 22.74493980407715 +[TRAIN] Iter: 126600 Loss: 0.013627773150801659 PSNR: 22.398195266723633 +[TRAIN] Iter: 126700 Loss: 0.01405201107263565 PSNR: 22.169143676757812 +[TRAIN] Iter: 126800 Loss: 0.011721216142177582 PSNR: 22.984790802001953 +[TRAIN] Iter: 126900 Loss: 0.01260901428759098 PSNR: 22.675243377685547 +[TRAIN] Iter: 127000 Loss: 0.01617354340851307 PSNR: 21.832143783569336 +[TRAIN] Iter: 127100 Loss: 0.014116323553025723 PSNR: 22.64044761657715 +[TRAIN] Iter: 127200 Loss: 0.012423043139278889 PSNR: 23.05440330505371 +[TRAIN] Iter: 127300 Loss: 0.009855248034000397 PSNR: 24.094202041625977 +[TRAIN] Iter: 127400 Loss: 0.013093090616166592 PSNR: 22.567230224609375 +[TRAIN] Iter: 127500 Loss: 0.014825168997049332 PSNR: 22.12091064453125 +[TRAIN] Iter: 127600 Loss: 0.01485694944858551 PSNR: 21.96719741821289 +[TRAIN] Iter: 127700 Loss: 0.010871377773582935 PSNR: 23.85426902770996 +[TRAIN] Iter: 127800 Loss: 0.012830878607928753 PSNR: 22.733251571655273 +[TRAIN] Iter: 127900 Loss: 0.013307379558682442 PSNR: 22.37669563293457 +[TRAIN] Iter: 128000 Loss: 0.012933313846588135 PSNR: 22.688310623168945 +[TRAIN] Iter: 128100 Loss: 0.012806609272956848 PSNR: 22.624486923217773 +[TRAIN] Iter: 128200 Loss: 0.014209038577973843 PSNR: 21.865320205688477 +[TRAIN] Iter: 128300 Loss: 0.012921196408569813 PSNR: 22.557861328125 +[TRAIN] Iter: 128400 Loss: 0.012488342821598053 PSNR: 23.14933204650879 +[TRAIN] Iter: 128500 Loss: 0.013636110350489616 PSNR: 22.409841537475586 +[TRAIN] Iter: 128600 Loss: 0.012288618832826614 PSNR: 22.846691131591797 +[TRAIN] Iter: 128700 Loss: 0.01235552504658699 PSNR: 23.072450637817383 +[TRAIN] Iter: 128800 Loss: 0.012623406946659088 PSNR: 23.025264739990234 +[TRAIN] Iter: 128900 Loss: 0.014636324718594551 PSNR: 22.251359939575195 +[TRAIN] Iter: 129000 Loss: 0.011665191501379013 PSNR: 23.12674903869629 +[TRAIN] Iter: 129100 Loss: 0.01129763014614582 PSNR: 23.46792221069336 +[TRAIN] Iter: 129200 Loss: 0.013197755441069603 PSNR: 22.46013069152832 +[TRAIN] Iter: 129300 Loss: 0.014029564335942268 PSNR: 22.43587303161621 +[TRAIN] Iter: 129400 Loss: 0.012392929755151272 PSNR: 23.41368865966797 +[TRAIN] Iter: 129500 Loss: 0.011499105021357536 PSNR: 22.837196350097656 +[TRAIN] Iter: 129600 Loss: 0.015129669569432735 PSNR: 21.75592613220215 +[TRAIN] Iter: 129700 Loss: 0.01242729090154171 PSNR: 23.220726013183594 +[TRAIN] Iter: 129800 Loss: 0.012374321930110455 PSNR: 22.725801467895508 +[TRAIN] Iter: 129900 Loss: 0.012599386274814606 PSNR: 22.69788932800293 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/130000.tar +[TRAIN] Iter: 130000 Loss: 0.012793504633009434 PSNR: 23.102752685546875 +[TRAIN] Iter: 130100 Loss: 0.010576395317912102 PSNR: 23.843585968017578 +[TRAIN] Iter: 130200 Loss: 0.010052647441625595 PSNR: 23.840166091918945 +[TRAIN] Iter: 130300 Loss: 0.011603821069002151 PSNR: 23.286836624145508 +[TRAIN] Iter: 130400 Loss: 0.012419331818819046 PSNR: 22.893115997314453 +[TRAIN] Iter: 130500 Loss: 0.012328271754086018 PSNR: 22.46221923828125 +[TRAIN] Iter: 130600 Loss: 0.012859603390097618 PSNR: 22.761606216430664 +[TRAIN] Iter: 130700 Loss: 0.012741091661155224 PSNR: 22.979541778564453 +[TRAIN] Iter: 130800 Loss: 0.013156630098819733 PSNR: 22.5899658203125 +[TRAIN] It0 0.0008997917175292969 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.60332989692688 +2 21.052441358566284 +3 21.555790662765503 +4 21.583808422088623 +5 21.78903079032898 +6 22.06020426750183 +7 21.6042902469635 +8 21.571099758148193 +9 21.654065132141113 +10 21.18506169319153 +11 23.303558588027954 +12 21.982328414916992 +13 22.470111846923828 +14 21.357961654663086 +15 21.505855798721313 +16 21.696942567825317 +17 23.247157096862793 +18 22.381223917007446 +19 22.1830050945282 +20 21.219528436660767 +21 21.57265305519104 +22 22.683643341064453 +23 21.721338748931885 +24 21.338486433029175 +25 22.450883626937866 +26 22.250452756881714 +27 21.45114278793335 +28 21.417190551757812 +29 22.08093810081482 +30 21.539019107818604 +31 21.689035177230835 +32 21.225717544555664 +33 21.58141779899597 +34 22.12126898765564 +35 22.941091060638428 +36 21.422794818878174 +37 21.691629648208618 +38 21.305980682373047 +39 22.05233597755432 +40 21.71112823486328 +41 21.55746293067932 +42 21.42638921737671 +43 21.740941286087036 +44 21.723225831985474 +45 22.770864963531494 +46 21.815895557403564 +47 22.09813618659973 +48 21.906811952590942 +49 22.63958191871643 +50 21.29354500770569 +51 21.84394383430481 +52 22.207345962524414 +53 21.7195942401886 +54 21.63345980644226 +55 22.252469778060913 +56 21.327462673187256 +57 21.809134483337402 +58 22.090936183929443 +59 22.356919050216675 +60 21.21338129043579 +61 22.815728425979614 +62 21.463385105133057 +63 22.113964080810547 +64 22.309048891067505 +65 21.38559865951538 +66 22.49366784095764 +67 22.274792432785034 +68 21.7287278175354 +69 20.929903984069824 +70 21.717804431915283 +71 20.88282084465027 +72 21.43415379524231 +73 21.74709153175354 +74 21.814960718154907 +75 21.31891632080078 +76 21.896826028823853 +77 21.234994649887085 +78 21.799386024475098 +79 21.577837228775024 +80 21.60596513748169 +81 22.28009557723999 +82 21.67381525039673 +83 21.667519569396973 +84 21.25282335281372 +85 22.218552112579346 +86 21.180124759674072 +87 21.35162353515625 +88 22.375974655151367 +89 22.036630392074585 +90 22.33988642692566 +91 21.513282775878906 +92 21.64591956138611 +93 21.63622260093689 +94 21.278053522109985 +95 21.66223382949829 +96 22.13258194923401 +97 21.487547397613525 +98 22.19135284423828 +99 21.215876817703247 +100 21.562575340270996 +101 21.33296513557434 +102 22.37740206718445 +103 22.052006483078003 +104 22.398383617401123 +105 21.449021577835083 +106 21.231785774230957 +107 21.72769594192505 +108 21.62301206588745 +109 22.222519874572754 +110 22.018542766571045 +111 21.480412483215332 +112 21.595436573028564 +113 22.596633672714233 +114 20.821579933166504 +115 22.49482226371765 +116 21.839928150177002 +117 21.794407844543457 +118 21.752485036849976 +119 21.524922370910645 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-1.1149e+00, -1.3839e+00, -1.3299e+00, -3.6512e+01], + [-1.6762e+00, -1.5232e+00, -5.7983e-01, -4.5693e+01], + [-4.3808e-01, -2.5512e-01, 4.2919e-01, -1.3487e+01], + ..., + [-6.7470e+00, -6.1888e+00, -4.1317e+00, 2.9612e+02], + [-6.0436e+00, -5.2733e+00, -3.9530e+00, 3.2327e+02], + [-6.2457e+00, -5.5808e+00, -3.5756e+00, 3.1057e+02]], + + [[ 1.3713e-01, 1.0508e+00, 2.0890e+00, -8.3980e+00], + [ 4.2150e-01, 1.3092e+00, 2.1043e+00, -1.1163e+01], + [ 4.0728e-01, 1.2203e+00, 1.9816e+00, -1.3320e+01], + ..., + [-1.1816e+01, -9.0951e+00, -1.3740e+00, 5.7571e+02], + [-1.1941e+01, -8.8779e+00, -1.0751e+00, 5.9216e+02], + [-1.2068e+01, -9.3303e+00, -1.3144e+00, 5.7416e+02]], + + [[-8.4942e-01, -9.7909e-01, -1.0048e+00, 2.3792e+01], + [-7.2704e-01, -8.9411e-01, -8.8935e-01, 3.2052e+00], + [-7.3179e-01, -8.8052e-01, -9.7637e-01, 9.5628e+00], + ..., + [-7.6459e+00, 1.8667e+00, 8.8086e+00, 5.8983e+02], + [-7.4512e+00, 2.9388e+00, 1.0440e+01, 5.6866e+02], + [-8.3923e+00, 9.8679e-01, 7.6022e+00, 6.2404e+02]], + + ..., + + [[-1.0437e+00, 2.4203e-02, 1.1289e+00, -2.7824e+01], + [-1.1894e+00, -9.3607e-02, 7.1880e-01, -2.7078e+01], + [-9.9582e-01, -5.5947e-02, 1.1217e+00, -1.7912e+01], + ..., + [-3.9707e+00, -3.2447e+00, -2.7565e-01, 2.5388e+02], + [-4.1305e+00, -3.4808e+00, -4.8512e-01, 2.3962e+02], + [-4.4999e+00, -3.2579e+00, -5.9146e-02, 2.1028e+02]], + + [[-3.3535e-01, -5.4054e-01, -5.0667e-01, 2.0063e+01], + [-1.1834e+00, -1.1847e+00, -1.0797e+00, -1.3130e+01], + [-1.0522e+00, -1.1450e+00, -9.5230e-01, -9.9144e+00], + ..., + [-9.8696e+00, -4.3471e+00, -2.9562e+00, 3.0988e+02], + [-9.8366e+00, -4.1983e+00, -2.8517e+00, 3.1428e+02], + [-9.6843e+00, -5.1132e+00, -4.9866e+00, 2.6907e+02]], + + [[-3.8607e-01, 3.0717e-01, 1.0406e+00, -1.2078e+01], + [-1.3175e+00, 3.3023e-02, 9.0173e-01, -4.2592e+01], + [-7.7273e-01, 4.8120e-01, 1.0932e+00, -4.2233e+01], + ..., + [-1.2344e+01, -9.4975e+00, -1.9783e+00, 6.3290e+02], + [-1.2560e+01, -9.6931e+00, -2.5949e+00, 6.4712e+02], + [-1.2536e+01, -9.6338e+00, -3.0082e+00, 6.3027e+02]]], + grad_fn=), 'rgb0': tensor([[0.3480, 0.4708, 0.6718], + [0.2631, 0.4267, 0.6608], + [0.3062, 0.2794, 0.2869], + ..., + [0.3924, 0.6405, 0.8519], + [0.4018, 0.3503, 0.3531], + [0.2436, 0.4096, 0.6471]], grad_fn=), 'disp0': tensor([ 25.5459, 126.1814, 167.3392, ..., 19.7613, 287.4355, 54.4572], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0057, 0.0022, 0.2409, ..., 0.0107, 0.2897, 0.0040])} +0 0.0010080337524414062 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.683287858963013 +2 21.56097674369812 +3 21.81191849708557 +4 21.253183841705322 +5 22.10608220100403 +6 21.19174313545227 +7 22.491743564605713 +8 21.316872358322144 +9 21.404534578323364 +10 22.219209909439087 +11 22.075949668884277 +12 21.277020931243896 +13 21.40772294998169 +14 21.91630744934082 +15 22.05702829360962 +16 22.151252508163452 +17 21.234628438949585 +18 22.709442615509033 +19 22.271596431732178 +20 21.384917736053467 +21 21.721595764160156 +22 22.321043729782104 +23 21.6845805644989 +24 21.802903175354004 +25 22.27053213119507 +26 21.383338451385498 +27 21.750473022460938 +28 22.498502254486084 +29 20.944782495498657 +30 22.137224197387695 +31 21.759329557418823 +32 21.70123553276062 +33 22.911199808120728 +34 20.99305558204651 +35 21.336434602737427 +36 22.141298532485962 +37 22.0482656955719 +38 21.6081280708313 +39 22.25236201286316 +40 21.734508991241455 +41 21.5184428691864 +42 21.683833837509155 +43 21.721880197525024 +44 21.686496257781982 +45 21.93828535079956 +46 21.989017248153687 +47 21.638880968093872 +48 22.406935453414917 +49 21.707594394683838 +50 21.637874126434326 +51 21.557309865951538 +52 22.068190574645996 +53 21.309521436691284 +54 21.748669147491455 +55 21.802051782608032 +56 21.507089853286743 +57 22.432893991470337 +58 21.73831081390381 +59 21.78493046760559 +60 21.269679069519043 +61 21.355713605880737 +62 21.216315507888794 +63 22.04921555519104 +64 21.342200756072998 +65 21.640693426132202 +66 22.186063766479492 +67 22.08753252029419 +68 22.04061245918274 +69 21.614514350891113 +70 21.732117891311646 +71 22.262734413146973 +72 21.024030208587646 +73 21.540595293045044 +74 21.051249742507935 +75 21.669559717178345 +76 22.07976722717285 +77 21.299914360046387 +78 21.844329118728638 +79 22.189691305160522 +80 21.28337597846985 +81 21.151131629943848 +82 21.81270718574524 +83 22.15750479698181 +84 21.92262363433838 +85 22.323598623275757 +86 21.552712440490723 +87 22.10421085357666 +88 22.261565685272217 +89 21.72152304649353 +90 21.715166091918945 +91 21.797614097595215 +92 21.955667972564697 +93 22.97576117515564 +94 21.333296298980713 +95 21.438143014907837 +96 21.501933574676514 +97 21.38386368751526 +98 21.284545183181763 +99 21.4639630317688 +100 21.509278535842896 +101 22.18393063545227 +102 21.190442085266113 +103 21.42796492576599 +104 21.88260793685913 +105 21.48966908454895 +106 21.218346118927002 +107 22.450780868530273 +108 22.04373526573181 +109 21.705217599868774 +110 20.903613328933716 +111 21.435649633407593 +112 22.21763777732849 +113 22.130391120910645 +114 21.62255072593689 +115 21.890068531036377 +116 22.10507035255432 +117 22.859349966049194 +118 21.836854457855225 +119 21.430744171142578 +test poses shape torch.Size([4, 3, 4]) +0 0.0008635520935058594 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.01328945159912 +2 21.6161687374115 +3 22.230760097503662 +Saved test set +[TRAIN] Iter: 150000 Loss: 0.008047551847994328 PSNR: 25.28976821899414 +[TRAIN] Iter: 150100 Loss: 0.008703210391104221 PSNR: 24.749479293823242 +[TRAIN] Iter: 150200 Loss: 0.008592708967626095 PSNR: 24.586172103881836 +[TRAIN] Iter: 150300 Loss: 0.0076098013669252396 PSNR: 24.584945678710938 +[TRAIN] Iter: 150400 Loss: 0.008370421826839447 PSNR: 25.003286361694336 +[TRAIN] Iter: 150500 Loss: 0.0064539131708443165 PSNR: 26.22431755065918 +[TRAIN] Iter: 150600 Loss: 0.007865207269787788 PSNR: 25.1215763092041 +[TRAIN] Iter: 150700 Loss: 0.007983701303601265 PSNR: 25.541919708251953 +[TRAIN] Iter: 150800 Loss: 0.006017197854816914 PSNR: 27.18227195739746 +[TRAIN] Iter: 150900 Loss: 0.008683109655976295 PSNR: 25.795297622680664 +[TRAIN] Iter: 151000 Loss: 0.006836144719272852 PSNR: 25.920873641967773 +[TRAIN] Iter: 151100 Loss: 0.008070341311395168 PSNR: 24.71259880065918 +[TRAIN] Iter: 151200 Loss: 0.007716004736721516 PSNR: 26.419273376464844 +[TRAIN] Iter: 151300 Loss: 0.008248223923146725 PSNR: 25.32780647277832 +[TRAIN] Iter: 151400 Loss: 0.00812376756221056 PSNR: 25.06427001953125 +[TRAIN] Iter: 151500 Loss: 0.006565142888575792 PSNR: 25.602916717529297 +[TRAIN] Iter: 151600 Loss: 0.006572604179382324 PSNR: 26.53679656982422 +[TRAIN] Iter: 151700 Loss: 0.00683214608579874 PSNR: 25.882360458374023 +[TRAIN] Iter: 151800 Loss: 0.008301332592964172 PSNR: 24.894140243530273 +[TRAIN] Iter: 151900 Loss: 0.008597963489592075 PSNR: 24.55026626586914 +[TRAIN] Iter: 152000 Loss: 0.007371756248176098 PSNR: 25.456113815307617 +[TRAIN] Iter: 152100 Loss: 0.006893903948366642 PSNR: 25.79576301574707 +[TRAIN] Iter: 152200 Loss: 0.007162613328546286 PSNR: 26.440481185913086 +[TRAIN] Iter: 152300 Loss: 0.006736521143466234 PSNR: 26.211061477661133 +[TRAIN] Iter: 152400 Loss: 0.008323894813656807 PSNR: 25.15334129333496 +[TRAIN] Iter: 152500 Loss: 0.00632224977016449 PSNR: 27.31221580505371 +[TRAIN] Iter: 152600 Loss: 0.005718285217881203 PSNR: 26.691463470458984 +[TRAIN] Iter: 152700 Loss: 0.008072513155639172 PSNR: 25.158016204833984 +[TRAIN] Iter: 152800 Loss: 0.008938778191804886 PSNR: 24.38544273376465 +[TRAIN] Iter: 152900 Loss: 0.007517840713262558 PSNR: 25.088207244873047 +[TRAIN] Iter: 153000 Loss: 0.008255232125520706 PSNR: 24.89472198486328 +[TRAIN] Iter: 153100 Loss: 0.007146770134568214 PSNR: 25.363428115844727 +[TRAIN] Iter: 153200 Loss: 0.0053939251229166985 PSNR: 27.222637176513672 +[TRAIN] Iter: 153300 Loss: 0.008105289191007614 PSNR: 25.697134017944336 +[TRAIN] Iter: 153400 Loss: 0.007761894725263119 PSNR: 25.175838470458984 +[TRAIN] Iter: 153500 Loss: 0.0082250302657485 PSNR: 24.649843215942383 +[TRAIN] Iter: 153600 Loss: 0.008227291516959667 PSNR: 25.311767578125 +[TRAIN] Iter: 153700 Loss: 0.007585910148918629 PSNR: 24.99642562866211 +[TRAIN] Iter: 153800 Loss: 0.008290808647871017 PSNR: 25.09019660949707 +[TRAIN] Iter: 153900 Loss: 0.006674806587398052 PSNR: 26.799266815185547 +[TRAIN] Iter: 154000 Loss: 0.009245557710528374 PSNR: 24.361618041992188 +[TRAIN] Iter: 154100 Loss: 0.006713941693305969 PSNR: 26.72465705871582 +[TRAIN] Iter: 154200 Loss: 0.008526327088475227 PSNR: 24.695253372192383 +[TRAIN] Iter: 154300 Loss: 0.007694024592638016 PSNR: 25.29041290283203 +[TRAIN] Iter: 154400 Loss: 0.007766928989440203 PSNR: 25.119319915771484 +[TRAIN] Iter: 154500 Loss: 0.009196054190397263 PSNR: 24.512712478637695 +[TRAIN] Iter: 154600 Loss: 0.007461194880306721 PSNR: 25.529071807861328 +[TRAIN] Iter: 154700 Loss: 0.009367702528834343 PSNR: 24.37191390991211 +[TRAIN] Iter: 154800 Loss: 0.007222010754048824 PSNR: 25.740955352783203 +[TRAIN] Iter: 154900 Loss: 0.008316009305417538 PSNR: 24.6292781829834 +[TRAIN] Iter: 155000 Loss: 0.008179979398846626 PSNR: 24.91905975341797 +[TRAIN] Iter: 155100 Loss: 0.008927883580327034 PSNR: 24.55605697631836 +[TRAIN] Iter: 155200 Loss: 0.009673712775111198 PSNR: 23.980518341064453 +[TRAIN] Iter: 155300 Loss: 0.006700295023620129 PSNR: 26.653284072875977 +[TRAIN] Iter: 155400 Loss: 0.007131076417863369 PSNR: 25.752357482910156 +[TRAIN] Iter: 155500 Loss: 0.008611626923084259 PSNR: 24.674779891967773 +[TRAIN] Iter: 155600 Loss: 0.007737563923001289 PSNR: 25.854398727416992 +[TRAIN] Iter: 155700 Loss: 0.006634298712015152 PSNR: 25.83875274658203 +[TRAIN] Iter: 155800 Loss: 0.008705882355570793 PSNR: 24.602455139160156 +[TRAIN] Iter: 155900 Loss: 0.008322957903146744 PSNR: 24.713159561157227 +[TRAIN] Iter: 156000 Loss: 0.006905599031597376 PSNR: 25.728050231933594 +[TRAIN] Iter: 156100 Loss: 0.00847889669239521 PSNR: 24.491201400756836 +[TRAIN] Iter: 156200 Loss: 0.007084948942065239 PSNR: 25.757448196411133 +[TRAIN] Iter: 156300 Loss: 0.0078200101852417 PSNR: 24.903417587280273 +[TRAIN] Iter: 156400 Loss: 0.008728312328457832 PSNR: 24.944292068481445 +[TRAIN] Iter: 156500 Loss: 0.006899191997945309 PSNR: 26.156309127807617 +[TRAIN] Iter: 156600 Loss: 0.00932040624320507 PSNR: 24.040756225585938 +[TRAIN] Iter: 156700 Loss: 0.007517737336456776 PSNR: 25.891143798828125 +[TRAIN] Iter: 156800 Loss: 0.008107435889542103 PSNR: 26.01988983154297 +[TRAIN] Iter: 156900 Loss: 0.007043963298201561 PSNR: 25.554521560668945 +[TRAIN] Iter: 157000 Loss: 0.008466155268251896 PSNR: 24.552120208740234 +[TRAIN] Iter: 157100 Loss: 0.00757656991481781 PSNR: 25.35777473449707 +[TRAIN] Iter: 157200 Loss: 0.008639777079224586 PSNR: 24.479520797729492 +[TRAIN] Iter: 157300 Loss: 0.007363772019743919 PSNR: 26.779359817504883 +[TRAIN] Iter: 157400 Loss: 0.005832803435623646 PSNR: 27.408767700195312 +[TRAIN] Iter: 157500 Loss: 0.007146195508539677 PSNR: 25.778167724609375 +[TRAIN] Iter: 157600 Loss: 0.009519273415207863 PSNR: 24.482112884521484 +[TRAIN] Iter: 157700 Loss: 0.008596140891313553 PSNR: 24.858081817626953 +[TRAIN] Iter: 157800 Loss: 0.009580807760357857 PSNR: 24.1680965423584 +[TRAIN] Iter: 157900 Loss: 0.008579518646001816 PSNR: 24.476747512817383 +[TRAIN] Iter: 158000 Loss: 0.0066721271723508835 PSNR: 25.926624298095703 +[TRAIN] Iter: 158100 Loss: 0.008916303515434265 PSNR: 24.532920837402344 +[TRAIN] Iter: 158200 Loss: 0.008178995922207832 PSNR: 25.282245635986328 +[TRAIN] Iter: 158300 Loss: 0.00783815048635006 PSNR: 25.001041412353516 +[TRAIN] Iter: 158400 Loss: 0.006199293304234743 PSNR: 26.354001998901367 +[TRAIN] Iter: 158500 Loss: 0.009813480079174042 PSNR: 24.777294158935547 +[TRAIN] Iter: 158600 Loss: 0.008674805983901024 PSNR: 24.688980102539062 +[TRAIN] Iter: 158700 Loss: 0.008155290968716145 PSNR: 25.36181640625 +[TRAIN] Iter: 158800 Loss: 0.006266675889492035 PSNR: 26.253488540649414 +[TRAIN] Iter: 158900 Loss: 0.008455204777419567 PSNR: 25.11202049255371 +[TRAIN] Iter: 159000 Loss: 0.006452261935919523 PSNR: 26.345643997192383 +[TRAIN] Iter: 159100 Loss: 0.0071183983236551285 PSNR: 26.100685119628906 +[TRAIN] Iter: 159200 Loss: 0.008093282580375671 PSNR: 25.108646392822266 +[TRAIN] Iter: 159300 Loss: 0.007057332433760166 PSNR: 25.371660232543945 +[TRAIN] Iter: 159400 Loss: 0.006643978878855705 PSNR: 26.297346115112305 +[TRAIN] Iter: 159500 Loss: 0.008265437558293343 PSNR: 25.657730102539062 +[TRAIN] Iter: 159600 Loss: 0.0069128116592764854 PSNR: 25.754627227783203 +[TRAIN] Iter: 159700 Loss: 0.007741907145828009 PSNR: 26.527767181396484 +[TRAIN] Iter: 159800 Loss: 0.008512534201145172 PSNR: 25.17748260498047 +[TRAIN] Iter: 159900 Loss: 0.006895366590470076 PSNR: 25.577730178833008 +Saved checkpoints at ./logs/TUT-out-doll-360-np/160000.tar +[TRAIN] Iter: 160000 Loss: 0.007533787749707699 PSNR: 25.02830696105957 +[TRAIN] Iter: 160100 Loss: 0.006934892386198044 PSNR: 26.386245727539062 +[TRAIN] Iter: 160200 Loss: 0.008660259656608105 PSNR: 24.81008529663086 +[TRAIN] Iter: 160300 Loss: 0.007156029809266329 PSNR: 25.445377349853516 +[TRAIN] Iter: 160400 Loss: 0.009023267775774002 PSNR: 24.742786407470703 +[TRAIN] Iter: 160500 Loss: 0.008607251569628716 PSNR: 24.658111572265625 +[TRAIN] Iter: 160600 Loss: 0.009058935567736626 PSNR: 24.78843879699707 +[TRAIN] Iter: 160700 Loss: 0.0077896639704704285 PSNR: 25.121545791625977 +[TRAIN] Iter: 160800 Loss: 0.00794984307140112 PSNR: 25.51971435546875 +[TRAIN] Iter: 160900 Loss: 0.006308697164058685 PSNR: 26.524734497070312 +[TRAIN] Iter: 161000 Loss: 0.006464821752160788 PSNR: 26.808279037475586 +[TRAIN] Iter: 161100 Loss: 0.008403532207012177 PSNR: 24.43443489074707 +[TRAIN] Iter: 161200 Loss: 0.007387596182525158 PSNR: 25.041854858398438 +[TRAIN] Iter: 161300 Loss: 0.007215787656605244 PSNR: 25.838951110839844 +[TRAIN] Iter: 161400 Loss: 0.006868299096822739 PSNR: 26.527877807617188 +[TRAIN] Iter: 161500 Loss: 0.006869794335216284 PSNR: 25.654142379760742 +[TRAIN] Iter: 161600 Loss: 0.008407811634242535 PSNR: 25.077163696289062 +[TRAIN] Iter: 161700 Loss: 0.008037318475544453 PSNR: 25.3581485748291 +[TRAIN] Iter: 161800 Loss: 0.007980549708008766 PSNR: 24.87616729736328 +[TRAIN] Iter: 161900 Loss: 0.007625795900821686 PSNR: 25.391773223876953 +[TRAIN] Iter: 162000 Loss: 0.007212257944047451 PSNR: 25.621856689453125 +[TRAIN] Iter: 162100 Loss: 0.009585043415427208 PSNR: 24.55176544189453 +[TRAIN] Iter: 162200 Loss: 0.007400440517812967 PSNR: 25.713455200195312 +[TRAIN] Iter: 162300 Loss: 0.006402261555194855 PSNR: 27.043468475341797 +[TRAIN] Iter: 162400 Loss: 0.00845767930150032 PSNR: 24.44125747680664 +[TRAIN] Iter: 162500 Loss: 0.005556795746088028 PSNR: 27.363576889038086 +[TRAIN] Iter: 162600 Loss: 0.006906512193381786 PSNR: 26.792490005493164 +[TRAIN] Iter: 162700 Loss: 0.008178872987627983 PSNR: 24.56965446472168 +[TRAIN] Iter: 162800 Loss: 0.006233150605112314 PSNR: 26.506206512451172 +[TRAIN] Iter: 162900 Loss: 0.0069874669425189495 PSNR: 25.651710510253906 +[TRAIN] Iter: 163000 Loss: 0.008510570973157883 PSNR: 25.22590446472168 +[TRAIN] Iter: 163100 Loss: 0.008133157156407833 PSNR: 25.212614059448242 +[TRAIN] Iter: 163200 Loss: 0.007051468826830387 PSNR: 25.66623306274414 +[TRAIN] Iter: 163300 Loss: 0.007755836471915245 PSNR: 25.109962463378906 +[TRAIN] Iter: 163400 Loss: 0.008052903227508068 PSNR: 24.879573822021484 +[TRAIN] Iter: 163500 Loss: 0.006344029679894447 PSNR: 25.927783966064453 +[TRAIN] Iter: 163600 Loss: 0.00823148712515831 PSNR: 24.879213333129883 +[TRAIN] Iter: 163700 Loss: 0.007044638507068157 PSNR: 26.277238845825195 +[TRAIN] Iter: 163800 Loss: 0.008956012316048145 PSNR: 24.27147102355957 +[TRAIN] Iter: 163900 Loss: 0.008103355765342712 PSNR: 25.25937271118164 +[TRAIN] Iter: 164000 Loss: 0.007289830595254898 PSNR: 25.395368576049805 +[TRAIN] Iter: 164100 Loss: 0.007605054881423712 PSNR: 25.03095817565918 +[TRAIN] Iter: 164200 Loss: 0.0071415165439248085 PSNR: 25.59026336669922 +[TRAIN] Iter: 164300 Loss: 0.00754225067794323 PSNR: 26.392749786376953 +[TRAIN] Iter: 164400 Loss: 0.008555869571864605 PSNR: 25.043155670166016 +[TRAIN] Iter: 164500 Loss: 0.007581861689686775 PSNR: 25.177562713623047 +[TRAIN] Iter: 164600 Loss: 0.006263718008995056 PSNR: 26.383773803710938 +[TRAIN] Iter: 164700 Loss: 0.006527639925479889 PSNR: 26.15265464782715 +[TRAIN] Iter: 164800 Loss: 0.006961985491216183 PSNR: 25.503183364868164 +[TRAIN] Iter: 164900 Loss: 0.007039215415716171 PSNR: 26.04833221435547 +[TRAIN] Iter: 165000 Loss: 0.00804497953504324 PSNR: 24.516616821289062 +[TRAIN] Iter: 165100 Loss: 0.007405908312648535 PSNR: 25.668115615844727 +[TRAIN] Iter: 165200 Loss: 0.007486067712306976 PSNR: 25.398086547851562 +[TRAIN] Iter: 165300 Loss: 0.008380362764000893 PSNR: 25.19526481628418 +[TRAIN] Iter: 165400 Loss: 0.008329672738909721 PSNR: 25.289630889892578 +[TRAIN] Iter: 165500 Loss: 0.00577343488112092 PSNR: 26.875823974609375 +[TRAIN] Iter: 165600 Loss: 0.00815743301063776 PSNR: 25.54500961303711 +[TRAIN] Iter: 165700 Loss: 0.007390043698251247 PSNR: 26.296846389770508 +[TRAIN] Iter: 165800 Loss: 0.00876564159989357 PSNR: 24.645427703857422 +[TRAIN] Iter: 165900 Loss: 0.007420743815600872 PSNR: 26.01362419128418 +[TRAIN] Iter: 166000 Loss: 0.0078754723072052 PSNR: 25.405210494995117 +[TRAIN] Iter: 166100 Loss: 0.007496807724237442 PSNR: 25.57194709777832 +[TRAIN] Iter: 166200 Loss: 0.006785253062844276 PSNR: 25.64069175720215 +[TRAIN] Iter: 166300 Loss: 0.009090980514883995 PSNR: 24.835651397705078 +[TRAIN] Iter: 166400 Loss: 0.008446183055639267 PSNR: 24.94245719909668 +[TRAIN] Iter: 166500 Loss: 0.008271782658994198 PSNR: 25.121131896972656 +[TRAIN] Iter: 166600 Loss: 0.008246483281254768 PSNR: 26.08342170715332 +[TRAIN] Iter: 166700 Loss: 0.007116498425602913 PSNR: 25.66566276550293 +[TRAIN] Iter: 166800 Loss: 0.006856439169496298 PSNR: 26.640836715698242 +[TRAIN] Iter: 166900 Loss: 0.008419057354331017 PSNR: 25.29006004333496 +[TRAIN] Iter: 167000 Loss: 0.007610579952597618 PSNR: 25.233449935913086 +[TRAIN] Iter: 167100 Loss: 0.007116937078535557 PSNR: 25.717395782470703 +[TRAIN] Iter: 167200 Loss: 0.008090551011264324 PSNR: 24.768001556396484 +[TRAIN] Iter: 167300 Loss: 0.006899694912135601 PSNR: 26.025894165039062 +[TRAIN] Iter: 167400 Loss: 0.007501872722059488 PSNR: 25.85771942138672 +[TRAIN] Iter: 167500 Loss: 0.006413055118173361 PSNR: 26.42464828491211 +[TRAIN] Iter: 167600 Loss: 0.009555654600262642 PSNR: 24.231645584106445 +[TRAIN] Iter: 167700 Loss: 0.008287796750664711 PSNR: 24.849538803100586 +[TRAIN] Iter: 167800 Loss: 0.00897810235619545 PSNR: 24.73244857788086 +[TRAIN] Iter: 167900 Loss: 0.006584736052900553 PSNR: 26.440940856933594 +[TRAIN] Iter: 168000 Loss: 0.008237771689891815 PSNR: 24.784393310546875 +[TRAIN] Iter: 168100 Loss: 0.006470018066465855 PSNR: 26.55847930908203 +[TRAIN] Iter: 168200 Loss: 0.00905587524175644 PSNR: 24.657699584960938 +[TRAIN] Iter: 168300 Loss: 0.008776000700891018 PSNR: 24.806907653808594 +[TRAIN] Iter: 168400 Loss: 0.007451866753399372 PSNR: 25.473360061645508 +[TRAIN] Iter: 168500 Loss: 0.007215914316475391 PSNR: 25.728374481201172 +[TRAIN] Iter: 168600 Loss: 0.008165941573679447 PSNR: 25.898218154907227 +[TRAIN] Iter: 168700 Loss: 0.007617960684001446 PSNR: 25.515363693237305 +[TRAIN] Iter: 168800 Loss: 0.007296448573470116 PSNR: 26.07267951965332 +[TRAIN] Iter: 168900 Loss: 0.007823999039828777 PSNR: 25.147240 0.0009698867797851562 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 25.759546756744385 +2 25.435984134674072 +3 25.308526039123535 +Saved test set +[TRAIN] Iter: 150000 Loss: 0.010047230869531631 PSNR: 23.806438446044922 +[TRAIN] Iter: 150100 Loss: 0.01144277211278677 PSNR: 23.287513732910156 +[TRAIN] Iter: 150200 Loss: 0.012007741257548332 PSNR: 23.476593017578125 +[TRAIN] Iter: 150300 Loss: 0.01219956111162901 PSNR: 23.052413940429688 +[TRAIN] Iter: 150400 Loss: 0.013977443799376488 PSNR: 22.041973114013672 +[TRAIN] Iter: 150500 Loss: 0.014709163457155228 PSNR: 22.118074417114258 +[TRAIN] Iter: 150600 Loss: 0.011483421549201012 PSNR: 23.394838333129883 +[TRAIN] Iter: 150700 Loss: 0.009982283227145672 PSNR: 23.794795989990234 +[TRAIN] Iter: 150800 Loss: 0.010836321860551834 PSNR: 23.770545959472656 +[TRAIN] Iter: 150900 Loss: 0.011130395345389843 PSNR: 24.032800674438477 +[TRAIN] Iter: 151000 Loss: 0.011768794618546963 PSNR: 23.421302795410156 +[TRAIN] Iter: 151100 Loss: 0.014081400819122791 PSNR: 22.091529846191406 +[TRAIN] Iter: 151200 Loss: 0.01415281742811203 PSNR: 21.920684814453125 +[TRAIN] Iter: 151300 Loss: 0.01197035238146782 PSNR: 22.907493591308594 +[TRAIN] Iter: 151400 Loss: 0.013373269699513912 PSNR: 22.653562545776367 +[TRAIN] Iter: 151500 Loss: 0.01538129709661007 PSNR: 21.699722290039062 +[TRAIN] Iter: 151600 Loss: 0.01316999364644289 PSNR: 22.52265739440918 +[TRAIN] Iter: 151700 Loss: 0.011173287406563759 PSNR: 23.47878646850586 +[TRAIN] Iter: 151800 Loss: 0.012201488018035889 PSNR: 23.300413131713867 +[TRAIN] Iter: 151900 Loss: 0.013561367988586426 PSNR: 22.56777572631836 +[TRAIN] Iter: 152000 Loss: 0.010948882438242435 PSNR: 23.508127212524414 +[TRAIN] Iter: 152100 Loss: 0.012946544215083122 PSNR: 23.014392852783203 +[TRAIN] Iter: 152200 Loss: 0.013911697082221508 PSNR: 22.21117401123047 +[TRAIN] Iter: 152300 Loss: 0.011720076203346252 PSNR: 23.015485763549805 +[TRAIN] Iter: 152400 Loss: 0.013468167744576931 PSNR: 22.244516372680664 +[TRAIN] Iter: 152500 Loss: 0.012071042321622372 PSNR: 22.792720794677734 +[TRAIN] Iter: 152600 Loss: 0.011800441890954971 PSNR: 23.229942321777344 +[TRAIN] Iter: 152700 Loss: 0.0136538902297616 PSNR: 22.610124588012695 +[TRAIN] Iter: 152800 Loss: 0.012780828401446342 PSNR: 22.925329208374023 +[TRAIN] Iter: 152900 Loss: 0.013360705226659775 PSNR: 22.907028198242188 +[TRAIN] Iter: 153000 Loss: 0.014420606195926666 PSNR: 22.100971221923828 +[TRAIN] Iter: 153100 Loss: 0.014242798089981079 PSNR: 22.394882202148438 +[TRAIN] Iter: 153200 Loss: 0.013123597949743271 PSNR: 22.582868576049805 +[TRAIN] Iter: 153300 Loss: 0.011994006112217903 PSNR: 23.06456184387207 +[TRAIN] Iter: 153400 Loss: 0.011995812878012657 PSNR: 22.493497848510742 +[TRAIN] Iter: 153500 Loss: 0.01172882504761219 PSNR: 23.376441955566406 +[TRAIN] Iter: 153600 Loss: 0.0131980637088418 PSNR: 22.493913650512695 +[TRAIN] Iter: 153700 Loss: 0.012906882911920547 PSNR: 22.59453582763672 +[TRAIN] Iter: 153800 Loss: 0.013923007994890213 PSNR: 22.291629791259766 +[TRAIN] Iter: 153900 Loss: 0.014833870343863964 PSNR: 22.516759872436523 +[TRAIN] Iter: 154000 Loss: 0.010766284540295601 PSNR: 23.99972915649414 +[TRAIN] Iter: 154100 Loss: 0.013664750382304192 PSNR: 22.69986343383789 +[TRAIN] Iter: 154200 Loss: 0.011849798262119293 PSNR: 23.270790100097656 +[TRAIN] Iter: 154300 Loss: 0.010758565738797188 PSNR: 23.712284088134766 +[TRAIN] Iter: 154400 Loss: 0.012507942505180836 PSNR: 22.8658504486084 +[TRAIN] Iter: 154500 Loss: 0.013328168541193008 PSNR: 22.654855728149414 +[TRAIN] Iter: 154600 Loss: 0.013658106327056885 PSNR: 22.526737213134766 +[TRAIN] Iter: 154700 Loss: 0.01294761709868908 PSNR: 22.729211807250977 +[TRAIN] Iter: 154800 Loss: 0.011841720901429653 PSNR: 23.379743576049805 +[TRAIN] Iter: 154900 Loss: 0.014643578790128231 PSNR: 22.01418113708496 +[TRAIN] Iter: 155000 Loss: 0.012712144292891026 PSNR: 22.66840171813965 +[TRAIN] Iter: 155100 Loss: 0.013702659867703915 PSNR: 22.30095100402832 +[TRAIN] Iter: 155200 Loss: 0.013623183593153954 PSNR: 22.624528884887695 +[TRAIN] Iter: 155300 Loss: 0.01341630332171917 PSNR: 22.486587524414062 +[TRAIN] Iter: 155400 Loss: 0.010871365666389465 PSNR: 23.458053588867188 +[TRAIN] Iter: 155500 Loss: 0.012970567680895329 PSNR: 22.803709030151367 +[TRAIN] Iter: 155600 Loss: 0.012730568647384644 PSNR: 22.23492431640625 +[TRAIN] Iter: 155700 Loss: 0.01189043652266264 PSNR: 22.842205047607422 +[TRAIN] Iter: 155800 Loss: 0.012604612857103348 PSNR: 22.993318557739258 +[TRAIN] Iter: 155900 Loss: 0.010519735515117645 PSNR: 23.68739128112793 +[TRAIN] Iter: 156000 Loss: 0.014996972866356373 PSNR: 22.054433822631836 +[TRAIN] Iter: 156100 Loss: 0.012338769622147083 PSNR: 22.93202781677246 +[TRAIN] Iter: 156200 Loss: 0.013808120973408222 PSNR: 23.11050796508789 +[TRAIN] Iter: 156300 Loss: 0.011266881600022316 PSNR: 23.393939971923828 +[TRAIN] Iter: 156400 Loss: 0.012574108317494392 PSNR: 22.711076736450195 +[TRAIN] Iter: 156500 Loss: 0.011621018871665001 PSNR: 23.554473876953125 +[TRAIN] Iter: 156600 Loss: 0.013137124478816986 PSNR: 22.704408645629883 +[TRAIN] Iter: 156700 Loss: 0.013249771669507027 PSNR: 22.471887588500977 +[TRAIN] Iter: 156800 Loss: 0.012564212083816528 PSNR: 23.018949508666992 +[TRAIN] Iter: 156900 Loss: 0.012465140782296658 PSNR: 22.823274612426758 +[TRAIN] Iter: 157000 Loss: 0.012868322432041168 PSNR: 22.689632415771484 +[TRAIN] Iter: 157100 Loss: 0.0127981873229146 PSNR: 22.95119285583496 +[TRAIN] Iter: 157200 Loss: 0.012758124619722366 PSNR: 23.459510803222656 +[TRAIN] Iter: 157300 Loss: 0.01416805386543274 PSNR: 22.06711769104004 +[TRAIN] Iter: 157400 Loss: 0.012730110436677933 PSNR: 22.946102142333984 +[TRAIN] Iter: 157500 Loss: 0.01219798531383276 PSNR: 23.168737411499023 +[TRAIN] Iter: 157600 Loss: 0.012860853224992752 PSNR: 22.61943244934082 +[TRAIN] Iter: 157700 Loss: 0.012301354669034481 PSNR: 22.7452335357666 +[TRAIN] Iter: 157800 Loss: 0.0123033057898283 PSNR: 23.1168270111084 +[TRAIN] Iter: 157900 Loss: 0.012153931893408298 PSNR: 23.39145851135254 +[TRAIN] Iter: 158000 Loss: 0.012133135460317135 PSNR: 23.346647262573242 +[TRAIN] Iter: 158100 Loss: 0.014013275504112244 PSNR: 22.753211975097656 +[TRAIN] Iter: 158200 Loss: 0.010031834244728088 PSNR: 24.49188804626465 +[TRAIN] Iter: 158300 Loss: 0.014212891459465027 PSNR: 22.13511085510254 +[TRAIN] Iter: 158400 Loss: 0.010525329038500786 PSNR: 23.45361328125 +[TRAIN] Iter: 158500 Loss: 0.012290870770812035 PSNR: 22.559598922729492 +[TRAIN] Iter: 158600 Loss: 0.013063514605164528 PSNR: 23.062929153442383 +[TRAIN] Iter: 158700 Loss: 0.010821690782904625 PSNR: 23.35668182373047 +[TRAIN] Iter: 158800 Loss: 0.011218072846531868 PSNR: 23.664588928222656 +[TRAIN] Iter: 158900 Loss: 0.011828857474029064 PSNR: 23.119543075561523 +[TRAIN] Iter: 159000 Loss: 0.010923124849796295 PSNR: 23.6433048248291 +[TRAIN] Iter: 159100 Loss: 0.011817656457424164 PSNR: 23.698694229125977 +[TRAIN] Iter: 159200 Loss: 0.011350709944963455 PSNR: 23.2359676361084 +[TRAIN] Iter: 159300 Loss: 0.013114966452121735 PSNR: 22.38198471069336 +[TRAIN] Iter: 159400 Loss: 0.0154649056494236 PSNR: 21.959030151367188 +[TRAIN] Iter: 159500 Loss: 0.010178151540458202 PSNR: 24.072946548461914 +[TRAIN] Iter: 159600 Loss: 0.011404350399971008 PSNR: 23.155113220214844 +[TRAIN] Iter: 159700 Loss: 0.012332966551184654 PSNR: 22.833515167236328 +[TRAIN] Iter: 159800 Loss: 0.011686975136399269 PSNR: 23.219327926635742 +[TRAIN] Iter: 159900 Loss: 0.013764512725174427 PSNR: 22.481487274169922 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/160000.tar +[TRAIN] Iter: 160000 Loss: 0.013429970480501652 PSNR: 22.48602867126465 +[TRAIN] Iter: 160100 Loss: 0.012922604568302631 PSNR: 23.031230926513672 +[TRAIN] Iter: 160200 Loss: 0.013102562166750431 PSNR: 22.67328453063965 +[TRAIN] Iter: 160300 Loss: 0.016131585463881493 PSNR: 21.594968795776367 +[TRAIN] Iter: 160400 Loss: 0.010848192498087883 PSNR: 23.563552856445312 +[TRAIN] Iter: 160500 Loss: 0.01266432274132967 PSNR: 22.637819290161133 +[TRAIN] Iter: 160600 Loss: 0.012837778776884079 PSNR: 22.742774963378906 +[TRAIN] Iter: 160700 Loss: 0.012718616053462029 PSNR: 23.13577651977539 +[TRAIN] Iter: 160800 Loss: 0.012837504968047142 PSNR: 22.718730926513672 +[TRAIN] Iter: 160900 Loss: 0.011251440271735191 PSNR: 23.32147216796875 +[TRAIN] Iter: 161000 Loss: 0.014310963451862335 PSNR: 22.312345504760742 +[TRAIN] Iter: 161100 Loss: 0.014181775972247124 PSNR: 22.244911193847656 +[TRAIN] Iter: 161200 Loss: 0.011633601039648056 PSNR: 23.144792556762695 +[TRAIN] Iter: 161300 Loss: 0.012160981073975563 PSNR: 23.04692840576172 +[TRAIN] Iter: 161400 Loss: 0.01149742305278778 PSNR: 23.321781158447266 +[TRAIN] Iter: 161500 Loss: 0.013162294402718544 PSNR: 22.602365493774414 +[TRAIN] Iter: 161600 Loss: 0.012936718761920929 PSNR: 23.223373413085938 +[TRAIN] Iter: 161700 Loss: 0.017068643122911453 PSNR: 21.550546646118164 +[TRAIN] Iter: 161800 Loss: 0.011197203770279884 PSNR: 24.071870803833008 +[TRAIN] Iter: 161900 Loss: 0.012670280411839485 PSNR: 22.764484405517578 +[TRAIN] Iter: 162000 Loss: 0.012521902099251747 PSNR: 23.002016067504883 +[TRAIN] Iter: 162100 Loss: 0.01263940054923296 PSNR: 22.91363525390625 +[TRAIN] Iter: 162200 Loss: 0.012593950144946575 PSNR: 22.99560546875 +[TRAIN] Iter: 162300 Loss: 0.013442019000649452 PSNR: 22.54083824157715 +[TRAIN] Iter: 162400 Loss: 0.011618873104453087 PSNR: 23.238819122314453 +[TRAIN] Iter: 162500 Loss: 0.014966254122555256 PSNR: 22.142839431762695 +[TRAIN] Iter: 162600 Loss: 0.009006022475659847 PSNR: 24.780948638916016 +[TRAIN] Iter: 162700 Loss: 0.013338293880224228 PSNR: 22.764266967773438 +[TRAIN] Iter: 162800 Loss: 0.011952932924032211 PSNR: 23.07301139831543 +[TRAIN] Iter: 162900 Loss: 0.011528601869940758 PSNR: 23.31627082824707 +[TRAIN] Iter: 163000 Loss: 0.012725036591291428 PSNR: 22.715850830078125 +[TRAIN] Iter: 163100 Loss: 0.011125965043902397 PSNR: 23.188791275024414 +[TRAIN] Iter: 163200 Loss: 0.011557815596461296 PSNR: 23.2684268951416 +[TRAIN] Iter: 163300 Loss: 0.01219975296407938 PSNR: 23.75081443786621 +[TRAIN] Iter: 163400 Loss: 0.011393200606107712 PSNR: 23.54663848876953 +[TRAIN] Iter: 163500 Loss: 0.011565838009119034 PSNR: 23.537025451660156 +[TRAIN] Iter: 163600 Loss: 0.01190941035747528 PSNR: 22.97987174987793 +[TRAIN] Iter: 163700 Loss: 0.011477440595626831 PSNR: 23.72547149658203 +[TRAIN] Iter: 163800 Loss: 0.01325933076441288 PSNR: 22.679214477539062 +[TRAIN] Iter: 163900 Loss: 0.009802105836570263 PSNR: 24.127470016479492 +[TRAIN] Iter: 164000 Loss: 0.013601811602711678 PSNR: 22.157249450683594 +[TRAIN] Iter: 164100 Loss: 0.012108385562896729 PSNR: 23.348430633544922 +[TRAIN] Iter: 164200 Loss: 0.011206245049834251 PSNR: 23.74983024597168 +[TRAIN] Iter: 164300 Loss: 0.0141044482588768 PSNR: 22.412616729736328 +[TRAIN] Iter: 164400 Loss: 0.012462534010410309 PSNR: 22.710880279541016 +[TRAIN] Iter: 164500 Loss: 0.011641008779406548 PSNR: 23.27882194519043 +[TRAIN] Iter: 164600 Loss: 0.010604433715343475 PSNR: 23.551725387573242 +[TRAIN] Iter: 164700 Loss: 0.011951318010687828 PSNR: 22.930744171142578 +[TRAIN] Iter: 164800 Loss: 0.012878337875008583 PSNR: 22.80558967590332 +[TRAIN] Iter: 164900 Loss: 0.010477139614522457 PSNR: 23.415719985961914 +[TRAIN] Iter: 165000 Loss: 0.012557461857795715 PSNR: 23.003820419311523 +[TRAIN] Iter: 165100 Loss: 0.010272691026329994 PSNR: 24.03466796875 +[TRAIN] Iter: 165200 Loss: 0.009815528988838196 PSNR: 24.2606258392334 +[TRAIN] Iter: 165300 Loss: 0.011398425325751305 PSNR: 23.78987693786621 +[TRAIN] Iter: 165400 Loss: 0.00950958114117384 PSNR: 24.110179901123047 +[TRAIN] Iter: 165500 Loss: 0.012110198847949505 PSNR: 23.028968811035156 +[TRAIN] Iter: 165600 Loss: 0.010686451569199562 PSNR: 23.528385162353516 +[TRAIN] Iter: 165700 Loss: 0.00980385858565569 PSNR: 24.152496337890625 +[TRAIN] Iter: 165800 Loss: 0.010778753086924553 PSNR: 23.46550178527832 +[TRAIN] Iter: 165900 Loss: 0.0127830496057868 PSNR: 22.511545181274414 +[TRAIN] Iter: 166000 Loss: 0.01319635845720768 PSNR: 22.628692626953125 +[TRAIN] Iter: 166100 Loss: 0.014186500571668148 PSNR: 22.309144973754883 +[TRAIN] Iter: 166200 Loss: 0.012456350028514862 PSNR: 23.8820743560791 +[TRAIN] Iter: 166300 Loss: 0.01156424917280674 PSNR: 23.19662094116211 +[TRAIN] Iter: 166400 Loss: 0.011366763152182102 PSNR: 23.06600570678711 +[TRAIN] Iter: 166500 Loss: 0.014270092360675335 PSNR: 22.41404151916504 +[TRAIN] Iter: 166600 Loss: 0.013430437073111534 PSNR: 23.009376525878906 +[TRAIN] Iter: 166700 Loss: 0.012359436601400375 PSNR: 22.818071365356445 +[TRAIN] Iter: 166800 Loss: 0.014081478118896484 PSNR: 22.40375518798828 +[TRAIN] Iter: 166900 Loss: 0.009928703308105469 PSNR: 23.905900955200195 +[TRAIN] Iter: 167000 Loss: 0.013550886884331703 PSNR: 22.62696075439453 +[TRAIN] Iter: 167100 Loss: 0.010774879716336727 PSNR: 23.430381774902344 +[TRAIN] Iter: 167200 Loss: 0.015019483864307404 PSNR: 21.96711540222168 +[TRAIN] Iter: 167300 Loss: 0.01117179449647665 PSNR: 23.42744255065918 +[TRAIN] Iter: 167400 Loss: 0.011696104891598225 PSNR: 23.202341079711914 +[TRAIN] Iter: 167500 Loss: 0.009587840177118778 PSNR: 24.669891357421875 +[TRAIN] Iter: 167600 Loss: 0.011039819568395615 PSNR: 23.185752868652344 +[TRAIN] Iter: 167700 Loss: 0.012571634724736214 PSNR: 23.086889266967773 +[TRAIN] Iter: 167800 Loss: 0.01120740920305252 PSNR: 23.818693161010742 +[TRAIN] Iter: 167900 Loss: 0.012758639641106129 PSNR: 22.754472732543945 +[TRAIN] Iter: 168000 Loss: 0.013249939307570457 PSNR: 22.614526748657227 +[TRAIN] Iter: 168100 Loss: 0.013621930032968521 PSNR: 22.265241622924805 +[TRAIN] Iter: 168200 Loss: 0.011689819395542145 PSNR: 23.601734161376953 +[TRAIN] Iter: 168300 Loss: 0.011567703448235989 PSNR: 23.572790145874023 +[TRAIN] Iter: 168400 Loss: 0.012249053455889225 PSNR: 23.03133773803711 +[TRAIN] Iter: 168500 Loss: 0.009820982813835144 PSNR: 23.692747116088867 +[TRAIN] Iter: 168600 Loss: 0.01377707626670599 PSNR: 22.25115394592285 +[TRAIN] Iter: 168700 Loss: 0.01265766192227602 PSNR: 22.89751625061035 +[TRAIN] Iter: 168800 Loss: 0.011868742294609547 PSNR: 24.099546432495117 +[TRAIN] Iter: 168900 Loss: 0.013549627736210823 PSNR: 22.748653411865234 +[TRAIN] Iter: 169000 Loss: 0.009791230782866478 PSNR: 24.054113388061523 +[TRAIN] Iter: 169100 Loss: 0.012160545215010643 PSNR: 22.763822555541992 +[TRAIN] Iter: 169200 Loss: 0.011501188389956951 PSNR: 23.462295532226562 +[TRAIN] Iter: 169300 Loss: 0.013361673802137375 PSNR: 22.5218448638916 +[TRAIN] Iter: 169400 Loss: 0.011074395850300789 PSNR: 23.19544219970703 +[TRAIN] Iter: 169500 Loss: 0.013246249407529831 PSNR: 23.076553344726562 +[TRAIN] Iter: 169600 Loss: 0.013443558476865292 PSNR: 22.440391540527344 +[TRAIN] Iter: 169700 Loss: 0.013025004416704178 PSNR: 22.619380950927734 +[TRAIN] Iter: 169800 Loss: 0.01374074351042509 PSNR: 22.332836151123047 +[TRAIN] Iter: 169900 Loss: 0.013479197397828102 PSNR: 22.999940872192383 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/170000.tar +[TRAIN] Iter: 170000 Loss: 0.011575046926736832 PSNR: 23.92524528503418 +[TRAIN] Iter: 170100 Loss: 0.010934123769402504 PSNR: 23.688905715942383 +[TRAIN] Iter: 170200 Loss: 0.014111317694187164 PSNR: 22.294401168823242 +[TRAIN] Iter: 170300 Loss: 0.00997255090624094 PSNR: 23.984737396240234 +[TRAIN] Iter: 170400 Loss: 0.010453597642481327 PSNR: 23.923439025878906 +[TRAIN] Iter: 170500 Loss: 0.011177729815244675 PSNR: 23.698698043823242 +[TRAIN] Iter: 170600 Loss: 0.009797723963856697 PSNR: 24.087251663208008 +[TRAIN] Iter: 170700 Loss: 0.011096292175352573 PSNR: 24.075946807861328 +[TRAIN] Iter: 170800 Loss: 0.012811679393053055 PSNR: 22.926790237426758 +[TRAIN] Iter: 170900 Loss: 0.011752331629395485 PSNR: 23.11680030822754 +[TRAIN] Iter: 171000 Loss: 0.014075108803808689 PSNR: 22.537952423095703 +[TRAIN] Iter: 171100 Loss: 0.012124419212341309 PSNR: 23.077594757080078 +[TRAIN] Iter: 171200 Loss: 0.01264088787138462 PSNR: 22.790603637695312 +[TRAIN] Iter: 171300 Loss: 0.014361504465341568 PSNR: 22.498065948486328 +[TRAIN] Iter: 171400 Loss: 0.011027520522475243 PSNR: 23.5245418548584 +[TRAIN] Iter: 171500 Loss: 0.012900670990347862 PSNR: 23.366291046142578 +[TRAIN] Iter: 171600 Loss: 0.011276345700025558 PSNR: 23.333967208862305 +[TRAIN] Iter: 171700 Loss: 0.011598307639360428 PSNR: 23.246105194091797 +[TRAIN] Iter: 171800 Loss: 0.012661203742027283 PSNR: 22.689937591552734 +[TRAIN] Iter: 171900 Loss: 0.014017825946211815 PSNR: 22.180788040161133 +[TRAIN] Iter: 172000 Loss: 0.013607097789645195 PSNR: 22.680391311645508 +[TRAIN] Iter: 172100 Loss: 0.012540744617581367 PSNR: 22.612668991088867 +[TRAIN] Iter: 172200 Loss: 0.01188109815120697 PSNR: 23.12765121459961 +[TRAIN] Iter: 172300 Loss: 0.011180158704519272 PSNR: 23.366336822509766 +[TRAIN] Iter: 172400 Loss: 0.01184786669909954 PSNR: 23.189130783081055 +[TRAIN] Iter: 172500 Loss: 0.013173112645745277 PSNR: 22.754884719848633 +[TRAIN] Iter: 172600 Loss: 0.010701179504394531 PSNR: 23.72081756591797 +[TRAIN] Iter: 172700 Loss: 0.01219656690955162 PSNR: 23.07245635986328 +[TRAIN] Iter: 172800 Loss: 0.013612344861030579 PSNR: 22.472497940063477 +[TRAIN] Iter: 172900 Loss: 0.011908442713320255 PSNR: 22.68701934814453 +[TRAIN] Iter: 173000 Loss: 0.013026406057178974 PSNR: 22.955928802490234 +[TRAIN] Iter: 173100 Loss: 0.011630549095571041 PSNR: 23.603261947631836 +[TRAIN] Iter: 173200 Loss: 0.013783938251435757 PSNR: 22.253774642944336 +[TRAIN] Iter: 173300 Loss: 0.012880096212029457 PSNR: 22.686241149902344 +[TRAIN] Iter: 173400 Loss: 0.012474982999265194 PSNR: 23.024402618408203 +[TRAIN] Iter: 173500 Loss: 0.013139047659933567 PSNR: 22.859355926513672 +[TRAIN] Iter: 173600 Loss: 0.013347876258194447 PSNR: 22.50728416442871 +[TRAIN] Iter: 173700 Loss: 0.012275424785912037 PSNR: 22.984636306762695 +[TRAIN] Iter: 173800 Loss: 0.012876378372311592 PSNR: 22.433490753173828 +[TRAIN] Iter: 173900 Loss: 0.01186495553702116 PSNR: 22.960079193115234 +[TRAIN] Iter: 174000 Loss: 0.012795865535736084 PSNR: 22.78453826904297 +[TRAIN] Iter: 174100 Loss: 0.010780728422105312 PSNR: 24.187030792236328 +[TRAIN] Iter: 174200 Loss: 0.009955309331417084 PSNR: 23.976173400878906 +[TRAIN] Iter: 174300 Loss: 0.013065158389508724 PSNR: 22.672996520996094 +[TRAIN] Iter: 174400 Loss: 0.01215924322605133 PSNR: 22.953784942626953 +[TRAIN] Iter: 174500 Loss: 0.012380121275782585 PSNR: 23.43256378173828 +[TRAIN] Iter: 174600 Loss: 0.011556350626051426 PSNR: 23.28057098388672 +[TRAIN] Iter: 174700 Loss: 0.011319709941744804 PSNR: 23.536130905151367 +[TRAIN] Iter: 174800 Loss: 0.013112233020365238 PSNR: 22.77948570251465 +[TRAIN] Iter: 174900 Loss: 0.012275634333491325 PSNR: 23.58605194091797 +[TRAIN] Iter: 175000 Loss: 0.011286618188023567 PSNR: 23.158588409423828 +[TRAIN] Iter: 175100 Loss: 0.012134458869695663 PSNR: 23.37896156311035 +[TRAIN] Iter: 175200 Loss: 0.011230019852519035 PSNR: 23.154653549194336 +[TRAIN] Iter: 175300 Loss: 0.012633995153009892 PSNR: 22.750015258789062 +[TRAIN] Iter: 175400 Loss: 0.011571242474019527 PSNR: 22.7266845703125 +[TRAIN] Iter: 175500 Loss: 0.014059953391551971 PSNR: 21.82157325744629 +[TRAIN] Iter: 175600 Loss: 0.013165544718503952 PSNR: 22.588134765625 +[TRAIN] Iter: 175700 Loss: 0.013037359341979027 PSNR: 22.749113082885742 +[TRAIN] Iter: 175800 Loss: 0.013234574347734451 PSNR: 22.651975631713867 +[TRAIN] Iter: 175900 Loss: 0.012228315696120262 PSNR: 22.950008392333984 +[TRAIN] Iter: 176000 Loss: 0.012420527637004852 PSNR: 23.313480377197266 +[TRAIN] Iter: 176100 Loss: 0.010588559322059155 PSNR: 24.19093894958496 +[TRAIN] Iter: 176200 Loss: 0.013873306103050709 PSNR: 22.47881317138672 +[TRAIN] Iter: 176300 Loss: 0.012173762544989586 PSNR: 23.2371768951416 +[TRAIN] Iter: 176400 Loss: 0.012455299496650696 PSNR: 23.562335968017578 +[TRAIN] Iter: 176500 Loss: 0.01139361597597599 PSNR: 23.211517333984375 +[TRAIN] Iter: 176600 Loss: 0.011676481924951077 PSNR: 23.409818649291992 +[TRAIN] Iter: 176700 Loss: 0.01174416858702898 PSNR: 23.35628318786621 +[TRAIN] Iter: 176800 Loss: 0.013439422473311424 PSNR: 22.471309661865234 +[TRAIN] Iter: 176900 Loss: 0.011659601703286171 PSNR: 22.971633911132812 +[TRAIN] Iter: 177000 Loss: 0.012531212531030178 PSNR: 22.67066764831543 +[TRAIN] Iter: 177100 Loss: 0.012882721610367298 PSNR: 22.857982635498047 +[TRAIN] Iter: 177200 Loss: 0.010975359007716179 PSNR: 23.543806076049805 +[TRAIN] Iter: 177300 Loss: 0.011034782975912094 PSNR: 23.09389305114746 +[TRAIN] Iter: 177400 Loss: 0.012487111613154411 PSNR: 23.117055892944336 +[TRAIN] Iter: 177500 Loss: 0.010901791974902153 PSNR: 23.66966438293457 +[TRAIN] Iter: 177600 Loss: 0.016410937532782555 PSNR: 21.655855178833008 +[TRAIN] Iter: 177700 Loss: 0.01101011037826538 PSNR: 23.51152992248535 +[TRAIN] Iter: 177800 Loss: 0.014688057824969292 PSNR: 22.399267196655273 +[TRAIN] Iter: 177900 Loss: 0.013840892352163792 PSNR: 22.604700088500977 +[TRAIN] Iter: 178000 Loss: 0.014551907777786255 PSNR: 22.254222869873047 +[TRAIN] Iter: 178100 Loss: 0.011489449068903923 PSNR: 22.45503044128418 +[TRAIN] Iter: 178200 Loss: 0.011701450683176517 PSNR: 23.00864028930664 +[TRAIN] Iter: 178300 Loss: 0.011213671416044235 PSNR: 23.326562881469727 +[TRAIN] Iter: 178400 Loss: 0.012749932706356049 PSNR: 22.703754425048828 +[TRAIN] Iter: 178500 Loss: 0.0125389713793993 PSNR: 23.311185836791992 +[TRAIN] Iter: 178600 Loss: 0.012982059270143509 PSNR: 22.459413528442383 +[TRAIN] Iter: 178700 Loss: 0.013946358114480972 PSNR: 22.247013092041016 +[TRAIN] Iter: 178800 Loss: 0.012362692505121231 PSNR: 22.782590866088867 +[TRAIN] Iter: 178900 Loss: 0.013072755187749863 PSNR: 22.55055046081543 +[TRAIN] Iter: 179000 Loss: 0.014578297734260559 PSNR: 22.287513732910156 +[TRAIN] Iter: 179100 Loss: 0.011266089044511318 PSNR: 23.383569717407227 +[TRAIN] Iter: 179200 Loss: 0.01061338372528553 PSNR: 23.7401065826416 +[TRAIN] Iter: 179300 Loss: 0.01190892606973648 PSNR: 23.178958892822266 +[TRAIN] Iter: 179400 Loss: 0.011124705895781517 PSNR: 23.985559463500977 +[TRAIN] Iter: 179500 Loss: 0.010299933142960072 PSNR: 23.631574630737305 +[TRAIN] Iter: 179600 Loss: 0.010875362902879715 PSNR: 23.963666915893555 +[TRAIN] Iter: 179700 Loss: 0.012382641434669495 PSNR: 23.049243927001953 +[TRAIN] Iter: 179800 Loss: 0.013595549389719963 PSNR: 22.750028610229492 +[TRAIN] Iter: 179900 Loss: 0.012196785770356655 PSNR: 23.309431076049805 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/180000.tar +[TRAIN] Iter: 180000 Loss: 0.014063575305044651 PSNR: 22.79294776916504 +[TRAIN] Iter: 180100 Loss: 0.014736439101397991 PSNR: 22.105045318603516 +[TRAIN] Iter: 180200 Loss: 0.012907995842397213 PSNR: 22.615949630737305 +[TRAIN] Iter: 180300 Loss: 0.012789282947778702 PSNR: 22.897037506103516 +[TRAIN] Iter: 180400 Loss: 0.010628944262862206 PSNR: 24.1743106842041 +[TRAIN] Iter: 180500 Loss: 0.012889937497675419 PSNR: 22.718751907348633 +[TRAIN] Iter: 180600 Loss: 0.010968007147312164 PSNR: 23.483394622802734 +[TRAIN] Iter: 180700 Loss: 0.010805278085172176 PSNR: 23.805011749267578 +[TRAIN] Iter: 180800 Loss: 0.012445296160876751 PSNR: 23.12148666380 0.0009691715240478516 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.64019775390625 +2 21.49402379989624 +3 21.41394829750061 +4 21.14338493347168 +5 22.790189266204834 +6 21.436306476593018 +7 21.662324905395508 +8 21.24580192565918 +9 21.92822241783142 +10 21.937487363815308 +11 21.936754941940308 +12 21.23407006263733 +13 21.286394119262695 +14 22.01069188117981 +15 21.64361310005188 +16 21.90652847290039 +17 22.056427717208862 +18 21.467288970947266 +19 21.785244464874268 +20 22.994091749191284 +21 21.89205503463745 +22 22.723965406417847 +23 21.460301160812378 +24 21.637452125549316 +25 21.924824953079224 +26 21.831032514572144 +27 21.659980058670044 +28 22.0942702293396 +29 22.16429328918457 +30 21.13504719734192 +31 22.684237003326416 +32 21.317180156707764 +33 21.86020302772522 +34 21.146465063095093 +35 21.933093070983887 +36 22.320902347564697 +37 22.3247492313385 +38 21.042726516723633 +39 21.599864721298218 +40 22.628602981567383 +41 21.83624839782715 +42 21.951078176498413 +43 23.19826316833496 +44 21.834064722061157 +45 22.240219831466675 +46 22.090986728668213 +47 22.047977685928345 +48 22.504160404205322 +49 21.134466409683228 +50 21.507333040237427 +51 21.64281916618347 +52 22.03563141822815 +53 22.047117948532104 +54 22.72952675819397 +55 21.158463954925537 +56 21.77632737159729 +57 21.900285720825195 +58 21.741352081298828 +59 22.013655424118042 +60 21.701333045959473 +61 22.17510724067688 +62 22.748414516448975 +63 21.919660329818726 +64 22.833109378814697 +65 21.72729802131653 +66 21.08472204208374 +67 22.41012692451477 +68 21.971324682235718 +69 21.51771330833435 +70 21.98272442817688 +71 22.296751737594604 +72 22.258992195129395 +73 21.51869249343872 +74 22.53675365447998 +75 21.61184287071228 +76 21.226467847824097 +77 21.27851963043213 +78 22.477943420410156 +79 21.855298042297363 +80 22.51601243019104 +81 21.486876010894775 +82 21.710940837860107 +83 21.248825788497925 +84 21.79120373725891 +85 21.287097692489624 +86 21.469897985458374 +87 21.246126174926758 +88 22.58835196495056 +89 21.898508310317993 +90 20.844464778900146 +91 21.718242406845093 +92 21.122809648513794 +93 21.165559768676758 +94 21.779683113098145 +95 22.63913583755493 +96 21.079132318496704 +97 21.497289180755615 +98 21.498522996902466 +99 21.608059644699097 +100 22.13885188102722 +101 21.970114707946777 +102 22.081395387649536 +103 22.175694942474365 +104 21.616644620895386 +105 22.039828300476074 +106 22.298497676849365 +107 21.524866580963135 +108 21.332563638687134 +109 21.69684600830078 +110 21.41821575164795 +111 22.825236558914185 +112 21.13105082511902 +113 21.836875915527344 +114 21.88969135284424 +115 21.492734909057617 +116 22.583200931549072 +117 21.00074052810669 +118 21.993878841400146 +119 22.486501216888428 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-3.5293e-01, 5.0554e-01, 1.5713e+00, -1.5651e+01], + [-8.7652e-01, -1.7964e-01, 7.7800e-01, -3.8373e+00], + [-9.0554e-01, -1.7639e-01, 7.8801e-01, -2.0331e+00], + ..., + [-6.1985e+00, -6.3266e+00, 4.1674e+00, 4.6336e+02], + [-5.2384e+00, -5.4203e+00, 5.1543e+00, 4.6425e+02], + [-5.5134e+00, -5.5508e+00, 5.0463e+00, 4.8081e+02]], + + [[-1.5718e+00, -1.4419e+00, -1.2161e+00, -4.2669e+01], + [-1.5265e+00, -1.5453e+00, -1.5672e+00, -5.9392e+01], + [ 5.0105e-02, -3.1125e-01, -3.8577e+00, 4.0705e+01], + ..., + [-7.4454e+00, -6.5960e+00, -2.4525e+00, 4.7191e+02], + [-7.1079e+00, -5.6375e+00, -1.0820e+00, 4.8389e+02], + [-6.2704e+00, -5.4365e+00, -2.1896e+00, 4.9803e+02]], + + [[ 1.6284e-01, 9.8397e-01, 1.8161e+00, -2.4265e+01], + [-7.1000e-01, -2.3579e-01, 4.6232e-01, -3.2358e+01], + [-7.8727e-01, -7.3385e-02, 1.0459e+00, -5.4753e+00], + ..., + [-1.7241e+00, -1.3719e+00, 2.1872e+00, 1.7564e+02], + [-1.9508e+00, -1.7606e+00, 1.7193e+00, 1.6953e+02], + [-2.3489e+00, -2.1006e+00, 8.4206e-01, 1.5763e+02]], + + ..., + + [[-4.8784e-02, -1.7028e-01, 3.7628e-02, -1.1424e+01], + [-7.9939e-01, -8.5983e-01, -7.3482e-01, 1.1915e+01], + [-6.8132e-01, -9.4472e-01, -1.2201e+00, -2.9462e+01], + ..., + [-1.1981e+01, -2.7006e+00, -6.5076e-02, 5.0524e+02], + [-1.2394e+01, -2.9398e+00, -9.5562e-01, 5.1771e+02], + [-1.2137e+01, -1.6501e+00, 1.8441e+00, 5.2944e+02]], + + [[-1.7080e+00, -1.7498e+00, -1.6326e+00, 4.6900e+01], + [ 7.9233e-01, 7.0184e-01, 1.2648e+00, -3.4608e+01], + [-1.5196e+00, -1.5976e+00, -1.3071e+00, 4.9362e+01], + ..., + [-1.5254e+01, -2.4897e+00, 5.3284e+00, 6.6188e+02], + [-1.8111e+01, -3.8045e+00, 6.4774e+00, 7.3777e+02], + [-1.8037e+01, -4.5092e+00, 4.3388e+00, 7.1273e+02]], + + [[-1.7605e+00, -4.5404e-01, 5.4059e-01, -1.9675e+01], + [-1.1797e+00, 7.5509e-02, 8.0510e-01, -2.7679e+01], + [-8.7429e-01, 9.4401e-02, 4.4690e-01, -1.3649e+01], + ..., + [-5.3690e+00, -5.4025e+00, -1.4484e+00, 1.9022e+02], + [-5.1116e+00, -5.2864e+00, -1.1261e+00, 1.9720e+02], + [-5.6038e+00, -5.2918e+00, -1.5403e+00, 1.6697e+02]]], + grad_fn=), 'rgb0': tensor([[0.3064, 0.4731, 0.6962], + [0.5339, 0.4386, 0.0867], + [0.4120, 0.6478, 0.8497], + ..., + [0.2279, 0.2295, 0.2594], + [0.1748, 0.1631, 0.1794], + [0.3414, 0.5832, 0.8099]], grad_fn=), 'disp0': tensor([ 52.7141, 26.0805, 18.3526, ..., 48.9982, 251.8389, 28.4032], + grad_fn=), 'acc0': tensor([1.0000, 1.0000, 1.0000, ..., 1.0000, 1.0000, 1.0000], + grad_fn=), 'z_std': tensor([0.0037, 0.0064, 0.0078, ..., 0.0046, 0.2733, 0.0084])} +0 0.0008170604705810547 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.914422750473022 +2 21.403679609298706 +3 21.521499633789062 +4 22.42948579788208 +5 21.522568464279175 +6 22.32320809364319 +7 21.509366512298584 +8 21.69387435913086 +9 21.888492584228516 +10 22.263050317764282 +11 23.130841732025146 +12 22.320844888687134 +13 21.824236392974854 +14 22.474788427352905 +15 21.790241956710815 +16 21.81823420524597 +17 21.451114892959595 +18 21.447116136550903 +19 21.963433980941772 +20 21.65073847770691 +21 21.661253452301025 +22 21.492111682891846 +23 22.379400491714478 +24 21.839725732803345 +25 21.76927399635315 +26 21.429641723632812 +27 22.968659162521362 +28 21.74495267868042 +29 22.065281867980957 +30 21.20401883125305 +31 21.75425124168396 +32 21.281834363937378 +33 22.190018892288208 +34 21.782695531845093 +35 21.1179780960083 +36 22.458221673965454 +37 22.980476140975952 +38 22.16947650909424 +39 21.471025705337524 +40 22.18791127204895 +41 22.014419555664062 +42 22.07245898246765 +43 22.007415533065796 +44 21.51079773902893 +45 22.952854871749878 +46 22.077853202819824 +47 21.535549640655518 +48 21.90133237838745 +49 22.16851282119751 +50 21.760851860046387 +51 21.60168433189392 +52 21.7690167427063 +53 21.469736099243164 +54 22.607107400894165 +55 22.05911874771118 +56 21.41273808479309 +57 22.50975275039673 +58 21.01872229576111 +59 21.572662591934204 +60 21.66675353050232 +61 22.22994112968445 +62 21.763291835784912 +63 21.282192707061768 +64 21.33454179763794 +65 21.702794551849365 +66 21.799230337142944 +67 22.300445795059204 +68 21.388432502746582 +69 22.372000455856323 +70 21.764068365097046 +71 21.324925899505615 +72 21.752097368240356 +73 21.725639581680298 +74 21.18535852432251 +75 22.8091459274292 +76 22.209390878677368 +77 22.10396695137024 +78 21.195747137069702 +79 22.05704164505005 +80 21.663854122161865 +81 22.050127744674683 +82 22.233992338180542 +83 22.02434515953064 +84 21.170018196105957 +85 21.6002516746521 +86 21.258212566375732 +87 22.59627914428711 +88 21.16824984550476 +89 21.333641529083252 +90 22.07121205329895 +91 21.74146318435669 +92 21.91321110725403 +93 21.497309684753418 +94 21.710428714752197 +95 22.76814079284668 +96 21.51407742500305 +97 21.258620500564575 +98 21.732524156570435 +99 21.43898844718933 +100 22.566630125045776 +101 21.869048357009888 +102 21.93983244895935 +103 21.539823055267334 +104 21.277181148529053 +105 22.012927532196045 +106 21.72735023498535 +107 21.126830101013184 +108 22.36198592185974 +109 21.049746990203857 +110 22.350353240966797 +111 22.38956570625305 +112 21.874434232711792 +113 22.107820749282837 +114 22.284343004226685 +115 22.079567432403564 +116 21.698267221450806 +117 22.79963254928589 +118 21.70883011817932 +119 21.53967523574829 +test poses shape torch.Size([4, 3, 4]) +0 0.0017042160034179688 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.614488124847412 +2 22.224393606185913 +3 21.69952082633972 +Saved test set +[TRAIN] Iter: 200000 Loss: 0.007672448642551899 PSNR: 25.38548469543457 +[TRAIN] Iter: 200100 Loss: 0.008062398992478848 PSNR: 25.574724197387695 +[TRAIN] Iter: 200200 Loss: 0.0068514239974319935 PSNR: 25.200889587402344 +[TRAIN] Iter: 200300 Loss: 0.0061537656001746655 PSNR: 26.63518714904785 +[TRAIN] Iter: 200400 Loss: 0.008169792592525482 PSNR: 25.12625503540039 +[TRAIN] Iter: 200500 Loss: 0.005587829276919365 PSNR: 27.536245346069336 +[TRAIN] Iter: 200600 Loss: 0.008321039378643036 PSNR: 25.095685958862305 +[TRAIN] Iter: 200700 Loss: 0.008601196110248566 PSNR: 25.10757064819336 +[TRAIN] Iter: 200800 Loss: 0.007051155902445316 PSNR: 25.58057403564453 +[TRAIN] Iter: 200900 Loss: 0.006016038358211517 PSNR: 26.29603385925293 +[TRAIN] Iter: 201000 Loss: 0.008847827091813087 PSNR: 24.543489456176758 +[TRAIN] Iter: 201100 Loss: 0.007627089042216539 PSNR: 25.259220123291016 +[TRAIN] Iter: 201200 Loss: 0.0065489597618579865 PSNR: 27.213476181030273 +[TRAIN] Iter: 201300 Loss: 0.006617369130253792 PSNR: 26.240785598754883 +[TRAIN] Iter: 201400 Loss: 0.006642534397542477 PSNR: 25.882719039916992 +[TRAIN] Iter: 201500 Loss: 0.006400738842785358 PSNR: 27.202978134155273 +[TRAIN] Iter: 201600 Loss: 0.007467211224138737 PSNR: 25.330738067626953 +[TRAIN] Iter: 201700 Loss: 0.007419467903673649 PSNR: 25.437416076660156 +[TRAIN] Iter: 201800 Loss: 0.00790167972445488 PSNR: 25.52387237548828 +[TRAIN] Iter: 201900 Loss: 0.005475271958857775 PSNR: 27.636276245117188 +[TRAIN] Iter: 202000 Loss: 0.006720171310007572 PSNR: 26.11515235900879 +[TRAIN] Iter: 202100 Loss: 0.007684446405619383 PSNR: 25.188180923461914 +[TRAIN] Iter: 202200 Loss: 0.007821963168680668 PSNR: 25.266775131225586 +[TRAIN] Iter: 202300 Loss: 0.006866305135190487 PSNR: 25.91156768798828 +[TRAIN] Iter: 202400 Loss: 0.0051909200847148895 PSNR: 27.758502960205078 +[TRAIN] Iter: 202500 Loss: 0.006608124822378159 PSNR: 26.642459869384766 +[TRAIN] Iter: 202600 Loss: 0.005142408423125744 PSNR: 27.833309173583984 +[TRAIN] Iter: 202700 Loss: 0.007272904738783836 PSNR: 25.24465560913086 +[TRAIN] Iter: 202800 Loss: 0.007114876061677933 PSNR: 25.711782455444336 +[TRAIN] Iter: 202900 Loss: 0.007658599875867367 PSNR: 25.17063331604004 +[TRAIN] Iter: 203000 Loss: 0.007123385090380907 PSNR: 25.469921112060547 +[TRAIN] Iter: 203100 Loss: 0.00547401700168848 PSNR: 26.797527313232422 +[TRAIN] Iter: 203200 Loss: 0.007339806295931339 PSNR: 26.62058448791504 +[TRAIN] Iter: 203300 Loss: 0.00830782949924469 PSNR: 25.144912719726562 +[TRAIN] Iter: 203400 Loss: 0.006049867253750563 PSNR: 26.84077262878418 +[TRAIN] Iter: 203500 Loss: 0.009217452257871628 PSNR: 24.8591365814209 +[TRAIN] Iter: 203600 Loss: 0.008432884700596333 PSNR: 25.392248153686523 +[TRAIN] Iter: 203700 Loss: 0.0073283519595861435 PSNR: 25.064847946166992 +[TRAIN] Iter: 203800 Loss: 0.0074018072336912155 PSNR: 25.33694076538086 +[TRAIN] Iter: 203900 Loss: 0.007947683334350586 PSNR: 25.629817962646484 +[TRAIN] Iter: 204000 Loss: 0.007394250016659498 PSNR: 25.95059585571289 +[TRAIN] Iter: 204100 Loss: 0.006973666604608297 PSNR: 26.956315994262695 +[TRAIN] Iter: 204200 Loss: 0.009476892650127411 PSNR: 24.21963882446289 +[TRAIN] Iter: 204300 Loss: 0.007842984050512314 PSNR: 25.637897491455078 +[TRAIN] Iter: 204400 Loss: 0.007390613202005625 PSNR: 25.937042236328125 +[TRAIN] Iter: 204500 Loss: 0.007250184193253517 PSNR: 25.552574157714844 +[TRAIN] Iter: 204600 Loss: 0.00673691276460886 PSNR: 26.282108306884766 +[TRAIN] Iter: 204700 Loss: 0.006144494749605656 PSNR: 27.673480987548828 +[TRAIN] Iter: 204800 Loss: 0.005378449335694313 PSNR: 27.8664493560791 +[TRAIN] Iter: 204900 Loss: 0.007765153422951698 PSNR: 25.322404861450195 +[TRAIN] Iter: 205000 Loss: 0.00795106403529644 PSNR: 25.129487991333008 +[TRAIN] Iter: 205100 Loss: 0.006606047973036766 PSNR: 26.04741668701172 +[TRAIN] Iter: 205200 Loss: 0.006799753755331039 PSNR: 25.70410919189453 +[TRAIN] Iter: 205300 Loss: 0.00745085533708334 PSNR: 25.760334014892578 +[TRAIN] Iter: 205400 Loss: 0.008036845363676548 PSNR: 25.254365921020508 +[TRAIN] Iter: 205500 Loss: 0.007426129654049873 PSNR: 25.789011001586914 +[TRAIN] Iter: 205600 Loss: 0.00679158978164196 PSNR: 25.657024383544922 +[TRAIN] Iter: 205700 Loss: 0.00722668319940567 PSNR: 25.4625186920166 +[TRAIN] Iter: 205800 Loss: 0.008960948325693607 PSNR: 24.8383846282959 +[TRAIN] Iter: 205900 Loss: 0.006000565364956856 PSNR: 26.598020553588867 +[TRAIN] Iter: 206000 Loss: 0.006255471613258123 PSNR: 26.2403621673584 +[TRAIN] Iter: 206100 Loss: 0.007599945180118084 PSNR: 26.008811950683594 +[TRAIN] Iter: 206200 Loss: 0.007487467024475336 PSNR: 25.61490821838379 +[TRAIN] Iter: 206300 Loss: 0.005385407712310553 PSNR: 27.22195053100586 +[TRAIN] Iter: 206400 Loss: 0.006340641528367996 PSNR: 27.048025131225586 +[TRAIN] Iter: 206500 Loss: 0.0064175426959991455 PSNR: 26.187881469726562 +[TRAIN] Iter: 206600 Loss: 0.00881747342646122 PSNR: 24.862464904785156 +[TRAIN] Iter: 206700 Loss: 0.006386738736182451 PSNR: 26.391029357910156 +[TRAIN] Iter: 206800 Loss: 0.00815034843981266 PSNR: 25.69920539855957 +[TRAIN] Iter: 206900 Loss: 0.007267836946994066 PSNR: 26.579666137695312 +[TRAIN] Iter: 207000 Loss: 0.007010990753769875 PSNR: 25.88226318359375 +[TRAIN] Iter: 207100 Loss: 0.007898399606347084 PSNR: 25.450212478637695 +[TRAIN] Iter: 207200 Loss: 0.005617163144052029 PSNR: 26.712018966674805 +[TRAIN] Iter: 207300 Loss: 0.005350262857973576 PSNR: 27.613800048828125 +[TRAIN] Iter: 207400 Loss: 0.006013830192387104 PSNR: 26.57693099975586 +[TRAIN] Iter: 207500 Loss: 0.006168645806610584 PSNR: 26.583309173583984 +[TRAIN] Iter: 207600 Loss: 0.007966628298163414 PSNR: 25.60036849975586 +[TRAIN] Iter: 207700 Loss: 0.006375309079885483 PSNR: 26.809240341186523 +[TRAIN] Iter: 207800 Loss: 0.007893809117376804 PSNR: 25.332992553710938 +[TRAIN] Iter: 207900 Loss: 0.006225481629371643 PSNR: 26.77299690246582 +[TRAIN] Iter: 208000 Loss: 0.0067300922237336636 PSNR: 26.28076171875 +[TRAIN] Iter: 208100 Loss: 0.006485076155513525 PSNR: 26.978506088256836 +[TRAIN] Iter: 208200 Loss: 0.00589984143152833 PSNR: 28.089279174804688 +[TRAIN] Iter: 208300 Loss: 0.005627747159451246 PSNR: 27.25067901611328 +[TRAIN] Iter: 208400 Loss: 0.009527080692350864 PSNR: 24.24455451965332 +[TRAIN] Iter: 208500 Loss: 0.007421938702464104 PSNR: 25.401071548461914 +[TRAIN] Iter: 208600 Loss: 0.006146874278783798 PSNR: 27.174543380737305 +[TRAIN] Iter: 208700 Loss: 0.007640649564564228 PSNR: 25.2362117767334 +[TRAIN] Iter: 208800 Loss: 0.006292805075645447 PSNR: 26.867015838623047 +[TRAIN] Iter: 208900 Loss: 0.006011626683175564 PSNR: 26.69841194152832 +[TRAIN] Iter: 209000 Loss: 0.007306436542421579 PSNR: 26.47659683227539 +[TRAIN] Iter: 209100 Loss: 0.008513661101460457 PSNR: 24.75792121887207 +[TRAIN] Iter: 209200 Loss: 0.0076405759900808334 PSNR: 25.960975646972656 +[TRAIN] Iter: 209300 Loss: 0.00537942536175251 PSNR: 27.82035255432129 +[TRAIN] Iter: 209400 Loss: 0.006010911427438259 PSNR: 27.89142608642578 +[TRAIN] Iter: 209500 Loss: 0.006873620208352804 PSNR: 26.172208786010742 +[TRAIN] Iter: 209600 Loss: 0.007513049989938736 PSNR: 26.386539459228516 +[TRAIN] Iter: 209700 Loss: 0.008110898546874523 PSNR: 25.304035186767578 +[TRAIN] Iter: 209800 Loss: 0.00660605076700449 PSNR: 25.645793914794922 +[TRAIN] Iter: 209900 Loss: 0.0082818903028965 PSNR: 25.18461799621582 +Saved checkpoints at ./logs/TUT-out-doll-360-np/210000.tar +[TRAIN] Iter: 210000 Loss: 0.0063523524440824986 PSNR: 26.27381134033203 +[TRAIN] Iter: 210100 Loss: 0.006861366797238588 PSNR: 26.021907806396484 +[TRAIN] Iter: 210200 Loss: 0.005448899231851101 PSNR: 27.697101593017578 +[TRAIN] Iter: 210300 Loss: 0.006529601290822029 PSNR: 26.391021728515625 +[TRAIN] Iter: 210400 Loss: 0.008253507316112518 PSNR: 24.762197494506836 +[TRAIN] Iter: 210500 Loss: 0.007830144837498665 PSNR: 25.148136138916016 +[TRAIN] Iter: 210600 Loss: 0.006975168362259865 PSNR: 25.5750789642334 +[TRAIN] Iter: 210700 Loss: 0.0061813900247216225 PSNR: 26.645490646362305 +[TRAIN] Iter: 210800 Loss: 0.007268198765814304 PSNR: 25.099180221557617 +[TRAIN] Iter: 210900 Loss: 0.006045487709343433 PSNR: 26.59356117248535 +[TRAIN] Iter: 211000 Loss: 0.005416397470980883 PSNR: 27.578096389770508 +[TRAIN] Iter: 211100 Loss: 0.007760040927678347 PSNR: 25.56755828857422 +[TRAIN] Iter: 211200 Loss: 0.008271142840385437 PSNR: 24.96951675415039 +[TRAIN] Iter: 211300 Loss: 0.006852986291050911 PSNR: 25.915390014648438 +[TRAIN] Iter: 211400 Loss: 0.006748228333890438 PSNR: 25.65091896057129 +[TRAIN] Iter: 211500 Loss: 0.005891479551792145 PSNR: 27.24815559387207 +[TRAIN] Iter: 211600 Loss: 0.005386392120271921 PSNR: 28.020523071289062 +[TRAIN] Iter: 211700 Loss: 0.005652310326695442 PSNR: 27.60879135131836 +[TRAIN] Iter: 211800 Loss: 0.006500471383333206 PSNR: 25.887853622436523 +[TRAIN] Iter: 211900 Loss: 0.007223334163427353 PSNR: 26.858867645263672 +[TRAIN] Iter: 212000 Loss: 0.009062589146196842 PSNR: 24.284217834472656 +[TRAIN] Iter: 212100 Loss: 0.006072781048715115 PSNR: 26.3310546875 +[TRAIN] Iter: 212200 Loss: 0.007572618313133717 PSNR: 25.146364212036133 +[TRAIN] Iter: 212300 Loss: 0.005088877864181995 PSNR: 28.000585556030273 +[TRAIN] Iter: 212400 Loss: 0.005645778030157089 PSNR: 27.015092849731445 +[TRAIN] Iter: 212500 Loss: 0.006027357652783394 PSNR: 26.851318359375 +[TRAIN] Iter: 212600 Loss: 0.005733368918299675 PSNR: 27.39737892150879 +[TRAIN] Iter: 212700 Loss: 0.006398891098797321 PSNR: 26.48072624206543 +[TRAIN] Iter: 212800 Loss: 0.005297554656863213 PSNR: 27.337202072143555 +[TRAIN] Iter: 212900 Loss: 0.007177240215241909 PSNR: 25.827537536621094 +[TRAIN] Iter: 213000 Loss: 0.007591364439576864 PSNR: 25.268253326416016 +[TRAIN] Iter: 213100 Loss: 0.008192035369575024 PSNR: 25.07817840576172 +[TRAIN] Iter: 213200 Loss: 0.008611404336988926 PSNR: 24.853736877441406 +[TRAIN] Iter: 213300 Loss: 0.006345079280436039 PSNR: 25.740795135498047 +[TRAIN] Iter: 213400 Loss: 0.007698764093220234 PSNR: 25.546876907348633 +[TRAIN] Iter: 213500 Loss: 0.0059598591178655624 PSNR: 26.31223487854004 +[TRAIN] Iter: 213600 Loss: 0.007485636509954929 PSNR: 25.296710968017578 +[TRAIN] Iter: 213700 Loss: 0.0068112872540950775 PSNR: 25.917552947998047 +[TRAIN] Iter: 213800 Loss: 0.008400422520935535 PSNR: 25.001855850219727 +[TRAIN] Iter: 213900 Loss: 0.006548523902893066 PSNR: 26.29150390625 +[TRAIN] Iter: 214000 Loss: 0.007207863964140415 PSNR: 26.77933692932129 +[TRAIN] Iter: 214100 Loss: 0.004929698072373867 PSNR: 27.420225143432617 +[TRAIN] Iter: 214200 Loss: 0.006344181951135397 PSNR: 25.897504806518555 +[TRAIN] Iter: 214300 Loss: 0.008007274940609932 PSNR: 25.081222534179688 +[TRAIN] Iter: 214400 Loss: 0.0060307034291327 PSNR: 27.251157760620117 +[TRAIN] Iter: 214500 Loss: 0.006802435964345932 PSNR: 26.210060119628906 +[TRAIN] Iter: 214600 Loss: 0.007457088213413954 PSNR: 26.6827335357666 +[TRAIN] Iter: 214700 Loss: 0.008672703057527542 PSNR: 24.803112030029297 +[TRAIN] Iter: 214800 Loss: 0.007715651765465736 PSNR: 25.89398956298828 +[TRAIN] Iter: 214900 Loss: 0.007188194431364536 PSNR: 25.432626724243164 +[TRAIN] Iter: 215000 Loss: 0.005772084929049015 PSNR: 27.06083106994629 +[TRAIN] Iter: 215100 Loss: 0.0063414620235562325 PSNR: 26.131488800048828 +[TRAIN] Iter: 215200 Loss: 0.006643709726631641 PSNR: 27.185434341430664 +[TRAIN] Iter: 215300 Loss: 0.006649842485785484 PSNR: 26.323837280273438 +[TRAIN] Iter: 215400 Loss: 0.00759157445281744 PSNR: 25.095632553100586 +[TRAIN] Iter: 215500 Loss: 0.005529586225748062 PSNR: 27.311279296875 +[TRAIN] Iter: 215600 Loss: 0.006964885629713535 PSNR: 25.74543571472168 +[TRAIN] Iter: 215700 Loss: 0.006601016968488693 PSNR: 25.738269805908203 +[TRAIN] Iter: 215800 Loss: 0.005484483670443296 PSNR: 27.288928985595703 +[TRAIN] Iter: 215900 Loss: 0.007116913795471191 PSNR: 25.363012313842773 +[TRAIN] Iter: 216000 Loss: 0.008657511323690414 PSNR: 25.092140197753906 +[TRAIN] Iter: 216100 Loss: 0.0070925564505159855 PSNR: 27.069000244140625 +[TRAIN] Iter: 216200 Loss: 0.007397081237286329 PSNR: 25.342472076416016 +[TRAIN] Iter: 216300 Loss: 0.007156355306506157 PSNR: 26.339317321777344 +[TRAIN] Iter: 216400 Loss: 0.006835983134806156 PSNR: 25.749126434326172 +[TRAIN] Iter: 216500 Loss: 0.006878557614982128 PSNR: 26.01812171936035 +[TRAIN] Iter: 216600 Loss: 0.007575402036309242 PSNR: 25.300525665283203 +[TRAIN] Iter: 216700 Loss: 0.004679055884480476 PSNR: 28.054346084594727 +[TRAIN] Iter: 216800 Loss: 0.008092518895864487 PSNR: 24.789241790771484 +[TRAIN] Iter: 216900 Loss: 0.007423049304634333 PSNR: 25.76034927368164 +[TRAIN] Iter: 217000 Loss: 0.00828271359205246 PSNR: 25.12887191772461 +[TRAIN] Iter: 217100 Loss: 0.007926326245069504 PSNR: 25.323898315429688 +[TRAIN] Iter: 217200 Loss: 0.006558114662766457 PSNR: 26.659048080444336 +[TRAIN] Iter: 217300 Loss: 0.005992147605866194 PSNR: 27.01650047302246 +[TRAIN] Iter: 217400 Loss: 0.00802147202193737 PSNR: 26.7733154296875 +[TRAIN] Iter: 217500 Loss: 0.005236051045358181 PSNR: 28.054000854492188 +[TRAIN] Iter: 217600 Loss: 0.006223499774932861 PSNR: 26.572399139404297 +[TRAIN] Iter: 217700 Loss: 0.005058418959379196 PSNR: 27.91792869567871 +[TRAIN] Iter: 217800 Loss: 0.0068209972232580185 PSNR: 27.219545364379883 +[TRAIN] Iter: 217900 Loss: 0.00569032970815897 PSNR: 27.39690589904785 +[TRAIN] Iter: 218000 Loss: 0.008043544366955757 PSNR: 25.37090301513672 +[TRAIN] Iter: 218100 Loss: 0.004625869449228048 PSNR: 27.608049392700195 +[TRAIN] Iter: 218200 Loss: 0.00602662144228816 PSNR: 26.796283721923828 +[TRAIN] Iter: 218300 Loss: 0.007309878244996071 PSNR: 26.008514404296875 +[TRAIN] Iter: 218400 Loss: 0.005974425468593836 PSNR: 27.022306442260742 +[TRAIN] Iter: 218500 Loss: 0.009089849889278412 PSNR: 24.343961715698242 +[TRAIN] Iter: 218600 Loss: 0.008145689964294434 PSNR: 24.902748107910156 +[TRAIN] Iter: 218700 Loss: 0.007321141194552183 PSNR: 25.563692092895508 +[TRAIN] Iter: 218800 Loss: 0.00832405686378479 PSNR: 24.79281234741211 +[TRAIN] Iter: 218900 Loss: 0.006404283456504345 PSNR: 25.864675521850586 +[TRAIN] Iter: 219000 Loss: 0.009457627311348915 PSNR: 24.86829948425293 +[TRAIN] Iter: 219100 Loss: 0.007560987025499344 PSNR: 25.51299476623535 +[TRAIN] Iter: 219200 Loss: 0.007185416761785746 PSNR: 25.199323654174805 +[TRAIN] Iter: 219300 Loss: 0.00887625478208065 PSNR: 24.85683822631836 +[TRAIN] Iter: 219400 Loss: 0.007320130709558725 PSNR: 26.51339340209961 +[TRAIN] Iter: 219500 Loss: 0.0077313925139606 PSNR: 25.342655181884766 +[TRAIN] Iter: 219600 Loss: 0.006688229739665985 PSNR: 26.62521743774414 +[TRAIN] Iter: 219700 Loss: 0.007356071379035711 PSNR: 25.76251220703125 +[TRAIN] Iter: 219800 Loss: 0.00826721079647541 PSNR: 24.78339385986328 +[TRAIN] Iter: 219900 Loss: 0.006650619208812714 PSNR: 26.913047790527344 +Saved checkpoints at ./logs/TUT-out-doll-360-np/220000.tar +[TRAIN] Iter: 220000 Loss: 0.00711607513949275 PSNR: 25.869951248168945 +[TRAIN] Iter: 220100 Loss: 0.007119892165064812 PSNR: 25.8273868560791 +[TRAIN] Iter: 220200 Loss: 0.0071025993674993515 PSNR: 26.080493927001953 +[TRAIN] Iter: 220300 Loss: 0.008637664839625359 PSNR: 25.030920028686523 +[TRAIN] Iter: 220400 Loss: 0.007416835520416498 PSNR: 24.932161331176758 +[TRAIN] Iter: 220500 Loss: 0.0074048638343811035 PSNR: 26.025959014892578 +[TRAIN] Iter: 220600 Loss: 0.007732806261628866 PSNR: 25.660945892333984 +[TRAIN] Iter: 220700 Loss: 0.005660935305058956 PSNR: 26.485511779785156 +[TRAIN] Iter: 220800 Loss: 0.007308909669518471 PSNR: 25.077655792236328 +[TRAIN] Iter: 220900 Loss: 0.008662763983011246 PSNR: 24.828163146972656 +[TRAIN] Iter: 221000 Loss: 0.006353018339723349 PSNR: 26.11109733581543 +[TRAIN] Iter: 221100 Loss: 0.006495597772300243 PSNR: 26.413610458374023 +[TRAIN] Iter: 221200 Loss: 0.006053390447050333 PSNR: 27.159042358398438 +[TRAIN] Iter: 221300 Loss: 0.00838877446949482 PSNR: 25.176137924194336 +[TRAIN] Iter: 221400 Loss: 0.006168356630951166 PSNR: 27.40167808532715 +[TRAIN] Iter: 221500 Loss: 0.007771668955683708 PSNR: 25.347158432006836 +[TRAIN] Iter: 221600 Loss: 0.0055125863291323185 PSNR: 27.813932418823242 +[TRAIN] Iter: 221700 Loss: 0.007295882795006037 PSNR: 25.37636375427246 +[TRAIN] Iter: 221800 Loss: 0.006026776507496834 PSNR: 28.047163009643555 +[TRAIN] Iter: 221900 Loss: 0.0057884929701685905 PSNR: 27.355602264404297 +[TRAIN] Iter: 222000 Loss: 0.006920502986758947 PSNR: 25.522871017456055 +[TRAIN] Iter: 222100 Loss: 0.005843213759362698 PSNR: 27.74968719482422 +[TRAIN] Iter: 222200 Loss: 0.0077117872424423695 PSNR: 25.563146591186523 +[TRAIN] Iter: 222300 Loss: 0.007116418797522783 PSNR: 25.58179473876953 +[TRAIN] Iter: 222400 Loss: 0.00685730529949069 PSNR: 27.40671730041504 +[TRAIN] Iter: 222500 Loss: 0.007219312246888876 PSNR: 25.731420516967773 +[TRAIN] Iter: 222600 Loss: 0.007346978411078453 PSNR: 25.50815773010254 +[TRAIN] Iter: 222700 Loss: 0.006391685456037521 PSNR: 27.007598876953125 +[TRAIN] Iter: 222800 Loss: 0.0076764426194131374 PSNR: 25.57854652404785 +[TRAIN] Iter: 222900 Loss: 0.006060904823243618 PSNR: 26.789596557617188 +[TRAIN] Iter: 223000 Loss: 0.005595204886049032 PSNR: 27.14314079284668 +[TRAIN] Iter: 223100 Loss: 0.006860528606921434 PSNR: 26.1950740814209 +[TRAIN] Iter: 223200 Loss: 0.0062682437710464 PSNR: 26.00434684753418 +[TRAIN] Iter: 223300 Loss: 0.006489459425210953 PSNR: 26.218616485595703 +[TRAIN] Iter: 223400 Loss: 0.006233268417418003 PSNR: 26.63361167907715 +[TRAIN] Iter: 223500 Loss: 0.006318789906799793 PSNR: 26.579389572143555 +[TRAIN] Iter: 223600 Loss: 0.005633320659399033 PSNR: 27.247243881225586 +[TRAIN] Iter: 223700 Loss: 0.007798305246978998 PSNR: 25.32262420654297 +[TRAIN] Iter: 223800 Loss: 0.007056457456201315 PSNR: 26.35094451904297 +[TRAIN] Iter: 223900 Loss: 0.0067143188789486885 PSNR: 25.650630950927734 +[TRAIN] Iter: 224000 Loss: 0.0067777507938444614 PSNR: 25.571046829223633 +[TRAIN] Iter: 224100 Loss: 0.006315260194242001 PSNR: 26.520259857177734 +[TRAIN] Iter: 224200 Loss: 0.006565289106220007 PSNR: 26.14959716796875 +[TRAIN] Iter: 224300 Loss: 0.0071596987545490265 PSNR: 25.90923500061035 +[TRAIN] Iter: 224400 Loss: 0.006877712905406952 PSNR: 25.92220115661621 +[TRAIN] Iter: 224500 Loss: 0.007944324985146523 PSNR: 25.15297508239746 +[TRAIN] Iter: 224600 Loss: 0.007465984672307968 PSNR: 25.803239822387695 +[TRAIN] Iter: 224700 Loss: 0.006927142851054668 PSNR: 25.60260581970215 +[TRAIN] Iter: 224800 Loss: 0.007080857641994953 PSNR: 26.16392707824707 +[TRAIN] Iter: 224900 Loss: 0.0064620072953403 PSNR: 27.393701553344727 +[TRAIN] Iter: 225000 Loss: 0.007630538661032915 PSNR: 25.795188903808594 +[TRAIN] Iter: 225100 Loss: 0.0063793109729886055 PSNR: 26.139631271362305 +[TRAIN] Iter: 225200 Loss: 0.006413150578737259 PSNR: 26.824094772338867 +[TRAIN] Iter: 225300 Loss: 0.006899374537169933 PSNR: 25.908893585205078 +[TRAIN] Iter: 225400 Loss: 0.006539166904985905 PSNR: 26.59596824645996 +[TRAIN] Iter: 225500 Loss: 0.005236656405031681 PSNR: 27.946725845336914 +[TRAIN] Iter: 225600 Loss: 0.006750797387212515 PSNR: 25.801525115966797 +[TRAIN] Iter: 225700 Loss: 0.0065498994663357735 PSNR: 26.141998291015625 +[TRAIN] Iter: 225800 Loss: 0.006074836477637291 PSNR: 26.347278594970703 +[TRAIN] Iter: 225900 Loss: 0.006035949569195509 PSNR: 26.745922088623047 +[TRAIN] Iter: 226000 Loss: 0.006031534634530544 PSNR: 26.52562141418457 +[TRAIN] Iter: 226100 Loss: 0.006726764142513275 PSNR: 26.295198440551758 +[TRAIN] Iter: 226200 Loss: 0.005703446920961142 PSNR: 27.26738166809082 +[TRAIN] Iter: 226300 Loss: 0.005916245747357607 PSNR: 26.58249855041504 +[TRAIN] Iter: 226400 Loss: 0.006875473540276289 PSNR: 26.364540100097656 +[TRAIN] Iter: 226500 Loss: 0.007564116269350052 PSNR: 26.210159301757812 +[TRAIN] Iter: 226600 Loss: 0.006922640837728977 PSNR: 25.910261154174805 +[TRAIN] Iter: 226700 Loss: 0.005869520828127861 PSNR: 28.140533447265625 +[TRAIN] Iter: 226800 Loss: 0.0060307797975838184 PSNR: 26.233592987060547 +[TRAIN] Iter: 226900 Loss: 0.006227059289813042 PSNR: 26.309186935424805 +[TRAIN] Iter: 227000 Loss: 0.007534722797572613 PSNR: 25.193729400634766 +[TRAIN] Iter: 227100 Loss: 0.0073253135196864605 PSNR: 25.806396484375 +[TRAIN] Iter: 227200 Loss: 0.0067072282545268536 PSNR: 26.844362258911133 +[TRAIN] Iter: 227300 Loss: 0.006776167079806328 PSNR: 26.501249313354492 +[TRAIN] Iter: 227400 Loss: 0.008793162181973457 PSNR: 25.166460037231445 +[TRAIN] Iter: 227500 Loss: 0.008741067722439766 PSNR: 24.512990951538086 +[TRAIN] Iter: 227600 Loss: 0.008060880936682224 PSNR: 25.26776885986328 +[TRAIN] Iter: 227700 Loss: 0.006394119933247566 PSNR: 27.335784912109375 +[TRAIN] Iter: 227800 Loss: 0.006197981536388397 PSNR: 27.325397491455078 +[TRAIN] Iter: 227900 Loss: 0.0052190376445651054 PSNR: 27.817829132080078 +[TRAIN] Iter: 228000 Loss: 0.007410981226712465 PSNR: 25.5797176361084 +[TRAIN] Iter: 228100 Loss: 0.005258915480226278 PSNR: 26.438222885131836 +[TRAIN] Iter: 228200 Loss: 0.00737814512103796 PSNR: 25.38319969177246 +[TRAIN] Iter: 228300 Loss: 0.0057086944580078125 PSNR: 27.353776931762695 +[TRAIN] Iter: 228400 Loss: 0.008147710002958775 PSNR: 25.298328399658203 +[TRAIN] Iter: 228500 Loss: 0.0074581606313586235 PSNR: 25.490543365478516 +[TRAIN] Iter: 228600 Loss: 0.007590130902826786 PSNR: 25.37886619567871 +[TRAIN] Iter: 228700 Loss: 0.006010489538311958 PSNR: 27.988704681396484 +[TRAIN] Iter: 228800 Loss: 0.006327751558274031 PSNR: 27.35138702392578 +[TRAIN] Iter: 228900 Loss: 0.007255755364894867 PSNR: 25.8253173828125 +[TRAIN] Iter: 229000 Loss: 0.00714754406362772 PSNR: 26.222951889038086 +[TRAIN] Iter: 229100 Loss: 0.005091454368084669 PSNR: 28.070581436157227 +[TRAIN] Iter: 229200 Loss: 0.006029243115335703 PSNR: 27.15597152709961 +[TRAIN] Iter: 229300 Loss: 0.005863063968718052 PSNR: 27.46563148498535 +[TRAIN] Iter: 229400 Loss: 0.006160159595310688 PSNR: 27.426048278808594 +[TRAIN] Iter: 229500 Loss: 0.007244407199323177 PSNR: 25.996828079223633 +[TRAIN] Iter: 229600 Loss: 0.005823149345815182 PSNR: 26.896499633789062 +[TRAIN] Iter: 229700 Loss: 0.006984950043261051 PSNR: 26.742290496826172 +[TRAIN] Iter: 229800 Loss: 0.00734296627342701 PSNR: 25.58428192138672 +[TRAIN] Iter: 229900 Loss: 0.00656626932322979 PSNR: 26.198339462280273 +Saved checkpoints at ./logs/TUT-out-doll-360-np/230000.tar +[TRAIN] Iter: 230000 Loss: 0.0062727066688239574 PSNR: 26.714603424072266 +[TRAIN] Iter: 230100 Loss: 0.006917053833603859 PSNR: 25.908700942993164 +[TRAIN] Iter: 230200 Loss: 0.006814778782427311 PSNR: 26.08629035949707 +[TRAIN] Iter: 230300 Loss: 0.007574192713946104 PSNR: 25.96303939819336 +[TRAIN] Iter: 230400 Loss: 0.00698697566986084 PSNR: 25.845670700073242 +[TRAIN] Iter: 230500 Loss: 0.007437925320118666 PSNR: 25.43788719177246 +[TRAIN] Iter: 230600 Loss: 0.006774448323994875 PSNR: 26.035051345825195 +[TRAIN] Iter: 230700 Loss: 0.005490129813551903 PSNR: 27.930988311767578 +[TRAIN] Iter: 230800 Loss: 0.007276665885001421 PSNR: 26.357133865356445 +[TRAIN] Iter: 230900 Loss: 0.006124126724898815 PSNR: 26.203306198120117 +[TRAIN] Iter: 231000 Loss: 0.006799791939556599 PSNR: 25.689573287963867 +[TRAIN] Iter: 231100 Loss: 0.00605155760422349 PSNR: 26.718345642089844 +[TRAIN] Iter: 231200 Loss: 0.0063389139249920845 PSNR: 26.867206573486328 +[TRAIN] Iter: 231300 Loss: 0.007175502832978964 PSNR: 25.97211456298828 +[TRAIN] Iter: 231400 Loss: 0.007725404109805822 PSNR: 25.14743995666504 +[TRAIN] Iter: 231500 Loss: 0.0075816381722688675 PSNR: 25.432231903076172 +[TRAIN] Iter: 231600 Loss: 0.00643831305205822 PSNR: 26.04814338684082 +[TRAIN] Iter: 231700 Loss: 0.00841977447271347 PSNR: 25.272720336914062 +[TRAIN] Iter: 231800 Loss: 0.007586573716253042 PSNR: 25.035442352294922 +[TRAIN] Iter: 231900 Loss: 0.005521049257367849 PSNR: 27.706329345703125 +[TRAIN] Iter: 232000 Loss: 0.007172201760113239 PSNR: 25.77639389038086 +[TRAIN] Iter: 232100 Loss: 0.0065066758543252945 PSNR: 26.503286361694336 +[TRAIN] Iter: 232200 Loss: 0.005556934047490358 PSNR: 28.034000396728516 +[TRAIN] Iter: 232300 Loss: 0.007894075475633144 PSNR: 25.48929786682129 +[TRAIN] Iter: 232400 Loss: 0.00717297475785017 PSNR: 27.4110050201416 +[TRAIN] Iter: 232500 Loss: 0.006268943194299936 PSNR: 26.343589782714844 +[TRAIN] Iter: 232600 Loss: 0.005170994438230991 PSNR: 27.57004165649414 +[TRAIN] Iter: 232700 Loss: 0.007233519107103348 PSNR: 25.599637985229492 +[TRAIN] Iter: 232800 Loss: 0.007221458479762077 PSNR: 26.208890914916992 +[TRAIN] Iter: 232900 Loss: 0.006311293691396713 PSNR: 26.634796142578125 +[TRAIN] Iter: 233000 Loss: 0.006842522881925106 PSNR: 25.970787048339844 +[TRAIN] Iter: 233100 Loss: 0.005519729107618332 PSNR: 27.735673904418945 +[TRAIN] Iter: 233200 Loss: 0.006524240132421255 PSNR: 26.109888076782227 +[TRAIN] Iter: 233300 Loss: 0.0062607331201434135 PSNR: 26.13518524169922 +[TRAIN] Iter: 233400 Loss: 0.005271285772323608 PSNR: 27.72954750061035 +[TRAIN] Iter: 233500 Loss: 0.007954953238368034 PSNR: 25.32694435119629 +[TRAIN] Iter: 233600 Loss: 0.008915469981729984 PSNR: 24.970205307006836 +[TRAIN] Iter: 233700 Loss: 0.007826857268810272 PSNR: 25.461143493652344 +[TRAIN] Iter: 233800 Loss: 0.007051501423120499 PSNR: 25.944438934326172 +[TRAIN] Iter: 233900 Loss: 0.005633991677314043 PSNR: 27.453453063964844 +[TRAIN] Iter: 234000 Loss: 0.008485285565257072 PSNR: 25.590227127075195 +[TRAIN] Iter: 234100 Loss: 0.005827723070979118 PSNR: 26.884435653686523 +[TRAIN] Iter: 234200 Loss: 0.007478846237063408 PSNR: 25.557872772216797 +[TRAIN] Iter: 234300 Loss: 0.009254631586372852 PSNR: 25.145553588867188 +[TRAIN] Iter: 234400 Loss: 0.00638314476236701 PSNR: 26.146026611328125 +[TRAIN] Iter: 234500 Loss: 0.007853562012314796 PSNR: 25.642499923706055 +[TRAIN] Iter: 234600 Loss: 0.005232213065028191 PSNR: 27.82707977294922 +[TRAIN] Iter: 234700 Loss: 0.007943104952573776 PSNR: 25.592191696166992 +[TRAIN] Iter: 234800 Loss: 0.005940316244959831 PSNR: 27.063180923461914 +[TRAIN] Iter: 234900 Loss: 0.007062534801661968 PSNR: 25.868043899536133 +[TRAIN] Iter: 235000 Loss: 0.007871663197875023 PSNR: 25.8741455078125 +[TRAIN] Iter: 235100 Loss: 0.009860052727162838 PSNR: 24.574533462524414 +[TRAIN] Iter: 235200 Loss: 0.008303781040012836 PSNR: 25.178442001342773 +[TRAIN] Iter: 235300 Loss: 0.006905191577970982 PSNR: 26.078765869140625 +[TRAIN] Iter: 235400 Loss: 0.007098647300153971 PSNR: 25.710803985595703 +[TRAIN] Iter: 235500 Loss: 0.006375496741384268 PSNR: 26.513851165771484 +[TRAIN] Iter: 235600 Loss: 0.006869372446089983 PSNR: 25.966638565063477 +[TRAIN] Iter: 235700 Loss: 0.007481126114726067 PSNR: 26.5849666595459 +[TRAIN] Iter: 235800 Loss: 0.008122255094349384 PSNR: 25.33561134338379 +[TRAIN] Iter: 235900 Loss: 0.0077881962060928345 PSNR: 25.504131317138672 +[TRAIN] Iter: 236000 Loss: 0.006659759674221277 PSNR: 26.039066314697266 +[TRAIN] Iter: 236100 Loss: 0.008473068475723267 PSNR: 24.600435256958008 +[TRAIN] Iter: 236200 Loss: 0.007220659404993057 PSNR: 25.517284393310547 +[TRAIN] Iter: 236300 Loss: 0.006586067378520966 PSNR: 26.61919403076172 +[TRAIN] Iter: 236400 Loss: 0.006348161958158016 PSNR: 27.32822036743164 +[TRAIN] Iter: 236500 Loss: 0.00644703209400177 PSNR: 26.86597442626953 +[TRAIN] Iter: 236600 Loss: 0.006945950910449028 PSNR: 25.43892478942871 +[TRAIN] Iter: 236700 Loss: 0.006342149805277586 PSNR: 26.498605728149414 +[TRAIN] Iter: 236800 Loss: 0.006853876169770956 PSNR: 26.21550178527832 +[TRAIN] Iter: 236900 Loss: 0.006140773184597492 PSNR: 27.613636016845703 +[TRAIN] Iter: 237000 Loss: 0.007015560287982225 PSNR: 25.501445770263672 +[TRAIN] Iter: 237100 Loss: 0.007260714657604694 PSNR: 25.94218635559082 +[TRAIN] Iter: 237200 Loss: 0.006804775446653366 PSNR: 26.589487075805664 +[TRAIN] Iter: 237300 Loss: 0.006602715700864792 PSNR: 26.597654342651367 +[TRAIN] Iter: 237400 Loss: 0.006682492792606354 PSNR: 26.718095779418945 +[TRAIN] Iter: 237500 Loss: 0.005954934284090996 PSNR: 27.700870513916016 +[TRAIN] Iter: 237600 Loss: 0.007620681077241898 PSNR: 25.085508346557617 +[TRAIN] Iter: 237700 Loss: 0.008100533857941628 PSNR: 25.414966583251953 +[TRAIN] Iter: 237800 Loss: 0.008702903985977173 PSNR: 25.39858627319336 +[TRAIN] Iter: 237900 Loss: 0.0074757058173418045 PSNR: 25.70045280456543 +[TRAIN] Iter: 238000 Loss: 0.005477003753185272 PSNR: 27.32915496826172 +[TRAIN] Iter: 238100 Loss: 0.00669993506744504 PSNR: 25.826833724975586 +[TRAIN] Iter: 238200 Loss: 0.005960067734122276 PSNR: 26.53240394592285 +[TRAIN] Iter: 238300 Loss: 0.007083269767463207 PSNR: 26.027585983276367 +[TRAIN] Iter: 238400 Loss: 0.007238016463816166 PSNR: 26.644901275634766 +[TRAIN] Iter: 238500 Loss: 0.007713442202657461 PSNR: 24.916906356811523 +[TRAIN] Iter: 238600 Loss: 0.0068533094599843025 PSNR: 26.213727951049805 +[TRAIN] Iter: 238700 Loss: 0.006469430401921272 PSNR: 26.1953182220459 +[TRAIN] Iter: 238800 Loss: 0.0063938419334590435 PSNR: 26.672685623168945 +[TRAIN] Iter: 238900 Loss: 0.007632457185536623 PSNR: 25.327518463134766 +[TRAIN] Iter: 239000 Loss: 0.006990768015384674 PSNR: 26.080589294433594 +[TRAIN] Iter: 239100 Loss: 0.004652099218219519 PSNR: 28.42447853088379 +[TRAIN] Iter: 239200 Loss: 0.0053205350413918495 PSNR: 27.55744743347168 +[TRAIN] Iter: 239300 Loss: 0.006619559135288 PSNR: 27.393022537231445 +[TRAIN] Iter: 239400 Loss: 0.00694270571693778 PSNR: 25.924718856811523 +[TRAIN] Iter: 239500 Loss: 0.006769191008061171 PSNR: 26.174821853637695 +[TRAIN] Iter: 239600 Loss: 0.0063651977106928825 PSNR: 27.79035758972168 +[TRAIN] Iter: 239700 Loss: 0.006471166852861643 PSNR: 25.67740821838379 +[TRAIN] Iter: 239800 Loss: 0.007202253211289644 PSNR: 25.571243286132812 +[TRAIN] Iter: 239900 Loss: 0.007987454533576965 PSNR: 25.393415451049805 +Saved checkpoints at ./logs/TUT-out-doll-360-np/240000.tar +[TRAIN] Iter: 240000 Loss: 0.0063483272679150105 PSNR: 27.16861343383789 +[TRAIN] Iter: 240100 Loss: 0.006844976916909218 PSNR: 25.800020217895508 +[TRAIN] Iter: 240200 Loss: 0.007260547019541264 PSNR: 25.628944396972656 +[TRAIN] Iter: 240300 Loss: 0.006454648915678263 PSNR: 26.036645889282227 +[TRAIN] Iter: 240400 Loss: 0.006056337151676416 PSNR: 26.929046630859375 +[TRAIN] Iter: 240500 Loss: 0.009084069170057774 PSNR: 24.90437126159668 +[TRAIN] Iter: 240600 Loss: 0.007525120861828327 PSNR: 25.50906753540039 +[TRAIN] Iter: 240700 Loss: 0.006478842813521624 PSNR: 27.476625442504883 +[TRAIN] Iter: 240800 Loss: 0.006305948831140995 PSNR: 27.152647018432617 +[TRAIN] Iter: 240900 Loss: 0.006066258996725082 PSNR: 27.26487159729004 +[TRAIN] Iter: 241000 Loss: 0.006895978003740311 PSNR: 25.845321655273438 +[TRAIN] Iter: 241100 Loss: 0.006807904224842787 PSNR: 26.207069396972656 +[TRAIN] Iter: 241200 Loss: 0.006723311729729176 PSNR: 25.82793617248535 +[TRAIN] Iter: 241300 Loss: 0.005940233822911978 PSNR: 28.102506637573242 +[TRAIN] Iter: 241400 Loss: 0.007048178929835558 PSNR: 25.475439071655273 +[TRAIN] Iter: 241500 Loss: 0.008082074113190174 PSNR: 25.538188934326172 +[TRAIN] Iter: 241600 Loss: 0.007126781158149242 PSNR: 26.300806045532227 +[TRAIN] Iter: 241700 Loss: 0.006932218559086323 PSNR: 26.06048583984375 +[TRAIN] Iter: 241800 Loss: 0.005537796765565872 PSNR: 27.624126434326172 +[TRAIN] Iter: 241900 Loss: 0.005715562961995602 PSNR: 27.344961166381836 +[TRAIN] Iter: 242000 Loss: 0.006610004231333733 PSNR: 26.245954513549805 +[TRAIN] Iter: 242100 Loss: 0.006932657212018967 PSNR: 27.04611587524414 +[TRAIN] Iter: 242200 Loss: 0.007638934999704361 PSNR: 25.553842544555664 +[TRAIN] Iter: 242300 Loss: 0.006378768943250179 PSNR: 26.57088279724121 +[TRAIN] Iter: 242400 Loss: 0.007184176705777645 PSNR: 26.261449813842773 +[TRAIN] Iter: 242500 Loss: 0.006356906145811081 PSNR: 28.019332885742188 +[TRAIN] Iter: 242600 Loss: 0.0071791703812778 PSNR: 26.728595733642578 +[TRAIN] Iter: 242700 Loss: 0.00609629275277257 PSNR: 27.768287658691406 +[TRAIN] Iter: 242800 Loss: 0.005184088833630085 PSNR: 27.037052154541016 +[TRAIN] Iter: 242900 Loss: 0.007708416320383549 PSNR: 25.759113311767578 +[TRAIN] Iter: 243000 Loss: 0.007054739631712437 PSNR: 26.459848403930664 +[TRAIN] Iter: 243100 Loss: 0.007087753154337406 PSNR: 26.074874877929688 +[TRAIN] Iter: 243200 Loss: 0.00789623148739338 PSNR: 25.488399505615234 +[TRAIN] Iter: 243300 Loss: 0.008136462420225143 PSNR: 25.322418212890625 +[TRAIN] Iter: 243400 Loss: 0.007576323579996824 PSNR: 25.54521942138672 +[TRAIN] Iter: 243500 Loss: 0.0064208549447357655 PSNR: 27.02541732788086 +[TRAIN] Iter: 243600 Loss: 0.005504970904439688 PSNR: 27.959165573120117 +[TRAIN] Iter: 243700 Loss: 0.006597939878702164 PSNR: 26.211994171142578 +[TRAIN] Iter: 243800 Loss: 0.006739795207977295 PSNR: 26.040210723876953 +[TRAIN] Iter: 243900 Loss: 0.006627832073718309 PSNR: 26.90875244140625 +[TRAIN] Iter: 244000 Loss: 0.00746102724224329 PSNR: 25.71917152404785 +[TRAIN] Iter: 244100 Loss: 0.005829259753227234 PSNR: 26.900949478149414 +[TRAIN] Iter: 244200 Loss: 0.0060127233155071735 PSNR: 26.921180725097656 +[TRAIN] Iter: 244300 Loss: 0.007607996463775635 PSNR: 26.0640926361084 +[TRAIN] Iter: 244400 Loss: 0.006677702069282532 PSNR: 25.94040298461914 +[TRAIN] Iter: 244500 Loss: 0.006289136596024036 PSNR: 26.16838836669922 +[TRAIN] Iter: 244600 Loss: 0.006939861923456192 PSNR: 25.714754104614258 +[TRAIN] Iter: 244700 Loss: 0.008683604188263416 PSNR: 25.203441619873047 +[TRAIN] Iter: 244800 Loss: 0.0069933729246258736 PSNR: 25.625051498413086 +[TRAIN] Iter: 244900 Loss: 0.005295649170875549 PSNR: 27.850988388061523 +[TRAIN] Iter: 245000 Loss: 0.00710191298276186 PSNR: 25.870882034301758 +[TRAIN] Iter: 245100 Loss: 0.0064034112729132175 PSNR: 26.59593391418457 +[TRAIN] Iter: 245200 Loss: 0.005773311480879784 PSNR: 27.037940979003906 +[TRAIN] Iter: 245300 Loss: 0.006135128904134035 PSNR: 26.47986602783203 +[TRAIN] Iter: 245400 Loss: 0.007250472437590361 PSNR: 25.9046630859375 +[TRAIN] Iter: 245500 Loss: 0.006393866613507271 PSNR: 26.83547592163086 +[TRAIN] Iter: 245600 Loss: 0.006209130864590406 PSNR: 26.835355758666992 +[TRAIN] Iter: 245700 Loss: 0.00730822142213583 PSNR: 25.79768180847168 +[TRAIN] Iter: 245800 Loss: 0.007010461762547493 PSNR: 25.627052307128906 +[TRAIN] Iter: 245900 Loss: 0.006224201060831547 PSNR: 27.408945083618164 +[TRAIN] Iter: 246000 Loss: 0.0054428656585514545 PSNR: 27.13577651977539 +[TRAIN] Iter: 246100 Loss: 0.006311573553830385 PSNR: 26.36769676208496 +[TRAIN] Iter: 246200 Loss: 0.007312982343137264 PSNR: 25.90045166015625 +[TRAIN] Iter: 246300 Loss: 0.007659503724426031 PSNR: 25.485227584838867 +[TRAIN] Iter: 246400 Loss: 0.006635354831814766 PSNR: 26.074052810668945 +[TRAIN] Iter: 246500 Loss: 0.007051004096865654 PSNR: 25.66441535949707 +[TRAIN] Iter: 246600 Loss: 0.00573059543967247 PSNR: 27.809206008911133 +[TRAIN] Iter: 246700 Loss: 0.0057626329362392426 PSNR: 27.162845611572266 +[TRAIN] Iter: 246800 Loss: 0.006613221950829029 PSNR: 26.86781120300293 +[TRAIN] Iter: 246900 Loss: 0.007360049523413181 PSNR: 25.829936981201172 +[TRAIN] Iter: 247000 Loss: 0.006830798462033272 PSNR: 25.530763626098633 +[TRAIN] Iter: 247100 Loss: 0.007348627783358097 PSNR: 25.984426498413086 +[TRAIN] Iter: 247200 Loss: 0.007894584909081459 PSNR: 25.358673095703125 +[TRAIN] Iter: 247300 Loss: 0.007230599410831928 PSNR: 25.773487091064453 +[TRAIN] Iter: 247400 Loss: 0.00563178863376379 PSNR: 26.413066864013672 +[TRAIN] Iter: 247500 Loss: 0.00592475850135088 PSNR: 27.493003845214844 +[TRAIN] Iter: 247600 Loss: 0.005398544017225504 PSNR: 27.83858299255371 +[TRAIN] Iter: 247700 Loss: 0.006015959661453962 PSNR: 26.79503631591797 +[TRAIN] Iter: 247800 Loss: 0.006938295904546976 PSNR: 26.125511169433594 +[TRAIN] Iter: 247900 Loss: 0.007898068055510521 PSNR: 25.340688705444336 +[TRAIN] Iter: 248000 Loss: 0.006003320682793856 PSNR: 26.71571922302246 +[TRAIN] Iter: 248100 Loss: 0.007572384551167488 PSNR: 25.437030792236328 +[TRAIN] Iter: 248200 Loss: 0.006589588709175587 PSNR: 25.7368221282959 +[TRAIN] Iter: 248300 Loss: 0.0064848922193050385 PSNR: 26.13062858581543 +[TRAIN] Iter: 248400 Loss: 0.007608320564031601 PSNR: 26.31183624267578 +[TRAIN] Iter: 248500 Loss: 0.00757682416588068 PSNR: 25.840173721313477 +[TRAIN] Iter: 248600 Loss: 0.006254798732697964 PSNR: 26.677215576171875 +[TRAIN] Iter: 248700 Loss: 0.006620095111429691 PSNR: 25.88248062133789 +[TRAIN] Iter: 248800 Loss: 0.006767129525542259 PSNR: 26.0195369720459 +[TRAIN] Iter: 248900 Loss: 0.008212292566895485 PSNR: 24.775959014892578 +[TRAIN] Iter: 249000 Loss: 0.006399902515113354 PSNR: 27.293121337890625 +[TRAIN] Iter: 249100 Loss: 0.007077714428305626 PSNR: 25.84688377380371 +[TRAIN] Iter: 249200 Loss: 0.008277447894215584 PSNR: 25.09623146057129 +[TRAIN] Iter: 249300 Loss: 0.005545806605368853 PSNR: 27.27791976928711 +[TRAIN] Iter: 249400 Loss: 0.006763202603906393 PSNR: 25.767898559570312 +[TRAIN] Iter: 249500 Loss: 0.007299918681383133 PSNR: 25.874324798583984 +[TRAIN] Iter: 249600 Loss: 0.0071702073328197 PSNR: 25.650596618652344 +[TRAIN] Iter: 249700 Loss: 0.007369339000433683 PSNR: 25.46821403503418 +[TRAIN] Iter: 249800 Loss: 0.005720954388380051 PSNR: 26.390398025512695 +[TRAIN] Iter: 249900 Loss: 0.00581097137182951 PSNR: 27.266550064086914 +Saved checkpoints at ./logs/TUT-out-doll-360-np/250000.tar +0 0.0008957386016845703 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.156876802444458 +2 22.376187324523926 +3 22.7237491607666 +4 21.040372133255005 +5 21.47300672531128 +6 22.123093843460083 +7 22.311199426651 +8 22.49036431312561 +9 22.015685081481934 +10 22.080979824066162 +11 22.108052968978882 +12 21.988760471343994 +13 22.33964776992798 +14 22.04039692878723 +15 21.577582359313965 +16 21.621169567108154 +17 21.97281265258789 +18 21.39023756980896 +19 21.832229375839233 +20 21.649031400680542 +21 22.34797954559326 +22 21.497958660125732 +23 21.484480619430542 +24 22.90848970413208 +25 22.005899667739868 +26 22.06651210784912 +27 21.979954481124878 +28 21.736746072769165 +29 21.63521647453308 +30 22.330046892166138 +31 21.92869257926941 +32 21.9060800075531 +33 21.87879514694214 +34 22.544384479522705 +35 22.07819175720215 +36 22.53914523124695 +37 22.242376804351807 +38 22.04767084121704 +39 21.426709413528442 +40 22.48578906059265 +41 21.759401321411133 +42 21.59645652770996 +43 21.576544761657715 +44 22.214788913726807 +45 21.674039363861084 +46 21.487576246261597 +47 21.960787296295166 +48 21.730133295059204 +49 22.00207781791687 +50 21.634687662124634 +51 21.830559730529785 +52 22.165058135986328 +53 21.546902418136597 +54 22.76777935028076 +55 22.067920207977295 +56 21.80784320831299 +57 22.38358974456787 +58 22.031233072280884 +59 22.590598106384277 +60 21.701991081237793 +61 21.579601764678955 +62 21.692949295043945 +63 22.681086540222168 +64 23.067010402679443 +65 21.272939205169678 +66 22.442022562026978 +67 21.48573398590088 +68 22.713239192962646 +69 21.84732222557068 +70 21.87737226486206 +71 22.088969707489014 +72 21.723114252090454 +73 21.928126096725464 +74 22.01056694984436 +75 22.315659046173096 +76 21.761832237243652 +77 22.565251111984253 +78 21.531580448150635 +79 21.62667489051819 +80 21.371733903884888 +81 21.21809434890747 +82 22.051186084747314 +83 21.803387880325317 +84 21.359347105026245 +85 22.45670461654663 +86 22.76557493209839 +87 22.71367859840393 +88 21.97447109222412 +89 22.001039028167725 +90 22.33058762550354 +91 22.390873193740845 +92 22.167070150375366 +93 22.308541297912598 +94 21.632112503051758 +95 22.558300733566284 +96 21.55452561378479 +97 22.307178735733032 +98 22.234721899032593 +99 22.274136304855347 +100 22.06738591194153 +101 21.995418787002563 +102 21.773741006851196 +103 21.59258460998535 +104 21.406646490097046 +105 22.560100317001343 +106 22.734482526779175 +107 21.982637643814087 +108 21.782326698303223 +109 22.178475618362427 +110 21.29914116859436 +111 21.952494382858276 +112 21.556346654891968 +113 22.18519902229309 +114 21.99601984024048 +115 21.492843866348267 +116 22.562255144119263 +117 21.81472635269165 +118 22.25209665298462 +119 22.02523708343506 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 6.6282e-01, 4.6590e-01, 1.0820e+00, -3.9421e+01], + [-1.3710e+00, -3.6203e-01, 6.3858e-01, -2.3526e+01], + [-2.3316e-01, 5.2208e-01, 1.3043e+00, -2.4168e+01], + ..., + [-1.2925e+01, -1.2193e+01, -2.0298e+00, 4.3630e+02], + [-1.2356e+01, -1.1281e+01, -1.3547e+00, 4.4639e+02], + [-1.2208e+01, -1.0976e+01, -7.2114e-01, 4.5601e+02]], + + [[ 2.3525e-01, -2.8046e-01, -6.5111e-01, -4.1060e+01], + [-1.4363e-01, -1.4647e-01, 9.7053e-02, -1.5567e+01], + [-5.9524e-01, -5.6605e-01, -4.0135e-01, -1.6763e+01], + ..., + [-7.0243e-01, 3.8510e+00, -5.7644e-01, 9.5985e+02], + [-3.3717e+00, 2.0648e+00, 2.2147e+00, 1.0378e+03], + [-3.2155e+00, 1.5648e+00, -1.2621e+00, 9.0669e+02]], + + [[-6.8571e-01, -9.3458e-01, -1.4979e+00, -6.9208e+01], + [ 3.5972e-01, 2.8653e-01, 2.6436e-01, -3.1011e+01], + [ 5.3351e-01, 4.7154e-01, 4.6891e-01, -3.1441e+01], + ..., + [-4.9869e+00, 1.0816e-01, 9.7877e-01, 3.3914e+02], + [-7.0015e+00, -1.7054e+00, -6.5828e-01, 3.4582e+02], + [-6.3779e+00, -9.1569e-01, 2.3804e-01, 3.4436e+02]], + + ..., + + [[-4.1117e-01, -8.2200e-02, 6.6301e-01, -6.2016e+01], + [-3.8731e-02, 4.5682e-01, 1.3026e+00, -5.6408e+00], + [-4.2903e-01, 2.5717e-01, 1.3198e+00, -8.7946e-01], + ..., + [-5.3241e+00, -3.7740e+00, 2.9095e-01, 2.9798e+02], + [-5.1027e+00, -3.5370e+00, -9.1264e-02, 3.0035e+02], + [-5.0376e+00, -3.4392e+00, 1.9913e-01, 3.0070e+02]], + + [[-1.3020e-01, -2.5559e-01, -3.0189e-01, -4.1741e+01], + [ 1.9485e+00, 2.0375e+00, 2.8230e+00, -2.7917e+01], + [ 1.5271e+00, 1.6424e+00, 2.4131e+00, -2.7864e+01], + ..., + [-7.5159e+00, 7.3572e-01, 6.4365e+00, 6.5570e+02], + [-7.4853e+00, 1.0728e+00, 6.9833e+00, 6.8190e+02], + [-8.5498e+00, -1.1813e+00, 4.1676e+00, 6.8001e+02]], + + [[ 1.7222e+00, 1.9142e+00, 2.0653e+00, -6.5501e+01], + [ 2.5769e+00, 2.6157e+00, 2.8684e+00, -3.0747e+01], + [ 2.0878e+00, 2.0872e+00, 2.2682e+00, -2.8412e+01], + ..., + [-8.5475e+00, 2.4151e-02, 6.1759e+00, 7.1377e+02], + [-8.0088e+00, 8.6966e-02, 5.7279e+00, 6.9953e+02], + [-8.2338e+00, 1.7590e+00, 8.0859e+00, 7.5462e+02]]], + grad_fn=), 'rgb0': tensor([[0.2488, 0.4238, 0.6567], + [0.3518, 0.3383, 0.3225], + [0.2741, 0.3018, 0.1081], + ..., + [0.4259, 0.5892, 0.7980], + [0.5636, 0.6269, 0.7133], + [0.4394, 0.5059, 0.5224]], grad_fn=), 'disp0': tensor([ 55.7197, 42.7553, 102.5530, ..., 28.4460, 57.9537, 105.7500], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0072, 0.0054, 0.0041, ..., 0.0031, 0.0028, 0.0048])} +0 0.0007386207580566406 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.14966058731079 +2 22.46699047088623 +3 22.00093126296997 +4 20.990539073944092 +5 22.315734386444092 +6 22.19711685180664 +7 21.99597454071045 +8 21.657999515533447 +9 21.75533103942871 +10 22.15914821624756 +11 22.005630254745483 +12 21.920542001724243 +13 22.303431272506714 +14 22.39298176765442 +15 21.305875539779663 +16 22.211074113845825 +17 21.509998321533203 +18 22.09865427017212 +19 21.462812185287476 +20 22.74034094810486 +21 21.270948886871338 +22 23.264639616012573 +23 21.35395860671997 +24 23.247963428497314 +25 21.46790838241577 +26 21.593583583831787 +27 22.509114027023315 +28 21.795482635498047 +29 21.591652154922485 +30 21.645990133285522 +31 21.963257551193237 +32 21.944305181503296 +33 21.446717500686646 +34 22.625512838363647 +35 22.142823219299316 +36 22.012712478637695 +37 21.766466856002808 +38 21.844340801239014 +39 21.74278450012207 +40 21.763936281204224 +41 21.33198571205139 +42 21.808826208114624 +43 21.097530603408813 +44 21.837263345718384 +45 22.075801610946655 +46 21.709223985671997 +47 22.2093243598938 +48 22.454556703567505 +49 22.001349210739136 +50 21.856932401657104 +51 21.42411231994629 +52 22.027784824371338 +53 22.121176719665527 +54 22.447619438171387 +55 21.98964786529541 +56 21.42364239692688 +57 21.93262553215027 +58 22.404370307922363 +59 21.55083966255188 +60 21.75075364112854 +61 21.327429056167603 +62 22.579302549362183 +63 21.79820466041565 +64 21.49795365333557 +65 21.501906156539917 +66 22.114943504333496 +67 22.139708280563354 +68 22.18031597137451 +69 21.258585453033447 +70 22.118668794631958 +71 21.749454975128174 +72 21.927229166030884 +73 22.42399311065674 +74 21.896883964538574 +75 21.364577054977417 +76 21.880011558532715 +77 22.214394092559814 +78 23.279249668121338 +79 21.11806869506836 +80 21.954873085021973 +81 22.660571813583374 +82 22.465047359466553 +83 22.187975645065308 +84 21.578783750534058 +85 22.338305711746216 +86 22.1921968460083 +87 21.346166610717773 +88 22.331302404403687 +89 22.04979658126831 +90 24.055388927459717 +91 21.475733280181885 +92 22.23939085006714 +93 22.435845851898193 +94 21.73482894897461 +95 22.274043083190918 +96 21.937378644943237 +97 21.25590753555298 +98 21.360328912734985 +99 22.134087800979614 +100 22.282140016555786 +101 21.958255767822266 +102 21.84510612487793 +103 22.009154796600342 +104 21.611713409423828 +105 21.680970668792725 +106 22.06317448616028 +107 22.14294195175171 +108 21.926613330841064 +109 21.383564949035645 +110 21.657464027404785 +111 22.4079110622406 +112 21.46012854576111 +113 22.67956852912903 +114 21.5158212184906 +115 22.202869653701782 +116 22.76529049873352 +117 21.689103364944458 +118 21.62490677833557 +119 21.535399198532104 +test poses shape torch.Size([4, 3, 4]) +0 0.0011374950408935547 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.758519172668457 +2 21.863837480545044 +3 22.035876512527466 +Saved test set +[TRAIN] Iter: 250000 Loss: 0.007457109168171883 PSNR: 25.478485107421875 +[TRAIN] Iter: 250100 Loss: 0.00865977443754673 PSNR: 24.81346893310547 +[TRAIN] Iter: 250200 Loss: 0.005812362767755985 PSNR: 26.227970123291016 +[TRAIN] Iter: 250300 Loss: 0.00530458427965641 PSNR: 27.581727981567383 +[TRAIN] Iter: 250400 Loss: 0.005147725809365511 PSNR: 28.38679313659668 +[TRAIN] Iter: 250500 Loss: 0.00704340822994709 PSNR: 26.528718948364258 +[TRAIN] Iter: 250600 Loss: 0.007253552787005901 PSNR: 25.900232315063477 +[TRAIN] Iter: 250700 Loss: 0.006240510381758213 PSNR: 27.74068832397461 +[TRAIN] Iter: 250800 Loss: 0.005542490165680647 PSNR: 27.86466407775879 +[TRAIN] Iter: 250900 Loss: 0.007840253412723541 PSNR: 25.272998809814453 +[TRAIN] Iter: 251000 Loss: 0.006336414720863104 PSNR: 27.908987045288086 +[TRAIN] Iter: 251100 Loss: 0.007070163730531931 PSNR: 25.845901489257812 +[TRAIN] Iter: 251200 Loss: 0.00866994634270668 PSNR: 25.449909210205078 +[TRAIN] Iter: 251300 Loss: 0.00759425014257431 PSNR: 25.608036041259766 +[TRAIN] Iter: 251400 Loss: 0.005712078884243965 PSNR: 27.802759170532227 +[TRAIN] Iter: 251500 Loss: 0.006813548039644957 PSNR: 25.424501419067383 +[TRAIN] Iter: 251600 Loss: 0.007468117401003838 PSNR: 25.950870513916016 +[TRAIN] Iter: 251700 Loss: 0.006430675741285086 PSNR: 27.02800178527832 +[TRAIN] Iter: 251800 Loss: 0.0057987733744084835 PSNR: 26.326595306396484 +[TRAIN] Iter: 251900 Loss: 0.0073791504837572575 PSNR: 26.122365951538086 +[TRAIN] Iter: 252000 Loss: 0.007414288818836212 PSNR: 26.0262508392334 +[TRAIN] Iter: 252100 Loss: 0.006897915154695511 PSNR: 25.939407348632812 +[TRAIN] Iter: 252200 Loss: 0.006407617591321468 PSNR: 26.139673233032227 +[TRAIN] Iter: 252300 Loss: 0.007941270247101784 PSNR: 25.804683685302734 +[TRAIN] Iter: 252400 Loss: 0.007858357392251492 PSNR: 25.124143600463867 +[TRAIN] Iter: 252500 Loss: 0.006595504470169544 PSNR: 26.575899124145508 +[TRAIN] Iter: 252600 Loss: 0.007173342630267143 PSNR: 26.22718620300293 +[TRAIN] Iter: 252700 Loss: 0.005458271596580744 PSNR: 27.113901138305664 +[TRAIN] Iter: 252800 Loss: 0.00634173396974802 PSNR: 26.954301834106445 +[TRAIN] Iter: 252900 Loss: 0.006370100658386946 PSNR: 25.68471908569336 +[TRAIN] Iter: 253000 Loss: 0.0073929280042648315 PSNR: 25.340927124023438 +[TRAIN] Iter: 253100 Loss: 0.005782964639365673 PSNR: 27.513338088989258 +[TRAIN] Iter: 253200 Loss: 0.005964080803096294 PSNR: 26.685855865478516 +[TRAIN] Iter: 253300 Loss: 0.006566242314875126 PSNR: 25.850011825561523 +[TRAIN] Iter: 253400 Loss: 0.005468596704304218 PSNR: 27.897920608520508 +[TRAIN] Iter: 253500 Loss: 0.007729333825409412 PSNR: 25.703645706176758 +[TRAIN] Iter: 253600 Loss: 0.0061798435635864735 PSNR: 26.292015075683594 +[TRAIN] Iter: 253700 Loss: 0.00783754326403141 PSNR: 25.43560218811035 +[TRAIN] Iter: 253800 Loss: 0.005860351957380772 PSNR: 26.92443084716797 +[TRAIN] Iter: 253900 Loss: 0.006142529658973217 PSNR: 27.097007751464844 +[TRAIN] Iter: 254000 Loss: 0.007249925285577774 PSNR: 25.599679946899414 +[TRAIN] Iter: 254100 Loss: 0.005424543283879757 PSNR: 28.099838256835938 +[TRAIN] Iter: 254200 Loss: 0.00642606895416975 PSNR: 26.2949161529541 +[TRAIN] Iter: 254300 Loss: 0.005679415538907051 PSNR: 26.882539749145508 +[TRAIN] Iter: 254400 Loss: 0.007239089347422123 PSNR: 26.014236450195312 +[TRAIN] Iter: 254500 Loss: 0.005428236909210682 PSNR: 28.44288444519043 +[TRAIN] Iter: 254600 Loss: 0.0068344855681061745 PSNR: 26.058752059936523 +[TRAIN] Iter: 254700 Loss: 0.006971942260861397 PSNR: 25.83991241455078 +[TRAIN] Iter: 254800 Loss: 0.005164080299437046 PSNR: 28.15909194946289 +[TRAIN] Iter: 254900 Loss: 0.005505731329321861 PSNR: 27.782825469970703 +[TRAIN] Iter: 255000 Loss: 0.007459579035639763 PSNR: 25.400550842285156 +[TRAIN] Iter: 255100 Loss: 0.006283148191869259 PSNR: 26.83211326599121 +[TRAIN] Iter: 255200 Loss: 0.007855965755879879 PSNR: 25.334928512573242 +[TRAIN] Iter: 255300 Loss: 0.0056271664798259735 PSNR: 27.055965423583984 +[TRAIN] Iter: 255400 Loss: 0.0070927622728049755 PSNR: 26.03102684020996 +[TRAIN] Iter: 255500 Loss: 0.007389613427221775 PSNR: 25.729442596435547 +[TRAIN] Iter: 255600 Loss: 0.006140473298728466 PSNR: 26.122526168823242 +[TRAIN] Iter: 255700 Loss: 0.006350538227707148 PSNR: 26.850343704223633 +[TRAIN] Iter: 255800 Loss: 0.005496961530297995 PSNR: 27.000736236572266 +[TRAIN] Iter: 255900 Loss: 0.006490923464298248 PSNR: 26.489116668701172 +[TRAIN] Iter: 256000 Loss: 0.008046005852520466 PSNR: 25.890525817871094 +[TRAIN] Iter: 256100 Loss: 0.005031608045101166 PSNR: 28.068090438842773 +[TRAIN] Iter: 256200 Loss: 0.007227866444736719 PSNR: 26.010454177856445 +[TRAIN] Iter: 256300 Loss: 0.00638877646997571 PSNR: 26.173391342163086 +[TRAIN] Iter: 256400 Loss: 0.005648969206959009 PSNR: 27.512731552124023 +[TRAIN] Iter: 256500 Loss: 0.00603627972304821 PSNR: 27.71504783630371 +[TRAIN] Iter: 256600 Loss: 0.008373109623789787 PSNR: 25.50332260131836 +[TRAIN] Iter: 256700 Loss: 0.007468365132808685 PSNR: 25.715892791748047 +[TRAIN] Iter: 256800 Loss: 0.005148668307811022 PSNR: 27.65330696105957 +[TRAIN] Iter: 256900 Loss: 0.007532019168138504 PSNR: 25.75493621826172 +[TRAIN] Iter: 257000 Loss: 0.007642482407391071 PSNR: 25.803775787353516 +[TRAIN] Iter: 257100 Loss: 0.0054107788018882275 PSNR: 27.78221321105957 +[TRAIN] Iter: 257200 Loss: 0.007168845273554325 PSNR: 25.984434127807617 +[TRAIN] Iter: 257300 Loss: 0.007533277850598097 PSNR: 25.330223083496094 +[TRAIN] Iter: 257400 Loss: 0.0057696085423231125 PSNR: 27.233198165893555 +[TRAIN] Iter: 257500 Loss: 0.006317345425486565 PSNR: 26.11247444152832 +[TRAIN] Iter: 257600 Loss: 0.007216166239231825 PSNR: 27.615995407104492 +[TRAIN] Iter: 257700 Loss: 0.006362455897033215 PSNR: 27.035655975341797 +[TRAIN] Iter: 257800 Loss: 0.007710583042353392 PSNR: 25.737367630004883 +[TRAIN] Iter: 257900 Loss: 0.007056816481053829 PSNR: 25.722524642944336 +[TRAIN] Iter: 258000 Loss: 0.007259408477693796 PSNR: 25.970556259155273 +[TRAIN] Iter: 258100 Loss: 0.007905959151685238 PSNR: 25.51021957397461 +[TRAIN] Iter: 258200 Loss: 0.005721205845475197 PSNR: 27.652507781982422 +[TRAIN] Iter: 258300 Loss: 0.005790178198367357 PSNR: 27.089948654174805 +[TRAIN] Iter: 258400 Loss: 0.004903128370642662 PSNR: 29.098608016967773 +[TRAIN] Iter: 258500 Loss: 0.00594749441370368 PSNR: 27.608440399169922 +[TRAIN] Iter: 258600 Loss: 0.0062331282533705235 PSNR: 27.23687744140625 +[TRAIN] Iter: 258700 Loss: 0.0076930467039346695 PSNR: 25.34459114074707 +[TRAIN] Iter: 258800 Loss: 0.007071766536682844 PSNR: 25.90359115600586 +[TRAIN] Iter: 258900 Loss: 0.007811160758137703 PSNR: 25.241235733032227 +[TRAIN] Iter: 259000 Loss: 0.005581260193139315 PSNR: 27.8878231048584 +[TRAIN] Iter: 259100 Loss: 0.0073092905804514885 PSNR: 25.663022994995117 +[TRAIN] Iter: 259200 Loss: 0.00898008793592453 PSNR: 24.566606521606445 +[TRAIN] Iter: 259300 Loss: 0.007439070381224155 PSNR: 25.53787612915039 +[TRAIN] Iter: 259400 Loss: 0.006595921702682972 PSNR: 25.935522079467773 +[TRAIN] Iter: 259500 Loss: 0.005467916838824749 PSNR: 27.58927345275879 +[TRAIN] Iter: 259600 Loss: 0.00663414504379034 PSNR: 26.13276481628418 +[TRAIN] Iter: 259700 Loss: 0.004982061684131622 PSNR: 27.874711990356445 +[TRAIN] Iter: 259800 Loss: 0.007138462737202644 PSNR: 25.891035079956055 +[TRAIN] Iter: 259900 Loss: 0.006249707657843828 PSNR: 27.194665908813477 +Saved checkpoints at ./logs/TUT-out-doll-360-np/260000.tar +[TRAIN] Iter: 260000 Loss: 0.007832827046513557 PSNR: 25.650497436523438 +[TRAIN] Iter: 260100 Loss: 0.007643810007721186 PSNR: 25.8333683013916 +[TRAIN] Iter: 260200 Loss: 0.00674499711021781 PSNR: 26.029253005981445 +[TRAIN] Iter: 260300 Loss: 0.00524429976940155 PSNR: 28.03567123413086 +[TRAIN] Iter: 260400 Loss: 0.007699481677263975 PSNR: 25.560678482055664 +[TRAIN] Iter: 260500 Loss: 0.005750811193138361 PSNR: 27.647216796875 +[TRAIN] Iter: 260600 Loss: 0.006638118997216225 PSNR: 26.282506942749023 +[TRAIN] Iter: 260700 Loss: 0.006365799345076084 PSNR: 26.8581600189209 +[TRAIN] Iter: 260800 Loss: 0.00711392518132925 PSNR: 25.783615112304688 +[TRAIN] Iter: 260900 Loss: 0.006244807504117489 PSNR: 27.49038314819336 +[TRAIN] Iter: 261000 Loss: 0.007983550429344177 PSNR: 25.211261749267578 +[TRAIN] Iter: 261100 Loss: 0.007175319828093052 PSNR: 26.138452529907227 +[TRAIN] Iter: 261200 Loss: 0.006116134114563465 PSNR: 26.68216896057129 +[TRAIN] Iter: 261300 Loss: 0.0068349819630384445 PSNR: 26.231542587280273 +[TRAIN] Iter: 261400 Loss: 0.006413940340280533 PSNR: 26.147022247314453 +[TRAIN] Iter: 261500 Loss: 0.007565137930214405 PSNR: 25.504566192626953 +[TRAIN] Iter: 261600 Loss: 0.007605757564306259 PSNR: 25.608003616333008 +[TRAIN] Iter: 261700 Loss: 0.006540294736623764 PSNR: 27.015087127685547 +[TRAIN] Iter: 261800 Loss: 0.006113536190241575 PSNR: 27.92229652404785 +[TRAIN] Iter: 261900 Loss: 0.007368283811956644 PSNR: 26.879196166992188 +[TRAIN] Iter: 262000 Loss: 0.007135588210076094 PSNR: 26.02982521057129 +[TRAIN] Iter: 262100 Loss: 0.006366632413119078 PSNR: 27.56603240966797 +[TRAIN] Iter: 262200 Loss: 0.006698131561279297 PSNR: 27.225839614868164 +[TRAIN] Iter: 262300 Loss: 0.006997367367148399 PSNR: 26.53561782836914 +[TRAIN] Iter: 262400 Loss: 0.0087498864158988 PSNR: 24.599470138549805 +[TRAIN] Iter: 262500 Loss: 0.006056861020624638 PSNR: 26.342655181884766 +[TRAIN] Iter: 262600 Loss: 0.0066198790445923805 PSNR: 27.193378448486328 +[TRAIN] Iter: 262700 Loss: 0.007582870312035084 PSNR: 25.541662216186523 +[TRAIN] Iter: 262800 Loss: 0.008741538971662521 PSNR: 25.40639305114746 +[TRAIN] Iter: 262900 Loss: 0.007681992370635271 PSNR: 25.635190963745117 +[TRAIN] Iter: 263000 Loss: 0.0076709166169166565 PSNR: 25.802669525146484 +[TRAIN] Iter: 263100 Loss: 0.006520923692733049 PSNR: 26.62089729309082 +[TRAIN] Iter: 263200 Loss: 0.006438978016376495 PSNR: 26.139677047729492 +[TRAIN] Iter: 263300 Loss: 0.0051223281770944595 PSNR: 27.90057373046875 +[TRAIN] Iter: 263400 Loss: 0.008890951052308083 PSNR: 24.60133171081543 +[TRAIN] Iter: 263500 Loss: 0.0071159168146550655 PSNR: 25.670183181762695 +[TRAIN] Iter: 263600 Loss: 0.008009149692952633 PSNR: 24.950159072875977 +[TRAIN] Iter: 263700 Loss: 0.005382445640861988 PSNR: 27.691862106323242 +[TRAIN] Iter: 263800 Loss: 0.006282457150518894 PSNR: 27.12989616394043 +[TRAIN] Iter: 263900 Loss: 0.006455955095589161 PSNR: 26.83212661743164 +[TRAIN] Iter: 264000 Loss: 0.005932880565524101 PSNR: 27.284183502197266 +[TRAIN] Iter: 264100 Loss: 0.006490814499557018 PSNR: 26.40628433227539 +[TRAIN] Iter: 264200 Loss: 0.007648078259080648 PSNR: 25.436925888061523 +[TRAIN] Iter: 264300 Loss: 0.007030590437352657 PSNR: 25.81645965576172 +[TRAIN] Iter: 264400 Loss: 0.008256303146481514 PSNR: 25.211462020874023 +[TRAIN] Iter: 264500 Loss: 0.0047712503001093864 PSNR: 28.368831634521484 +[TRAIN] Iter: 264600 Loss: 0.00645849434658885 PSNR: 26.053733825683594 +[TRAIN] Iter: 264700 Loss: 0.006474233232438564 PSNR: 26.263256072998047 +[TRAIN] Iter: 264800 Loss: 0.006967926397919655 PSNR: 26.246856689453125 +[TRAIN] Iter: 264900 Loss: 0.007161840330809355 PSNR: 26.574373245239258 +[TRAIN] Iter: 265000 Loss: 0.006963767111301422 PSNR: 26.066972732543945 +[TRAIN] Iter: 265100 Loss: 0.005453248508274555 PSNR: 28.118261337280273 +[TRAIN] Iter: 265200 Loss: 0.0077557372860610485 PSNR: 25.613304138183594 +[TRAIN] Iter: 265300 Loss: 0.008304467424750328 PSNR: 25.443750381469727 +[TRAIN] Iter: 265400 Loss: 0.007241413928568363 PSNR: 25.311920166015625 +[TRAIN] Iter: 265500 Loss: 0.007210948970168829 PSNR: 26.69040298461914 +[TRAIN] Iter: 265600 Loss: 0.008302910253405571 PSNR: 25.061552047729492 +[TRAIN] Iter: 265700 Loss: 0.007392987608909607 PSNR: 25.802825927734375 +[TRAIN] Iter: 265800 Loss: 0.006649535149335861 PSNR: 26.63809585571289 +[TRAIN] Iter: 265900 Loss: 0.0064146569930016994 PSNR: 26.10428237915039 +[TRAIN] Iter: 266000 Loss: 0.007031349930912256 PSNR: 26.826139450073242 +[TRAIN] Iter: 266100 Loss: 0.00787048414349556 PSNR: 25.545530319213867 +[TRAIN] Iter: 266200 Loss: 0.006937090307474136 PSNR: 25.876131057739258 +[TRAIN] Iter: 266300 Loss: 0.007200873922556639 PSNR: 25.868013381958008 +[TRAIN] Iter: 266400 Loss: 0.006892634555697441 PSNR: 25.995187759399414 +[TRAIN] Iter: 266500 Loss: 0.006691290531307459 PSNR: 25.913469314575195 +[TRAIN] Iter: 266600 Loss: 0.007820138707756996 PSNR: 24.937917709350586 +[TRAIN] Iter: 266700 Loss: 0.00769047811627388 PSNR: 25.099485397338867 +[TRAIN] Iter: 266800 Loss: 0.006507467944175005 PSNR: 26.32060432434082 +[TRAIN] Iter: 266900 Loss: 0.006323171779513359 PSNR: 27.20258331298828 +[TRAIN] Iter: 267000 Loss: 0.006687295623123646 PSNR: 26.437347412109375 +[TRAIN] Iter: 267100 Loss: 0.00733447540551424 PSNR: 25.919086456298828 +[TRAIN] Iter: 267200 Loss: 0.0066702342592179775 PSNR: 26.46750259399414 +[TRAIN] Iter: 267300 Loss: 0.007362736389040947 PSNR: 26.026771545410156 +[TRAIN] Iter: 267400 Loss: 0.008202116005122662 PSNR: 24.840341567993164 +[TRAIN] Iter: 267500 Loss: 0.007882039062678814 PSNR: 25.27758026123047 +[TRAIN] Iter: 267600 Loss: 0.007491733878850937 PSNR: 25.454660415649414 +[TRAIN] Iter: 267700 Loss: 0.0065265316516160965 PSNR: 26.28327751159668 +[TRAIN] Iter: 267800 Loss: 0.005863999016582966 PSNR: 26.170747756958008 +[TRAIN] Iter: 267900 Loss: 0.00490294024348259 PSNR: 27.62076759338379 +[TRAIN] Iter: 268000 Loss: 0.007744395639747381 PSNR: 25.048053741455078 +[TRAIN] Iter: 268100 Loss: 0.008050725795328617 PSNR: 25.296655654907227 +[TRAIN] Iter: 268200 Loss: 0.006443989463150501 PSNR: 26.67099380493164 +[TRAIN] Iter: 268300 Loss: 0.006884309463202953 PSNR: 25.828569412231445 +[TRAIN] Iter: 268400 Loss: 0.006703826133161783 PSNR: 27.352294921875 +[TRAIN] Iter: 268500 Loss: 0.005962761119008064 PSNR: 26.55504035949707 +[TRAIN] Iter: 268600 Loss: 0.006503058597445488 PSNR: 26.131624221801758 +[TRAIN] Iter: 268700 Loss: 0.008583134040236473 PSNR: 24.97350311279297 +[TRAIN] Iter: 268800 Loss: 0.007416661828756332 PSNR: 25.72357749938965 +[TRAIN] Iter: 268900 Loss: 0.004722550977021456 PSNR: 28.56052017211914 +[TRAIN] Iter: 269000 Loss: 0.005838558543473482 PSNR: 27.40049171447754 +[TRAIN] Iter: 269100 Loss: 0.005613054148852825 PSNR: 26.800119400024414 +[TRAIN] Iter: 269200 Loss: 0.005531680770218372 PSNR: 28.227222442626953 +[TRAIN] Iter: 269300 Loss: 0.007106835022568703 PSNR: 25.804588317871094 +[TRAIN] Iter: 269400 Loss: 0.005193513818085194 PSNR: 27.17519760131836 +[TRAIN] Iter: 269500 Loss: 0.0056484718807041645 PSNR: 28.36417579650879 +[TRAIN] Iter: 269600 Loss: 0.006672120653092861 PSNR: 25.72138214111328 +[TRAIN] Iter: 269700 Loss: 0.007323200814425945 PSNR: 25.582427978515625 +[TRAIN] Iter: 269800 Loss: 0.007151974830776453 PSNR: 25.746068954467773 +[TRAIN] Iter: 269900 Loss: 0.007970264181494713 PSNR: 25.24539566040039 +Saved checkpoints at ./logs/TUT-out-doll-360-np/270000.tar +[TRAIN] Iter: 270000 Loss: 0.006616179831326008 PSNR: 26.147146224975586 +[TRAIN] Iter: 270100 Loss: 0.006585039664059877 PSNR: 25.98923110961914 +[TRAIN] Iter: 270200 Loss: 0.008146767504513264 PSNR: 25.683645248413086 +[TRAIN] Iter: 270300 Loss: 0.006784083321690559 PSNR: 26.013235092163086 +[TRAIN] Iter: 270400 Loss: 0.007539739832282066 PSNR: 25.43647003173828 +[TRAIN] Iter: 270500 Loss: 0.00669958395883441 PSNR: 25.86227798461914 +[TRAIN] Iter: 270600 Loss: 0.007408581208437681 PSNR: 25.52297592163086 +[TRAIN] Iter: 270700 Loss: 0.006438057869672775 PSNR: 26.467275619506836 +[TRAIN] Iter: 270800 Loss: 0.006644305773079395 PSNR: 26.24457359313965 +[TRAIN] Iter: 270900 Loss: 0.008410552516579628 PSNR: 25.411619186401367 +[TRAIN] Iter: 271000 Loss: 0.006870311684906483 PSNR: 25.73117446899414 +[TRAIN] Iter: 271100 Loss: 0.005446946248412132 PSNR: 27.281272888183594 +[TRAIN] Iter: 271200 Loss: 0.007639159448444843 PSNR: 25.3275203704834 +[TRAIN] Iter: 271300 Loss: 0.006606960669159889 PSNR: 26.160446166992188 +[TRAIN] Iter: 271400 Loss: 0.006836045999079943 PSNR: 25.934186935424805 +[TRAIN] Iter: 271500 Loss: 0.0063994936645030975 PSNR: 26.02480125427246 +[TRAIN] Iter: 271600 Loss: 0.006299965083599091 PSNR: 27.22210693359375 +[TRAIN] Iter: 271700 Loss: 0.0071546584367752075 PSNR: 26.85116958618164 +[TRAIN] Iter: 271800 Loss: 0.006116345524787903 PSNR: 27.235708236694336 +[TRAIN] Iter: 271900 Loss: 0.006234854459762573 PSNR: 26.608713150024414 +[TRAIN] Iter: 272000 Loss: 0.0071296775713562965 PSNR: 26.794818878173828 +[TRAIN] Iter: 272100 Loss: 0.0071900272741913795 PSNR: 26.271575927734375 +[TRAIN] Iter: 272200 Loss: 0.008070498704910278 PSNR: 25.670495986938477 +[TRAIN] Iter: 272300 Loss: 0.005202092695981264 PSNR: 28.15858268737793 +[TRAIN] Iter: 272400 Loss: 0.006366214249283075 PSNR: 27.311555862426758 +[TRAIN] Iter: 272500 Loss: 0.005905198398977518 PSNR: 27.293758392333984 +[TRAIN] Iter: 272600 Loss: 0.005600549280643463 PSNR: 27.608158111572266 +[TRAIN] Iter: 272700 Loss: 0.006506302859634161 PSNR: 26.76418685913086 +[TRAIN] Iter: 272800 Loss: 0.005636545829474926 PSNR: 26.95472526550293 +[TRAIN] Iter: 272900 Loss: 0.007368280552327633 PSNR: 25.753456115722656 +[TRAIN] Iter: 273000 Loss: 0.004964879248291254 PSNR: 28.343976974487305 +[TRAIN] Iter: 273100 Loss: 0.007387769874185324 PSNR: 26.883275985717773 +[TRAIN] Iter: 273200 Loss: 0.005598763935267925 PSNR: 28.20899200439453 +[TRAIN] Iter: 273300 Loss: 0.008052374236285686 PSNR: 25.52686882019043 +[TRAIN] Iter: 273400 Loss: 0.006809844635426998 PSNR: 26.106374740600586 +[TRAIN] Iter: 273500 Loss: 0.007524967193603516 PSNR: 25.79019546508789 +[TRAIN] Iter: 273600 Loss: 0.00758343655616045 PSNR: 25.932598114013672 +[TRAIN] Iter: 273700 Loss: 0.00847551878541708 PSNR: 24.377124786376953 +[TRAIN] Iter: 273800 Loss: 0.006955813616514206 PSNR: 25.640779495239258 +[TRAIN] Iter: 273900 Loss: 0.0065213837660849094 PSNR: 26.026540756225586 +[TRAIN] Iter: 274000 Loss: 0.007210020441561937 PSNR: 25.72454071044922 +[TRAIN] Iter: 274100 Loss: 0.005173183046281338 PSNR: 27.988170623779297 +[TRAIN] Iter: 274200 Loss: 0.007026528939604759 PSNR: 26.278724670410156 +[TRAIN] Iter: 274300 Loss: 0.006445045582950115 PSNR: 26.9306640625 +[TRAIN] Iter: 274400 Loss: 0.006084993481636047 PSNR: 26.274444580078125 +[TRAIN] Iter: 274500 Loss: 0.006493635941296816 PSNR: 26.141788482666016 +[TRAIN] Iter: 274600 Loss: 0.005446677096188068 PSNR: 27.067020416259766 +[TRAIN] Iter: 274700 Loss: 0.005144321825355291 PSNR: 27.463682174682617 +[TRAIN] Iter: 274800 Loss: 0.006807797588407993 PSNR: 25.91180419921875 +[TRAIN] Iter: 274900 Loss: 0.007330548018217087 PSNR: 26.28683090209961 +[TRAIN] Iter: 275000 Loss: 0.004938223399221897 PSNR: 27.788732528686523 +[TRAIN] Iter: 275100 Loss: 0.006290019489824772 PSNR: 27.321043014526367 +[TRAIN] Iter: 275200 Loss: 0.006632781121879816 PSNR: 26.565616607666016 +[TRAIN] Iter: 275300 Loss: 0.006775664631277323 PSNR: 25.52324676513672 +[TRAIN] Iter: 275400 Loss: 0.006704295985400677 PSNR: 25.923860549926758 +[TRAIN] Iter: 275500 Loss: 0.006058906204998493 PSNR: 28.070955276489258 +[TRAIN] Iter: 275600 Loss: 0.006410648580640554 PSNR: 26.682369232177734 +[TRAIN] Iter: 275700 Loss: 0.005056019406765699 PSNR: 27.991024017333984 +[TRAIN] Iter: 275800 Loss: 0.006521657109260559 PSNR: 25.79585075378418 +[TRAIN] Iter: 275900 Loss: 0.007674611639231443 PSNR: 25.715242385864258 +[TRAIN] Iter: 276000 Loss: 0.007021996192634106 PSNR: 26.53862190246582 +[TRAIN] Iter: 276100 Loss: 0.005475575104355812 PSNR: 27.35643196105957 +[TRAIN] Iter: 276200 Loss: 0.007000923156738281 PSNR: 25.9910945892334 +[TRAIN] Iter: 276300 Loss: 0.005058481357991695 PSNR: 27.89201545715332 +[TRAIN] Iter: 276400 Loss: 0.00449275691062212 PSNR: 28.11292266845703 +[TRAIN] Iter: 276500 Loss: 0.0077828005887568 PSNR: 25.471160888671875 +[TRAIN] Iter: 276600 Loss: 0.007094044703990221 PSNR: 26.319643020629883 +[TRAIN] Iter: 276700 Loss: 0.00742949265986681 PSNR: 25.85179901123047 +[TRAIN] Iter: 276800 Loss: 0.007366403006017208 PSNR: 26.39108657836914 +[TRAIN] Iter: 276900 Loss: 0.006833074148744345 PSNR: 26.257200241088867 +[TRAIN] Iter: 277000 Loss: 0.007170683238655329 PSNR: 25.756912231445312 +[TRAIN] Iter: 277100 Loss: 0.006850073114037514 PSNR: 25.865455627441406 +[TRAIN] Iter: 277200 Loss: 0.005731081590056419 PSNR: 27.559072494506836 +[TRAIN] Iter: 277300 Loss: 0.006520258262753487 PSNR: 26.18593978881836 +[TRAIN] Iter: 277400 Loss: 0.006598792504519224 PSNR: 26.66622543334961 +[TRAIN] Iter: 277500 Loss: 0.006385269574820995 PSNR: 26.779529571533203 +[TRAIN] Iter: 277600 Loss: 0.006921837106347084 PSNR: 27.000961303710938 +[TRAIN] Iter: 277700 Loss: 0.006870858836919069 PSNR: 25.391834259033203 +[TRAIN] Iter: 277800 Loss: 0.006393750663846731 PSNR: 26.322776794433594 +[TRAIN] Iter: 277900 Loss: 0.0064438339322805405 PSNR: 25.965742111206055 +[TRAIN] Iter: 278000 Loss: 0.00532436603680253 PSNR: 27.119157791137695 +[TRAIN] Iter: 278100 Loss: 0.006467646919190884 PSNR: 26.39954948425293 +[TRAIN] Iter: 278200 Loss: 0.007176301442086697 PSNR: 25.846094131469727 +[TRAIN] Iter: 278300 Loss: 0.006508989259600639 PSNR: 26.638395309448242 +[TRAIN] Iter: 278400 Loss: 0.008889665827155113 PSNR: 24.533296585083008 +[TRAIN] Iter: 278500 Loss: 0.007844507694244385 PSNR: 25.206960678100586 +[TRAIN] Iter: 278600 Loss: 0.007165274582803249 PSNR: 25.616695404052734 +[TRAIN] Iter: 278700 Loss: 0.007114409934729338 PSNR: 25.612531661987305 +[TRAIN] Iter: 278800 Loss: 0.007121718022972345 PSNR: 25.632734298706055 +[TRAIN] Iter: 278900 Loss: 0.005922866519540548 PSNR: 26.811784744262695 +[TRAIN] Iter: 279000 Loss: 0.006534898187965155 PSNR: 26.669979095458984 +[TRAIN] Iter: 279100 Loss: 0.005096030421555042 PSNR: 27.75520133972168 +[TRAIN] Iter: 279200 Loss: 0.006206836085766554 PSNR: 26.719200134277344 +[TRAIN] Iter: 279300 Loss: 0.005457820370793343 PSNR: 26.809114456176758 +[TRAIN] Iter: 279400 Loss: 0.005483998451381922 PSNR: 27.52557373046875 +[TRAIN] Iter: 279500 Loss: 0.005876514129340649 PSNR: 27.912376403808594 +[TRAIN] Iter: 279600 Loss: 0.006025856360793114 PSNR: 26.25188446044922 +[TRAIN] Iter: 279700 Loss: 0.006907919887453318 PSNR: 26.209362030029297 +[TRAIN] Iter: 279800 Loss: 0.0056680310517549515 PSNR: 26.725492477416992 +[TRAIN] Iter: 279900 Loss: 0.004888721741735935 PSNR: 28.280548095703125 +Saved checkpoints at ./logs/TUT-out-doll-360-np/280000.tar +[TRAIN] Iter: 280000 Loss: 0.006868633441627026 PSNR: 26.379737854003906 +[TRAIN] Iter: 280100 Loss: 0.007463425863534212 PSNR: 26.04878044128418 +[TRAIN] Iter: 280200 Loss: 0.006103096529841423 PSNR: 26.47683334350586 +[TRAIN] Iter: 280300 Loss: 0.006223961710929871 PSNR: 26.843242645263672 +[TRAIN] Iter: 280400 Loss: 0.006894083693623543 PSNR: 25.916099548339844 +[TRAIN] Iter: 280500 Loss: 0.007133039645850658 PSNR: 25.89444351196289 +[TRAIN] Iter: 280600 Loss: 0.008103253319859505 PSNR: 25.2960147857666 +[TRAIN] Iter: 280700 Loss: 0.006812108680605888 PSNR: 25.86661720275879 +[TRAIN] Iter: 280800 Loss: 0.00646534888073802 PSNR: 26.266908645629883 +[TRAIN] Iter: 280900 Loss: 0.007629916071891785 PSNR: 25.32546615600586 +[TRAIN] Iter: 281000 Loss: 0.00705968402326107 PSNR: 26.296703338623047 +[TRAIN] Iter: 281100 Loss: 0.006367533467710018 PSNR: 26.91224479675293 +[TRAIN] Iter: 281200 Loss: 0.0059657315723598 PSNR: 26.83007049560547 +[TRAIN] Iter: 281300 Loss: 0.007529504131525755 PSNR: 25.906272888183594 +[TRAIN] Iter: 281400 Loss: 0.005879898555576801 PSNR: 27.78398895263672 +[TRAIN] Iter: 281500 Loss: 0.006869192235171795 PSNR: 25.686635971069336 +[TRAIN] Iter: 281600 Loss: 0.005998618900775909 PSNR: 27.407773971557617 +[TRAIN] Iter: 281700 Loss: 0.005545448046177626 PSNR: 27.472352981567383 +[TRAIN] Iter: 281800 Loss: 0.005234208889305592 PSNR: 28.106273651123047 +[TRAIN] Iter: 281900 Loss: 0.005781538784503937 PSNR: 27.7252254486084 +[TRAIN] Iter: 282000 Loss: 0.0045974645763635635 PSNR: 28.229887008666992 +[TRAIN] Iter: 282100 Loss: 0.005520486272871494 PSNR: 28.294130325317383 +[TRAIN] Iter: 282200 Loss: 0.006465767975896597 PSNR: 26.021808624267578 +[TRAIN] Iter: 282300 Loss: 0.006180412136018276 PSNR: 26.378398895263672 +[TRAIN] Iter: 282400 Loss: 0.006153472699224949 PSNR: 26.481271743774414 +[TRAIN] Iter: 282500 Loss: 0.007155810482800007 PSNR: 25.720504760742188 +[TRAIN] Iter: 282600 Loss: 0.006583781912922859 PSNR: 26.6296443939209 +[TRAIN] Iter: 282700 Loss: 0.006621425040066242 PSNR: 26.20447540283203 +[TRAIN] Iter: 282800 Loss: 0.007001614663749933 PSNR: 25.80094337463379 +[TRAIN] Iter: 282900 Loss: 0.005539929494261742 PSNR: 27.16762924194336 +[TRAIN] Iter: 283000 Loss: 0.00804894044995308 PSNR: 25.979724884033203 +[TRAIN] Iter: 283100 Loss: 0.006839098408818245 PSNR: 26.466394424438477 +[TRAIN] Iter: 283200 Loss: 0.007368561811745167 PSNR: 25.74225616455078 +[TRAIN] Iter: 283300 Loss: 0.005860438570380211 PSNR: 26.62261962890625 +[TRAIN] Iter: 283400 Loss: 0.007562640123069286 PSNR: 25.125593185424805 +[TRAIN] Iter: 283500 Loss: 0.0063521177507936954 PSNR: 26.676511764526367 +[TRAIN] Iter: 283600 Loss: 0.006861558184027672 PSNR: 25.760353088378906 +[TRAIN] Iter: 283700 Loss: 0.006014022044837475 PSNR: 26.92213249206543 +[TRAIN] Iter: 283800 Loss: 0.00715935742482543 PSNR: 25.584524154663086 +[TRAIN] Iter: 283900 Loss: 0.006498898379504681 PSNR: 26.197065353393555 +[TRAIN] Iter: 284000 Loss: 0.005019342992454767 PSNR: 28.072599411010742 +[TRAIN] Iter: 284100 Loss: 0.007252707611769438 PSNR: 25.492136001586914 +[TRAIN] Iter: 284200 Loss: 0.007319607771933079 PSNR: 25.534391403198242 +[TRAIN] Iter: 284300 Loss: 0.007083951961249113 PSNR: 25.698158264160156 +[TRAIN] Iter: 284400 Loss: 0.006961598992347717 PSNR: 26.226411819458008 +[TRAIN] Iter: 284500 Loss: 0.006901183631271124 PSNR: 27.19153594970703 +[TRAIN] Iter: 284600 Loss: 0.007133945822715759 PSNR: 25.848674774169922 +[TRAIN] Iter: 284700 Loss: 0.0064706336706876755 PSNR: 26.249143600463867 +[TRAIN] Iter: 284800 Loss: 0.006922242231667042 PSNR: 26.179445266723633 +[TRAIN] Iter: 284900 Loss: 0.005473108496516943 PSNR: 27.894338607788086 +[TRAIN] Iter: 285000 Loss: 0.005450440105050802 PSNR: 27.462753295898438 +[TRAIN] Iter: 285100 Loss: 0.005275541450828314 PSNR: 28.415672302246094 +[TRAIN] Iter: 285200 Loss: 0.004960809834301472 PSNR: 27.71455955505371 +[TRAIN] Iter: 285300 Loss: 0.006709245964884758 PSNR: 25.893802642822266 +[TRAIN] Iter: 285400 Loss: 0.008845159783959389 PSNR: 24.25912094116211 +[TRAIN] Iter: 285500 Loss: 0.006327058654278517 PSNR: 26.184906005859375 +[TRAIN] Iter: 285600 Loss: 0.0072730183601379395 PSNR: 26.094242095947266 +[TRAIN] Iter: 285700 Loss: 0.007044931873679161 PSNR: 26.465951919555664 +[TRAIN] Iter: 285800 Loss: 0.00640570092946291 PSNR: 26.245105743408203 +[TRAIN] Iter: 285900 Loss: 0.007640674710273743 PSNR: 25.36443519592285 +[TRAIN] Iter: 286000 Loss: 0.006172132678329945 PSNR: 25.885787963867188 +[TRAIN] Iter: 286100 Loss: 0.006946776993572712 PSNR: 25.88625144958496 +[TRAIN] Iter: 286200 Loss: 0.006521312519907951 PSNR: 26.27576446533203 +[TRAIN] Iter: 286300 Loss: 0.005321752279996872 PSNR: 27.481056213378906 +[TRAIN] Iter: 286400 Loss: 0.006371890194714069 PSNR: 26.167360305786133 +[TRAIN] Iter: 286500 Loss: 0.007080429699271917 PSNR: 25.533023834228516 +[TRAIN] Iter: 286600 Loss: 0.00734441913664341 PSNR: 25.426298141479492 +[TRAIN] Iter: 286700 Loss: 0.0051502021960914135 PSNR: 28.434181213378906 +[TRAIN] Iter: 286800 Loss: 0.005903214681893587 PSNR: 26.81275749206543 +[TRAIN] Iter: 286900 Loss: 0.006069605238735676 PSNR: 27.020368576049805 +[TRAIN] Iter: 287000 Loss: 0.006272644270211458 PSNR: 27.92099380493164 +[TRAIN] Iter: 287100 Loss: 0.007929446175694466 PSNR: 25.93160057067871 +[TRAIN] Iter: 287200 Loss: 0.007825063541531563 PSNR: 24.859073638916016 +[TRAIN] Iter: 287300 Loss: 0.005959269590675831 PSNR: 27.688777923583984 +[TRAIN] Iter: 287400 Loss: 0.006466507911682129 PSNR: 26.323406219482422 +[TRAIN] Iter: 287500 Loss: 0.006380610167980194 PSNR: 26.492483139038086 +[TRAIN] Iter: 287600 Loss: 0.004308963660150766 PSNR: 28.61250114440918 +[TRAIN] Iter: 287700 Loss: 0.007986526936292648 PSNR: 25.35016441345215 +[TRAIN] Iter: 287800 Loss: 0.00534957367926836 PSNR: 28.83652114868164 +[TRAIN] Iter: 287900 Loss: 0.005819642450660467 PSNR: 27.349021911621094 +[TRAIN] Iter: 288000 Loss: 0.00563171599060297 PSNR: 27.143404006958008 +[TRAIN] Iter: 288100 Loss: 0.006819912698119879 PSNR: 26.657705307006836 +[TRAIN] Iter: 288200 Loss: 0.006094772834330797 PSNR: 26.947956085205078 +[TRAIN] Iter: 288300 Loss: 0.007168896961957216 PSNR: 26.146800994873047 +[TRAIN] Iter: 288400 Loss: 0.006516384892165661 PSNR: 26.623615264892578 +[TRAIN] Iter: 288500 Loss: 0.006457003764808178 PSNR: 25.865278244018555 +[TRAIN] Iter: 288600 Loss: 0.0065857283771038055 PSNR: 26.454347610473633 +[TRAIN] Iter: 288700 Loss: 0.0068410104140639305 PSNR: 26.452280044555664 +[TRAIN] Iter: 288800 Loss: 0.007406311575323343 PSNR: 26.074325561523438 +[TRAIN] Iter: 288900 Loss: 0.007033562753349543 PSNR: 25.894695281982422 +[TRAIN] Iter: 289000 Loss: 0.007667175959795713 PSNR: 25.55455207824707 +[TRAIN] Iter: 289100 Loss: 0.005724925547838211 PSNR: 27.02948570251465 +[TRAIN] Iter: 289200 Loss: 0.0074449572712183 PSNR: 25.59832191467285 +[TRAIN] Iter: 289300 Loss: 0.005336926318705082 PSNR: 26.948890686035156 +[TRAIN] Iter: 289400 Loss: 0.005840247496962547 PSNR: 26.561784744262695 +[TRAIN] Iter: 289500 Loss: 0.0073168836534023285 PSNR: 25.82105827331543 +[TRAIN] Iter: 289600 Loss: 0.005693328101187944 PSNR: 27.303407669067383 +[TRAIN] Iter: 289700 Loss: 0.006073343567550182 PSNR: 26.89308738708496 +[TRAIN] Iter: 289800 Loss: 0.006891652476042509 PSNR: 25.994382858276367 +[TRAIN] Iter: 289900 Loss: 0.007052718196064234 PSNR: 25.898664474487305 +Saved checkpoints at ./logs/TUT-out-doll-360-np/290000.tar +[TRAIN] Iter: 290000 Loss: 0.006890265271067619 PSNR: 27.559581756591797 +[TRAIN] Iter: 290100 Loss: 0.007666705641895533 PSNR: 25.54850196838379 +[TRAIN] Iter: 290200 Loss: 0.004930221475660801 PSNR: 29.145681381225586 +[TRAIN] Iter: 290300 Loss: 0.007145586423575878 PSNR: 25.60906410217285 +[TRAIN] Iter: 290400 Loss: 0.006472487933933735 PSNR: 26.403005599975586 +[TRAIN] Iter: 290500 Loss: 0.005850872024893761 PSNR: 27.00859260559082 +[TRAIN] Iter: 290600 Loss: 0.006560183130204678 PSNR: 25.915664672851562 +[TRAIN] Iter: 290700 Loss: 0.006637138314545155 PSNR: 25.94330406188965 +[TRAIN] Iter: 290800 Loss: 0.005882713012397289 PSNR: 27.185054779052734 +[TRAIN] Iter: 290900 Loss: 0.0064745149575173855 PSNR: 26.368650436401367 +[TRAIN] Iter: 291000 Loss: 0.0058679236099123955 PSNR: 27.070825576782227 +[TRAIN] Iter: 291100 Loss: 0.00678639579564333 PSNR: 26.479373931884766 +[TRAIN] Iter: 291200 Loss: 0.0071934303268790245 PSNR: 25.579971313476562 +[TRAIN] Iter: 291300 Loss: 0.005749579519033432 PSNR: 27.281936645507812 +[TRAIN] Iter: 291400 Loss: 0.006005290430039167 PSNR: 27.242412567138672 +[TRAIN] Iter: 291500 Loss: 0.005361621733754873 PSNR: 27.76266098022461 +[TRAIN] Iter: 291600 Loss: 0.007609294727444649 PSNR: 25.329374313354492 +[TRAIN] Iter: 291700 Loss: 0.006858830340206623 PSNR: 25.729412078857422 +[TRAIN] Iter: 291800 Loss: 0.0068677314557135105 PSNR: 26.105457305908203 +[TRAIN] Iter: 291900 Loss: 0.006624789908528328 PSNR: 26.10710334777832 +[TRAIN] Iter: 292000 Loss: 0.0057290480472147465 PSNR: 27.868927001953125 +[TRAIN] Iter: 292100 Loss: 0.0063619716092944145 PSNR: 26.300025939941406 +[TRAIN] Iter: 292200 Loss: 0.006435689050704241 PSNR: 26.48666763305664 +[TRAIN] Iter: 292300 Loss: 0.007363517768681049 PSNR: 25.988828659057617 +[TRAIN] Iter: 292400 Loss: 0.006386160850524902 PSNR: 26.449378967285156 +[TRAIN] Iter: 292500 Loss: 0.005339923780411482 PSNR: 27.171361923217773 +[TRAIN] Iter: 292600 Loss: 0.006472855806350708 PSNR: 26.54837417602539 +[TRAIN] Iter: 292700 Loss: 0.00673072412610054 PSNR: 25.783967971801758 +[TRAIN] Iter: 292800 Loss: 0.005754764191806316 PSNR: 27.522132873535156 +[TRAIN] Iter: 292900 Loss: 0.0048937443643808365 PSNR: 27.909481048583984 +[TRAIN] Iter: 293000 Loss: 0.006839354522526264 PSNR: 25.98600959777832 +[TRAIN] Iter: 293100 Loss: 0.005213482771068811 PSNR: 28.30781364440918 +[TRAIN] Iter: 293200 Loss: 0.005797445308417082 PSNR: 26.934309005737305 +[TRAIN] Iter: 293300 Loss: 0.0075143370777368546 PSNR: 25.47467803955078 +[TRAIN] Iter: 293400 Loss: 0.006801655516028404 PSNR: 26.129966735839844 +[TRAIN] Iter: 293500 Loss: 0.008465771563351154 PSNR: 25.134531021118164 +[TRAIN] Iter: 293600 Loss: 0.0060446797870099545 PSNR: 27.419864654541016 +[TRAIN] Iter: 293700 Loss: 0.005748729221522808 PSNR: 26.799480438232422 +[TRAIN] Iter: 293800 Loss: 0.0061280070804059505 PSNR: 26.59284210205078 +[TRAIN] Iter: 293900 Loss: 0.006906859576702118 PSNR: 26.70836067199707 +[TRAIN] Iter: 294000 Loss: 0.006119667086750269 PSNR: 27.4686336517334 +[TRAIN] Iter: 294100 Loss: 0.0045653474517166615 PSNR: 29.138072967529297 +[TRAIN] Iter: 294200 Loss: 0.005539332050830126 PSNR: 28.335315704345703 +[TRAIN] Iter: 294300 Loss: 0.008342141285538673 PSNR: 25.394990921020508 +[TRAIN] Iter: 294400 Loss: 0.005791372619569302 PSNR: 27.59766387939453 +[TRAIN] Iter: 294500 Loss: 0.008033748716115952 PSNR: 25.751293182373047 +[TRAIN] Iter: 294600 Loss: 0.0063399579375982285 PSNR: 26.238611221313477 +[TRAIN] Iter: 294700 Loss: 0.005159779451787472 PSNR: 27.64288330078125 +[TRAIN] Iter: 294800 Loss: 0.00708768330514431 PSNR: 25.564279556274414 +[TRAIN] Iter: 294900 Loss: 0.007423431146889925 PSNR: 25.93600082397461 +[TRAIN] Iter: 295000 Loss: 0.007186039816588163 PSNR: 25.851526260375977 +[TRAIN] Iter: 295100 Loss: 0.007998162880539894 PSNR: 25.251537322998047 +[TRAIN] Iter: 295200 Loss: 0.00657656230032444 PSNR: 25.813251495361328 +[TRAIN] Iter: 295300 Loss: 0.006199593190103769 PSNR: 26.612321853637695 +[TRAIN] Iter: 295400 Loss: 0.005940222181379795 PSNR: 26.498838424682617 +[TRAIN] Iter: 295500 Loss: 0.004275965038686991 PSNR: 28.729034423828125 +[TRAIN] Iter: 295600 Loss: 0.00731763057410717 PSNR: 25.829421997070312 +[TRAIN] Iter: 295700 Loss: 0.00581827899441123 PSNR: 27.191390991210938 +[TRAIN] Iter: 295800 Loss: 0.007344123907387257 PSNR: 27.261777877807617 +[TRAIN] Iter: 295900 Loss: 0.007842060178518295 PSNR: 25.725648880004883 +[TRAIN] Iter: 296000 Loss: 0.006336931139230728 PSNR: 25.9096622467041 +[TRAIN] Iter: 296100 Loss: 0.005989749915897846 PSNR: 26.993934631347656 +[TRAIN] Iter: 296200 Loss: 0.0060150111094117165 PSNR: 27.59848403930664 +[TRAIN] Iter: 296300 Loss: 0.006689685396850109 PSNR: 26.385272979736328 +[TRAIN] Iter: 296400 Loss: 0.006022258661687374 PSNR: 27.5615234375 +[TRAIN] Iter: 296500 Loss: 0.007232865318655968 PSNR: 25.689916610717773 +[TRAIN] Iter: 296600 Loss: 0.007326857652515173 PSNR: 26.097890853881836 +[TRAIN] Iter: 296700 Loss: 0.0059823570773005486 PSNR: 27.11109161376953 +[TRAIN] Iter: 296800 Loss: 0.007265619933605194 PSNR: 25.914947509765625 +[TRAIN] Iter: 296900 Loss: 0.005866506136953831 PSNR: 27.726497650146484 +[TRAIN] Iter: 297000 Loss: 0.0057661207392811775 PSNR: 27.30750846862793 +[TRAIN] Iter: 297100 Loss: 0.0069211446680128574 PSNR: 25.942893981933594 +[TRAIN] Iter: 297200 Loss: 0.006914511322975159 PSNR: 26.161287307739258 +[TRAIN] Iter: 297300 Loss: 0.005755254998803139 PSNR: 28.214393615722656 +[TRAIN] Iter: 297400 Loss: 0.004902965389192104 PSNR: 27.24580955505371 +[TRAIN] Iter: 297500 Loss: 0.006506768986582756 PSNR: 26.173904418945312 +[TRAIN] Iter: 297600 Loss: 0.005895162001252174 PSNR: 27.428773880004883 +[TRAIN] Iter: 297700 Loss: 0.0051068649627268314 PSNR: 27.80265998840332 +[TRAIN] Iter: 297800 Loss: 0.006870415061712265 PSNR: 26.14824104309082 +[TRAIN] Iter: 297900 Loss: 0.0052053541876375675 PSNR: 26.398508071899414 +[TRAIN] Iter: 298000 Loss: 0.005732951685786247 PSNR: 26.76564598083496 +[TRAIN] Iter: 298100 Loss: 0.006256110966205597 PSNR: 26.458534240722656 +[TRAIN] Iter: 298200 Loss: 0.007671824656426907 PSNR: 25.210514068603516 +[TRAIN] Iter: 298300 Loss: 0.006454397924244404 PSNR: 26.75570297241211 +[TRAIN] Iter: 298400 Loss: 0.0065508438274264336 PSNR: 26.202817916870117 +[TRAIN] Iter: 298500 Loss: 0.005050659645348787 PSNR: 27.579782485961914 +[TRAIN] Iter: 298600 Loss: 0.00544390082359314 PSNR: 27.44660186767578 +[TRAIN] Iter: 298700 Loss: 0.005775379948318005 PSNR: 26.490385055541992 +[TRAIN] Iter: 298800 Loss: 0.005724683869630098 PSNR: 28.566770553588867 +[TRAIN] Iter: 298900 Loss: 0.006760395132005215 PSNR: 26.907169342041016 +[TRAIN] Iter: 299000 Loss: 0.006288937758654356 PSNR: 26.289920806884766 +[TRAIN] Iter: 299100 Loss: 0.006472878623753786 PSNR: 26.30954360961914 +[TRAIN] Iter: 299200 Loss: 0.005650573410093784 PSNR: 28.273103713989258 +[TRAIN] Iter: 299300 Loss: 0.006840658374130726 PSNR: 25.705522537231445 +[TRAIN] Iter: 299400 Loss: 0.006735492032021284 PSNR: 25.868913650512695 +[TRAIN] Iter: 299500 Loss: 0.005238532554358244 PSNR: 27.992868423461914 +[TRAIN] Iter: 299600 Loss: 0.00643440056592226 PSNR: 26.434898376464844 +[TRAIN] Iter: 299700 Loss: 0.006759286858141422 PSNR: 26.195707321166992 +[TRAIN] Iter: 299800 Loss: 0.00659893499687314 PSNR: 26.69151496887207 +[TRAIN] Iter: 299900 Loss: 0.006772438529878855 PSNR: 26.27967643737793 +Saved checkpoints at ./logs/TUT-out-doll-360-np/300000.tar +0 0.0008237361907958984 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.314668893814087 +2 22.051639080047607 +3 21.007134675979614 +4 22.041947603225708 +5 21.019824743270874 +6 21.52165126800537 +7 21.662339448928833 +8 21.591447830200195 +9 21.913460731506348 +10 22.15479302406311 +11 22.00399136543274 +12 22.127565383911133 +13 21.829700231552124 +14 21.86627697944641 +15 21.70959973335266 +16 21.78625226020813 +17 21.369709253311157 +18 21.9505877494812 +19 21.22944211959839 +20 21.614166736602783 +21 22.317681789398193 +22 22.13294291496277 +23 22.709094047546387 +24 22.350991010665894 +25 22.51567816734314 +26 21.98357653617859 +27 21.67362642288208 +28 21.832761526107788 +29 21.408803939819336 +30 22.78334951400757 +31 21.20507526397705 +32 22.002541542053223 +33 21.99113917350769 +34 22.318389892578125 +35 21.82611322402954 +36 21.85032320022583 +37 21.692211866378784 +38 22.269263982772827 +39 21.953688144683838 +40 22.056158542633057 +41 21.49270224571228 +42 22.132031202316284 +43 21.692813634872437 +44 21.470006465911865 +45 22.019636869430542 +46 21.257909774780273 +47 22.75589108467102 +48 21.658656358718872 +49 22.53989839553833 +50 21.764804363250732 +51 22.28057837486267 +52 21.887820959091187 +53 22.719930171966553 +54 21.88788104057312 +55 21.61984348297119 +56 21.72329807281494 +57 22.07313561439514 +58 22.28851342201233 +59 22.191699266433716 +60 22.21503186225891 +61 21.580578088760376 +62 22.14082145690918 +63 21.45559287071228 +64 21.7669620513916 +65 21.83213710784912 +66 21.76918077468872 +67 22.225088119506836 +68 21.856350898742676 +69 22.925971269607544 +70 21.349578619003296 +71 22.365593910217285 +72 22.419917821884155 +73 22.325499296188354 +74 22.710293292999268 +75 21.036590337753296 +76 21.57998538017273 +77 21.66476273536682 +78 21.73841118812561 +79 22.214244842529297 +80 21.531054258346558 +81 22.242502689361572 +82 21.619513273239136 +83 22.288081169128418 +84 22.007842540740967 +85 21.035739421844482 +86 23.01310968399048 +87 21.625423669815063 +88 22.585463762283325 +89 21.837475776672363 +90 22.612792015075684 +91 21.382041931152344 +92 21.87121033668518 +93 21.250006675720215 +94 21.750425338745117 +95 21.592530012130737 +96 21.401886463165283 +97 21.47561240196228 +98 21.73510241508484 +99 21.283093214035034 +100 21.630871057510376 +101 21.55991005897522 +102 22.03089213371277 +103 21.418872594833374 +104 21.787121295928955 +105 21.696989059448242 +106 22.249287128448486 +107 21.875797510147095 +108 21.949681043624878 +109 21.11458659172058 +110 21.86357545852661 +111 21.520394563674927 +112 21.952178478240967 +113 20.95313572883606 +114 22.162782669067383 +115 21.860935926437378 +116 22.22246479988098 +117 21.076520204544067 +118 21.861221075057983 +119 23.026066780090332 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 1.1264e-01, 1.0252e-01, 3.2872e-01, -4.5401e+01], + [-9.6420e-01, -4.0304e-01, -3.2306e-03, -5.0560e+01], + [-2.1987e+00, -1.7797e+00, -2.0602e+00, -3.0237e+01], + ..., + [-3.5667e+00, 1.6808e+00, 5.0585e+00, 8.1038e+02], + [-2.2936e+00, 3.1464e+00, 4.9188e+00, 8.7768e+02], + [-2.5988e+00, 2.8189e+00, 5.1244e+00, 8.8272e+02]], + + [[ 4.2525e-01, 1.1112e+00, 1.4767e+00, -7.5669e+01], + [-3.6593e+00, -2.0381e+00, -6.4528e-01, -9.0096e+01], + [-3.1296e-01, 4.2206e-01, 1.4467e+00, -1.8120e+01], + ..., + [-5.2726e+00, -5.1345e+00, -4.2203e-01, 1.6973e+02], + [-5.3278e+00, -5.2177e+00, -7.0596e-01, 1.7837e+02], + [-5.2390e+00, -4.9800e+00, -6.1830e-02, 1.9352e+02]], + + [[-2.8792e+00, -2.6062e+00, -2.6242e+00, -3.3106e+01], + [-2.3378e+00, -1.7294e+00, -1.7775e+00, -4.0497e+01], + [-1.7914e-01, -6.4802e-02, -1.9684e-01, -4.1582e+01], + ..., + [-1.6553e+01, -1.7976e+00, 4.7916e+00, 8.1554e+02], + [-1.6877e+01, -1.9456e+00, 4.7364e+00, 8.5625e+02], + [-1.4137e+01, -3.3143e-01, 5.6739e+00, 8.1205e+02]], + + ..., + + [[-1.3702e+00, 7.3251e-01, 1.4042e-01, -1.1980e+02], + [-1.1019e+00, -1.3403e-01, 1.3246e+00, -3.8612e+01], + [-2.5341e+00, -1.1172e+00, 4.0023e-01, -3.4483e+01], + ..., + [-7.5861e+00, -7.0042e+00, 6.9280e-01, 3.5640e+02], + [-7.6796e+00, -7.1761e+00, 1.9267e-01, 3.8643e+02], + [-7.5597e+00, -7.0065e+00, 6.7955e-01, 3.6842e+02]], + + [[-1.1841e+00, -1.0796e+00, -1.0139e+00, -4.0262e+01], + [-3.1067e-01, -4.6211e-01, -5.1699e-01, -1.7063e+01], + [-1.0433e+00, -1.1068e+00, -1.0671e+00, -1.4184e+01], + ..., + [-1.1358e+01, -4.8184e+00, -5.4116e+00, 7.0527e+02], + [-1.1316e+01, -5.3789e+00, -6.9604e+00, 6.8116e+02], + [-1.0154e+01, -4.9136e+00, -7.2443e+00, 7.0953e+02]], + + [[ 6.5021e+00, 7.3875e+00, 7.8824e+00, -5.6733e+01], + [ 5.1004e+00, 5.4900e+00, 5.1939e+00, -5.1924e+01], + [-1.5569e+00, -8.3365e-01, 2.6324e-01, -6.5444e+01], + ..., + [ 1.9678e+00, 3.8497e+00, 6.5083e+00, 2.0403e+02], + [ 1.6317e+00, 3.2906e+00, 6.2776e+00, 2.3584e+02], + [ 1.9927e+00, 3.8528e+00, 6.3751e+00, 2.2163e+02]]], + grad_fn=), 'rgb0': tensor([[0.2092, 0.2419, 0.2840], + [0.3295, 0.5744, 0.8097], + [0.2496, 0.2375, 0.2424], + ..., + [0.2877, 0.4711, 0.7059], + [0.1815, 0.1796, 0.2064], + [0.5463, 0.7445, 0.9044]], grad_fn=), 'disp0': tensor([25.1709, 18.8440, 42.8848, ..., 40.3132, 54.6437, 11.7529], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0059, 0.0200, 0.0049, ..., 0.0075, 0.0030, 0.0063])} +0 0.0008330345153808594 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.588146209716797 +2 21.919715881347656 +3 22.045437335968018 +4 21.476524353027344 +5 21.682145595550537 +6 22.059927225112915 +7 21.653668880462646 +8 21.74705457687378 +9 21.731064558029175 +10 22.13563084602356 +11 21.84971594810486 +12 22.10201668739319 +13 22.135554790496826 +14 22.36265516281128 +15 22.88681697845459 +16 21.26841402053833 +17 21.93599820137024 +18 22.9811589717865 +19 21.9149968624115 +20 22.388136625289917 +21 21.0610511302948 +22 21.86316990852356 +23 21.8831467628479 +24 21.51050615310669 +25 22.53447151184082 +26 21.815389156341553 +27 22.227638244628906 +28 21.03843927383423 +29 21.561949014663696 +30 21.59022092819214 +31 22.81417202949524 +32 21.574357509613037 +33 21.840635061264038 +34 21.62669801712036 +35 21.790557146072388 +36 21.754889011383057 +37 21.72357153892517 +38 21.648874044418335 +39 21.7398624420166 +40 22.33686923980713 +41 21.89889931678772 +42 21.109528064727783 +43 21.68476152420044 +44 21.6602885723114 +45 21.33160901069641 +46 21.567097902297974 +47 21.664923191070557 +48 21.57129693031311 +49 22.34677815437317 +50 22.80936026573181 +51 21.59112286567688 +52 22.020071029663086 +53 22.963370323181152 +54 21.39796209335327 +55 22.115326166152954 +56 21.66337513923645 +57 21.357186555862427 +58 21.89566445350647 +59 22.76861882209778 +60 21.551937341690063 +61 21.897897243499756 +62 22.63609766960144 +63 21.904287815093994 +64 21.69587540626526 +65 21.738301992416382 +66 21.9369113445282 +67 22.248315811157227 +68 21.895891904830933 +69 21.34028387069702 +70 22.362525701522827 +71 22.381256103515625 +72 21.385136365890503 +73 21.51846957206726 +74 21.679083824157715 +75 21.75214433670044 +76 21.385140657424927 +77 21.619953155517578 +78 22.38446569442749 +79 21.88548731803894 +80 21.86970806121826 +81 21.900951385498047 +82 22.51485013961792 +83 21.75374674797058 +84 22.080275058746338 +85 21.438579320907593 +86 21.380398511886597 +87 21.775603532791138 +88 22.264522790908813 +89 21.616839170455933 +90 22.03068208694458 +91 21.996034383773804 +92 22.500166416168213 +93 21.20772957801819 +94 21.699236154556274 +95 22.114960432052612 +96 22.186737298965454 +97 21.959854125976562 +98 22.206058740615845 +99 21.685043334960938 +100 22.48405694961548 +101 21.422038316726685 +102 22.92804217338562 +103 22.81946349143982 +104 22.236557960510254 +105 21.949052572250366 +106 21.547762632369995 +107 22.90067195892334 +108 21.712324380874634 +109 23.148396015167236 +110 21.72045111656189 +111 21.996399402618408 +112 21.382147789001465 +113 21.758310079574585 +114 23.131232500076294 +115 22.224804162979126 +116 21.726605892181396 +117 21.592307090759277 +118 22.33597469329834 +119 22.05570936203003 +test poses shape torch.Size([4, 3, 4]) +0 0.0010707378387451172 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.648630380630493 +2 22.35338521003723 +3 21.988933563232422 +Saved test set +[TRAIN] Iter: 300000 Loss: 0.0072809848934412 PSNR: 25.558231353759766 +[TRAIN] Iter: 300100 Loss: 0.008083617314696312 PSNR: 25.731990814208984 +[TRAIN] Iter: 300200 Loss: 0.00671190582215786 PSNR: 26.150205612182617 +[TRAIN] Iter: 300300 Loss: 0.007547799963504076 PSNR: 26.245790481567383 +[TRAIN] Iter: 300400 Loss: 0.006830429192632437 PSNR: 25.61566925048828 +[TRAIN] Iter: 300500 Loss: 0.00577517319470644 PSNR: 27.576383590698242 +[TRAIN] Iter: 300600 Loss: 0.005424216389656067 PSNR: 28.296489715576172 +[TRAIN] Iter: 300700 Loss: 0.006138733588159084 PSNR: 27.860553741455078 +[TRAIN] Iter: 300800 Loss: 0.00634864903986454 PSNR: 26.62531852722168 +[TRAIN] Iter: 300900 Loss: 0.005335754714906216 PSNR: 27.857200622558594 +[TRAIN] Iter: 301000 Loss: 0.00609230063855648 PSNR: 28.023653030395508 +[TRAIN] Iter: 301100 Loss: 0.007386878132820129 PSNR: 25.761089324951172 +[TRAIN] Iter: 301200 Loss: 0.006974068004637957 PSNR: 25.984352111816406 +[TRAIN] Iter: 301300 Loss: 0.006862744688987732 PSNR: 25.951889038085938 +[TRAIN] Iter: 301400 Loss: 0.007615900132805109 PSNR: 25.586694717407227 +[TRAIN] Iter: 301500 Loss: 0.007036111317574978 PSNR: 26.50127601623535 +[TRAIN] Iter: 301600 Loss: 0.007391061633825302 PSNR: 26.107070922851562 +[TRAIN] Iter: 301700 Loss: 0.005858614109456539 PSNR: 26.870298385620117 +[TRAIN] Iter: 301800 Loss: 0.006575203500688076 PSNR: 26.539539337158203 +[TRAIN] Iter: 301900 Loss: 0.0075660767033696175 PSNR: 25.5203857421875 +[TRAIN] Iter: 302000 Loss: 0.006800487637519836 PSNR: 26.983129501342773 +[TRAIN] Iter: 302100 Loss: 0.006903555244207382 PSNR: 26.36844253540039 +[TRAIN] Iter: 302200 Loss: 0.0055437227711081505 PSNR: 28.128433227539062 +[TRAIN] Iter: 302300 Loss: 0.007451134733855724 PSNR: 25.281936645507812 +[TRAIN] Iter: 302400 Loss: 0.006008882075548172 PSNR: 26.85050392150879 +[TRAIN] Iter: 302500 Loss: 0.007779276929795742 PSNR: 25.72261619567871 +[TRAIN] Iter: 302600 Loss: 0.0074500129558146 PSNR: 25.5904483795166 +[TRAIN] Iter: 302700 Loss: 0.006962307728827 PSNR: 25.99500274658203 +[TRAIN] Iter: 302800 Loss: 0.005769393406808376 PSNR: 27.412052154541016 +[TRAIN] Iter: 302900 Loss: 0.007152191828936338 PSNR: 25.939359664916992 +[TRAIN] Iter: 303000 Loss: 0.004854926839470863 PSNR: 28.319866180419922 +[TRAIN] Iter: 303100 Loss: 0.007051208987832069 PSNR: 25.944734573364258 +[TRAIN] Iter: 303200 Loss: 0.007067625876516104 PSNR: 25.876001358032227 +[TRAIN] Iter: 303300 Loss: 0.0073735760524868965 PSNR: 26.080848693847656 +[TRAIN] Iter: 303400 Loss: 0.007688527461141348 PSNR: 25.276037216186523 +[TRAIN] Iter: 303500 Loss: 0.006434505805373192 PSNR: 26.031930923461914 +[TRAIN] Iter: 303600 Loss: 0.005516526289284229 PSNR: 28.077747344970703 +[TRAIN] Iter: 303700 Loss: 0.0053080227226018906 PSNR: 27.872678756713867 +[TRAIN] Iter: 303800 Loss: 0.0062041557393968105 PSNR: 26.409439086914062 +[TRAIN] Iter: 303900 Loss: 0.0056476895697414875 PSNR: 26.59137725830078 +[TRAIN] Iter: 304000 Loss: 0.007550253067165613 PSNR: 25.53103256225586 +[TRAIN] Iter: 304100 Loss: 0.0072947111912071705 PSNR: 26.181243896484375 +[TRAIN] Iter: 304200 Loss: 0.005557104013860226 PSNR: 28.158836364746094 +[TRAIN] Iter: 304300 Loss: 0.005464774556457996 PSNR: 27.403385162353516 +[TRAIN] Iter: 304400 Loss: 0.006107103545218706 PSNR: 27.557836532592773 +[TRAIN] Iter: 304500 Loss: 0.00599994882941246 PSNR: 27.29261589050293 +[TRAIN] Iter: 304600 Loss: 0.006207193247973919 PSNR: 26.53232765197754 +[TRAIN] Iter: 304700 Loss: 0.006571045145392418 PSNR: 27.007226943969727 +[TRAIN] Iter: 304800 Loss: 0.007940617389976978 PSNR: 25.010467529296875 +[TRAIN] Iter: 304900 Loss: 0.006967982277274132 PSNR: 25.956762313842773 +[TRAIN] Iter: 305000 Loss: 0.0071673206984996796 PSNR: 25.96052360534668 +[TRAIN] Iter: 305100 Loss: 0.005442387890070677 PSNR: 28.213485717773438 +[TRAIN] Iter: 305200 Loss: 0.006251257844269276 PSNR: 26.38580322265625 +[TRAIN] Iter: 305300 Loss: 0.0050869230180978775 PSNR: 27.694334030151367 +[TRAIN] Iter: 305400 Loss: 0.00667069386690855 PSNR: 25.91924476623535 +[TRAIN] Iter: 305500 Loss: 0.006908247247338295 PSNR: 26.185829162597656 +[TRAIN] Iter: 305600 Loss: 0.00607979716733098 PSNR: 26.625581741333008 +[TRAIN] Iter: 305700 Loss: 0.006462474353611469 PSNR: 27.747644424438477 +[TRAIN] Iter: 305800 Loss: 0.0064139338210225105 PSNR: 26.287626266479492 +[TRAIN] Iter: 305900 Loss: 0.005327415652573109 PSNR: 27.752580642700195 +[TRAIN] Iter: 306000 Loss: 0.006118384189903736 PSNR: 26.475595474243164 +[TRAIN] Iter: 306100 Loss: 0.0061154295690357685 PSNR: 26.375587463378906 +[TRAIN] Iter: 306200 Loss: 0.006197032518684864 PSNR: 26.492263793945312 +[TRAIN] Iter: 306300 Loss: 0.0063504925929009914 PSNR: 26.27988624572754 +[TRAIN] Iter: 306400 Loss: 0.008125904947519302 PSNR: 25.08476448059082 +[TRAIN] Iter: 306500 Loss: 0.006566700525581837 PSNR: 26.034191131591797 +[TRAIN] Iter: 306600 Loss: 0.006541483104228973 PSNR: 26.666872024536133 +[TRAIN] Iter: 306700 Loss: 0.007440547458827496 PSNR: 25.533599853515625 +[TRAIN] Iter: 306800 Loss: 0.006221866235136986 PSNR: 27.85445213317871 +[TRAIN] Iter: 306900 Loss: 0.005237851291894913 PSNR: 27.951454162597656 +[TRAIN] Iter: 307000 Loss: 0.006849487777799368 PSNR: 25.89641761779785 +[TRAIN] Iter: 307100 Loss: 0.007617584429681301 PSNR: 26.14336395263672 +[TRAIN] Iter: 307200 Loss: 0.008115824311971664 PSNR: 25.774612426757812 +[TRAIN] Iter: 307300 Loss: 0.0069561917334795 PSNR: 26.1635684967041 +[TRAIN] Iter: 307400 Loss: 0.005950030870735645 PSNR: 26.780155181884766 +[TRAIN] Iter: 307500 Loss: 0.004765934776514769 PSNR: 29.05768394470215 +[TRAIN] Iter: 307600 Loss: 0.006493785418570042 PSNR: 26.099842071533203 +[TRAIN] Iter: 307700 Loss: 0.006013593636453152 PSNR: 27.692243576049805 +[TRAIN] Iter: 307800 Loss: 0.006547847297042608 PSNR: 26.742158889770508 +[TRAIN] Iter: 307900 Loss: 0.007430762518197298 PSNR: 26.414642333984375 +[TRAIN] Iter: 308000 Loss: 0.0066793025471270084 PSNR: 26.391904830932617 +[TRAIN] Iter: 308100 Loss: 0.005767917260527611 PSNR: 27.139114379882812 +[TRAIN] Iter: 308200 Loss: 0.005936541128903627 PSNR: 27.072141647338867 +[TRAIN] Iter: 308300 Loss: 0.006839800626039505 PSNR: 26.290966033935547 +[TRAIN] Iter: 308400 Loss: 0.005238685756921768 PSNR: 27.945615768432617 +[TRAIN] Iter: 308500 Loss: 0.00686847697943449 PSNR: 26.138011932373047 +[TRAIN] Iter: 308600 Loss: 0.005614022724330425 PSNR: 27.453956604003906 +[TRAIN] Iter: 308700 Loss: 0.0077652051113545895 PSNR: 25.386760711669922 +[TRAIN] Iter: 308800 Loss: 0.007041562348604202 PSNR: 26.270790100097656 +[TRAIN] Iter: 308900 Loss: 0.0078697819262743 PSNR: 25.831518173217773 +[TRAIN] Iter: 309000 Loss: 0.006172750610858202 PSNR: 26.17144012451172 +[TRAIN] Iter: 309100 Loss: 0.00690098712220788 PSNR: 26.126245498657227 +[TRAIN] Iter: 309200 Loss: 0.0054324110969901085 PSNR: 28.683130264282227 +[TRAIN] Iter: 309300 Loss: 0.005090055521577597 PSNR: 28.10121726989746 +[TRAIN] Iter: 309400 Loss: 0.007840302772819996 PSNR: 25.58871841430664 +[TRAIN] Iter: 309500 Loss: 0.006194151006639004 PSNR: 26.751989364624023 +[TRAIN] Iter: 309600 Loss: 0.00596289848908782 PSNR: 27.696195602416992 +[TRAIN] Iter: 309700 Loss: 0.005362403579056263 PSNR: 27.645559310913086 +[TRAIN] Iter: 309800 Loss: 0.007926039397716522 PSNR: 25.48163604736328 +[TRAIN] Iter: 309900 Loss: 0.008182991296052933 PSNR: 25.55840301513672 +Saved checkpoints at ./logs/TUT-out-doll-360-np/310000.tar +[TRAIN] Iter: 310000 Loss: 0.006825306452810764 PSNR: 26.861257553100586 +[TRAIN] Iter: 310100 Loss: 0.005952921696007252 PSNR: 28.235376358032227 +[TRAIN] Iter: 310200 Loss: 0.006505931727588177 PSNR: 26.55547523498535 +[TRAIN] Iter: 310300 Loss: 0.005052014719694853 PSNR: 27.759431838989258 +[TRAIN] Iter: 310400 Loss: 0.007179691921919584 PSNR: 25.485729217529297 +[TRAIN] Iter: 310500 Loss: 0.006995455361902714 PSNR: 25.590726852416992 +[TRAIN] Iter: 310600 Loss: 0.006678298115730286 PSNR: 25.746723175048828 +[TRAIN] Iter: 310700 Loss: 0.009204189293086529 PSNR: 24.788354873657227 +[TRAIN] Iter: 310800 Loss: 0.005902752745896578 PSNR: 27.717252731323242 +[TRAIN] Iter: 310900 Loss: 0.005776649806648493 PSNR: 27.39568519592285 +[TRAIN] Iter: 311000 Loss: 0.007084698881953955 PSNR: 27.27370834350586 +[TRAIN] Iter: 311100 Loss: 0.0079337228089571 PSNR: 26.52490997314453 +[TRAIN] Iter: 311200 Loss: 0.006820435635745525 PSNR: 26.20478057861328 +[TRAIN] Iter: 311300 Loss: 0.007432102225720882 PSNR: 26.342777252197266 +[TRAIN] Iter: 311400 Loss: 0.00638246163725853 PSNR: 26.82289695739746 +[TRAIN] Iter: 311500 Loss: 0.0070291245356202126 PSNR: 26.924205780029297 +[TRAIN] Iter: 311600 Loss: 0.006395431701093912 PSNR: 26.776933670043945 +[TRAIN] Iter: 311700 Loss: 0.005601787939667702 PSNR: 26.77281379699707 +[TRAIN] Iter: 311800 Loss: 0.0061839912086725235 PSNR: 26.246980667114258 +[TRAIN] Iter: 311900 Loss: 0.005857572890818119 PSNR: 26.283065795898438 +[TRAIN] Iter: 312000 Loss: 0.006564987823367119 PSNR: 26.622726440429688 +[TRAIN] Iter: 312100 Loss: 0.007299177814275026 PSNR: 26.0865535736084 +[TRAIN] Iter: 312200 Loss: 0.005859622731804848 PSNR: 27.575214385986328 +[TRAIN] Iter: 312300 Loss: 0.005745393224060535 PSNR: 26.800254821777344 +[TRAIN] Iter: 312400 Loss: 0.005886618513613939 PSNR: 27.201658248901367 +[TRAIN] Iter: 312500 Loss: 0.006530039943754673 PSNR: 27.30543327331543 +[TRAIN] Iter: 312600 Loss: 0.007610518019646406 PSNR: 25.53628921508789 +[TRAIN] Iter: 312700 Loss: 0.006623837165534496 PSNR: 26.183504104614258 +[TRAIN] Iter: 312800 Loss: 0.008352575823664665 PSNR: 24.703033447265625 +[TRAIN] Iter: 312900 Loss: 0.006066875532269478 PSNR: 27.012935638427734 +[TRAIN] Iter: 313000 Loss: 0.006101634353399277 PSNR: 26.53166961669922 +[TRAIN] Iter: 313100 Loss: 0.007374424021691084 PSNR: 25.673538208007812 +[TRAIN] Iter: 313200 Loss: 0.007607459556311369 PSNR: 25.538841247558594 +[TRAIN] Iter: 313300 Loss: 0.007450011558830738 PSNR: 25.582761764526367 +[TRAIN] Iter: 313400 Loss: 0.005954454652965069 PSNR: 27.04216194152832 +[TRAIN] Iter: 313500 Loss: 0.006924019195139408 PSNR: 26.500656127929688 +[TRAIN] Iter: 313600 Loss: 0.004803794901818037 PSNR: 27.84157943725586 +[TRAIN] Iter: 313700 Loss: 0.006498476956039667 PSNR: 26.405927658081055 +[TRAIN] Iter: 313800 Loss: 0.0067100366577506065 PSNR: 26.206247329711914 +[TRAIN] Iter: 313900 Loss: 0.007947675883769989 PSNR: 25.44704818725586 +[TRAIN] Iter: 314000 Loss: 0.006569203920662403 PSNR: 26.151226043701172 +[TRAIN] Iter: 314100 Loss: 0.007618176285177469 PSNR: 25.073497772216797 +[TRAIN] Iter: 314200 Loss: 0.008009613491594791 PSNR: 25.384552001953125 +[TRAIN] Iter: 314300 Loss: 0.005543368868529797 PSNR: 27.432157516479492 +[TRAIN] Iter: 314400 Loss: 0.006654384080320597 PSNR: 25.77843475341797 +[TRAIN] Iter: 314500 Loss: 0.0064545441418886185 PSNR: 26.436506271362305 +[TRAIN] Iter: 314600 Loss: 0.006398926023393869 PSNR: 27.50623893737793 +[TRAIN] Iter: 314700 Loss: 0.005353883840143681 PSNR: 27.426698684692383 +[TRAIN] Iter: 314800 Loss: 0.0061171939596533775 PSNR: 26.309579849243164 +[TRAIN] Iter: 314900 Loss: 0.006399629637598991 PSNR: 26.273849487304688 +[TRAIN] Iter: 315000 Loss: 0.005297076888382435 PSNR: 27.542274475097656 +[TRAIN] Iter: 315100 Loss: 0.008045164868235588 PSNR: 25.26396942138672 +[TRAIN] Iter: 315200 Loss: 0.006156293675303459 PSNR: 26.674182891845703 +[TRAIN] Iter: 315300 Loss: 0.0075653232634067535 PSNR: 25.53695297241211 +[TRAIN] Iter: 315400 Loss: 0.006538676097989082 PSNR: 26.79789161682129 +[TRAIN] Iter: 315500 Loss: 0.007621067110449076 PSNR: 25.466482162475586 +[TRAIN] Iter: 315600 Loss: 0.006378159858286381 PSNR: 26.438859939575195 +[TRAIN] Iter: 315700 Loss: 0.0061854212544858456 PSNR: 26.644315719604492 +[TRAIN] Iter: 315800 Loss: 0.007246258202940226 PSNR: 26.18575096130371 +[TRAIN] Iter: 315900 Loss: 0.006928200833499432 PSNR: 26.385730743408203 +[TRAIN] Iter: 316000 Loss: 0.00529384333640337 PSNR: 26.98407745361328 +[TRAIN] Iter: 316100 Loss: 0.007550064008682966 PSNR: 25.574506759643555 +[TRAIN] Iter: 316200 Loss: 0.00582513865083456 PSNR: 27.805803298950195 +[TRAIN] Iter: 316300 Loss: 0.006618449464440346 PSNR: 26.20293617248535 +[TRAIN] Iter: 316400 Loss: 0.008010676130652428 PSNR: 25.22110939025879 +[TRAIN] Iter: 316500 Loss: 0.006396633572876453 PSNR: 26.955873489379883 +[TRAIN] Iter: 316600 Loss: 0.006251733750104904 PSNR: 26.26655387878418 +[TRAIN] Iter: 316700 Loss: 0.006737879943102598 PSNR: 25.92763328552246 +[TRAIN] Iter: 316800 Loss: 0.006584569811820984 PSNR: 26.885820388793945 +[TRAIN] Iter: 316900 Loss: 0.006351476069539785 PSNR: 26.78022575378418 +[TRAIN] Iter: 317000 Loss: 0.0057854726910591125 PSNR: 27.194448471069336 +[TRAIN] Iter: 317100 Loss: 0.0057022157125175 PSNR: 28.35213851928711 +[TRAIN] Iter: 317200 Loss: 0.007102620787918568 PSNR: 26.152847290039062 +[TRAIN] Iter: 317300 Loss: 0.007684049196541309 PSNR: 24.90821647644043 +[TRAIN] Iter: 317400 Loss: 0.007309021428227425 PSNR: 25.962779998779297 +[TRAIN] Iter: 317500 Loss: 0.00798550434410572 PSNR: 25.236581802368164 +[TRAIN] Iter: 317600 Loss: 0.006048395298421383 PSNR: 26.470626831054688 +[TRAIN] Iter: 317700 Loss: 0.005711094010621309 PSNR: 26.670589447021484 +[TRAIN] Iter: 317800 Loss: 0.006520584691315889 PSNR: 27.38939094543457 +[TRAIN] Iter: 317900 Loss: 0.007239077240228653 PSNR: 26.39201545715332 +[TRAIN] Iter: 318000 Loss: 0.005542173516005278 PSNR: 28.363758087158203 +[TRAIN] Iter: 318100 Loss: 0.007352717220783234 PSNR: 25.854360580444336 +[TRAIN] Iter: 318200 Loss: 0.005164492875337601 PSNR: 27.895740509033203 +[TRAIN] Iter: 318300 Loss: 0.0070159658789634705 PSNR: 25.657472610473633 +[TRAIN] Iter: 318400 Loss: 0.008093629032373428 PSNR: 25.59891128540039 +[TRAIN] Iter: 318500 Loss: 0.008275722153484821 PSNR: 25.036636352539062 +[TRAIN] Iter: 318600 Loss: 0.005732021294534206 PSNR: 26.949443817138672 +[TRAIN] Iter: 318700 Loss: 0.007698229979723692 PSNR: 25.474510192871094 +[TRAIN] Iter: 318800 Loss: 0.005132565274834633 PSNR: 28.79205894470215 +[TRAIN] Iter: 318900 Loss: 0.006373589858412743 PSNR: 26.149171829223633 +[TRAIN] Iter: 319000 Loss: 0.005720281042158604 PSNR: 26.76419448852539 +[TRAIN] Iter: 319100 Loss: 0.00631148274987936 PSNR: 27.21546173095703 +[TRAIN] Iter: 319200 Loss: 0.007536866702139378 PSNR: 25.7567195892334 +[TRAIN] Iter: 319300 Loss: 0.006614490877836943 PSNR: 26.65338897705078 +[TRAIN] Iter: 319400 Loss: 0.00659896619617939 PSNR: 26.407747268676758 +[TRAIN] Iter: 319500 Loss: 0.0057542272843420506 PSNR: 27.707834243774414 +[TRAIN] Iter: 319600 Loss: 0.006287680007517338 PSNR: 26.554851531982422 +[TRAIN] Iter: 319700 Loss: 0.0065324376337230206 PSNR: 26.317537307739258 +[TRAIN] Iter: 319800 Loss: 0.006675880402326584 PSNR: 26.573698043823242 +[TRAIN] Iter: 319900 Loss: 0.007238105870783329 PSNR: 25.705760955810547 +Saved checkpoints at ./logs/TUT-out-doll-360-np/320000.tar +[TRAIN] Iter: 320000 Loss: 0.005176398903131485 PSNR: 27.528636932373047 +[TRAIN] Iter: 320100 Loss: 0.006260310299694538 PSNR: 26.685701370239258 +[TRAIN] Iter: 320200 Loss: 0.005685976706445217 PSNR: 27.403520584106445 +[TRAIN] Iter: 320300 Loss: 0.009145762771368027 PSNR: 24.500253677368164 +[TRAIN] Iter: 320400 Loss: 0.006897725630551577 PSNR: 26.241180419921875 +[TRAIN] Iter: 320500 Loss: 0.005092931445688009 PSNR: 28.372995376586914 +[TRAIN] Iter: 320600 Loss: 0.006661468185484409 PSNR: 26.358200073242188 +[TRAIN] Iter: 320700 Loss: 0.006679845508188009 PSNR: 26.023468017578125 +[TRAIN] Iter: 320800 Loss: 0.006828462239354849 PSNR: 25.811237335205078 +[TRAIN] Iter: 320900 Loss: 0.006558797787874937 PSNR: 26.625709533691406 +[TRAIN] Iter: 321000 Loss: 0.007066878490149975 PSNR: 25.782020568847656 +[TRAIN] Iter: 321100 Loss: 0.007043014280498028 PSNR: 26.092416763305664 +[TRAIN] Iter: 321200 Loss: 0.007022057194262743 PSNR: 25.921661376953125 +[TRAIN] Iter: 321300 Loss: 0.008486817590892315 PSNR: 24.97508430480957 +[TRAIN] Iter: 321400 Loss: 0.006176437251269817 PSNR: 26.203702926635742 +[TRAIN] Iter: 321500 Loss: 0.007394167594611645 PSNR: 25.359113693237305 +[TRAIN] Iter: 321600 Loss: 0.006879607681185007 PSNR: 26.145191192626953 +[TRAIN] Iter: 321700 Loss: 0.006159469485282898 PSNR: 26.960926055908203 +[TRAIN] Iter: 321800 Loss: 0.007018570322543383 PSNR: 26.46311378479004 +[TRAIN] Iter: 321900 Loss: 0.005479265470057726 PSNR: 28.489290237426758 +[TRAIN] Iter: 322000 Loss: 0.00475481990724802 PSNR: 28.596351623535156 +[TRAIN] Iter: 322100 Loss: 0.006853438913822174 PSNR: 27.047199249267578 +[TRAIN] Iter: 322200 Loss: 0.008322101086378098 PSNR: 25.122346878051758 +[TRAIN] Iter: 322300 Loss: 0.00734239025041461 PSNR: 25.692655563354492 +[TRAIN] Iter: 322400 Loss: 0.006844883784651756 PSNR: 25.70887565612793 +[TRAIN] Iter: 322500 Loss: 0.004918311722576618 PSNR: 28.57488441467285 +[TRAIN] Iter: 322600 Loss: 0.005698299501091242 PSNR: 27.500879287719727 +[TRAIN] Iter: 322700 Loss: 0.006183356046676636 PSNR: 26.329858779907227 +[TRAIN] Iter: 322800 Loss: 0.005275154486298561 PSNR: 28.02828598022461 +[TRAIN] Iter: 322900 Loss: 0.005780698265880346 PSNR: 27.44089698791504 +[TRAIN] Iter: 323000 Loss: 0.005291496403515339 PSNR: 28.135196685791016 +[TRAIN] Iter: 323100 Loss: 0.006050087511539459 PSNR: 26.44356346130371 +[TRAIN] Iter: 323200 Loss: 0.006601599045097828 PSNR: 26.709203720092773 +[TRAIN] Iter: 323300 Loss: 0.006463323254138231 PSNR: 26.00494384765625 +[TRAIN] Iter: 323400 Loss: 0.006678640842437744 PSNR: 26.174739837646484 +[TRAIN] Iter: 323500 Loss: 0.00530086737126112 PSNR: 27.379520416259766 +[TRAIN] Iter: 323600 Loss: 0.007605268619954586 PSNR: 25.489227294921875 +[TRAIN] Iter: 323700 Loss: 0.006685629021376371 PSNR: 25.9288330078125 +[TRAIN] Iter: 323800 Loss: 0.005936292931437492 PSNR: 26.643169403076172 +[TRAIN] Iter: 323900 Loss: 0.005464937537908554 PSNR: 28.09148597717285 +[TRAIN] Iter: 324000 Loss: 0.006593798287212849 PSNR: 26.82884979248047 +[TRAIN] Iter: 324100 Loss: 0.006686273496598005 PSNR: 26.135324478149414 +[TRAIN] Iter: 324200 Loss: 0.006616787984967232 PSNR: 26.002971649169922 +[TRAIN] Iter: 324300 Loss: 0.0066774757578969 PSNR: 26.751279830932617 +[TRAIN] Iter: 324400 Loss: 0.005785586778074503 PSNR: 27.03046989440918 +[TRAIN] Iter: 324500 Loss: 0.005439618602395058 PSNR: 27.119213104248047 +[TRAIN] Iter: 324600 Loss: 0.007350483443588018 PSNR: 25.604402542114258 +[TRAIN] Iter: 324700 Loss: 0.005243577528744936 PSNR: 28.378963470458984 +[TRAIN] Iter: 324800 Loss: 0.004904953297227621 PSNR: 28.754318237304688 +[TRAIN] Iter: 324900 Loss: 0.0060606105253100395 PSNR: 26.590118408203125 +[TRAIN] Iter: 325000 Loss: 0.005144160240888596 PSNR: 27.731571197509766 +[TRAIN] Iter: 325100 Loss: 0.006172815337777138 PSNR: 27.000743865966797 +[TRAIN] Iter: 325200 Loss: 0.005350303836166859 PSNR: 28.56172752380371 +[TRAIN] Iter: 325300 Loss: 0.005433925427496433 PSNR: 28.014875411987305 +[TRAIN] Iter: 325400 Loss: 0.00727288331836462 PSNR: 26.16749382019043 +[TRAIN] Iter: 325500 Loss: 0.0075318291783332825 PSNR: 26.15550422668457 +[TRAIN] Iter: 325600 Loss: 0.006171558052301407 PSNR: 26.136924743652344 +[TRAIN] Iter: 325700 Loss: 0.006218521855771542 PSNR: 26.513065338134766 +[TRAIN] Iter: 325800 Loss: 0.005965430289506912 PSNR: 27.989622116088867 +[TRAIN] Iter: 325900 Loss: 0.006295147351920605 PSNR: 27.449954986572266 +[TRAIN] Iter: 326000 Loss: 0.007332255598157644 PSNR: 26.07400131225586 +[TRAIN] Iter: 326100 Loss: 0.005515263881534338 PSNR: 27.72001075744629 +[TRAIN] Iter: 326200 Loss: 0.006984682288020849 PSNR: 25.886564254760742 +[TRAIN] Iter: 326300 Loss: 0.006182820536196232 PSNR: 26.79301643371582 +[TRAIN] Iter: 326400 Loss: 0.005852130241692066 PSNR: 27.19960594177246 +[TRAIN] Iter: 326500 Loss: 0.007242399267852306 PSNR: 25.77613067626953 +[TRAIN] Iter: 326600 Loss: 0.006686938926577568 PSNR: 25.973342895507812 +[TRAIN] Iter: 326700 Loss: 0.006352178752422333 PSNR: 26.366016387939453 +[TRAIN] Iter: 326800 Loss: 0.005915472283959389 PSNR: 26.6125545501709 +[TRAIN] Iter: 326900 Loss: 0.005730136297643185 PSNR: 27.06965446472168 +[TRAIN] Iter: 327000 Loss: 0.006789780221879482 PSNR: 25.879201889038086 +[TRAIN] Iter: 327100 Loss: 0.0069090803153812885 PSNR: 26.0267391204834 +[TRAIN] Iter: 327200 Loss: 0.005811427719891071 PSNR: 26.871116638183594 +[TRAIN] Iter: 327300 Loss: 0.00708673195913434 PSNR: 25.930217742919922 +[TRAIN] Iter: 327400 Loss: 0.0062021599151194096 PSNR: 26.21877670288086 +[TRAIN] Iter: 327500 Loss: 0.008156021125614643 PSNR: 24.965085983276367 +[TRAIN] Iter: 327600 Loss: 0.005552403628826141 PSNR: 26.860557556152344 +[TRAIN] Iter: 327700 Loss: 0.008820798248052597 PSNR: 24.916118621826172 +[TRAIN] Iter: 327800 Loss: 0.005184944253414869 PSNR: 27.036365509033203 +[TRAIN] Iter: 327900 Loss: 0.0067505864426493645 PSNR: 25.787029266357422 +[TRAIN] Iter: 328000 Loss: 0.006138568744063377 PSNR: 27.06235694885254 +[TRAIN] Iter: 328100 Loss: 0.0051558720879256725 PSNR: 27.41796875 +[TRAIN] Iter: 328200 Loss: 0.006226305849850178 PSNR: 26.156633377075195 +[TRAIN] Iter: 328300 Loss: 0.00697777746245265 PSNR: 26.33014488220215 +[TRAIN] Iter: 328400 Loss: 0.006337311118841171 PSNR: 26.501733779907227 +[TRAIN] Iter: 328500 Loss: 0.007091345265507698 PSNR: 26.810747146606445 +[TRAIN] Iter: 328600 Loss: 0.007419653236865997 PSNR: 25.201950073242188 +[TRAIN] Iter: 328700 Loss: 0.006383361294865608 PSNR: 25.942651748657227 +[TRAIN] Iter: 328800 Loss: 0.005728533957153559 PSNR: 27.28000259399414 +[TRAIN] Iter: 328900 Loss: 0.006450642831623554 PSNR: 26.253938674926758 +[TRAIN] Iter: 329000 Loss: 0.006081254221498966 PSNR: 26.72199058532715 +[TRAIN] Iter: 329100 Loss: 0.006760241463780403 PSNR: 26.15022087097168 +[TRAIN] Iter: 329200 Loss: 0.005805774591863155 PSNR: 27.660358428955078 +[TRAIN] Iter: 329300 Loss: 0.005503674037754536 PSNR: 28.215463638305664 +[TRAIN] Iter: 329400 Loss: 0.005161639302968979 PSNR: 27.760168075561523 +[TRAIN] Iter: 329500 Loss: 0.007360383402556181 PSNR: 25.501358032226562 +[TRAIN] Iter: 329600 Loss: 0.0071675400249660015 PSNR: 26.369848251342773 +[TRAIN] Iter: 329700 Loss: 0.0046920934692025185 PSNR: 27.828027725219727 +[TRAIN] Iter: 329800 Loss: 0.007347254082560539 PSNR: 26.094383239746094 +[TRAIN] Iter: 329900 Loss: 0.007168974727392197 PSNR: 26.627317428588867 +Saved checkpoints at ./logs/TUT-out-doll-360-np/330000.tar +[TRAIN] Iter: 330000 Loss: 0.005856690928339958 PSNR: 27.17824935913086 +[TRAIN] Iter: 330100 Loss: 0.0068551297299563885 PSNR: 26.469470977783203 +[TRAIN] Iter: 330200 Loss: 0.007515816483646631 PSNR: 25.44170379638672 +[TRAIN] Iter: 330300 Loss: 0.006371618248522282 PSNR: 27.688072204589844 +[TRAIN] Iter: 330400 Loss: 0.005873256362974644 PSNR: 27.15725326538086 +[TRAIN] Iter: 330500 Loss: 0.0058394684456288815 PSNR: 26.32868003845215 +[TRAIN] Iter: 330600 Loss: 0.006406496744602919 PSNR: 26.526731491088867 +[TRAIN] Iter: 330700 Loss: 0.0064380657859146595 PSNR: 27.25638198852539 +[TRAIN] Iter: 330800 Loss: 0.004279558081179857 PSNR: 28.770061492919922 +[TRAIN] Iter: 330900 Loss: 0.005097172223031521 PSNR: 27.74559783935547 +[TRAIN] Iter: 331000 Loss: 0.006746926344931126 PSNR: 26.472976684570312 +[TRAIN] Iter: 331100 Loss: 0.005782203748822212 PSNR: 26.796810150146484 +[TRAIN] Iter: 331200 Loss: 0.0078034913167357445 PSNR: 25.144275665283203 +[TRAIN] Iter: 331300 Loss: 0.006946173962205648 PSNR: 26.349794387817383 +[TRAIN] Iter: 331400 Loss: 0.006616101134568453 PSNR: 27.21870994567871 +[TRAIN] Iter: 331500 Loss: 0.006136241834610701 PSNR: 27.388578414916992 +[TRAIN] Iter: 331600 Loss: 0.006394237279891968 PSNR: 26.472980499267578 +[TRAIN] Iter: 331700 Loss: 0.00592277105897665 PSNR: 28.063066482543945 +[TRAIN] Iter: 331800 Loss: 0.005762274377048016 PSNR: 26.79194450378418 +[TRAIN] Iter: 331900 Loss: 0.007152100093662739 PSNR: 25.622512817382812 +[TRAIN] Iter: 332000 Loss: 0.007868624292314053 PSNR: 25.18816566467285 +[TRAIN] Iter: 332100 Loss: 0.0057846358977258205 PSNR: 26.73716163635254 +[TRAIN] Iter: 332200 Loss: 0.00696742394939065 PSNR: 26.034217834472656 +[TRAIN] Iter: 332300 Loss: 0.007061582989990711 PSNR: 25.923784255981445 +[TRAIN] Iter: 332400 Loss: 0.006423681974411011 PSNR: 27.16033363342285 +[TRAIN] Iter: 332500 Loss: 0.006158107426017523 PSNR: 26.671510696411133 +[TRAIN] Iter: 332600 Loss: 0.007234617136418819 PSNR: 25.78757667541504 +[TRAIN] Iter: 332700 Loss: 0.005881693214178085 PSNR: 26.48968505859375 +[TRAIN] Iter: 332800 Loss: 0.0066750627011060715 PSNR: 26.148033142089844 +[TRAIN] Iter: 332900 Loss: 0.005754915066063404 PSNR: 26.42444610595703 +[TRAIN] Iter: 333000 Loss: 0.005527457222342491 PSNR: 28.153316497802734 +[TRAIN] Iter: 333100 Loss: 0.005337105598300695 PSNR: 28.50895118713379 +[TRAIN] Iter: 333200 Loss: 0.0060108257457613945 PSNR: 27.338401794433594 +[TRAIN] Iter: 333300 Loss: 0.008071864023804665 PSNR: 25.698238372802734 +[TRAIN] Iter: 333400 Loss: 0.005347169004380703 PSNR: 28.00358772277832 +[TRAIN] Iter: 333500 Loss: 0.006737051531672478 PSNR: 26.617006301879883 +[TRAIN] Iter: 333600 Loss: 0.005051278509199619 PSNR: 28.8911190032959 +[TRAIN] Iter: 333700 Loss: 0.008740822784602642 PSNR: 24.48811149597168 +[TRAIN] Iter: 333800 Loss: 0.005722311325371265 PSNR: 27.844375610351562 +[TRAIN] Iter: 333900 Loss: 0.006976404692977667 PSNR: 25.92156410217285 +[TRAIN] Iter: 334000 Loss: 0.006558570079505444 PSNR: 26.29745864868164 +[TRAIN] Iter: 334100 Loss: 0.00851130299270153 PSNR: 24.973772048950195 +[TRAIN] Iter: 334200 Loss: 0.005883136764168739 PSNR: 27.41404151916504 +[TRAIN] Iter: 334300 Loss: 0.006119381170719862 PSNR: 26.123117446899414 +[TRAIN] Iter: 334400 Loss: 0.008436843752861023 PSNR: 25.93402862548828 +[TRAIN] Iter: 334500 Loss: 0.005561672151088715 PSNR: 26.782615661621094 +[TRAIN] Iter: 334600 Loss: 0.006779475137591362 PSNR: 26.401094436645508 +[TRAIN] Iter: 334700 Loss: 0.006059081293642521 PSNR: 26.858957290649414 +[TRAIN] Iter: 334800 Loss: 0.007156013045459986 PSNR: 26.39056968688965 +[TRAIN] Iter: 334900 Loss: 0.007670057006180286 PSNR: 25.389528274536133 +[TRAIN] Iter: 335000 Loss: 0.008253589272499084 PSNR: 25.4810791015625 +[TRAIN] Iter: 335100 Loss: 0.005561609286814928 PSNR: 27.750022888183594 +[TRAIN] Iter: 335200 Loss: 0.008063104003667831 PSNR: 25.903427124023438 +[TRAIN] Iter: 335300 Loss: 0.005338725633919239 PSNR: 28.120145797729492 +[TRAIN] Iter: 335400 Loss: 0.0060837846249341965 PSNR: 26.337635040283203 +[TRAIN] Iter: 335500 Loss: 0.005789557471871376 PSNR: 27.42922019958496 +[TRAIN] Iter: 335600 Loss: 0.005958277732133865 PSNR: 27.578054428100586 +[TRAIN] Iter: 335700 Loss: 0.0069533828645944595 PSNR: 25.762434005737305 +[TRAIN] Iter: 335800 Loss: 0.005817277356982231 PSNR: 27.2764949798584 +[TRAIN] Iter: 335900 Loss: 0.005398076958954334 PSNR: 27.852493286132812 +[TRAIN] Iter: 336000 Loss: 0.005917006637901068 PSNR: 26.21870231628418 +[TRAIN] Iter: 336100 Loss: 0.007633984088897705 PSNR: 25.91234588623047 +[TRAIN] Iter: 336200 Loss: 0.006244251504540443 PSNR: 26.209877014160156 +[TRAIN] Iter: 336300 Loss: 0.006266225129365921 PSNR: 26.072357177734375 +[TRAIN] Iter: 336400 Loss: 0.006751085165888071 PSNR: 27.40911293029785 +[TRAIN] Iter: 336500 Loss: 0.00608476996421814 PSNR: 26.32115936279297 +[TRAIN] Iter: 336600 Loss: 0.006231474690139294 PSNR: 26.74839973449707 +[TRAIN] Iter: 336700 Loss: 0.006563268601894379 PSNR: 27.155363082885742 +[TRAIN] Iter: 336800 Loss: 0.007611551322042942 PSNR: 25.332645416259766 +[TRAIN] Iter: 336900 Loss: 0.00640633562579751 PSNR: 26.329574584960938 +[TRAIN] Iter: 337000 Loss: 0.006748816464096308 PSNR: 25.919281005859375 +[TRAIN] Iter: 337100 Loss: 0.0065930127166211605 PSNR: 25.54644203186035 +[TRAIN] Iter: 337200 Loss: 0.006136268377304077 PSNR: 26.153982162475586 +[TRAIN] Iter: 337300 Loss: 0.007180206943303347 PSNR: 25.744781494140625 +[TRAIN] Iter: 337400 Loss: 0.007105721160769463 PSNR: 26.562358856201172 +[TRAIN] Iter: 337500 Loss: 0.007862823083996773 PSNR: 25.475278854370117 +[TRAIN] Iter: 337600 Loss: 0.0074767256155610085 PSNR: 25.735044479370117 +[TRAIN] Iter: 337700 Loss: 0.006021600216627121 PSNR: 26.83905029296875 +[TRAIN] Iter: 337800 Loss: 0.0071407221257686615 PSNR: 25.273555755615234 +[TRAIN] Iter: 337900 Loss: 0.008243750780820847 PSNR: 26.036178588867188 +[TRAIN] Iter: 338000 Loss: 0.006709350273013115 PSNR: 25.82181167602539 +[TRAIN] Iter: 338100 Loss: 0.007862839847803116 PSNR: 25.147340774536133 +[TRAIN] Iter: 338200 Loss: 0.007749640382826328 PSNR: 25.982337951660156 +[TRAIN] Iter: 338300 Loss: 0.0065357983112335205 PSNR: 26.156091690063477 +[TRAIN] Iter: 338400 Loss: 0.006806798744946718 PSNR: 26.048742294311523 +[TRAIN] Iter: 338500 Loss: 0.0056077525950968266 PSNR: 28.26734161376953 +[TRAIN] Iter: 338600 Loss: 0.008358198218047619 PSNR: 25.628185272216797 +[TRAIN] Iter: 338700 Loss: 0.005864226259291172 PSNR: 26.791555404663086 +[TRAIN] Iter: 338800 Loss: 0.004974416457116604 PSNR: 27.92948341369629 +[TRAIN] Iter: 338900 Loss: 0.006682699080556631 PSNR: 26.085901260375977 +[TRAIN] Iter: 339000 Loss: 0.0071435910649597645 PSNR: 25.673437118530273 +[TRAIN] Iter: 339100 Loss: 0.008036352694034576 PSNR: 25.31522560119629 +[TRAIN] Iter: 339200 Loss: 0.0070724403485655785 PSNR: 26.27760887145996 +[TRAIN] Iter: 339300 Loss: 0.005568909924477339 PSNR: 27.027359008789062 +[TRAIN] Iter: 339400 Loss: 0.005907533690333366 PSNR: 26.838232040405273 +[TRAIN] Iter: 339500 Loss: 0.005943912547081709 PSNR: 26.732532501220703 +[TRAIN] Iter: 339600 Loss: 0.00531766377389431 PSNR: 27.5469970703125 +[TRAIN] Iter: 339700 Loss: 0.005476988386362791 PSNR: 27.76255989074707 +[TRAIN] Iter: 339800 Loss: 0.005167542025446892 PSNR: 27.33536720275879 +[TRAIN] Iter: 339900 Loss: 0.0067251455038785934 PSNR: 26.60616683959961 +Saved checkpoints at ./logs/TUT-out-doll-360-np/340000.tar +[TRAIN] Iter: 340000 Loss: 0.0058922055177390575 PSNR: 27.975643157958984 +[TRAIN] Iter: 340100 Loss: 0.005629602819681168 PSNR: 27.6351375579834 +[TRAIN] Iter: 340200 Loss: 0.005211879964917898 PSNR: 27.6108455657959 +[TRAIN] Iter: 340300 Loss: 0.006670473609119654 PSNR: 26.517791748046875 +[TRAIN] Iter: 340400 Loss: 0.008782600983977318 PSNR: 25.330875396728516 +[TRAIN] Iter: 340500 Loss: 0.006503574084490538 PSNR: 26.136842727661133 +[TRAIN] Iter: 340600 Loss: 0.006787666119635105 PSNR: 27.066701889038086 +[TRAIN] Iter: 340700 Loss: 0.006716268602758646 PSNR: 26.089468002319336 +[TRAIN] Iter: 340800 Loss: 0.005877972114831209 PSNR: 27.9151668548584 +[TRAIN] Iter: 340900 Loss: 0.006714890711009502 PSNR: 26.060997009277344 +[TRAIN] Iter: 341000 Loss: 0.006158695090562105 PSNR: 26.685516357421875 +[TRAIN] Iter: 341100 Loss: 0.007749899756163359 PSNR: 25.602272033691406 +[TRAIN] Iter: 341200 Loss: 0.006034353747963905 PSNR: 27.134765625 +[TRAIN] Iter: 341300 Loss: 0.006836637854576111 PSNR: 26.247013092041016 +[TRAIN] Iter: 341400 Loss: 0.006127222906798124 PSNR: 27.919870376586914 +[TRAIN] Iter: 341500 Loss: 0.0071491338312625885 PSNR: 25.73430061340332 +[TRAIN] Iter: 341600 Loss: 0.004719284828752279 PSNR: 28.53655433654785 +[TRAIN] Iter: 341700 Loss: 0.007479057181626558 PSNR: 25.678848266601562 +[TRAIN] Iter: 341800 Loss: 0.006135673262178898 PSNR: 26.240032196044922 +[TRAIN] Iter: 341900 Loss: 0.006410283036530018 PSNR: 26.962915420532227 +[TRAIN] Iter: 342000 Loss: 0.005388379096984863 PSNR: 27.60504722595215 +[TRAIN] Iter: 342100 Loss: 0.008063235320150852 PSNR: 25.379379272460938 +[TRAIN] Iter: 342200 Loss: 0.006515962071716785 PSNR: 27.578693389892578 +[TRAIN] Iter: 342300 Loss: 0.006796576548367739 PSNR: 26.032197952270508 +[TRAIN] Iter: 342400 Loss: 0.0052532413974404335 PSNR: 28.019441604614258 +[TRAIN] Iter: 342500 Loss: 0.005275049712508917 PSNR: 28.191795349121094 +[TRAIN] Iter: 342600 Loss: 0.005747942719608545 PSNR: 26.54956817626953 +[TRAIN] Iter: 342700 Loss: 0.005993275437504053 PSNR: 28.035675048828125 +[TRAIN] Iter: 342800 Loss: 0.006419770419597626 PSNR: 26.489072799682617 +[TRAIN] Iter: 342900 Loss: 0.006107391323894262 PSNR: 26.343704223632812 +[TRAIN] Iter: 343000 Loss: 0.0061363764107227325 PSNR: 26.565433502197266 +[TRAIN] Iter: 343100 Loss: 0.006045970134437084 PSNR: 26.918315887451172 +[TRAIN] Iter: 343200 Loss: 0.007686937227845192 PSNR: 25.873998641967773 +[TRAIN] Iter: 343300 Loss: 0.004955825861543417 PSNR: 28.390119552612305 +[TRAIN] Iter: 343400 Loss: 0.0060460250824689865 PSNR: 26.306272506713867 +[TRAIN] Iter: 343500 Loss: 0.0057448106817901134 PSNR: 27.459575653076172 +[TRAIN] Iter: 343600 Loss: 0.0072908466681838036 PSNR: 25.87285614013672 +[TRAIN] Iter: 343700 Loss: 0.0069328900426626205 PSNR: 25.92545509338379 +[TRAIN] Iter: 343800 Loss: 0.006938180886209011 PSNR: 25.805999755859375 +[TRAIN] Iter: 343900 Loss: 0.0072548892349004745 PSNR: 26.130449295043945 +[TRAIN] Iter: 344000 Loss: 0.006065873894840479 PSNR: 26.861879348754883 +[TRAIN] Iter: 344100 Loss: 0.005215415731072426 PSNR: 28.10863494873047 +[TRAIN] Iter: 344200 Loss: 0.005728482268750668 PSNR: 27.984569549560547 +[TRAIN] Iter: 344300 Loss: 0.005894336849451065 PSNR: 27.01936149597168 +[TRAIN] Iter: 344400 Loss: 0.007051385007798672 PSNR: 25.627681732177734 +[TRAIN] Iter: 344500 Loss: 0.004860654938966036 PSNR: 28.856725692749023 +[TRAIN] Iter: 344600 Loss: 0.007043414283543825 PSNR: 25.824464797973633 +[TRAIN] Iter: 344700 Loss: 0.007689641788601875 PSNR: 25.813297271728516 +[TRAIN] Iter: 344800 Loss: 0.0057024224661290646 PSNR: 28.037513732910156 +[TRAIN] Iter: 344900 Loss: 0.0065084658563137054 PSNR: 27.16146469116211 +[TRAIN] Iter: 345000 Loss: 0.006194352172315121 PSNR: 26.03521728515625 +[TRAIN] Iter: 345100 Loss: 0.005140968598425388 PSNR: 28.01006507873535 +[TRAIN] Iter: 345200 Loss: 0.00600688811391592 PSNR: 26.68978500366211 +[TRAIN] Iter: 345300 Loss: 0.005247756838798523 PSNR: 26.97254753112793 +[TRAIN] Iter: 345400 Loss: 0.007117589004337788 PSNR: 25.6503849029541 +[TRAIN] Iter: 345500 Loss: 0.0066594951786100864 PSNR: 26.67609214782715 +[TRAIN] Iter: 345600 Loss: 0.0053597716614604 PSNR: 27.783870697021484 +[TRAIN] Iter: 345700 Loss: 0.006635383702814579 PSNR: 25.94874382019043 +[TRAIN] Iter: 345800 Loss: 0.0057550473138689995 PSNR: 27.73915672302246 +[TRAIN] Iter: 345900 Loss: 0.007193376310169697 PSNR: 25.36699867248535 +[TRAIN] Iter: 346000 Loss: 0.007213608827441931 PSNR: 26.23609733581543 +[TRAIN] Iter: 346100 Loss: 0.006295269820839167 PSNR: 26.964588165283203 +[TRAIN] Iter: 346200 Loss: 0.006126852706074715 PSNR: 26.564455032348633 +[TRAIN] Iter: 346300 Loss: 0.007993919774889946 PSNR: 25.269689559936523 +[TRAIN] Iter: 346400 Loss: 0.00664442079141736 PSNR: 26.79807472229004 +[TRAIN] Iter: 346500 Loss: 0.006948304362595081 PSNR: 26.369291305541992 +[TRAIN] Iter: 346600 Loss: 0.006852109916508198 PSNR: 26.06646728515625 +[TRAIN] Iter: 346700 Loss: 0.006856951396912336 PSNR: 26.31739044189453 +[TRAIN] Iter: 346800 Loss: 0.00735588651150465 PSNR: 26.163894653320312 +[TRAIN] Iter: 346900 Loss: 0.006448672153055668 PSNR: 26.34783363342285 +[TRAIN] Iter: 347000 Loss: 0.0070325168780982494 PSNR: 26.0696964263916 +[TRAIN] Iter: 347100 Loss: 0.005881869699805975 PSNR: 26.884843826293945 +[TRAIN] Iter: 347200 Loss: 0.007322306744754314 PSNR: 25.83047103881836 +[TRAIN] Iter: 347300 Loss: 0.006436142139136791 PSNR: 26.24089813232422 +[TRAIN] Iter: 347400 Loss: 0.004558886401355267 PSNR: 28.363195419311523 +[TRAIN] Iter: 347500 Loss: 0.006652132607996464 PSNR: 26.395235061645508 +[TRAIN] Iter: 347600 Loss: 0.005456205457448959 PSNR: 26.759923934936523 +[TRAIN] Iter: 347700 Loss: 0.007249446120113134 PSNR: 25.878870010375977 +[TRAIN] Iter: 347800 Loss: 0.007048279512673616 PSNR: 26.202043533325195 +[TRAIN] Iter: 347900 Loss: 0.007185131777077913 PSNR: 25.921493530273438 +[TRAIN] Iter: 348000 Loss: 0.006307773757725954 PSNR: 26.82904815673828 +[TRAIN] Iter: 348100 Loss: 0.006856946274638176 PSNR: 26.126142501831055 +[TRAIN] Iter: 348200 Loss: 0.007320800796151161 PSNR: 25.640106201171875 +[TRAIN] Iter: 348300 Loss: 0.00668297428637743 PSNR: 26.570030212402344 +[TRAIN] Iter: 348400 Loss: 0.005830271635204554 PSNR: 27.119300842285156 +[TRAIN] Iter: 348500 Loss: 0.006813194137066603 PSNR: 25.97488021850586 +[TRAIN] Iter: 348600 Loss: 0.005120888818055391 PSNR: 27.893821716308594 +[TRAIN] Iter: 348700 Loss: 0.006015845574438572 PSNR: 27.463621139526367 +[TRAIN] Iter: 348800 Loss: 0.0067217652685940266 PSNR: 25.975893020629883 +[TRAIN] Iter: 348900 Loss: 0.004765042103827 PSNR: 28.47837257385254 +[TRAIN] Iter: 349000 Loss: 0.006464419886469841 PSNR: 26.769859313964844 +[TRAIN] Iter: 349100 Loss: 0.005856729112565517 PSNR: 26.903240203857422 +[TRAIN] Iter: 349200 Loss: 0.007760920096188784 PSNR: 25.630849838256836 +[TRAIN] Iter: 349300 Loss: 0.007735113613307476 PSNR: 26.06891441345215 +[TRAIN] Iter: 349400 Loss: 0.007168718613684177 PSNR: 25.893957138061523 +[TRAIN] Iter: 349500 Loss: 0.006124585866928101 PSNR: 26.49222183227539 +[TRAIN] Iter: 349600 Loss: 0.006894594989717007 PSNR: 26.037668228149414 +[TRAIN] Iter: 349700 Loss: 0.0074131363071501255 PSNR: 25.20672035217285 +[TRAIN] Iter: 349800 Loss: 0.006315861828625202 PSNR: 26.577823638916016 +[TRAIN] Iter: 349900 Loss: 0.0054338001646101475 PSNR: 28.46479034423828 +Saved checkpoints at ./logs/TUT-out-doll-360-np/350000.tar +0 0.0008418560028076172 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.371405363082886 +2 22.687846422195435 +3 21.336127996444702 +4 21.652606964111328 +5 22.0458402633667 +6 22.687009572982788 +7 21.01040816307068 +8 22.252318620681763 +9 21.887301921844482 +10 21.664384365081787 +11 22.057411432266235 +12 21.71712350845337 +13 22.038360595703125 +14 22.751776218414307 +15 21.52982759475708 +16 21.41036033630371 +17 22.04238271713257 +18 22.171272039413452 +19 21.734267234802246 +20 23.546952486038208 +21 22.365068197250366 +22 22.290380477905273 +23 20.99516773223877 +24 21.870792150497437 +25 22.418051958084106 +26 21.697677850723267 +27 22.40661644935608 +28 22.487345695495605 +29 21.470333099365234 +30 22.168704986572266 +31 22.19780158996582 +32 21.78546929359436 +33 22.65563726425171 +34 22.084472179412842 +35 21.96764898300171 +36 22.19258737564087 +37 21.881043434143066 +38 21.767596006393433 +39 22.26013970375061 +40 21.992635488510132 +41 22.00941753387451 +42 21.560415983200073 +43 21.464629411697388 +44 21.597801685333252 +45 22.020145177841187 +46 21.51761031150818 +47 22.738063097000122 +48 21.770170211791992 +49 21.564409255981445 +50 21.66156530380249 +51 22.709081649780273 +52 21.978659868240356 +53 22.243544101715088 +54 21.443589210510254 +55 21.446056842803955 +56 21.601309537887573 +57 21.99363946914673 +58 21.327223539352417 +59 21.452072620391846 +60 21.265337228775024 +61 21.702775239944458 +62 22.28410029411316 +63 21.392847776412964 +64 22.163527488708496 +65 21.392815828323364 +66 22.091675519943237 +67 21.89134407043457 +68 21.64411234855652 +69 21.8726327419281 +70 22.614659309387207 +71 22.566585302352905 +72 21.367459058761597 +73 22.71643853187561 +74 21.528337240219116 +75 21.51243305206299 +76 21.75375533103943 +77 21.942965745925903 +78 22.177847862243652 +79 22.806046962738037 +80 20.83942484855652 +81 21.811955213546753 +82 21.592276096343994 +83 21.900777101516724 +84 21.54060411453247 +85 21.780804872512817 +86 21.74438214302063 +87 21.805896520614624 +88 21.52912425994873 +89 21.990126848220825 +90 21.457021236419678 +91 22.508991479873657 +92 21.9490647315979 +93 21.902413368225098 +94 21.493563175201416 +95 21.614985704421997 +96 22.336876153945923 +97 21.9539475440979 +98 21.549673795700073 +99 22.113556146621704 +100 22.128966331481934 +101 22.600418090820312 +102 21.560766220092773 +103 22.044579029083252 +104 21.676319122314453 +105 21.682267427444458 +106 22.00345754623413 +107 22.889894247055054 +108 21.835528135299683 +109 21.778093338012695 +110 21.87438201904297 +111 22.356017589569092 +112 22.27156114578247 +113 22.66773748397827 +114 22.061479330062866 +115 22.18572688102722 +116 21.779892921447754 +117 21.5364351272583 +118 22.05988883972168 +119 22.13888382911682 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 2.5169e+00, 2.3619e+00, 2.4063e+00, -5.5583e+01], + [ 1.9706e+00, 1.9230e+00, 1.9786e+00, -3.7390e+01], + [ 1.3653e+00, 1.3283e+00, 1.3961e+00, -3.6213e+01], + ..., + [-9.1263e+00, -7.2142e+00, -7.7491e+00, 4.9893e+02], + [-1.2888e+01, -1.0481e+01, -1.0985e+01, 4.7556e+02], + [-1.0780e+01, -8.6288e+00, -8.7230e+00, 5.5576e+02]], + + [[-1.5475e+00, -9.0639e-01, -2.4821e-01, -8.6995e+01], + [-5.7317e-01, -6.6338e-01, -6.4072e-01, -2.7617e+01], + [ 2.3963e-01, -1.5665e-01, 3.0102e-01, -4.5200e+01], + ..., + [-3.7647e+00, -2.6672e+00, -1.8609e+00, 2.0083e+02], + [-3.4405e+00, -1.5875e+00, 9.6500e-02, 2.3547e+02], + [-5.1258e+00, -4.0837e+00, -3.1083e+00, 2.7616e+02]], + + [[-1.3147e+00, -3.7452e-01, 5.5283e-01, -8.2744e+01], + [-2.3019e+00, -9.7610e-01, 3.9002e-02, -9.7994e+01], + [ 3.6838e-01, 7.9367e-01, 1.0375e+00, -4.5325e+01], + ..., + [-4.5664e+00, 5.6235e-01, 3.8274e+00, 8.8212e+02], + [-5.6599e+00, -5.4423e-01, 2.7486e+00, 9.4257e+02], + [-4.0808e+00, 1.2514e+00, 4.1755e+00, 9.0782e+02]], + + ..., + + [[ 6.3459e-02, 8.8732e-02, 1.9722e-02, -6.6067e+01], + [-6.3342e-01, -4.5601e-01, -2.4801e-01, -6.4965e+01], + [-6.8201e-01, -3.2855e-01, 3.8649e-01, -2.4240e+01], + ..., + [-9.3789e+00, -7.2648e+00, -5.7277e+00, 2.6735e+02], + [-8.8137e+00, -6.7550e+00, -5.6676e+00, 2.8660e+02], + [-9.9852e+00, -7.7636e+00, -6.4122e+00, 3.0052e+02]], + + [[-3.0469e-01, -5.0035e-01, -7.7002e-01, -5.5643e+01], + [-2.8516e-01, -2.7374e-01, -6.0157e-01, -1.0910e+01], + [-2.5388e-01, -2.3727e-01, -5.4293e-01, -1.1321e+01], + ..., + [-6.9693e+00, -1.2380e+00, -4.5765e-02, 4.1487e+02], + [-6.9959e+00, -1.0035e+00, 4.4704e-01, 4.2024e+02], + [-5.2365e+00, 6.7955e-01, 1.9122e+00, 3.8348e+02]], + + [[-7.2642e-01, -6.4451e-01, -4.1934e-01, -4.5406e+01], + [-5.9117e-01, -5.6146e-01, -2.7454e-01, -2.6947e+01], + [-9.9545e-01, -1.1757e+00, -9.9835e-01, -6.1237e+00], + ..., + [-6.9412e+00, -2.9041e+00, 1.1422e-01, 4.6427e+02], + [-7.7492e+00, -3.7507e+00, -1.5607e+00, 4.7580e+02], + [-8.2341e+00, -3.6426e+00, -1.5853e+00, 4.9185e+02]]], + grad_fn=), 'rgb0': tensor([[0.2610, 0.2527, 0.2645], + [0.2755, 0.2694, 0.2439], + [0.2934, 0.3616, 0.5003], + ..., + [0.2009, 0.2340, 0.1005], + [0.3040, 0.3195, 0.2669], + [0.0973, 0.1026, 0.0948]], grad_fn=), 'disp0': tensor([87.1437, 11.6530, 30.2006, ..., 38.1122, 48.5146, 35.1120], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0043, 0.0044, 0.0036, ..., 0.0054, 0.0034, 0.0044])} +0 0.0009722709655761719 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.546751737594604 +2 21.764810800552368 +3 21.321918487548828 +4 21.483989000320435 +5 22.264121055603027 +6 21.81385612487793 +7 22.47289752960205 +8 22.96300959587097 +9 21.351196765899658 +10 21.060179471969604 +11 22.165043830871582 +12 21.484809398651123 +13 22.06174063682556 +14 22.11197590827942 +15 21.731322765350342 +16 22.188434600830078 +17 21.835758924484253 +18 21.84962272644043 +19 21.779863357543945 +20 22.232951641082764 +21 21.666252374649048 +22 22.799986600875854 +23 22.231229066848755 +24 22.537731885910034 +25 21.50996708869934 +26 22.34091353416443 +27 22.154590129852295 +28 20.833214282989502 +29 21.94461441040039 +30 21.675657987594604 +31 22.157390356063843 +32 21.95118737220764 +33 21.45732808113098 +34 21.989320516586304 +35 21.954798936843872 +36 21.992685556411743 +37 23.28521490097046 +38 22.90077304840088 +39 23.189774751663208 +40 21.691736459732056 +41 20.990082263946533 +42 21.670478343963623 +43 21.738638639450073 +44 22.931680917739868 +45 21.91525363922119 +46 22.229033946990967 +47 22.25267481803894 +48 21.993799448013306 +49 22.20989203453064 +50 22.319145679473877 +51 21.91746234893799 +52 22.070984363555908 +53 22.050254106521606 +54 22.336602449417114 +55 22.31382393836975 +56 22.359853267669678 +57 22.78150248527527 +58 21.904470205307007 +59 21.930094003677368 +60 21.671334266662598 +61 21.411767721176147 +62 22.540826082229614 +63 22.02794885635376 +64 21.71976065635681 +65 22.08167862892151 +66 21.941835165023804 +67 21.89038872718811 +68 21.827937841415405 +69 21.77129101753235 +70 21.79886031150818 +71 21.666881561279297 +72 21.69889760017395 +73 21.479265213012695 +74 20.942466259002686 +75 21.946282386779785 +76 21.39000105857849 +77 21.77993893623352 +78 21.741161108016968 +79 21.873807668685913 +80 21.461132049560547 +81 22.134331464767456 +82 21.056960105895996 +83 21.932278156280518 +84 22.224154233932495 +85 21.61141347885132 +86 22.038684129714966 +87 21.895562171936035 +88 22.97477078437805 +89 21.746363878250122 +90 22.62818694114685 +91 21.58063054084778 +92 21.79793381690979 +93 22.116166591644287 +94 21.862375259399414 +95 21.67920684814453 +96 21.735992431640625 +97 22.08001685142517 +98 22.197192668914795 +99 21.29070496559143 +100 22.260671138763428 +101 22.1529860496521 +102 21.75427007675171 +103 21.701725721359253 +104 22.49872374534607 +105 21.32683777809143 +106 21.48654270172119 +107 21.921687364578247 +108 21.418128728866577 +109 21.239596128463745 +110 22.277209758758545 +111 21.638731002807617 +112 22.546446323394775 +113 22.16335368156433 +114 21.525676012039185 +115 21.857365131378174 +116 23.03489089012146 +117 21.362266302108765 +118 21.157819032669067 +119 21.790120601654053 +test poses shape torch.Size([4, 3, 4]) +0 0.0013484954833984375 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.233370542526245 +2 22.078505277633667 +3 22.19616413116455 +Saved test set +[TRAIN] Iter: 350000 Loss: 0.008017601445317268 PSNR: 25.08747100830078 +[TRAIN] Iter: 350100 Loss: 0.005444937385618687 PSNR: 27.382083892822266 +[TRAIN] Iter: 350200 Loss: 0.008448343724012375 PSNR: 25.126388549804688 +[TRAIN] Iter: 350300 Loss: 0.006775262299925089 PSNR: 26.465896606445312 +[TRAIN] Iter: 350400 Loss: 0.0070662712678313255 PSNR: 26.052963256835938 +[TRAIN] Iter: 350500 Loss: 0.005531222093850374 PSNR: 27.805883407592773 +[TRAIN] Iter: 350600 Loss: 0.006547596305608749 PSNR: 26.275684356689453 +[TRAIN] Iter: 350700 Loss: 0.005781226325780153 PSNR: 26.909828186035156 +[TRAIN] Iter: 350800 Loss: 0.004800667054951191 PSNR: 27.82592010498047 +[TRAIN] Iter: 350900 Loss: 0.005852578207850456 PSNR: 27.03744125366211 +[TRAIN] Iter: 351000 Loss: 0.004730911925435066 PSNR: 28.298337936401367 +[TRAIN] Iter: 351100 Loss: 0.0051405406557023525 PSNR: 29.160110473632812 +[TRAIN] Iter: 351200 Loss: 0.006201470270752907 PSNR: 26.837791442871094 +[TRAIN] Iter: 351300 Loss: 0.006904623471200466 PSNR: 26.603336334228516 +[TRAIN] Iter: 351400 Loss: 0.0055019971914589405 PSNR: 27.067832946777344 +[TRAIN] Iter: 351500 Loss: 0.005022895522415638 PSNR: 28.056568145751953 +[TRAIN] Iter: 351600 Loss: 0.0077214231714606285 PSNR: 25.920761108398438 +[TRAIN] Iter: 351700 Loss: 0.007260170765221119 PSNR: 25.684598922729492 +[TRAIN] Iter: 351800 Loss: 0.007090064696967602 PSNR: 25.92934226989746 +[TRAIN] Iter: 351900 Loss: 0.0065530650317668915 PSNR: 26.097030639648438 +[TRAIN] Iter: 352000 Loss: 0.006191672757267952 PSNR: 28.572370529174805 +[TRAIN] Iter: 352100 Loss: 0.007120069582015276 PSNR: 25.486467361450195 +[TRAIN] Iter: 352200 Loss: 0.007213731296360493 PSNR: 26.149320602416992 +[TRAIN] Iter: 352300 Loss: 0.005060556344687939 PSNR: 27.88364601135254 +[TRAIN] Iter: 352400 Loss: 0.006421152967959642 PSNR: 26.457561492919922 +[TRAIN] Iter: 352500 Loss: 0.005867263302206993 PSNR: 26.625682830810547 +[TRAIN] Iter: 352600 Loss: 0.00626921933144331 PSNR: 26.794357299804688 +[TRAIN] Iter: 352700 Loss: 0.006997577380388975 PSNR: 26.71220588684082 +[TRAIN] Iter: 352800 Loss: 0.006918653845787048 PSNR: 26.013608932495117 +[TRAIN] Iter: 352900 Loss: 0.005156473256647587 PSNR: 28.89034652709961 +[TRAIN] Iter: 353000 Loss: 0.0068009537644684315 PSNR: 26.609546661376953 +[TRAIN] Iter: 353100 Loss: 0.008526604622602463 PSNR: 25.076858520507812 +[TRAIN] Iter: 353200 Loss: 0.008337047882378101 PSNR: 25.045696258544922 +[TRAIN] Iter: 353300 Loss: 0.0063246143981814384 PSNR: 26.155052185058594 +[TRAIN] Iter: 353400 Loss: 0.006637856364250183 PSNR: 26.488563537597656 +[TRAIN] Iter: 353500 Loss: 0.005932852625846863 PSNR: 27.010473251342773 +[TRAIN] Iter: 353600 Loss: 0.006724800448864698 PSNR: 25.705997467041016 +[TRAIN] Iter: 353700 Loss: 0.006739319302141666 PSNR: 26.609127044677734 +[TRAIN] Iter: 353800 Loss: 0.005506317131221294 PSNR: 26.953704833984375 +[TRAIN] Iter: 353900 Loss: 0.00561180803924799 PSNR: 27.620643615722656 +[TRAIN] Iter: 354000 Loss: 0.006722692865878344 PSNR: 26.148681640625 +[TRAIN] Iter: 354100 Loss: 0.007311986759305 PSNR: 25.879777908325195 +[TRAIN] Iter: 354200 Loss: 0.0070343781262636185 PSNR: 25.780105590820312 +[TRAIN] Iter: 354300 Loss: 0.004726252518594265 PSNR: 28.179475784301758 +[TRAIN] Iter: 354400 Loss: 0.005424663424491882 PSNR: 26.89659309387207 +[TRAIN] Iter: 354500 Loss: 0.007394435349851847 PSNR: 25.453351974487305 +[TRAIN] Iter: 354600 Loss: 0.006829078309237957 PSNR: 26.207204818725586 +[TRAIN] Iter: 354700 Loss: 0.006290915422141552 PSNR: 27.500547409057617 +[TRAIN] Iter: 354800 Loss: 0.006093156524002552 PSNR: 27.027124404907227 +[TRAIN] Iter: 354900 Loss: 0.00524176238104701 PSNR: 27.630714416503906 +[TRAIN] Iter: 355000 Loss: 0.00738056655973196 PSNR: 26.25055694580078 +[TRAIN] Iter: 355100 Loss: 0.0074216583743691444 PSNR: 26.24434471130371 +[TRAIN] Iter: 355200 Loss: 0.006864895112812519 PSNR: 26.567779541015625 +[TRAIN] Iter: 355300 Loss: 0.007371518760919571 PSNR: 26.337385177612305 +[TRAIN] Iter: 355400 Loss: 0.006161862052977085 PSNR: 26.67957878112793 +[TRAIN] Iter: 355500 Loss: 0.005991682410240173 PSNR: 27.035953521728516 +[TRAIN] Iter: 355600 Loss: 0.005095857195556164 PSNR: 28.568208694458008 +[TRAIN] Iter: 355700 Loss: 0.007080050650984049 PSNR: 25.588871002197266 +[TRAIN] Iter: 355800 Loss: 0.005672816187143326 PSNR: 27.298885345458984 +[TRAIN] Iter: 355900 Loss: 0.005822272039949894 PSNR: 26.973196029663086 +[TRAIN] Iter: 356000 Loss: 0.005646381061524153 PSNR: 27.35919761657715 +[TRAIN] Iter: 356100 Loss: 0.005949497222900391 PSNR: 27.525184631347656 +[TRAIN] Iter: 356200 Loss: 0.006701995152980089 PSNR: 26.619125366210938 +[TRAIN] Iter: 356300 Loss: 0.00789276696741581 PSNR: 25.563825607299805 +[TRAIN] Iter: 356400 Loss: 0.006847443990409374 PSNR: 25.847131729125977 +[TRAIN] Iter: 356500 Loss: 0.006168291438370943 PSNR: 27.028108596801758 +[TRAIN] Iter: 356600 Loss: 0.005553540773689747 PSNR: 26.94137954711914 +[TRAIN] Iter: 356700 Loss: 0.006011327728629112 PSNR: 27.6107177734375 +[TRAIN] Iter: 356800 Loss: 0.006087917368859053 PSNR: 26.107933044433594 +[TRAIN] Iter: 356900 Loss: 0.00637234840542078 PSNR: 26.000497817993164 +[TRAIN] Iter: 357000 Loss: 0.006323324050754309 PSNR: 26.24871826171875 +[TRAIN] Iter: 357100 Loss: 0.007099354639649391 PSNR: 25.893962860107422 +[TRAIN] Iter: 357200 Loss: 0.005093230400234461 PSNR: 28.419178009033203 +[TRAIN] Iter: 357300 Loss: 0.006921868305653334 PSNR: 26.276552200317383 +[TRAIN] Iter: 357400 Loss: 0.00803067721426487 PSNR: 25.130949020385742 +[TRAIN] Iter: 357500 Loss: 0.008087890222668648 PSNR: 25.32989501953125 +[TRAIN] Iter: 357600 Loss: 0.008556921035051346 PSNR: 25.206701278686523 +[TRAIN] Iter: 357700 Loss: 0.006295692175626755 PSNR: 26.37957191467285 +[TRAIN] Iter: 357800 Loss: 0.006648147013038397 PSNR: 27.050321578979492 +[TRAIN] Iter: 357900 Loss: 0.0060746194794774055 PSNR: 27.749065399169922 +[TRAIN] Iter: 358000 Loss: 0.006466391962021589 PSNR: 26.2377872467041 +[TRAIN] Iter: 358100 Loss: 0.007314906921237707 PSNR: 25.56416130065918 +[TRAIN] Iter: 358200 Loss: 0.006479739211499691 PSNR: 26.283658981323242 +[TRAIN] Iter: 358300 Loss: 0.005963682662695646 PSNR: 26.678913116455078 +[TRAIN] Iter: 358400 Loss: 0.007317614741623402 PSNR: 25.859392166137695 +[TRAIN] Iter: 358500 Loss: 0.006893511861562729 PSNR: 26.23891258239746 +[TRAIN] Iter: 358600 Loss: 0.006064805202186108 PSNR: 26.405750274658203 +[TRAIN] Iter: 358700 Loss: 0.006019446067512035 PSNR: 26.61677360534668 +[TRAIN] Iter: 358800 Loss: 0.006675882264971733 PSNR: 26.504344940185547 +[TRAIN] Iter: 358900 Loss: 0.006873816251754761 PSNR: 25.557039260864258 +[TRAIN] Iter: 359000 Loss: 0.004774908535182476 PSNR: 28.325223922729492 +[TRAIN] Iter: 359100 Loss: 0.005892293527722359 PSNR: 26.625295639038086 +[TRAIN] Iter: 359200 Loss: 0.005920548923313618 PSNR: 27.7137393951416 +[TRAIN] Iter: 359300 Loss: 0.008216866292059422 PSNR: 25.535602569580078 +[TRAIN] Iter: 359400 Loss: 0.005414492450654507 PSNR: 27.318967819213867 +[TRAIN] Iter: 359500 Loss: 0.007272015791386366 PSNR: 25.72292137145996 +[TRAIN] Iter: 359600 Loss: 0.006600326392799616 PSNR: 26.30973243713379 +[TRAIN] Iter: 359700 Loss: 0.006245788186788559 PSNR: 26.963842391967773 +[TRAIN] Iter: 359800 Loss: 0.008141577243804932 PSNR: 25.50225067138672 +[TRAIN] Iter: 359900 Loss: 0.006698069628328085 PSNR: 26.551538467407227 +Saved checkpoints at ./logs/TUT-out-doll-360-np/360000.tar +[TRAIN] Iter: 360000 Loss: 0.006854307372123003 PSNR: 25.955564498901367 +[TRAIN] Iter: 360100 Loss: 0.006163420621305704 PSNR: 26.730937957763672 +[TRAIN] Iter: 360200 Loss: 0.007786389905959368 PSNR: 25.859943389892578 +[TRAIN] Iter: 360300 Loss: 0.007307533174753189 PSNR: 26.94267463684082 +[TRAIN] Iter: 360400 Loss: 0.006818176247179508 PSNR: 26.008464813232422 +[TRAIN] Iter: 360500 Loss: 0.006753751076757908 PSNR: 26.75122833251953 +[TRAIN] Iter: 360600 Loss: 0.006799356546252966 PSNR: 26.432968139648438 +[TRAIN] Iter: 360700 Loss: 0.005892601795494556 PSNR: 27.671077728271484 +[TRAIN] Iter: 360800 Loss: 0.006232613697648048 PSNR: 26.71104621887207 +[TRAIN] Iter: 360900 Loss: 0.004870512522757053 PSNR: 27.711803436279297 +[TRAIN] Iter: 361000 Loss: 0.006672204472124577 PSNR: 26.934682846069336 +[TRAIN] Iter: 361100 Loss: 0.0066253263503313065 PSNR: 27.12313461303711 +[TRAIN] Iter: 361200 Loss: 0.006784586235880852 PSNR: 26.33250617980957 +[TRAIN] Iter: 361300 Loss: 0.006058077327907085 PSNR: 28.356895446777344 +[TRAIN] Iter: 361400 Loss: 0.005646866280585527 PSNR: 27.026025772094727 +[TRAIN] Iter: 361500 Loss: 0.008677981793880463 PSNR: 25.650516510009766 +[TRAIN] Iter: 361600 Loss: 0.007305343635380268 PSNR: 26.653676986694336 +[TRAIN] Iter: 361700 Loss: 0.006479641888290644 PSNR: 26.47367286682129 +[TRAIN] Iter: 361800 Loss: 0.008130626752972603 PSNR: 25.26046371459961 +[TRAIN] Iter: 361900 Loss: 0.004322209861129522 PSNR: 29.283727645874023 +[TRAIN] Iter: 362000 Loss: 0.006582856178283691 PSNR: 26.460205078125 +[TRAIN] Iter: 362100 Loss: 0.006218267604708672 PSNR: 27.06793785095215 +[TRAIN] Iter: 362200 Loss: 0.005285531282424927 PSNR: 27.83875846862793 +[TRAIN] Iter: 362300 Loss: 0.006279504392296076 PSNR: 26.716779708862305 +[TRAIN] Iter: 362400 Loss: 0.0064664725214242935 PSNR: 26.576223373413086 +[TRAIN] Iter: 362500 Loss: 0.006423660553991795 PSNR: 26.32131004333496 +[TRAIN] Iter: 362600 Loss: 0.005994295235723257 PSNR: 26.738920211791992 +[TRAIN] Iter: 362700 Loss: 0.007592243142426014 PSNR: 25.91495704650879 +[TRAIN] Iter: 362800 Loss: 0.0071073309518396854 PSNR: 25.96980094909668 +[TRAIN] Iter: 362900 Loss: 0.007312319241464138 PSNR: 26.013599395751953 +[TRAIN] Iter: 363000 Loss: 0.007474740967154503 PSNR: 25.643814086914062 +[TRAIN] Iter: 363100 Loss: 0.006208093836903572 PSNR: 27.07624053955078 +[TRAIN] Iter: 363200 Loss: 0.006360192783176899 PSNR: 26.29573631286621 +[TRAIN] Iter: 363300 Loss: 0.006963007152080536 PSNR: 25.919086456298828 +[TRAIN] Iter: 363400 Loss: 0.005647227168083191 PSNR: 26.982585906982422 +[TRAIN] Iter: 363500 Loss: 0.007036014460027218 PSNR: 25.705303192138672 +[TRAIN] Iter: 363600 Loss: 0.00467678764835 PSNR: 28.62213134765625 +[TRAIN] Iter: 363700 Loss: 0.005655138753354549 PSNR: 26.826690673828125 +[TRAIN] Iter: 363800 Loss: 0.005366452503949404 PSNR: 27.8212947845459 +[TRAIN] Iter: 363900 Loss: 0.006446145474910736 PSNR: 26.434072494506836 +[TRAIN] Iter: 364000 Loss: 0.006136248353868723 PSNR: 26.62235450744629 +[TRAIN] Iter: 364100 Loss: 0.006621204782277346 PSNR: 27.002948760986328 +[TRAIN] Iter: 364200 Loss: 0.007721730507910252 PSNR: 25.80849266052246 +[TRAIN] Iter: 364300 Loss: 0.0043291132897138596 PSNR: 28.351964950561523 +[TRAIN] Iter: 364400 Loss: 0.008443718776106834 PSNR: 25.370384216308594 +[TRAIN] Iter: 364500 Loss: 0.0056939758360385895 PSNR: 27.50978660583496 +[TRAIN] Iter: 364600 Loss: 0.006520995870232582 PSNR: 26.13066291809082 +[TRAIN] Iter: 364700 Loss: 0.006722315680235624 PSNR: 26.41691017150879 +[TRAIN] Iter: 364800 Loss: 0.00503147765994072 PSNR: 28.564189910888672 +[TRAIN] Iter: 364900 Loss: 0.005922197829931974 PSNR: 27.414901733398438 +[TRAIN] Iter: 365000 Loss: 0.007654963992536068 PSNR: 25.37543296813965 +[TRAIN] Iter: 365100 Loss: 0.006626227404922247 PSNR: 26.315961837768555 +[TRAIN] Iter: 365200 Loss: 0.006840151734650135 PSNR: 25.873693466186523 +[TRAIN] Iter: 365300 Loss: 0.005853688810020685 PSNR: 27.160930633544922 +[TRAIN] Iter: 365400 Loss: 0.008146610110998154 PSNR: 24.73576545715332 +[TRAIN] Iter: 365500 Loss: 0.0058503723703324795 PSNR: 27.772518157958984 +[TRAIN] Iter: 365600 Loss: 0.0045476993545889854 PSNR: 28.78181266784668 +[TRAIN] Iter: 365700 Loss: 0.004905568901449442 PSNR: 28.380006790161133 +[TRAIN] Iter: 365800 Loss: 0.006697447970509529 PSNR: 25.794363021850586 +[TRAIN] Iter: 365900 Loss: 0.007174527272582054 PSNR: 25.07000160217285 +[TRAIN] Iter: 366000 Loss: 0.006493025459349155 PSNR: 26.235280990600586 +[TRAIN] Iter: 366100 Loss: 0.005284528248012066 PSNR: 27.684005737304688 +[TRAIN] Iter: 366200 Loss: 0.004649843089282513 PSNR: 28.128374099731445 +[TRAIN] Iter: 366300 Loss: 0.006166735664010048 PSNR: 27.33481216430664 +[TRAIN] Iter: 366400 Loss: 0.006517473608255386 PSNR: 26.398151397705078 +[TRAIN] Iter: 366500 Loss: 0.005451028700917959 PSNR: 28.16925811767578 +[TRAIN] Iter: 366600 Loss: 0.006297234445810318 PSNR: 26.560680389404297 +[TRAIN] Iter: 366700 Loss: 0.006114419549703598 PSNR: 27.071304321289062 +[TRAIN] Iter: 366800 Loss: 0.005759221501648426 PSNR: 26.77629280090332 +[TRAIN] Iter: 366900 Loss: 0.006513639353215694 PSNR: 26.88222312927246 +[TRAIN] Iter: 367000 Loss: 0.005892038810998201 PSNR: 28.42142105102539 +[TRAIN] Iter: 367100 Loss: 0.005400676745921373 PSNR: 27.793710708618164 +[TRAIN] Iter: 367200 Loss: 0.0065506091341376305 PSNR: 26.26369857788086 +[TRAIN] Iter: 367300 Loss: 0.005487456452101469 PSNR: 27.85804557800293 +[TRAIN] Iter: 367400 Loss: 0.005649690516293049 PSNR: 26.604902267456055 +[TRAIN] Iter: 367500 Loss: 0.006053892895579338 PSNR: 26.263559341430664 +[TRAIN] Iter: 367600 Loss: 0.007587882690131664 PSNR: 25.284069061279297 +[TRAIN] Iter: 367700 Loss: 0.00630085077136755 PSNR: 26.692920684814453 +[TRAIN] Iter: 367800 Loss: 0.006191147491335869 PSNR: 27.043241500854492 +[TRAIN] Iter: 367900 Loss: 0.006562002934515476 PSNR: 26.136381149291992 +[TRAIN] Iter: 368000 Loss: 0.006760403048247099 PSNR: 26.320768356323242 +[TRAIN] Iter: 368100 Loss: 0.006297603249549866 PSNR: 26.653228759765625 +[TRAIN] Iter: 368200 Loss: 0.006841221824288368 PSNR: 26.3763484954834 +[TRAIN] Iter: 368300 Loss: 0.005477551370859146 PSNR: 26.597078323364258 +[TRAIN] Iter: 368400 Loss: 0.005767240189015865 PSNR: 27.056480407714844 +[TRAIN] Iter: 368500 Loss: 0.005229657515883446 PSNR: 26.9468994140625 +[TRAIN] Iter: 368600 Loss: 0.00730910524725914 PSNR: 26.11934471130371 +[TRAIN] Iter: 368700 Loss: 0.006744797807186842 PSNR: 26.366117477416992 +[TRAIN] Iter: 368800 Loss: 0.007934439927339554 PSNR: 25.230998992919922 +[TRAIN] Iter: 368900 Loss: 0.00622784998267889 PSNR: 28.017749786376953 +[TRAIN] Iter: 369000 Loss: 0.005808848422020674 PSNR: 27.675453186035156 +[TRAIN] Iter: 369100 Loss: 0.005707271862775087 PSNR: 28.008073806762695 +[TRAIN] Iter: 369200 Loss: 0.007096700835973024 PSNR: 26.109437942504883 +[TRAIN] Iter: 369300 Loss: 0.005870112217962742 PSNR: 27.138517379760742 +[TRAIN] Iter: 369400 Loss: 0.00521837268024683 PSNR: 27.619874954223633 +[TRAIN] Iter: 369500 Loss: 0.0070634521543979645 PSNR: 25.437986373901367 +[TRAIN] Iter: 369600 Loss: 0.005960978101938963 PSNR: 27.766807556152344 +[TRAIN] Iter: 369700 Loss: 0.004599146544933319 PSNR: 28.953954696655273 +[TRAIN] Iter: 369800 Loss: 0.007080515846610069 PSNR: 25.989015579223633 +[TRAIN] Iter: 369900 Loss: 0.007643377408385277 PSNR: 25.512420654296875 +Saved checkpoints at ./logs/TUT-out-doll-360-np/370000.tar +[TRAIN] Iter: 370000 Loss: 0.0070935385301709175 PSNR: 26.270017623901367 +[TRAIN] Iter: 370100 Loss: 0.005492666736245155 PSNR: 26.885446548461914 +[TRAIN] Iter: 370200 Loss: 0.006459787022322416 PSNR: 27.11687469482422 +[TRAIN] Iter: 370300 Loss: 0.006780354306101799 PSNR: 26.345600128173828 +[TRAIN] Iter: 370400 Loss: 0.004811333492398262 PSNR: 28.004150390625 +[TRAIN] Iter: 370500 Loss: 0.006819143891334534 PSNR: 26.008092880249023 +[TRAIN] Iter: 370600 Loss: 0.007049398496747017 PSNR: 26.22761344909668 +[TRAIN] Iter: 370700 Loss: 0.005879477597773075 PSNR: 27.92693519592285 +[TRAIN] Iter: 370800 Loss: 0.0073761409148573875 PSNR: 25.50659942626953 +[TRAIN] Iter: 370900 Loss: 0.007128887344151735 PSNR: 26.10322380065918 +[TRAIN] Iter: 371000 Loss: 0.007107733748853207 PSNR: 25.80844497680664 +[TRAIN] Iter: 371100 Loss: 0.0056442636996507645 PSNR: 26.816057205200195 +[TRAIN] Iter: 371200 Loss: 0.004848480690270662 PSNR: 28.334129333496094 +[TRAIN] Iter: 371300 Loss: 0.005726730450987816 PSNR: 28.386808395385742 +[TRAIN] Iter: 371400 Loss: 0.006649501621723175 PSNR: 26.283897399902344 +[TRAIN] Iter: 371500 Loss: 0.005591374356299639 PSNR: 27.607406616210938 +[TRAIN] Iter: 371600 Loss: 0.005435049533843994 PSNR: 27.001239776611328 +[TRAIN] Iter: 371700 Loss: 0.006805984769016504 PSNR: 25.648212432861328 +[TRAIN] Iter: 371800 Loss: 0.007274186704307795 PSNR: 25.98072052001953 +[TRAIN] Iter: 371900 Loss: 0.005130552686750889 PSNR: 27.32545280456543 +[TRAIN] Iter: 372000 Loss: 0.006747032981365919 PSNR: 26.25177574157715 +[TRAIN] Iter: 372100 Loss: 0.0063126059249043465 PSNR: 27.105724334716797 +[TRAIN] Iter: 372200 Loss: 0.006609681062400341 PSNR: 26.44076156616211 +[TRAIN] Iter: 372300 Loss: 0.008978265337646008 PSNR: 24.819650650024414 +[TRAIN] Iter: 372400 Loss: 0.005683959927409887 PSNR: 27.16278839111328 +[TRAIN] Iter: 372500 Loss: 0.006237150635570288 PSNR: 26.751121520996094 +[TRAIN] Iter: 372600 Loss: 0.006123642902821302 PSNR: 26.52744483947754 +[TRAIN] Iter: 372700 Loss: 0.006425961852073669 PSNR: 26.633827209472656 +[TRAIN] Iter: 372800 Loss: 0.005884514190256596 PSNR: 27.007204055786133 +[TRAIN] Iter: 372900 Loss: 0.0060194991528987885 PSNR: 26.624042510986328 +[TRAIN] Iter: 373000 Loss: 0.006804023869335651 PSNR: 26.39253807067871 +[TRAIN] Iter: 373100 Loss: 0.0062475563026964664 PSNR: 26.48869514465332 +[TRAIN] Iter: 373200 Loss: 0.0047019775956869125 PSNR: 28.79644203186035 +[TRAIN] Iter: 373300 Loss: 0.00823161005973816 PSNR: 25.372512817382812 +[TRAIN] Iter: 373400 Loss: 0.005975376348942518 PSNR: 26.623363494873047 +[TRAIN] Iter: 373500 Loss: 0.0061197057366371155 PSNR: 27.389938354492188 +[TRAIN] Iter: 373600 Loss: 0.00658338563516736 PSNR: 26.431396484375 +[TRAIN] Iter: 373700 Loss: 0.006640297826379538 PSNR: 26.18895149230957 +[TRAIN] Iter: 373800 Loss: 0.007024824619293213 PSNR: 26.218944549560547 +[TRAIN] Iter: 373900 Loss: 0.0068264128640294075 PSNR: 25.633821487426758 +[TRAIN] Iter: 374000 Loss: 0.006023733876645565 PSNR: 27.148853302001953 +[TRAIN] Iter: 374100 Loss: 0.007801607716828585 PSNR: 25.223508834838867 +[TRAIN] Iter: 374200 Loss: 0.005590125918388367 PSNR: 26.82011604309082 +[TRAIN] Iter: 374300 Loss: 0.005415655672550201 PSNR: 27.865205764770508 +[TRAIN] Iter: 374400 Loss: 0.005174263846129179 PSNR: 28.59202766418457 +[TRAIN] Iter: 374500 Loss: 0.006397996097803116 PSNR: 26.602741241455078 +[TRAIN] Iter: 374600 Loss: 0.006483487319201231 PSNR: 26.903589248657227 +[TRAIN] Iter: 374700 Loss: 0.0071077169850468636 PSNR: 26.05868148803711 +[TRAIN] Iter: 374800 Loss: 0.007325548678636551 PSNR: 25.655420303344727 +[TRAIN] Iter: 374900 Loss: 0.006783488672226667 PSNR: 26.315122604370117 +[TRAIN] Iter: 375000 Loss: 0.006316985934972763 PSNR: 26.762306213378906 +[TRAIN] Iter: 375100 Loss: 0.005208829417824745 PSNR: 27.443172454833984 +[TRAIN] Iter: 375200 Loss: 0.0056492919102311134 PSNR: 27.83685302734375 +[TRAIN] Iter: 375300 Loss: 0.007785755209624767 PSNR: 25.00951385498047 +[TRAIN] Iter: 375400 Loss: 0.006727940868586302 PSNR: 25.61318588256836 +[TRAIN] Iter: 375500 Loss: 0.005790001712739468 PSNR: 27.245376586914062 +[TRAIN] Iter: 375600 Loss: 0.006795945577323437 PSNR: 25.903738021850586 +[TRAIN] Iter: 375700 Loss: 0.006492603570222855 PSNR: 26.417617797851562 +[TRAIN] Iter: 375800 Loss: 0.007099011447280645 PSNR: 25.573577880859375 +[TRAIN] Iter: 375900 Loss: 0.006842221599072218 PSNR: 26.632465362548828 +[TRAIN] Iter: 376000 Loss: 0.006968787871301174 PSNR: 26.12681007385254 +[TRAIN] Iter: 376100 Loss: 0.007062452845275402 PSNR: 26.4180965423584 +[TRAIN] Iter: 376200 Loss: 0.005664902739226818 PSNR: 27.194263458251953 +[TRAIN] Iter: 376300 Loss: 0.005524917040020227 PSNR: 27.18876838684082 +[TRAIN] Iter: 376400 Loss: 0.006437771953642368 PSNR: 26.149742126464844 +[TRAIN] Iter: 376500 Loss: 0.005852916277945042 PSNR: 27.92095184326172 +[TRAIN] Iter: 376600 Loss: 0.007340550422668457 PSNR: 26.62232208251953 +[TRAIN] Iter: 376700 Loss: 0.006712119560688734 PSNR: 25.73018455505371 +[TRAIN] Iter: 376800 Loss: 0.007226826623082161 PSNR: 27.60594940185547 +[TRAIN] Iter: 376900 Loss: 0.006898378022015095 PSNR: 25.939559936523438 +[TRAIN] Iter: 377000 Loss: 0.004536855965852737 PSNR: 28.845483779907227 +[TRAIN] Iter: 377100 Loss: 0.006540396250784397 PSNR: 25.751070022583008 +[TRAIN] Iter: 377200 Loss: 0.008151625283062458 PSNR: 25.86317253112793 +[TRAIN] Iter: 377300 Loss: 0.006801963783800602 PSNR: 26.311986923217773 +[TRAIN] Iter: 377400 Loss: 0.006222183350473642 PSNR: 27.447546005249023 +[TRAIN] Iter: 377500 Loss: 0.006376203615218401 PSNR: 27.18653106689453 +[TRAIN] Iter: 377600 Loss: 0.007355464156717062 PSNR: 25.86961555480957 +[TRAIN] Iter: 377700 Loss: 0.005498320329934359 PSNR: 26.835227966308594 +[TRAIN] Iter: 377800 Loss: 0.00768628902733326 PSNR: 25.375829696655273 +[TRAIN] Iter: 377900 Loss: 0.005748525261878967 PSNR: 27.355371475219727 +[TRAIN] Iter: 378000 Loss: 0.007229539565742016 PSNR: 26.371570587158203 +[TRAIN] Iter: 378100 Loss: 0.004949050024151802 PSNR: 28.56665802001953 +[TRAIN] Iter: 378200 Loss: 0.005150069948285818 PSNR: 27.708707809448242 +[TRAIN] Iter: 378300 Loss: 0.004581062123179436 PSNR: 29.11979103088379 +[TRAIN] Iter: 378400 Loss: 0.006834936793893576 PSNR: 26.406686782836914 +[TRAIN] Iter: 378500 Loss: 0.006069654133170843 PSNR: 26.92987823486328 +[TRAIN] Iter: 378600 Loss: 0.006399695295840502 PSNR: 26.240676879882812 +[TRAIN] Iter: 378700 Loss: 0.006778326351195574 PSNR: 25.887819290161133 +[TRAIN] Iter: 378800 Loss: 0.007191806565970182 PSNR: 26.092573165893555 +[TRAIN] Iter: 378900 Loss: 0.00669053103774786 PSNR: 25.99749755859375 +[TRAIN] Iter: 379000 Loss: 0.005366968922317028 PSNR: 27.74410629272461 +[TRAIN] Iter: 379100 Loss: 0.0070682973600924015 PSNR: 26.175561904907227 +[TRAIN] Iter: 379200 Loss: 0.006604325491935015 PSNR: 27.1778564453125 +[TRAIN] Iter: 379300 Loss: 0.0066204676404595375 PSNR: 26.654348373413086 +[TRAIN] Iter: 379400 Loss: 0.005839626304805279 PSNR: 27.575347900390625 +[TRAIN] Iter: 379500 Loss: 0.006416902411729097 PSNR: 26.52349281311035 +[TRAIN] Iter: 379600 Loss: 0.005648588761687279 PSNR: 26.83051300048828 +[TRAIN] Iter: 379700 Loss: 0.006477775052189827 PSNR: 27.950910568237305 +[TRAIN] Iter: 379800 Loss: 0.005867001600563526 PSNR: 26.735069274902344 +[TRAIN] Iter: 379900 Loss: 0.0071860672906041145 PSNR: 26.049352645874023 +Saved checkpoints at ./logs/TUT-out-doll-360-np/380000.tar +[TRAIN] Iter: 380000 Loss: 0.00577177107334137 PSNR: 26.943241119384766 +[TRAIN] Iter: 380100 Loss: 0.006194856949150562 PSNR: 26.5789852142334 +[TRAIN] Iter: 380200 Loss: 0.006503887474536896 PSNR: 26.761409759521484 +[TRAIN] Iter: 380300 Loss: 0.006359789986163378 PSNR: 26.662914276123047 +[TRAIN] Iter: 380400 Loss: 0.005510983522981405 PSNR: 27.246803283691406 +[TRAIN] Iter: 380500 Loss: 0.0071907443925738335 PSNR: 26.316030502319336 +[TRAIN] Iter: 380600 Loss: 0.006255778484046459 PSNR: 26.405527114868164 +[TRAIN] Iter: 380700 Loss: 0.0074964528903365135 PSNR: 26.010913848876953 +[TRAIN] Iter: 380800 Loss: 0.00802893191576004 PSNR: 25.6573429107666 +[TRAIN] Iter: 380900 Loss: 0.006513228639960289 PSNR: 26.24050521850586 +[TRAIN] Iter: 381000 Loss: 0.006843702867627144 PSNR: 26.019433975219727 +[TRAIN] Iter: 381100 Loss: 0.006692239083349705 PSNR: 26.594913482666016 +[TRAIN] Iter: 381200 Loss: 0.005722790025174618 PSNR: 26.74230194091797 +[TRAIN] Iter: 381300 Loss: 0.00605181697756052 PSNR: 26.302932739257812 +[TRAIN] Iter: 381400 Loss: 0.007078218273818493 PSNR: 25.7700252532959 +[TRAIN] Iter: 381500 Loss: 0.006771702319383621 PSNR: 26.376209259033203 +[TRAIN] Iter: 381600 Loss: 0.007166485767811537 PSNR: 25.914173126220703 +[TRAIN] Iter: 381700 Loss: 0.0064612943679094315 PSNR: 26.695886611938477 +[TRAIN] Iter: 381800 Loss: 0.005277030169963837 PSNR: 28.30459976196289 +[TRAIN] Iter: 381900 Loss: 0.006637580692768097 PSNR: 26.408803939819336 +[TRAIN] Iter: 382000 Loss: 0.0055405450984835625 PSNR: 28.090295791625977 +[TRAIN] Iter: 382100 Loss: 0.00631177332252264 PSNR: 27.508583068847656 +[TRAIN] Iter: 382200 Loss: 0.006565166637301445 PSNR: 26.46946144104004 +[TRAIN] Iter: 382300 Loss: 0.005448455922305584 PSNR: 27.222497940063477 +[TRAIN] Iter: 382400 Loss: 0.0059536052867770195 PSNR: 27.011695861816406 +[TRAIN] Iter: 382500 Loss: 0.0053796772845089436 PSNR: 27.99785614013672 +[TRAIN] Iter: 382600 Loss: 0.006677199155092239 PSNR: 26.020057678222656 +[TRAIN] Iter: 382700 Loss: 0.006119868718087673 PSNR: 26.641366958618164 +[TRAIN] Iter: 382800 Loss: 0.00571246724575758 PSNR: 28.018362045288086 +[TRAIN] Iter: 382900 Loss: 0.007892588153481483 PSNR: 25.37308692932129 +[TRAIN] Iter: 383000 Loss: 0.006566575262695551 PSNR: 25.954402923583984 +[TRAIN] Iter: 383100 Loss: 0.006621129345148802 PSNR: 26.132051467895508 +[TRAIN] Iter: 383200 Loss: 0.005812351126223803 PSNR: 27.1591854095459 +[TRAIN] Iter: 383300 Loss: 0.005283616483211517 PSNR: 26.6453914642334 +[TRAIN] Iter: 383400 Loss: 0.0059871794655919075 PSNR: 26.808446884155273 +[TRAIN] Iter: 383500 Loss: 0.007245508022606373 PSNR: 26.057573318481445 +[TRAIN] Iter: 383600 Loss: 0.006964366417378187 PSNR: 25.60310935974121 +[TRAIN] Iter: 383700 Loss: 0.0046310764737427235 PSNR: 28.374338150024414 +[TRAIN] Iter: 383800 Loss: 0.006681258324533701 PSNR: 26.158702850341797 +[TRAIN] Iter: 383900 Loss: 0.0052810353226959705 PSNR: 27.661298751831055 +[TRAIN] Iter: 384000 Loss: 0.005330435466021299 PSNR: 27.75932502746582 +[TRAIN] Iter: 384100 Loss: 0.007361655123531818 PSNR: 26.161550521850586 +[TRAIN] Iter: 384200 Loss: 0.004926228895783424 PSNR: 28.058082580566406 +[TRAIN] Iter: 384300 Loss: 0.007576876785606146 PSNR: 26.441198348999023 +[TRAIN] Iter: 384400 Loss: 0.005007497034966946 PSNR: 28.393386840820312 +[TRAIN] Iter: 384500 Loss: 0.008333610370755196 PSNR: 25.28685760498047 +[TRAIN] Iter: 384600 Loss: 0.006137452088296413 PSNR: 27.00969123840332 +[TRAIN] Iter: 384700 Loss: 0.008108002133667469 PSNR: 25.910152435302734 +[TRAIN] Iter: 384800 Loss: 0.005881570279598236 PSNR: 27.115463256835938 +[TRAIN] Iter: 384900 Loss: 0.0068061575293540955 PSNR: 26.2572078704834 +[TRAIN] Iter: 385000 Loss: 0.006066109985113144 PSNR: 26.687986373901367 +[TRAIN] Iter: 385100 Loss: 0.006765288766473532 PSNR: 25.509933471679688 +[TRAIN] Iter: 385200 Loss: 0.005434258375316858 PSNR: 27.044200897216797 +[TRAIN] Iter: 385300 Loss: 0.00802193209528923 PSNR: 25.058990478515625 +[TRAIN] Iter: 385400 Loss: 0.0068851662799716 PSNR: 26.290620803833008 +[TRAIN] Iter: 385500 Loss: 0.005580229219049215 PSNR: 26.972919464111328 +[TRAIN] Iter: 385600 Loss: 0.006285783369094133 PSNR: 26.169950485229492 +[TRAIN] Iter: 385700 Loss: 0.006654953584074974 PSNR: 26.29826545715332 +[TRAIN] Iter: 385800 Loss: 0.006220032460987568 PSNR: 27.213394165039062 +[TRAIN] Iter: 385900 Loss: 0.007042953744530678 PSNR: 26.175294876098633 +[TRAIN] Iter: 386000 Loss: 0.006867040880024433 PSNR: 25.98257064819336 +[TRAIN] Iter: 386100 Loss: 0.006594580132514238 PSNR: 25.855712890625 +[TRAIN] Iter: 386200 Loss: 0.006803595926612616 PSNR: 25.878541946411133 +[TRAIN] Iter: 386300 Loss: 0.005323481746017933 PSNR: 28.56102752685547 +[TRAIN] Iter: 386400 Loss: 0.005819095764309168 PSNR: 26.79416847229004 +[TRAIN] Iter: 386500 Loss: 0.006715857889503241 PSNR: 26.485685348510742 +[TRAIN] Iter: 386600 Loss: 0.006479417905211449 PSNR: 26.406686782836914 +[TRAIN] Iter: 386700 Loss: 0.003925233148038387 PSNR: 28.734233856201172 +[TRAIN] Iter: 386800 Loss: 0.008191900327801704 PSNR: 25.069538116455078 +[TRAIN] Iter: 386900 Loss: 0.005653022788465023 PSNR: 26.807998657226562 +[TRAIN] Iter: 387000 Loss: 0.007632805034518242 PSNR: 25.377197265625 +[TRAIN] Iter: 387100 Loss: 0.006725395563989878 PSNR: 26.045373916625977 +[TRAIN] Iter: 387200 Loss: 0.00615449994802475 PSNR: 26.57206153869629 +[TRAIN] Iter: 387300 Loss: 0.00716704037040472 PSNR: 25.89373779296875 +[TRAIN] Iter: 387400 Loss: 0.006502851378172636 PSNR: 26.70245361328125 +[TRAIN] Iter: 387500 Loss: 0.006084647960960865 PSNR: 26.570375442504883 +[TRAIN] Iter: 387600 Loss: 0.006108657456934452 PSNR: 27.52623176574707 +[TRAIN] Iter: 387700 Loss: 0.007388794794678688 PSNR: 26.868043899536133 +[TRAIN] Iter: 387800 Loss: 0.006508332677185535 PSNR: 26.884286880493164 +[TRAIN] Iter: 387900 Loss: 0.006505646277219057 PSNR: 26.799306869506836 +[TRAIN] Iter: 388000 Loss: 0.00684577040374279 PSNR: 26.614879608154297 +[TRAIN] Iter: 388100 Loss: 0.007026863284409046 PSNR: 26.54170799255371 +[TRAIN] Iter: 388200 Loss: 0.006619283463805914 PSNR: 26.789512634277344 +[TRAIN] Iter: 388300 Loss: 0.006345284637063742 PSNR: 27.442075729370117 +[TRAIN] Iter: 388400 Loss: 0.005453894380480051 PSNR: 26.615081787109375 +[TRAIN] Iter: 388500 Loss: 0.005306982435286045 PSNR: 27.043033599853516 +[TRAIN] Iter: 388600 Loss: 0.007178964093327522 PSNR: 26.002887725830078 +[TRAIN] Iter: 388700 Loss: 0.005216220393776894 PSNR: 28.174381256103516 +[TRAIN] Iter: 388800 Loss: 0.008591953665018082 PSNR: 25.250808715820312 +[TRAIN] Iter: 388900 Loss: 0.005811090115457773 PSNR: 27.349994659423828 +[TRAIN] Iter: 389000 Loss: 0.006005843169987202 PSNR: 27.872854232788086 +[TRAIN] Iter: 389100 Loss: 0.007249284069985151 PSNR: 26.03082847595215 +[TRAIN] Iter: 389200 Loss: 0.005766768474131823 PSNR: 26.667217254638672 +[TRAIN] Iter: 389300 Loss: 0.006491784006357193 PSNR: 26.34331512451172 +[TRAIN] Iter: 389400 Loss: 0.006091227289289236 PSNR: 27.620716094970703 +[TRAIN] Iter: 389500 Loss: 0.007341732271015644 PSNR: 25.338268280029297 +[TRAIN] Iter: 389600 Loss: 0.007035384885966778 PSNR: 26.155675888061523 +[TRAIN] Iter: 389700 Loss: 0.005946383811533451 PSNR: 26.23455238342285 +[TRAIN] Iter: 389800 Loss: 0.006759725511074066 PSNR: 26.510263442993164 +[TRAIN] Iter: 389900 Loss: 0.007766400929540396 PSNR: 25.971019744873047 +Saved checkpoints at ./logs/TUT-out-doll-360-np/390000.tar +[TRAIN] Iter: 390000 Loss: 0.006777180824428797 PSNR: 25.995197296142578 +[TRAIN] Iter: 390100 Loss: 0.007509604096412659 PSNR: 25.546194076538086 +[TRAIN] Iter: 390200 Loss: 0.004842414520680904 PSNR: 27.721410751342773 +[TRAIN] Iter: 390300 Loss: 0.006363291293382645 PSNR: 26.370378494262695 +[TRAIN] Iter: 390400 Loss: 0.007026888430118561 PSNR: 26.259933471679688 +[TRAIN] Iter: 390500 Loss: 0.007078072987496853 PSNR: 26.341325759887695 +[TRAIN] Iter: 390600 Loss: 0.0066376300528645515 PSNR: 26.220888137817383 +[TRAIN] Iter: 390700 Loss: 0.007244559936225414 PSNR: 25.50371742248535 +[TRAIN] Iter: 390800 Loss: 0.007533811032772064 PSNR: 26.291015625 +[TRAIN] Iter: 390900 Loss: 0.006002960726618767 PSNR: 27.73290252685547 +[TRAIN] Iter: 391000 Loss: 0.006607758812606335 PSNR: 26.277179718017578 +[TRAIN] Iter: 391100 Loss: 0.007014997769147158 PSNR: 25.769332885742188 +[TRAIN] Iter: 391200 Loss: 0.0058697545900940895 PSNR: 26.55293083190918 +[TRAIN] Iter: 391300 Loss: 0.006579246371984482 PSNR: 26.10542869567871 +[TRAIN] Iter: 391400 Loss: 0.006032176781445742 PSNR: 26.91922950744629 +[TRAIN] Iter: 391500 Loss: 0.007409193553030491 PSNR: 25.617385864257812 +[TRAIN] Iter: 391600 Loss: 0.006005064584314823 PSNR: 26.357648849487305 +[TRAIN] Iter: 391700 Loss: 0.005499660968780518 PSNR: 28.331287384033203 +[TRAIN] Iter: 391800 Loss: 0.007081942167133093 PSNR: 25.69770050048828 +[TRAIN] Iter: 391900 Loss: 0.005704805254936218 PSNR: 27.914440155029297 +[TRAIN] Iter: 392000 Loss: 0.005284407176077366 PSNR: 27.331951141357422 +[TRAIN] Iter: 392100 Loss: 0.007458121981471777 PSNR: 25.993043899536133 +[TRAIN] Iter: 392200 Loss: 0.007033021189272404 PSNR: 25.825225830078125 +[TRAIN] Iter: 392300 Loss: 0.004704640246927738 PSNR: 29.28472137451172 +[TRAIN] Iter: 392400 Loss: 0.006105188280344009 PSNR: 27.141361236572266 +[TRAIN] Iter: 392500 Loss: 0.005486178211867809 PSNR: 27.23505401611328 +[TRAIN] Iter: 392600 Loss: 0.007392033468931913 PSNR: 26.039033889770508 +[TRAIN] Iter: 392700 Loss: 0.005293058231472969 PSNR: 28.436811447143555 +[TRAIN] Iter: 392800 Loss: 0.006260375492274761 PSNR: 26.52419662475586 +[TRAIN] Iter: 392900 Loss: 0.007942460477352142 PSNR: 26.647069931030273 +[TRAIN] Iter: 393000 Loss: 0.006970718968659639 PSNR: 25.769559860229492 +[TRAIN] Iter: 393100 Loss: 0.00743438582867384 PSNR: 26.13018226623535 +[TRAIN] Iter: 393200 Loss: 0.005811960436403751 PSNR: 27.12540626525879 +[TRAIN] Iter: 393300 Loss: 0.00533923227339983 PSNR: 27.572872161865234 +[TRAIN] Iter: 393400 Loss: 0.0053544798865914345 PSNR: 27.869640350341797 +[TRAIN] Iter: 393500 Loss: 0.0066923415288329124 PSNR: 25.982255935668945 +[TRAIN] Iter: 393600 Loss: 0.006894215010106564 PSNR: 26.236358642578125 +[TRAIN] Iter: 393700 Loss: 0.007230438757687807 PSNR: 25.908987045288086 +[TRAIN] Iter: 393800 Loss: 0.0087808296084404 PSNR: 24.989665985107422 +[TRAIN] Iter: 393900 Loss: 0.006966015789657831 PSNR: 26.511287689208984 +[TRAIN] Iter: 394000 Loss: 0.006409716326743364 PSNR: 26.640247344970703 +[TRAIN] Iter: 394100 Loss: 0.006293692626059055 PSNR: 25.844003677368164 +[TRAIN] Iter: 394200 Loss: 0.007942911237478256 PSNR: 25.639881134033203 +[TRAIN] Iter: 394300 Loss: 0.006730359513312578 PSNR: 26.450735092163086 +[TRAIN] Iter: 394400 Loss: 0.0058420514687895775 PSNR: 26.571237564086914 +[TRAIN] Iter: 394500 Loss: 0.00694902241230011 PSNR: 26.022361755371094 +[TRAIN] Iter: 394600 Loss: 0.00615578331053257 PSNR: 27.839221954345703 +[TRAIN] Iter: 394700 Loss: 0.004621918313205242 PSNR: 28.52386474609375 +[TRAIN] Iter: 394800 Loss: 0.006463398691266775 PSNR: 26.397666931152344 +[TRAIN] Iter: 394900 Loss: 0.006779906339943409 PSNR: 26.37522315979004 +[TRAIN] Iter: 395000 Loss: 0.006615522783249617 PSNR: 26.899240493774414 +[TRAIN] Iter: 395100 Loss: 0.005749646108597517 PSNR: 27.716726303100586 +[TRAIN] Iter: 395200 Loss: 0.005824699532240629 PSNR: 27.015911102294922 +[TRAIN] Iter: 395300 Loss: 0.006513234227895737 PSNR: 25.917570114135742 +[TRAIN] Iter: 395400 Loss: 0.005543924868106842 PSNR: 27.997173309326172 +[TRAIN] Iter: 395500 Loss: 0.0048063816502690315 PSNR: 28.988679885864258 +[TRAIN] Iter: 395600 Loss: 0.006717191077768803 PSNR: 26.390933990478516 +[TRAIN] Iter: 395700 Loss: 0.007199008949100971 PSNR: 25.607681274414062 +[TRAIN] Iter: 395800 Loss: 0.005948490463197231 PSNR: 27.50408363342285 +[TRAIN] Iter: 395900 Loss: 0.006960990373045206 PSNR: 26.28960609436035 +[TRAIN] Iter: 396000 Loss: 0.006141112186014652 PSNR: 27.496042251586914 +[TRAIN] Iter: 396100 Loss: 0.005717269144952297 PSNR: 28.444320678710938 +[TRAIN] Iter: 396200 Loss: 0.0070638409815728664 PSNR: 25.388639450073242 +[TRAIN] Iter: 396300 Loss: 0.006706633605062962 PSNR: 26.249330520629883 +[TRAIN] Iter: 396400 Loss: 0.00623459555208683 PSNR: 26.545089721679688 +[TRAIN] Iter: 396500 Loss: 0.006586904637515545 PSNR: 27.441518783569336 +[TRAIN] Iter: 396600 Loss: 0.006589412689208984 PSNR: 26.496532440185547 +[TRAIN] Iter: 396700 Loss: 0.006176907569169998 PSNR: 26.751062393188477 +[TRAIN] Iter: 396800 Loss: 0.006596588995307684 PSNR: 26.177845001220703 +[TRAIN] Iter: 396900 Loss: 0.004471232183277607 PSNR: 28.227901458740234 +[TRAIN] Iter: 397000 Loss: 0.004603176843374968 PSNR: 28.541658401489258 +[TRAIN] Iter: 397100 Loss: 0.00643746554851532 PSNR: 26.884496688842773 +[TRAIN] Iter: 397200 Loss: 0.005281726364046335 PSNR: 28.414058685302734 +[TRAIN] Iter: 397300 Loss: 0.005994182080030441 PSNR: 26.932331085205078 +[TRAIN] Iter: 397400 Loss: 0.00631998386234045 PSNR: 26.921268463134766 +[TRAIN] Iter: 397500 Loss: 0.005199776496738195 PSNR: 27.69692611694336 +[TRAIN] Iter: 397600 Loss: 0.006468943785876036 PSNR: 27.17576026916504 +[TRAIN] Iter: 397700 Loss: 0.007066127844154835 PSNR: 25.97232437133789 +[TRAIN] Iter: 397800 Loss: 0.006914443336427212 PSNR: 25.94386100769043 +[TRAIN] Iter: 397900 Loss: 0.005707371048629284 PSNR: 26.940547943115234 +[TRAIN] Iter: 398000 Loss: 0.007523081265389919 PSNR: 25.348066329956055 +[TRAIN] Iter: 398100 Loss: 0.0056984918192029 PSNR: 27.745426177978516 +[TRAIN] Iter: 398200 Loss: 0.006110754795372486 PSNR: 26.803194046020508 +[TRAIN] Iter: 398300 Loss: 0.006939572747796774 PSNR: 26.08658790588379 +[TRAIN] Iter: 398400 Loss: 0.006430686451494694 PSNR: 27.07937240600586 +[TRAIN] Iter: 398500 Loss: 0.005352051928639412 PSNR: 28.324417114257812 +[TRAIN] Iter: 398600 Loss: 0.007050071377307177 PSNR: 26.342470169067383 +[TRAIN] Iter: 398700 Loss: 0.006221989635378122 PSNR: 26.112545013427734 +[TRAIN] Iter: 398800 Loss: 0.006046239752322435 PSNR: 27.16943359375 +[TRAIN] Iter: 398900 Loss: 0.005793224088847637 PSNR: 27.356855392456055 +[TRAIN] Iter: 399000 Loss: 0.006961725186556578 PSNR: 25.844303131103516 +[TRAIN] Iter: 399100 Loss: 0.007693880703300238 PSNR: 25.64912223815918 +[TRAIN] Iter: 399200 Loss: 0.0068589779548347 PSNR: 26.306406021118164 +[TRAIN] Iter: 399300 Loss: 0.0064621916972100735 PSNR: 25.9119930267334 +[TRAIN] Iter: 399400 Loss: 0.004927461501210928 PSNR: 28.19886016845703 +[TRAIN] Iter: 399500 Loss: 0.008814145810902119 PSNR: 24.828744888305664 +[TRAIN] Iter: 399600 Loss: 0.006400893442332745 PSNR: 26.668485641479492 +[TRAIN] Iter: 399700 Loss: 0.006965728476643562 PSNR: 27.061294555664062 +[TRAIN] Iter: 399800 Loss: 0.005048527382314205 PSNR: 28.40274429321289 +[TRAIN] Iter: 399900 Loss: 0.006784372963011265 PSNR: 26.34766387939453 +Saved checkpoints at ./logs/TUT-out-doll-360-np/400000.tar +0 0.0011489391326904297 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.630001544952393 +2 22.10680317878723 +3 22.221776962280273 +4 21.70456886291504 +5 22.17561388015747 +6 21.600367784500122 +7 22.41384267807007 +8 22.13142681121826 +9 21.97589612007141 +10 22.27196502685547 +11 21.948647260665894 +12 21.75341534614563 +13 21.400683641433716 +14 22.088317155838013 +15 22.868367433547974 +16 22.97846746444702 +17 22.246593236923218 +18 21.883870601654053 +19 21.48561692237854 +20 22.501129865646362 +21 21.80404782295227 +22 21.54078459739685 +23 22.197893857955933 +24 21.617011785507202 +25 21.67531704902649 +26 22.349063634872437 +27 22.186055183410645 +28 21.752813816070557 +29 22.33888339996338 +30 21.854939222335815 +31 23.050137519836426 +32 21.51252245903015 +33 22.309425830841064 +34 21.669161081314087 +35 21.693991899490356 +36 21.34926152229309 +37 21.72340488433838 +38 21.448060274124146 +39 21.7019259929657 +40 21.80107355117798 +41 22.625495195388794 +42 21.425224781036377 +43 22.026458740234375 +44 21.53292226791382 +45 21.850484132766724 +46 22.40349578857422 +47 22.24931240081787 +48 22.044665336608887 +49 21.85283851623535 +50 21.55006980895996 +51 21.936821699142456 +52 22.457741260528564 +53 21.334983348846436 +54 21.8104727268219 +55 22.379672527313232 +56 21.86470127105713 +57 22.390172004699707 +58 21.801809310913086 +59 21.74734139442444 +60 22.475591897964478 +61 22.072275161743164 +62 21.793891668319702 +63 23.020795822143555 +64 21.933866500854492 +65 22.02838897705078 +66 21.774563312530518 +67 22.07535433769226 +68 21.759119272232056 +69 22.257158279418945 +70 22.777561902999878 +71 22.2524893283844 +72 21.445645570755005 +73 21.95970058441162 +74 22.601113080978394 +75 21.548513412475586 +76 23.361849784851074 +77 21.272716283798218 +78 21.68911337852478 +79 22.121590852737427 +80 21.29752278327942 +81 22.35334610939026 +82 22.342272996902466 +83 22.103188276290894 +84 21.86496329307556 +85 22.480205297470093 +86 21.679933547973633 +87 23.26813006401062 +88 21.453303337097168 +89 21.878133058547974 +90 22.133840560913086 +91 21.99619722366333 +92 22.062581300735474 +93 21.74178147315979 +94 22.535590648651123 +95 21.162130117416382 +96 22.152486085891724 +97 22.9301495552063 +98 22.925278186798096 +99 21.964643716812134 +100 22.31051993370056 +101 22.003194332122803 +102 22.02043914794922 +103 22.24693012237549 +104 22.513333559036255 +105 22.762756824493408 +106 21.88915991783142 +107 22.735373497009277 +108 22.380321741104126 +109 21.33699345588684 +110 22.9515962600708 +111 22.449939489364624 +112 21.62379288673401 +113 21.88765287399292 +114 22.79623031616211 +115 22.833044290542603 +116 21.449865102767944 +117 22.172564029693604 +118 21.722665786743164 +119 22.87826418876648 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-9.8068e-01, -9.1191e-01, -5.9622e-01, -5.7136e+01], + [-3.0460e+00, -2.7685e+00, -2.2905e+00, -1.1806e+01], + [-2.8928e+00, -2.8583e+00, -2.5900e+00, -1.2467e+01], + ..., + [-2.1022e+01, -1.2899e+01, -1.1953e+01, 4.9230e+02], + [-2.4741e+01, -1.7422e+01, -1.7811e+01, 5.2681e+02], + [-2.2950e+01, -1.5398e+01, -1.4917e+01, 5.1280e+02]], + + [[ 7.3498e-01, 1.3354e+00, 1.5907e+00, -9.7901e+01], + [ 1.4678e+00, 1.5906e+00, 1.5567e+00, -8.2079e+01], + [ 7.5298e-01, 8.5169e-01, 1.4866e+00, -5.5914e+01], + ..., + [ 1.4944e+00, 3.5526e+00, 4.4914e+00, 7.4175e+02], + [-1.0713e+00, 1.7573e+00, 3.3143e+00, 7.0987e+02], + [ 1.3652e+00, 3.9489e+00, 4.8866e+00, 7.1272e+02]], + + [[-2.9354e+00, -2.6388e+00, -2.3218e+00, -7.2325e+01], + [ 4.7486e-01, 1.9706e-01, -1.7812e-01, -1.2536e+01], + [-1.8671e+00, -2.1370e+00, -1.9439e+00, -3.6997e+01], + ..., + [-2.0531e+00, -1.2560e+00, -5.0025e-01, 2.8899e+02], + [-1.1712e+00, -5.8615e-01, -9.0940e-02, 3.0602e+02], + [-7.5570e-01, -3.2249e-01, 1.2804e-01, 2.1611e+02]], + + ..., + + [[-1.2096e+00, -9.7669e-01, -4.2120e-01, -4.6810e+01], + [-3.1969e+00, -2.5245e+00, -1.8850e+00, -9.5224e+01], + [-3.1954e+00, -2.5285e+00, -1.8898e+00, -9.5107e+01], + ..., + [-7.6701e+00, -2.8651e+00, -4.3229e-01, 4.0397e+02], + [-1.0448e+01, -5.1125e+00, -2.1102e+00, 4.5888e+02], + [-1.0004e+01, -4.5235e+00, -1.0104e+00, 3.8952e+02]], + + [[-8.8628e-02, 1.4361e-01, 4.3022e-01, -6.2074e+01], + [-6.1021e-01, -7.5749e-01, -1.0999e+00, -4.2626e+01], + [-1.1557e-02, 3.1848e-01, 1.1422e+00, -4.4244e+01], + ..., + [-3.4212e+00, -2.6312e+00, -2.7832e+00, 2.3771e+02], + [-3.1966e+00, -1.8714e+00, -2.1112e+00, 3.4248e+02], + [-2.8037e+00, -1.4686e+00, -2.1694e+00, 2.8489e+02]], + + [[-1.5735e+00, -6.3433e-01, 8.1042e-01, -4.2129e+01], + [-1.4989e+00, -4.8922e-01, 8.4397e-01, -2.1735e+01], + [-1.3002e+00, -3.0185e-01, 9.5965e-01, -2.3889e+01], + ..., + [-9.2937e+00, -7.9747e+00, -2.7966e-01, 5.0878e+02], + [-9.9604e+00, -8.8441e+00, -4.3973e-01, 5.3618e+02], + [-9.4189e+00, -7.9798e+00, 1.8655e-01, 5.4352e+02]]], + grad_fn=), 'rgb0': tensor([[0.0886, 0.1101, 0.1631], + [0.6416, 0.7832, 0.8956], + [0.0775, 0.1272, 0.1173], + ..., + [0.1815, 0.2066, 0.1967], + [0.0369, 0.0959, 0.0802], + [0.2986, 0.4608, 0.6894]], grad_fn=), 'disp0': tensor([ 48.6957, 21.7701, 9.4906, ..., 112.0110, 11.5683, 54.2794], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0046, 0.0051, 0.0157, ..., 0.0045, 0.0068, 0.0029])} +0 0.0008828639984130859 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.876909971237183 +2 23.135953664779663 +3 21.748900175094604 +4 22.844611406326294 +5 21.888977766036987 +6 22.63641858100891 +7 21.676127433776855 +8 21.894359350204468 +9 21.93177890777588 +10 21.19084620475769 +11 22.02940058708191 +12 22.84393310546875 +13 22.016833782196045 +14 22.148249626159668 +15 21.88824152946472 +16 22.8980770111084 +17 21.07219672203064 +18 21.786916255950928 +19 21.92726731300354 +20 22.884158849716187 +21 21.68284273147583 +22 21.930765867233276 +23 22.046947240829468 +24 22.234146118164062 +25 22.303212642669678 +26 21.677795886993408 +27 22.09604024887085 +28 21.655213356018066 +29 22.43542742729187 +30 20.942394018173218 +31 21.880199193954468 +32 21.812942028045654 +33 22.01943564414978 +34 21.447638273239136 +35 21.906309127807617 +36 21.46607279777527 +37 22.3401620388031 +38 21.596850156784058 +39 22.08108878135681 +40 22.670222997665405 +41 23.028080463409424 +42 22.603237867355347 +43 23.216871976852417 +44 22.103705167770386 +45 21.590829610824585 +46 22.70936369895935 +47 22.315214157104492 +48 22.053716897964478 +49 22.571009397506714 +50 21.963506937026978 +51 22.936336517333984 +52 22.858179330825806 +53 22.77571177482605 +54 21.90369725227356 +55 21.98424243927002 +56 21.73367214202881 +57 21.443716764450073 +58 22.147667169570923 +59 22.2730712890625 +60 21.66946315765381 +61 21.938382863998413 +62 22.100760459899902 +63 21.6519033908844 +64 21.6492862701416 +65 21.83758854866028 +66 21.6842200756073 +67 22.082103967666626 +68 23.07621741294861 +69 21.440457344055176 +70 21.45711636543274 +71 21.99189305305481 +72 21.81883692741394 +73 21.73030710220337 +74 21.78489637374878 +75 22.22087597846985 +76 21.37599492073059 +77 21.736758947372437 +78 21.887287139892578 +79 22.44672727584839 +80 22.390793561935425 +81 22.78209137916565 +82 22.320571899414062 +83 22.093902111053467 +84 22.07554531097412 +85 21.961151838302612 +86 22.44390320777893 +87 22.113795042037964 +88 21.584012269973755 +89 22.27141499519348 +90 22.488908290863037 +91 23.435295581817627 +92 21.52634310722351 +93 21.675144910812378 +94 21.98631000518799 +95 22.56660795211792 +96 22.547541618347168 +97 22.01907968521118 +98 21.068312168121338 +99 22.429033279418945 +100 22.270753383636475 +101 22.659594774246216 +102 21.602153301239014 +103 21.689796686172485 +104 21.471306085586548 +105 21.904788732528687 +106 21.637253046035767 +107 22.169615507125854 +108 22.282001495361328 +109 21.98640203475952 +110 21.85877799987793 +111 22.00937843322754 +112 21.461160898208618 +113 21.712000608444214 +114 21.55729389190674 +115 22.644124507904053 +116 21.454338312149048 +117 21.824094772338867 +118 21.589534282684326 +119 22.27030372619629 +test poses shape torch.Size([4, 3, 4]) +0 0.0012791156768798828 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.328863620758057 +2 21.105732440948486 +3 22.411848306655884 +Saved test set +[TRAIN] Iter: 400000 Loss: 0.007085819728672504 PSNR: 25.920774459838867 +[TRAIN] Iter: 400100 Loss: 0.0065854270942509174 PSNR: 26.3614559173584 +[TRAIN] Iter: 400200 Loss: 0.006985013838857412 PSNR: 25.576290130615234 +[TRAIN] Iter: 400300 Loss: 0.004966488108038902 PSNR: 28.91698455810547 +[TRAIN] Iter: 400400 Loss: 0.006566583178937435 PSNR: 26.600770950317383 +[TRAIN] Iter: 400500 Loss: 0.006030704826116562 PSNR: 27.391395568847656 +[TRAIN] Iter: 400600 Loss: 0.006412065587937832 PSNR: 25.955928802490234 +[TRAIN] Iter: 400700 Loss: 0.005715926177799702 PSNR: 27.683618545532227 +[TRAIN] Iter: 400800 Loss: 0.004992763511836529 PSNR: 27.831445693969727 +[TRAIN] Iter: 400900 Loss: 0.007355086971074343 PSNR: 25.84899139404297 +[TRAIN] Iter: 401000 Loss: 0.005571009125560522 PSNR: 28.010738372802734 +[TRAIN] Iter: 401100 Loss: 0.006221773102879524 PSNR: 27.19240379333496 +[TRAIN] Iter: 401200 Loss: 0.007540996186435223 PSNR: 25.383953094482422 +[TRAIN] Iter: 401300 Loss: 0.006131981499493122 PSNR: 26.509950637817383 +[TRAIN] Iter: 401400 Loss: 0.0059277950786054134 PSNR: 26.460880279541016 +[TRAIN] Iter: 401500 Loss: 0.006048580165952444 PSNR: 26.94852066040039 +[TRAIN] Iter: 401600 Loss: 0.004931043833494186 PSNR: 27.697059631347656 +[TRAIN] Iter: 401700 Loss: 0.007448925636708736 PSNR: 25.688730239868164 +[TRAIN] Iter: 401800 Loss: 0.0071257492527365685 PSNR: 26.49086570739746 +[TRAIN] Iter: 401900 Loss: 0.007542847655713558 PSNR: 25.271564483642578 +[TRAIN] Iter: 402000 Loss: 0.005447462201118469 PSNR: 27.228782653808594 +[TRAIN] Iter: 402100 Loss: 0.005879088304936886 PSNR: 27.240453720092773 +[TRAIN] Iter: 402200 Loss: 0.006139320321381092 PSNR: 26.516202926635742 +[TRAIN] Iter: 402300 Loss: 0.00669435691088438 PSNR: 26.56887435913086 +[TRAIN] Iter: 402400 Loss: 0.0064387540332973 PSNR: 27.94980812072754 +[TRAIN] Iter: 402500 Loss: 0.005292823538184166 PSNR: 28.083162307739258 +[TRAIN] Iter: 402600 Loss: 0.007847020402550697 PSNR: 25.765380859375 +[TRAIN] Iter: 402700 Loss: 0.0075844163075089455 PSNR: 26.040090560913086 +[TRAIN] Iter: 402800 Loss: 0.006931618321686983 PSNR: 26.316043853759766 +[TRAIN] Iter: 402900 Loss: 0.0061986069194972515 PSNR: 27.176485061645508 +[TRAIN] Iter: 403000 Loss: 0.00651938933879137 PSNR: 26.826095581054688 +[TRAIN] Iter: 403100 Loss: 0.006566924974322319 PSNR: 26.288532257080078 +[TRAIN] Iter: 403200 Loss: 0.006582239642739296 PSNR: 26.352842330932617 +[TRAIN] Iter: 403300 Loss: 0.007534732576459646 PSNR: 25.753582000732422 +[TRAIN] Iter: 403400 Loss: 0.0066876886412501335 PSNR: 26.415943145751953 +[TRAIN] Iter: 403500 Loss: 0.00571911595761776 PSNR: 27.248321533203125 +[TRAIN] Iter: 403600 Loss: 0.005796825513243675 PSNR: 27.877601623535156 +[TRAIN] Iter: 403700 Loss: 0.00644877552986145 PSNR: 25.71385955810547 +[TRAIN] Iter: 403800 Loss: 0.005473412107676268 PSNR: 27.247730255126953 +[TRAIN] Iter: 403900 Loss: 0.005842854268848896 PSNR: 27.838191986083984 +[TRAIN] Iter: 404000 Loss: 0.006001775152981281 PSNR: 27.791227340698242 +[TRAIN] Iter: 404100 Loss: 0.006278304848819971 PSNR: 27.891782760620117 +[TRAIN] Iter: 404200 Loss: 0.005229576490819454 PSNR: 27.91000747680664 +[TRAIN] Iter: 404300 Loss: 0.007826507091522217 PSNR: 25.930377960205078 +[TRAIN] Iter: 404400 Loss: 0.006412436719983816 PSNR: 26.75421905517578 +[TRAIN] Iter: 404500 Loss: 0.0054441774263978004 PSNR: 27.115188598632812 +[TRAIN] Iter: 404600 Loss: 0.005534374620765448 PSNR: 27.979576110839844 +[TRAIN] Iter: 404700 Loss: 0.005327613092958927 PSNR: 28.323503494262695 +[TRAIN] Iter: 404800 Loss: 0.005938977934420109 PSNR: 27.42437171936035 +[TRAIN] Iter: 404900 Loss: 0.005680222064256668 PSNR: 27.279813766479492 +[TRAIN] Iter: 405000 Loss: 0.00747468788176775 PSNR: 25.85259437561035 +[TRAIN] Iter: 405100 Loss: 0.006402121856808662 PSNR: 26.813018798828125 +[TRAIN] Iter: 405200 Loss: 0.00639689713716507 PSNR: 26.025659561157227 +[TRAIN] Iter: 405300 Loss: 0.006716105155646801 PSNR: 27.030302047729492 +[TRAIN] Iter: 405400 Loss: 0.0067232027649879456 PSNR: 26.260498046875 +[TRAIN] Iter: 405500 Loss: 0.00605266448110342 PSNR: 26.787370681762695 +[TRAIN] Iter: 405600 Loss: 0.005363489501178265 PSNR: 27.93636131286621 +[TRAIN] Iter: 405700 Loss: 0.0061682118102908134 PSNR: 27.798126220703125 +[TRAIN] Iter: 405800 Loss: 0.007023992948234081 PSNR: 26.12538719177246 +[TRAIN] Iter: 405900 Loss: 0.006300370674580336 PSNR: 26.528635025024414 +[TRAIN] Iter: 406000 Loss: 0.007897019386291504 PSNR: 25.227779388427734 +[TRAIN] Iter: 406100 Loss: 0.005701138637959957 PSNR: 28.108009338378906 +[TRAIN] Iter: 406200 Loss: 0.005641999654471874 PSNR: 27.64388084411621 +[TRAIN] Iter: 406300 Loss: 0.006634955760091543 PSNR: 27.24810218811035 +[TRAIN] Iter: 406400 Loss: 0.006108937319368124 PSNR: 28.552968978881836 +[TRAIN] Iter: 406500 Loss: 0.0075121549889445305 PSNR: 26.437938690185547 +[TRAIN] Iter: 406600 Loss: 0.006416494958102703 PSNR: 26.978174209594727 +[TRAIN] Iter: 406700 Loss: 0.00565569382160902 PSNR: 26.84513282775879 +[TRAIN] Iter: 406800 Loss: 0.007043985649943352 PSNR: 26.133455276489258 +[TRAIN] Iter: 406900 Loss: 0.006112006027251482 PSNR: 26.454416275024414 +[TRAIN] Iter: 407000 Loss: 0.004211762920022011 PSNR: 28.575292587280273 +[TRAIN] Iter: 407100 Loss: 0.007414571940898895 PSNR: 26.085437774658203 +[TRAIN] Iter: 407200 Loss: 0.006012065801769495 PSNR: 26.7340030670166 +[TRAIN] Iter: 407300 Loss: 0.004514469299465418 PSNR: 29.19588279724121 +[TRAIN] Iter: 407400 Loss: 0.00643386784940958 PSNR: 26.654296875 +[TRAIN] Iter: 407500 Loss: 0.007557784207165241 PSNR: 25.84450912475586 +[TRAIN] Iter: 407600 Loss: 0.006112241186201572 PSNR: 26.210899353027344 +[TRAIN] Iter: 407700 Loss: 0.0074140336364507675 PSNR: 25.651758193969727 +[TRAIN] Iter: 407800 Loss: 0.007551279384642839 PSNR: 25.767826080322266 +[TRAIN] Iter: 407900 Loss: 0.006447517313063145 PSNR: 26.610536575317383 +[TRAIN] Iter: 408000 Loss: 0.007307607214897871 PSNR: 26.235559463500977 +[TRAIN] Iter: 408100 Loss: 0.006011645775288343 PSNR: 27.659194946289062 +[TRAIN] Iter: 408200 Loss: 0.006730538792908192 PSNR: 26.538787841796875 +[TRAIN] Iter: 408300 Loss: 0.006236349232494831 PSNR: 26.663570404052734 +[TRAIN] Iter: 408400 Loss: 0.00660918653011322 PSNR: 26.5931453704834 +[TRAIN] Iter: 408500 Loss: 0.006727010011672974 PSNR: 26.442462921142578 +[TRAIN] Iter: 408600 Loss: 0.0060704429633915424 PSNR: 26.568456649780273 +[TRAIN] Iter: 408700 Loss: 0.006401570048183203 PSNR: 26.37372398376465 +[TRAIN] Iter: 408800 Loss: 0.006611898075789213 PSNR: 27.310497283935547 +[TRAIN] Iter: 408900 Loss: 0.007511479314416647 PSNR: 25.465269088745117 +[TRAIN] Iter: 409000 Loss: 0.005147432908415794 PSNR: 28.441953659057617 +[TRAIN] Iter: 409100 Loss: 0.007422822527587414 PSNR: 26.7594051361084 +[TRAIN] Iter: 409200 Loss: 0.0056773521937429905 PSNR: 27.28910255432129 +[TRAIN] Iter: 409300 Loss: 0.008220142684876919 PSNR: 25.459585189819336 +[TRAIN] Iter: 409400 Loss: 0.004307709634304047 PSNR: 28.920297622680664 +[TRAIN] Iter: 409500 Loss: 0.005102085880935192 PSNR: 27.54316520690918 +[TRAIN] Iter: 409600 Loss: 0.007946579717099667 PSNR: 25.0452938079834 +[TRAIN] Iter: 409700 Loss: 0.005632325541228056 PSNR: 26.616107940673828 +[TRAIN] Iter: 409800 Loss: 0.005878708325326443 PSNR: 26.98611831665039 +[TRAIN] Iter: 409900 Loss: 0.005371434614062309 PSNR: 26.874332427978516 +Saved checkpoints at ./logs/TUT-out-doll-360-np/410000.tar +[TRAIN] Iter: 410000 Loss: 0.00559669453650713 PSNR: 27.877044677734375 +[TRAIN] Iter: 410100 Loss: 0.006703771650791168 PSNR: 26.107419967651367 +[TRAIN] Iter: 410200 Loss: 0.007075378205627203 PSNR: 25.7379093170166 +[TRAIN] Iter: 410300 Loss: 0.0041303448379039764 PSNR: 28.618715286254883 +[TRAIN] Iter: 410400 Loss: 0.005368336569517851 PSNR: 27.695276260375977 +[TRAIN] Iter: 410500 Loss: 0.0054525649175047874 PSNR: 28.165504455566406 +[TRAIN] Iter: 410600 Loss: 0.00610069464892149 PSNR: 27.345922470092773 +[TRAIN] Iter: 410700 Loss: 0.0056387512013316154 PSNR: 27.763681411743164 +[TRAIN] Iter: 410800 Loss: 0.005946749821305275 PSNR: 26.782501220703125 +[TRAIN] Iter: 410900 Loss: 0.0062864418141543865 PSNR: 26.890731811523438 +[TRAIN] Iter: 411000 Loss: 0.005675421096384525 PSNR: 26.900190353393555 +[TRAIN] Iter: 411100 Loss: 0.005517314188182354 PSNR: 27.70803451538086 +[TRAIN] Iter: 411200 Loss: 0.006983449682593346 PSNR: 26.117141723632812 +[TRAIN] Iter: 411300 Loss: 0.007056931033730507 PSNR: 26.20505714416504 +[TRAIN] Iter: 411400 Loss: 0.007655054330825806 PSNR: 26.22178840637207 +[TRAIN] Iter: 411500 Loss: 0.0073857088573277 PSNR: 26.532243728637695 +[TRAIN] Iter: 411600 Loss: 0.006094682961702347 PSNR: 26.58260154724121 +[TRAIN] Iter: 411700 Loss: 0.005792293697595596 PSNR: 27.287948608398438 +[TRAIN] Iter: 411800 Loss: 0.006747155915945768 PSNR: 26.08712387084961 +[TRAIN] Iter: 411900 Loss: 0.006239815149456263 PSNR: 27.440990447998047 +[TRAIN] Iter: 412000 Loss: 0.006635692436248064 PSNR: 26.770938873291016 +[TRAIN] Iter: 412100 Loss: 0.005745456088334322 PSNR: 28.37725067138672 +[TRAIN] Iter: 412200 Loss: 0.005621999502182007 PSNR: 28.59586524963379 +[TRAIN] Iter: 412300 Loss: 0.005640590097755194 PSNR: 28.129858016967773 +[TRAIN] Iter: 412400 Loss: 0.006821839138865471 PSNR: 26.469507217407227 +[TRAIN] Iter: 412500 Loss: 0.007904715836048126 PSNR: 26.116565704345703 +[TRAIN] Iter: 412600 Loss: 0.0049410536885261536 PSNR: 28.35866928100586 +[TRAIN] Iter: 412700 Loss: 0.006351628340780735 PSNR: 26.291349411010742 +[TRAIN] Iter: 412800 Loss: 0.007873378694057465 PSNR: 25.576446533203125 +[TRAIN] Iter: 412900 Loss: 0.007064484525471926 PSNR: 25.44871711730957 +[TRAIN] Iter: 413000 Loss: 0.005254079587757587 PSNR: 27.31061363220215 +[TRAIN] Iter: 413100 Loss: 0.005713692866265774 PSNR: 27.081727981567383 +[TRAIN] Iter: 413200 Loss: 0.0071946680545806885 PSNR: 26.434803009033203 +[TRAIN] Iter: 413300 Loss: 0.006794695742428303 PSNR: 25.929838180541992 +[TRAIN] Iter: 413400 Loss: 0.005914777517318726 PSNR: 27.672239303588867 +[TRAIN] Iter: 413500 Loss: 0.0069114998914301395 PSNR: 26.483861923217773 +[TRAIN] Iter: 413600 Loss: 0.005504861939698458 PSNR: 28.45607566833496 +[TRAIN] Iter: 413700 Loss: 0.005958695895969868 PSNR: 26.786724090576172 +[TRAIN] Iter: 413800 Loss: 0.006001968402415514 PSNR: 28.08216094970703 +[TRAIN] Iter: 413900 Loss: 0.004753086715936661 PSNR: 29.21463966369629 +[TRAIN] Iter: 414000 Loss: 0.005772095173597336 PSNR: 27.42932891845703 +[TRAIN] Iter: 414100 Loss: 0.006191055290400982 PSNR: 26.33321762084961 +[TRAIN] Iter: 414200 Loss: 0.008086657151579857 PSNR: 25.572139739990234 +[TRAIN] Iter: 414300 Loss: 0.005081132519990206 PSNR: 27.059263229370117 +[TRAIN] Iter: 414400 Loss: 0.006420199293643236 PSNR: 26.11160659790039 +[TRAIN] Iter: 414500 Loss: 0.005574049428105354 PSNR: 28.198230743408203 +[TRAIN] Iter: 414600 Loss: 0.0064924960024654865 PSNR: 27.166627883911133 +[TRAIN] Iter: 414700 Loss: 0.006327427923679352 PSNR: 26.350370407104492 +[TRAIN] Iter: 414800 Loss: 0.0066182566806674 PSNR: 26.235849380493164 +[TRAIN] Iter: 414900 Loss: 0.0056483084335923195 PSNR: 26.76146125793457 +[TRAIN] Iter: 415000 Loss: 0.006131391040980816 PSNR: 26.79810905456543 +[TRAIN] Iter: 415100 Loss: 0.005531386472284794 PSNR: 26.850181579589844 +[TRAIN] Iter: 415200 Loss: 0.006412523798644543 PSNR: 26.52655601501465 +[TRAIN] Iter: 415300 Loss: 0.0065635936334729195 PSNR: 26.13841438293457 +[TRAIN] Iter: 415400 Loss: 0.0062787290662527084 PSNR: 26.120792388916016 +[TRAIN] Iter: 415500 Loss: 0.006829199381172657 PSNR: 26.40411949157715 +[TRAIN] Iter: 415600 Loss: 0.00641552172601223 PSNR: 26.8331356048584 +[TRAIN] Iter: 415700 Loss: 0.006615055724978447 PSNR: 26.306888580322266 +[TRAIN] Iter: 415800 Loss: 0.005376850254833698 PSNR: 28.170093536376953 +[TRAIN] Iter: 415900 Loss: 0.006394369062036276 PSNR: 26.45771026611328 +[TRAIN] Iter: 416000 Loss: 0.007200222462415695 PSNR: 25.852933883666992 +[TRAIN] Iter: 416100 Loss: 0.005485814064741135 PSNR: 27.915895462036133 +[TRAIN] Iter: 416200 Loss: 0.005415135994553566 PSNR: 27.64640235900879 +[TRAIN] Iter: 416300 Loss: 0.005910067819058895 PSNR: 26.878963470458984 +[TRAIN] Iter: 416400 Loss: 0.00792353693395853 PSNR: 25.449983596801758 +[TRAIN] Iter: 416500 Loss: 0.005691412836313248 PSNR: 27.151660919189453 +[TRAIN] Iter: 416600 Loss: 0.006100237835198641 PSNR: 26.490478515625 +[TRAIN] Iter: 416700 Loss: 0.006951676681637764 PSNR: 26.68915367126465 +[TRAIN] Iter: 416800 Loss: 0.006203118711709976 PSNR: 27.96617317199707 +[TRAIN] Iter: 416900 Loss: 0.006309334188699722 PSNR: 26.82938003540039 +[TRAIN] Iter: 417000 Loss: 0.0058712949976325035 PSNR: 27.057870864868164 +[TRAIN] Iter: 417100 Loss: 0.006794095505028963 PSNR: 26.095014572143555 +[TRAIN] Iter: 417200 Loss: 0.005647988524287939 PSNR: 26.535842895507812 +[TRAIN] Iter: 417300 Loss: 0.0072405170649290085 PSNR: 26.039955139160156 +[TRAIN] Iter: 417400 Loss: 0.006591301877051592 PSNR: 26.33162498474121 +[TRAIN] Iter: 417500 Loss: 0.006271075922995806 PSNR: 26.961332321166992 +[TRAIN] Iter: 417600 Loss: 0.007081475108861923 PSNR: 25.79368782043457 +[TRAIN] Iter: 417700 Loss: 0.007026554085314274 PSNR: 26.022871017456055 +[TRAIN] Iter: 417800 Loss: 0.007939294911921024 PSNR: 25.614742279052734 +[TRAIN] Iter: 417900 Loss: 0.00480874115601182 PSNR: 28.16912841796875 +[TRAIN] Iter: 418000 Loss: 0.007590410765260458 PSNR: 25.81336212158203 +[TRAIN] Iter: 418100 Loss: 0.0061759622767567635 PSNR: 26.832672119140625 +[TRAIN] Iter: 418200 Loss: 0.00725559052079916 PSNR: 26.007976531982422 +[TRAIN] Iter: 418300 Loss: 0.007293982896953821 PSNR: 25.795379638671875 +[TRAIN] Iter: 418400 Loss: 0.0052487971261143684 PSNR: 28.068756103515625 +[TRAIN] Iter: 418500 Loss: 0.006739769130945206 PSNR: 27.153011322021484 +[TRAIN] Iter: 418600 Loss: 0.007743477821350098 PSNR: 25.841196060180664 +[TRAIN] Iter: 418700 Loss: 0.00678213220089674 PSNR: 26.219587326049805 +[TRAIN] Iter: 418800 Loss: 0.006602573208510876 PSNR: 26.401630401611328 +[TRAIN] Iter: 418900 Loss: 0.008085254579782486 PSNR: 25.65259552001953 +[TRAIN] Iter: 419000 Loss: 0.0061390819028019905 PSNR: 26.616365432739258 +[TRAIN] Iter: 419100 Loss: 0.0063956561498343945 PSNR: 26.30410385131836 +[TRAIN] Iter: 419200 Loss: 0.005399991758167744 PSNR: 28.01780128479004 +[TRAIN] Iter: 419300 Loss: 0.006145120598375797 PSNR: 27.086885452270508 +[TRAIN] Iter: 419400 Loss: 0.007082192227244377 PSNR: 26.304378509521484 +[TRAIN] Iter: 419500 Loss: 0.005948759149760008 PSNR: 26.90767478942871 +[TRAIN] Iter: 419600 Loss: 0.005324026569724083 PSNR: 27.193496704101562 +[TRAIN] Iter: 419700 Loss: 0.008125022053718567 PSNR: 25.58039665222168 +[TRAIN] Iter: 419800 Loss: 0.005636868067085743 PSNR: 26.904489517211914 +[TRAIN] Iter: 419900 Loss: 0.004606107249855995 PSNR: 28.271013259887695 +Saved checkpoints at ./logs/TUT-out-doll-360-np/420000.tar +[TRAIN] Iter: 420000 Loss: 0.006627255119383335 PSNR: 25.618139266967773 +[TRAIN] Iter: 420100 Loss: 0.005105718970298767 PSNR: 28.00277328491211 +[TRAIN] Iter: 420200 Loss: 0.006059032399207354 PSNR: 26.531713485717773 +[TRAIN] Iter: 420300 Loss: 0.00647950591519475 PSNR: 26.530029296875 +[TRAIN] Iter: 420400 Loss: 0.005377180874347687 PSNR: 28.3856258392334 +[TRAIN] Iter: 420500 Loss: 0.00554288737475872 PSNR: 27.440494537353516 +[TRAIN] Iter: 420600 Loss: 0.004250678233802319 PSNR: 29.19894790649414 +[TRAIN] Iter: 420700 Loss: 0.004899242892861366 PSNR: 28.55194664001465 +[TRAIN] Iter: 420800 Loss: 0.0070606111548841 PSNR: 26.300222396850586 +[TRAIN] Iter: 420900 Loss: 0.0069573381915688515 PSNR: 25.962797164916992 +[TRAIN] Iter: 421000 Loss: 0.006209726445376873 PSNR: 26.790115356445312 +[TRAIN] Iter: 421100 Loss: 0.00695221358910203 PSNR: 26.228755950927734 +[TRAIN] Iter: 421200 Loss: 0.006386637222021818 PSNR: 26.714344024658203 +[TRAIN] Iter: 421300 Loss: 0.006419763900339603 PSNR: 26.294601440429688 +[TRAIN] Iter: 421400 Loss: 0.007844976149499416 PSNR: 26.044736862182617 +[TRAIN] Iter: 421500 Loss: 0.006393075454980135 PSNR: 26.74146842956543 +[TRAIN] Iter: 421600 Loss: 0.007206504233181477 PSNR: 26.01078987121582 +[TRAIN] Iter: 421700 Loss: 0.004742435645312071 PSNR: 28.460651397705078 +[TRAIN] Iter: 421800 Loss: 0.006233069580048323 PSNR: 27.240020751953125 +[TRAIN] Iter: 421900 Loss: 0.0060297781601548195 PSNR: 27.98736572265625 +[TRAIN] Iter: 422000 Loss: 0.006715438794344664 PSNR: 25.967254638671875 +[TRAIN] Iter: 422100 Loss: 0.005418030545115471 PSNR: 28.305978775024414 +[TRAIN] Iter: 422200 Loss: 0.005108247976750135 PSNR: 28.245555877685547 +[TRAIN] Iter: 422300 Loss: 0.004470033571124077 PSNR: 28.192136764526367 +[TRAIN] Iter: 422400 Loss: 0.007439320906996727 PSNR: 25.489730834960938 +[TRAIN] Iter: 422500 Loss: 0.006541180424392223 PSNR: 26.421695709228516 +[TRAIN] Iter: 422600 Loss: 0.005718683823943138 PSNR: 26.516143798828125 +[TRAIN] Iter: 422700 Loss: 0.006497256923466921 PSNR: 26.70789909362793 +[TRAIN] Iter: 422800 Loss: 0.006972609553486109 PSNR: 26.12996482849121 +[TRAIN] Iter: 422900 Loss: 0.005047042854130268 PSNR: 27.359230041503906 +[TRAIN] Iter: 423000 Loss: 0.005521728657186031 PSNR: 28.572826385498047 +[TRAIN] Iter: 423100 Loss: 0.0072659882716834545 PSNR: 25.831411361694336 +[TRAIN] Iter: 423200 Loss: 0.006410815753042698 PSNR: 26.19512176513672 +[TRAIN] Iter: 423300 Loss: 0.006385388784110546 PSNR: 26.50050163269043 +[TRAIN] Iter: 423400 Loss: 0.005830305628478527 PSNR: 26.776330947875977 +[TRAIN] Iter: 423500 Loss: 0.006601083092391491 PSNR: 26.730083465576172 +[TRAIN] Iter: 423600 Loss: 0.006663802079856396 PSNR: 26.146451950073242 +[TRAIN] Iter: 423700 Loss: 0.006600002758204937 PSNR: 26.0963191986084 +[TRAIN] Iter: 423800 Loss: 0.00621397141367197 PSNR: 27.105615615844727 +[TRAIN] Iter: 423900 Loss: 0.006055297330021858 PSNR: 27.884748458862305 +[TRAIN] Iter: 424000 Loss: 0.005644164979457855 PSNR: 26.96409797668457 +[TRAIN] Iter: 424100 Loss: 0.0055803232826292515 PSNR: 28.669546127319336 +[TRAIN] Iter: 424200 Loss: 0.0060791075229644775 PSNR: 26.628080368041992 +[TRAIN] Iter: 424300 Loss: 0.0055098808370530605 PSNR: 27.002683639526367 +[TRAIN] Iter: 424400 Loss: 0.005795078352093697 PSNR: 27.152128219604492 +[TRAIN] Iter: 424500 Loss: 0.0063465009443461895 PSNR: 27.120899200439453 +[TRAIN] Iter: 424600 Loss: 0.006356299854815006 PSNR: 26.678911209106445 +[TRAIN] Iter: 424700 Loss: 0.005505502689629793 PSNR: 27.890153884887695 +[TRAIN] Iter: 424800 Loss: 0.006820471957325935 PSNR: 26.49190330505371 +[TRAIN] Iter: 424900 Loss: 0.0067992848344147205 PSNR: 26.452308654785156 +[TRAIN] Iter: 425000 Loss: 0.008096529170870781 PSNR: 25.268564224243164 +[TRAIN] Iter: 425100 Loss: 0.005892029032111168 PSNR: 26.51730728149414 +[TRAIN] Iter: 425200 Loss: 0.008650829084217548 PSNR: 24.919065475463867 +[TRAIN] Iter: 425300 Loss: 0.00637000473216176 PSNR: 25.957656860351562 +[TRAIN] Iter: 425400 Loss: 0.007298363372683525 PSNR: 25.8569278717041 +[TRAIN] Iter: 425500 Loss: 0.005749402567744255 PSNR: 26.538619995117188 +[TRAIN] Iter: 425600 Loss: 0.004319369327276945 PSNR: 29.06485366821289 +[TRAIN] Iter: 425700 Loss: 0.007525306660681963 PSNR: 25.773990631103516 +[TRAIN] Iter: 425800 Loss: 0.006280627101659775 PSNR: 26.551050186157227 +[TRAIN] Iter: 425900 Loss: 0.006464539095759392 PSNR: 26.241710662841797 +[TRAIN] Iter: 426000 Loss: 0.004542846232652664 PSNR: 28.76765251159668 +[TRAIN] Iter: 426100 Loss: 0.005578194744884968 PSNR: 28.154178619384766 +[TRAIN] Iter: 426200 Loss: 0.007295997813344002 PSNR: 25.617542266845703 +[TRAIN] Iter: 426300 Loss: 0.006722638849169016 PSNR: 26.530790328979492 +[TRAIN] Iter: 426400 Loss: 0.006358795799314976 PSNR: 26.520925521850586 +[TRAIN] Iter: 426500 Loss: 0.004957603290677071 PSNR: 28.409425735473633 +[TRAIN] Iter: 426600 Loss: 0.0055024465546011925 PSNR: 27.956707000732422 +[TRAIN] Iter: 426700 Loss: 0.005308341234922409 PSNR: 27.956344604492188 +[TRAIN] Iter: 426800 Loss: 0.005444114096462727 PSNR: 27.074748992919922 +[TRAIN] Iter: 426900 Loss: 0.005396077409386635 PSNR: 26.860902786254883 +[TRAIN] Iter: 427000 Loss: 0.007558440789580345 PSNR: 26.296035766601562 +[TRAIN] Iter: 427100 Loss: 0.005849391687661409 PSNR: 27.229307174682617 +[TRAIN] Iter: 427200 Loss: 0.0057572112418711185 PSNR: 27.154869079589844 +[TRAIN] Iter: 427300 Loss: 0.006041932851076126 PSNR: 27.01181411743164 +[TRAIN] Iter: 427400 Loss: 0.004067552275955677 PSNR: 28.230876922607422 +[TRAIN] Iter: 427500 Loss: 0.005671547260135412 PSNR: 27.356184005737305 +[TRAIN] Iter: 427600 Loss: 0.005553418770432472 PSNR: 28.12863540649414 +[TRAIN] Iter: 427700 Loss: 0.005564611405134201 PSNR: 27.465087890625 +[TRAIN] Iter: 427800 Loss: 0.005888262763619423 PSNR: 27.648582458496094 +[TRAIN] Iter: 427900 Loss: 0.008292687125504017 PSNR: 25.92930793762207 +[TRAIN] Iter: 428000 Loss: 0.006360185798257589 PSNR: 26.646286010742188 +[TRAIN] Iter: 428100 Loss: 0.007057717069983482 PSNR: 27.116474151611328 +[TRAIN] Iter: 428200 Loss: 0.0058365194126963615 PSNR: 26.940776824951172 +[TRAIN] Iter: 428300 Loss: 0.006451285444200039 PSNR: 26.439504623413086 +[TRAIN] Iter: 428400 Loss: 0.006865533068776131 PSNR: 25.678972244262695 +[TRAIN] Iter: 428500 Loss: 0.005442349705845118 PSNR: 27.996347427368164 +[TRAIN] Iter: 428600 Loss: 0.006286920048296452 PSNR: 26.879396438598633 +[TRAIN] Iter: 428700 Loss: 0.005548149812966585 PSNR: 27.820392608642578 +[TRAIN] Iter: 428800 Loss: 0.007793469354510307 PSNR: 25.442882537841797 +[TRAIN] Iter: 428900 Loss: 0.005127677693963051 PSNR: 27.46977424621582 +[TRAIN] Iter: 429000 Loss: 0.0071717225946486 PSNR: 25.90264892578125 +[TRAIN] Iter: 429100 Loss: 0.005430528894066811 PSNR: 28.059127807617188 +[TRAIN] Iter: 429200 Loss: 0.008115869015455246 PSNR: 24.908884048461914 +[TRAIN] Iter: 429300 Loss: 0.006700765807181597 PSNR: 26.46278190612793 +[TRAIN] Iter: 429400 Loss: 0.005938513204455376 PSNR: 26.6939754486084 +[TRAIN] Iter: 429500 Loss: 0.0066666072234511375 PSNR: 27.1585636138916 +[TRAIN] Iter: 429600 Loss: 0.007185431197285652 PSNR: 25.96767807006836 +[TRAIN] Iter: 429700 Loss: 0.006814736407250166 PSNR: 26.134889602661133 +[TRAIN] Iter: 429800 Loss: 0.00637676939368248 PSNR: 26.042999267578125 +[TRAIN] Iter: 429900 Loss: 0.0070239100605249405 PSNR: 25.88486099243164 +Saved checkpoints at ./logs/TUT-out-doll-360-np/430000.tar +[TRAIN] Iter: 430000 Loss: 0.0064274319447577 PSNR: 27.443639755249023 +[TRAIN] Iter: 430100 Loss: 0.006176911294460297 PSNR: 26.449481964111328 +[TRAIN] Iter: 430200 Loss: 0.0062012262642383575 PSNR: 27.143146514892578 +[TRAIN] Iter: 430300 Loss: 0.005858545191586018 PSNR: 26.6129207611084 +[TRAIN] Iter: 430400 Loss: 0.0057190777733922005 PSNR: 26.70977020263672 +[TRAIN] Iter: 430500 Loss: 0.005949229001998901 PSNR: 26.415790557861328 +[TRAIN] Iter: 430600 Loss: 0.006421969272196293 PSNR: 26.561567306518555 +[TRAIN] Iter: 430700 Loss: 0.0065710581839084625 PSNR: 26.796672821044922 +[TRAIN] Iter: 430800 Loss: 0.007768821436911821 PSNR: 25.92011070251465 +[TRAIN] Iter: 430900 Loss: 0.00647894898429513 PSNR: 26.775772094726562 +[TRAIN] Iter: 431000 Loss: 0.006470308173447847 PSNR: 26.408876419067383 +[TRAIN] Iter: 431100 Loss: 0.0056511820293962955 PSNR: 27.89005470275879 +[TRAIN] Iter: 431200 Loss: 0.006799511611461639 PSNR: 26.826828002929688 +[TRAIN] Iter: 431300 Loss: 0.006622490473091602 PSNR: 26.09104347229004 +[TRAIN] Iter: 431400 Loss: 0.006184011697769165 PSNR: 26.4691162109375 +[TRAIN] Iter: 431500 Loss: 0.004742504097521305 PSNR: 27.56819725036621 +[TRAIN] Iter: 431600 Loss: 0.005978702567517757 PSNR: 26.525419235229492 +[TRAIN] Iter: 431700 Loss: 0.007221813313663006 PSNR: 26.096723556518555 +[TRAIN] Iter: 431800 Loss: 0.00656649935990572 PSNR: 26.59819984436035 +[TRAIN] Iter: 431900 Loss: 0.005004626698791981 PSNR: 28.254295349121094 +[TRAIN] Iter: 432000 Loss: 0.0067108403891325 PSNR: 27.653785705566406 +[TRAIN] Iter: 432100 Loss: 0.005854740273207426 PSNR: 27.610700607299805 +[TRAIN] Iter: 432200 Loss: 0.006758791394531727 PSNR: 26.029888153076172 +[TRAIN] Iter: 432300 Loss: 0.006991078145802021 PSNR: 25.695619583129883 +[TRAIN] Iter: 432400 Loss: 0.00674412539228797 PSNR: 25.912792205810547 +[TRAIN] Iter: 432500 Loss: 0.006463097874075174 PSNR: 26.825897216796875 +[TRAIN] Iter: 432600 Loss: 0.0055944835767149925 PSNR: 27.287981033325195 +[TRAIN] Iter: 432700 Loss: 0.005523171275854111 PSNR: 27.317224502563477 +[TRAIN] Iter: 432800 Loss: 0.0059282174333930016 PSNR: 26.98198890686035 +[TRAIN] Iter: 432900 Loss: 0.005931431893259287 PSNR: 26.525373458862305 +[TRAIN] Iter: 433000 Loss: 0.005241992883384228 PSNR: 26.711200714111328 +[TRAIN] Iter: 433100 Loss: 0.0063662040047347546 PSNR: 26.700803756713867 +[TRAIN] Iter: 433200 Loss: 0.005776567384600639 PSNR: 28.274227142333984 +[TRAIN] Iter: 433300 Loss: 0.006463172379881144 PSNR: 25.98280906677246 +[TRAIN] Iter: 433400 Loss: 0.005824652966111898 PSNR: 26.88374900817871 +[TRAIN] Iter: 433500 Loss: 0.0049585383385419846 PSNR: 28.52517318725586 +[TRAIN] Iter: 433600 Loss: 0.006406223401427269 PSNR: 26.349308013916016 +[TRAIN] Iter: 433700 Loss: 0.005427572876214981 PSNR: 28.114192962646484 +[TRAIN] Iter: 433800 Loss: 0.004235285334289074 PSNR: 29.60436248779297 +[TRAIN] Iter: 433900 Loss: 0.005423794500529766 PSNR: 27.202177047729492 +[TRAIN] Iter: 434000 Loss: 0.005322648212313652 PSNR: 27.46136474609375 +[TRAIN] Iter: 434100 Loss: 0.0047196936793625355 PSNR: 28.06916618347168 +[TRAIN] Iter: 434200 Loss: 0.007600766606628895 PSNR: 25.90806007385254 +[TRAIN] Iter: 434300 Loss: 0.005804998334497213 PSNR: 28.17955207824707 +[TRAIN] Iter: 434400 Loss: 0.006447015330195427 PSNR: 26.3804931640625 +[TRAIN] Iter: 434500 Loss: 0.005747207440435886 PSNR: 26.473846435546875 +[TRAIN] Iter: 434600 Loss: 0.006838224828243256 PSNR: 25.870328903198242 +[TRAIN] Iter: 434700 Loss: 0.007111574988812208 PSNR: 26.441179275512695 +[TRAIN] Iter: 434800 Loss: 0.006013879086822271 PSNR: 27.137306213378906 +[TRAIN] Iter: 434900 Loss: 0.005821463651955128 PSNR: 27.130022048950195 +[TRAIN] Iter: 435000 Loss: 0.0046960278414189816 PSNR: 28.52326202392578 +[TRAIN] Iter: 435100 Loss: 0.006078315898776054 PSNR: 26.95159912109375 +[TRAIN] Iter: 435200 Loss: 0.005638613365590572 PSNR: 27.580442428588867 +[TRAIN] Iter: 435300 Loss: 0.006320381537079811 PSNR: 26.260272979736328 +[TRAIN] Iter: 435400 Loss: 0.005718919914215803 PSNR: 26.737194061279297 +[TRAIN] Iter: 435500 Loss: 0.007746814284473658 PSNR: 25.45714569091797 +[TRAIN] Iter: 435600 Loss: 0.007111314684152603 PSNR: 25.92609214782715 +[TRAIN] Iter: 435700 Loss: 0.004632231313735247 PSNR: 28.28677749633789 +[TRAIN] Iter: 435800 Loss: 0.0055509391240775585 PSNR: 27.3792667388916 +[TRAIN] Iter: 435900 Loss: 0.006091163959354162 PSNR: 26.626388549804688 +[TRAIN] Iter: 436000 Loss: 0.006241385824978352 PSNR: 26.587446212768555 +[TRAIN] Iter: 436100 Loss: 0.006784240249544382 PSNR: 26.34691619873047 +[TRAIN] Iter: 436200 Loss: 0.005047333426773548 PSNR: 27.28138542175293 +[TRAIN] Iter: 436300 Loss: 0.0062647005543112755 PSNR: 26.766138076782227 +[TRAIN] Iter: 436400 Loss: 0.005809064954519272 PSNR: 26.704843521118164 +[TRAIN] Iter: 436500 Loss: 0.005271755158901215 PSNR: 27.825620651245117 +[TRAIN] Iter: 436600 Loss: 0.006625557318329811 PSNR: 26.582612991333008 +[TRAIN] Iter: 436700 Loss: 0.005047909449785948 PSNR: 28.23381233215332 +[TRAIN] Iter: 436800 Loss: 0.0055349599570035934 PSNR: 28.098587036132812 +[TRAIN] Iter: 436900 Loss: 0.005916337016969919 PSNR: 26.593345642089844 +[TRAIN] Iter: 437000 Loss: 0.005540060345083475 PSNR: 27.004642486572266 +[TRAIN] Iter: 437100 Loss: 0.00590122863650322 PSNR: 26.566680908203125 +[TRAIN] Iter: 437200 Loss: 0.007261243648827076 PSNR: 26.282630920410156 +[TRAIN] Iter: 437300 Loss: 0.0066332439891994 PSNR: 26.767805099487305 +[TRAIN] Iter: 437400 Loss: 0.0049987053498625755 PSNR: 27.598508834838867 +[TRAIN] Iter: 437500 Loss: 0.005111753009259701 PSNR: 28.005611419677734 +[TRAIN] Iter: 437600 Loss: 0.006108894012868404 PSNR: 26.438642501831055 +[TRAIN] Iter: 437700 Loss: 0.006036097649484873 PSNR: 26.79793357849121 +[TRAIN] Iter: 437800 Loss: 0.006778480485081673 PSNR: 26.261737823486328 +[TRAIN] Iter: 437900 Loss: 0.007239609025418758 PSNR: 26.008146286010742 +[TRAIN] Iter: 438000 Loss: 0.007424480747431517 PSNR: 26.035053253173828 +[TRAIN] Iter: 438100 Loss: 0.00691605918109417 PSNR: 26.209413528442383 +[TRAIN] Iter: 438200 Loss: 0.0060300761833786964 PSNR: 27.162214279174805 +[TRAIN] Iter: 438300 Loss: 0.005841953679919243 PSNR: 26.63082504272461 +[TRAIN] Iter: 438400 Loss: 0.005204474087804556 PSNR: 28.265352249145508 +[TRAIN] Iter: 438500 Loss: 0.008036929182708263 PSNR: 25.078020095825195 +[TRAIN] Iter: 438600 Loss: 0.006761519704014063 PSNR: 26.121112823486328 +[TRAIN] Iter: 438700 Loss: 0.006664860062301159 PSNR: 26.07904815673828 +[TRAIN] Iter: 438800 Loss: 0.005708309821784496 PSNR: 26.670913696289062 +[TRAIN] Iter: 438900 Loss: 0.005257746670395136 PSNR: 28.573490142822266 +[TRAIN] Iter: 439000 Loss: 0.006999696604907513 PSNR: 25.778186798095703 +[TRAIN] Iter: 439100 Loss: 0.006891447119414806 PSNR: 25.89388656616211 +[TRAIN] Iter: 439200 Loss: 0.006324599497020245 PSNR: 26.584257125854492 +[TRAIN] Iter: 439300 Loss: 0.0056588491424918175 PSNR: 27.208683013916016 +[TRAIN] Iter: 439400 Loss: 0.0051017263904213905 PSNR: 27.690683364868164 +[TRAIN] Iter: 439500 Loss: 0.006123988423496485 PSNR: 26.795347213745117 +[TRAIN] Iter: 439600 Loss: 0.006777346134185791 PSNR: 26.328166961669922 +[TRAIN] Iter: 439700 Loss: 0.005310222506523132 PSNR: 28.08233642578125 +[TRAIN] Iter: 439800 Loss: 0.00593664962798357 PSNR: 26.9668025970459 +[TRAIN] Iter: 439900 Loss: 0.00593560840934515 PSNR: 27.871479034423828 +Saved checkpoints at ./logs/TUT-out-doll-360-np/440000.tar +[TRAIN] Iter: 440000 Loss: 0.004509421065449715 PSNR: 28.934654235839844 +[TRAIN] Iter: 440100 Loss: 0.0070785777643322945 PSNR: 26.404911041259766 +[TRAIN] Iter: 440200 Loss: 0.0061867875047028065 PSNR: 27.577686309814453 +[TRAIN] Iter: 440300 Loss: 0.005066366866230965 PSNR: 27.2769718170166 +[TRAIN] Iter: 440400 Loss: 0.005456424318253994 PSNR: 28.389022827148438 +[TRAIN] Iter: 440500 Loss: 0.006672315765172243 PSNR: 26.37883758544922 +[TRAIN] Iter: 440600 Loss: 0.005960451439023018 PSNR: 27.03614616394043 +[TRAIN] Iter: 440700 Loss: 0.006071879528462887 PSNR: 26.901817321777344 +[TRAIN] Iter: 440800 Loss: 0.0049072327092289925 PSNR: 27.9906063079834 +[TRAIN] Iter: 440900 Loss: 0.005960354581475258 PSNR: 27.371910095214844 +[TRAIN] Iter: 441000 Loss: 0.006728619337081909 PSNR: 25.820415496826172 +[TRAIN] Iter: 441100 Loss: 0.0055571068078279495 PSNR: 27.544567108154297 +[TRAIN] Iter: 441200 Loss: 0.006949080619961023 PSNR: 26.618127822875977 +[TRAIN] Iter: 441300 Loss: 0.0063793412409722805 PSNR: 26.57666778564453 +[TRAIN] Iter: 441400 Loss: 0.007030690088868141 PSNR: 26.182117462158203 +[TRAIN] Iter: 441500 Loss: 0.006268814206123352 PSNR: 26.40599250793457 +[TRAIN] Iter: 441600 Loss: 0.004515759646892548 PSNR: 28.31656265258789 +[TRAIN] Iter: 441700 Loss: 0.004906839691102505 PSNR: 28.535856246948242 +[TRAIN] Iter: 441800 Loss: 0.005951972212642431 PSNR: 27.422060012817383 +[TRAIN] Iter: 441900 Loss: 0.005456212442368269 PSNR: 27.174442291259766 +[TRAIN] Iter: 442000 Loss: 0.006505583878606558 PSNR: 27.227567672729492 +[TRAIN] Iter: 442100 Loss: 0.006289458833634853 PSNR: 26.034955978393555 +[TRAIN] Iter: 442200 Loss: 0.006772606633603573 PSNR: 26.451133728027344 +[TRAIN] Iter: 442300 Loss: 0.006747030653059483 PSNR: 26.10432243347168 +[TRAIN] Iter: 442400 Loss: 0.005301435943692923 PSNR: 27.80687713623047 +[TRAIN] Iter: 442500 Loss: 0.00765357306227088 PSNR: 25.219768524169922 +[TRAIN] Iter: 442600 Loss: 0.006433431524783373 PSNR: 26.17270278930664 +[TRAIN] Iter: 442700 Loss: 0.006838079541921616 PSNR: 26.110315322875977 +[TRAIN] Iter: 442800 Loss: 0.008366934955120087 PSNR: 25.92679786682129 +[TRAIN] Iter: 442900 Loss: 0.005732100456953049 PSNR: 26.615598678588867 +[TRAIN] Iter: 443000 Loss: 0.006668223533779383 PSNR: 25.973012924194336 +[TRAIN] Iter: 443100 Loss: 0.006575530860573053 PSNR: 26.477792739868164 +[TRAIN] Iter: 443200 Loss: 0.006879780441522598 PSNR: 25.74440574645996 +[TRAIN] Iter: 443300 Loss: 0.004559151362627745 PSNR: 28.608198165893555 +[TRAIN] Iter: 443400 Loss: 0.00674512330442667 PSNR: 25.851842880249023 +[TRAIN] Iter: 443500 Loss: 0.00512972567230463 PSNR: 28.245058059692383 +[TRAIN] Iter: 443600 Loss: 0.005441739689558744 PSNR: 27.35429573059082 +[TRAIN] Iter: 443700 Loss: 0.00713520310819149 PSNR: 25.733680725097656 +[TRAIN] Iter: 443800 Loss: 0.006325557827949524 PSNR: 26.260223388671875 +[TRAIN] Iter: 443900 Loss: 0.007028350606560707 PSNR: 25.36250877380371 +[TRAIN] Iter: 444000 Loss: 0.004850614350289106 PSNR: 28.21491241455078 +[TRAIN] Iter: 444100 Loss: 0.00794420950114727 PSNR: 25.79413414001465 +[TRAIN] Iter: 444200 Loss: 0.0052971080876886845 PSNR: 27.59056282043457 +[TRAIN] Iter: 444300 Loss: 0.006181077100336552 PSNR: 26.58652687072754 +[TRAIN] Iter: 444400 Loss: 0.005312169902026653 PSNR: 28.527599334716797 +[TRAIN] Iter: 444500 Loss: 0.006275924853980541 PSNR: 26.765962600708008 +[TRAIN] Iter: 444600 Loss: 0.007931716740131378 PSNR: 25.989198684692383 +[TRAIN] Iter: 444700 Loss: 0.004493908956646919 PSNR: 28.700157165527344 +[TRAIN] Iter: 444800 Loss: 0.006982114631682634 PSNR: 26.335731506347656 +[TRAIN] Iter: 444900 Loss: 0.00677136518061161 PSNR: 26.593843460083008 +[TRAIN] Iter: 445000 Loss: 0.006566024385392666 PSNR: 26.585559844970703 +[TRAIN] Iter: 445100 Loss: 0.00698623713105917 PSNR: 26.26526641845703 +[TRAIN] Iter: 445200 Loss: 0.005895914044231176 PSNR: 27.110008239746094 +[TRAIN] Iter: 445300 Loss: 0.005378931760787964 PSNR: 27.515003204345703 +[TRAIN] Iter: 445400 Loss: 0.006275818683207035 PSNR: 27.88925552368164 +[TRAIN] Iter: 445500 Loss: 0.005602000281214714 PSNR: 27.902069091796875 +[TRAIN] Iter: 445600 Loss: 0.0058831567876040936 PSNR: 27.390426635742188 +[TRAIN] Iter: 445700 Loss: 0.006959420628845692 PSNR: 26.460634231567383 +[TRAIN] Iter: 445800 Loss: 0.00637767743319273 PSNR: 26.001907348632812 +[TRAIN] Iter: 445900 Loss: 0.006883978843688965 PSNR: 26.2334041595459 +[TRAIN] Iter: 446000 Loss: 0.006645048037171364 PSNR: 26.518430709838867 +[TRAIN] Iter: 446100 Loss: 0.006836299784481525 PSNR: 25.856050491333008 +[TRAIN] Iter: 446200 Loss: 0.005814575590193272 PSNR: 26.65923500061035 +[TRAIN] Iter: 446300 Loss: 0.006925150752067566 PSNR: 25.9533634185791 +[TRAIN] Iter: 446400 Loss: 0.007364317309111357 PSNR: 26.199176788330078 +[TRAIN] Iter: 446500 Loss: 0.007182370871305466 PSNR: 25.80946159362793 +[TRAIN] Iter: 446600 Loss: 0.007654270157217979 PSNR: 25.68295669555664 +[TRAIN] Iter: 446700 Loss: 0.006945331580936909 PSNR: 25.993906021118164 +[TRAIN] Iter: 446800 Loss: 0.006463336292654276 PSNR: 26.6028995513916 +[TRAIN] Iter: 446900 Loss: 0.00540590425953269 PSNR: 28.044485092163086 +[TRAIN] Iter: 447000 Loss: 0.006370685063302517 PSNR: 26.588651657104492 +[TRAIN] Iter: 447100 Loss: 0.005166408605873585 PSNR: 28.422687530517578 +[TRAIN] Iter: 447200 Loss: 0.00640313932672143 PSNR: 26.413990020751953 +[TRAIN] Iter: 447300 Loss: 0.006906967610120773 PSNR: 26.61913299560547 +[TRAIN] Iter: 447400 Loss: 0.008279199711978436 PSNR: 26.15961456298828 +[TRAIN] Iter: 447500 Loss: 0.007486538961529732 PSNR: 26.20431900024414 +[TRAIN] Iter: 447600 Loss: 0.006186570506542921 PSNR: 26.88457679748535 +[TRAIN] Iter: 447700 Loss: 0.006008759140968323 PSNR: 26.905975341796875 +[TRAIN] Iter: 447800 Loss: 0.005457630380988121 PSNR: 27.992918014526367 +[TRAIN] Iter: 447900 Loss: 0.006100271828472614 PSNR: 26.479921340942383 +[TRAIN] Iter: 448000 Loss: 0.005268543493002653 PSNR: 28.626663208007812 +[TRAIN] Iter: 448100 Loss: 0.0069450014270842075 PSNR: 26.08673095703125 +[TRAIN] Iter: 448200 Loss: 0.007630402687937021 PSNR: 25.936067581176758 +[TRAIN] Iter: 448300 Loss: 0.006201714277267456 PSNR: 26.586511611938477 +[TRAIN] Iter: 448400 Loss: 0.006870155688375235 PSNR: 26.46993637084961 +[TRAIN] Iter: 448500 Loss: 0.007371507119387388 PSNR: 25.647611618041992 +[TRAIN] Iter: 448600 Loss: 0.006428132764995098 PSNR: 27.465045928955078 +[TRAIN] Iter: 448700 Loss: 0.006876501254737377 PSNR: 25.893800735473633 +[TRAIN] Iter: 448800 Loss: 0.007293909788131714 PSNR: 25.874156951904297 +[TRAIN] Iter: 448900 Loss: 0.008046849630773067 PSNR: 25.225841522216797 +[TRAIN] Iter: 449000 Loss: 0.006070259027183056 PSNR: 26.538606643676758 +[TRAIN] Iter: 449100 Loss: 0.005967867560684681 PSNR: 27.896770477294922 +[TRAIN] Iter: 449200 Loss: 0.006262600421905518 PSNR: 26.243803024291992 +[TRAIN] Iter: 449300 Loss: 0.006017095874994993 PSNR: 25.961055755615234 +[TRAIN] Iter: 449400 Loss: 0.006369390524923801 PSNR: 26.828136444091797 +[TRAIN] Iter: 449500 Loss: 0.007432000711560249 PSNR: 26.27715492248535 +[TRAIN] Iter: 449600 Loss: 0.005078794434666634 PSNR: 27.99315643310547 +[TRAIN] Iter: 449700 Loss: 0.006317096296697855 PSNR: 26.074966430664062 +[TRAIN] Iter: 449800 Loss: 0.006281524430960417 PSNR: 26.355318069458008 +[TRAIN] Iter: 449900 Loss: 0.007899344898760319 PSNR: 25.574443817138672 +Saved checkpoints at ./logs/TUT-out-doll-360-np/450000.tar +0 0.0008988380432128906 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.37176012992859 +2 21.45247459411621 +3 21.738860607147217 +4 21.654542207717896 +5 21.952928066253662 +6 21.439132928848267 +7 21.63950276374817 +8 22.045542001724243 +9 22.37422800064087 +10 22.926990509033203 +11 20.680607080459595 +12 22.60486626625061 +13 21.58322501182556 +14 22.107168197631836 +15 21.59531021118164 +16 21.85197687149048 +17 22.37137508392334 +18 22.07019805908203 +19 21.884016752243042 +20 21.529837369918823 +21 21.928462743759155 +22 22.12061858177185 +23 21.731374263763428 +24 22.32021474838257 +25 21.911763429641724 +26 21.459977388381958 +27 21.50849151611328 +28 22.11487102508545 +29 22.815476179122925 +30 22.519654035568237 +31 22.05145764350891 +32 22.438276529312134 +33 22.940885543823242 +34 21.052613735198975 +35 21.862737894058228 +36 21.76944661140442 +37 22.18893575668335 +38 21.748345851898193 +39 21.50650143623352 +40 21.766520023345947 +41 21.523761987686157 +42 22.16712522506714 +43 22.073644876480103 +44 22.219042778015137 +45 22.120258569717407 +46 22.348721027374268 +47 21.336310863494873 +48 22.178964376449585 +49 21.69736385345459 +50 22.606614351272583 +51 21.680375814437866 +52 22.570601224899292 +53 15.365451574325562 +54 22.54827642440796 +55 21.561756134033203 +56 22.26758122444153 +57 21.93674921989441 +58 22.25747013092041 +59 22.212377786636353 +60 21.673185110092163 +61 22.189329624176025 +62 22.455769062042236 +63 22.374211311340332 +64 22.452932596206665 +65 22.384440183639526 +66 21.949373483657837 +67 22.045226097106934 +68 21.774882078170776 +69 22.00096082687378 +70 23.35288906097412 +71 21.53064250946045 +72 22.18225598335266 +73 22.493274450302124 +74 22.29503870010376 +75 22.11644148826599 +76 21.51506781578064 +77 22.24858045578003 +78 21.981037855148315 +79 22.378278017044067 +80 22.37604546546936 +81 21.23484492301941 +82 21.94436287879944 +83 21.749353170394897 +84 21.950650930404663 +85 22.66227412223816 +86 21.665873050689697 +87 22.09606695175171 +88 21.75750470161438 +89 22.261775255203247 +90 22.387782096862793 +91 21.92481565475464 +92 21.822988986968994 +93 21.95465087890625 +94 22.56427502632141 +95 21.728132963180542 +96 22.712284564971924 +97 22.467068672180176 +98 21.777796983718872 +99 22.17017388343811 +100 21.72162961959839 +101 22.1685574054718 +102 22.078506231307983 +103 23.19217300415039 +104 22.61530303955078 +105 21.421584606170654 +106 21.774962186813354 +107 22.26664161682129 +108 21.58426570892334 +109 21.685109615325928 +110 21.9225811958313 +111 21.896251678466797 +112 22.194804430007935 +113 21.47890043258667 +114 21.749338150024414 +115 22.85623288154602 +116 21.776443004608154 +117 21.65223741531372 +118 22.01959776878357 +119 22.155330419540405 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 7.3335e-01, 1.4152e+00, 1.7458e+00, -6.6622e+01], + [-1.6684e+00, -9.2544e-01, 4.5446e-01, -4.9428e+01], + [-1.4652e+00, -7.9140e-01, 5.0747e-01, -4.1949e+01], + ..., + [-7.4735e+00, -5.1522e+00, -1.6358e+00, 4.8184e+02], + [-7.3601e+00, -5.0644e+00, -1.9059e+00, 4.8548e+02], + [-7.7501e+00, -5.2228e+00, -2.2776e+00, 4.9266e+02]], + + [[-3.2813e+00, -2.6956e+00, -2.0945e+00, -5.0296e+01], + [-3.2760e+00, -2.6578e+00, -1.9628e+00, -4.3899e+01], + [-1.6232e+00, -1.2103e+00, -7.1424e-01, -3.9799e+01], + ..., + [-1.4943e+01, -3.4996e+00, -3.3694e-01, 7.6625e+02], + [-1.2362e+01, -1.0819e-02, 4.4870e+00, 7.8689e+02], + [-1.0907e+01, 1.5496e+00, 5.7005e+00, 7.7491e+02]], + + [[-6.9328e+00, -6.4624e+00, -6.0811e+00, -2.3664e+01], + [-6.5190e+00, -5.9319e+00, -5.4495e+00, -4.4483e+01], + [-4.9076e+00, -4.5872e+00, -3.8328e+00, -1.4679e+01], + ..., + [-2.0549e+01, -7.4292e+00, -8.4743e-01, 7.1546e+02], + [-1.7460e+01, -4.0927e+00, 2.1800e+00, 7.0374e+02], + [-1.5159e+01, -2.0952e+00, 3.9293e+00, 6.9853e+02]], + + ..., + + [[-6.0978e-01, -7.1238e-01, -7.5393e-01, -4.0527e+01], + [ 1.7590e+00, 1.8547e+00, 2.9464e+00, -4.3409e+01], + [ 1.2229e+00, 1.2800e+00, 2.2793e+00, -4.2195e+01], + ..., + [-4.2110e+00, 6.4948e-01, 4.6134e+00, 4.0794e+02], + [-4.8007e+00, -4.1260e-02, 4.0416e+00, 4.0963e+02], + [-5.1012e+00, 8.3791e-02, 4.2515e+00, 4.4677e+02]], + + [[-4.1258e+00, -3.3596e+00, -2.8577e+00, -6.6754e+01], + [-7.4621e-01, -8.4357e-01, -1.0224e+00, -2.3153e+01], + [-1.1381e+00, -1.1938e+00, -1.3482e+00, -2.3366e+01], + ..., + [-1.4030e+01, -9.1798e+00, -1.2952e+01, 7.8382e+02], + [-1.6802e+01, -1.0042e+01, -1.2852e+01, 9.2635e+02], + [-1.6723e+01, -1.0066e+01, -1.2843e+01, 9.2007e+02]], + + [[ 4.5564e+00, 5.1127e+00, 5.3854e+00, -5.7762e+01], + [-2.7724e-01, 2.5735e-02, 7.2228e-01, -3.6902e+00], + [-4.7926e-02, 2.7447e-01, 1.0146e+00, -2.9514e+00], + ..., + [-9.4132e+00, -6.4693e+00, -7.1438e+00, 5.1382e+02], + [-9.3177e+00, -6.5924e+00, -7.3451e+00, 5.9668e+02], + [-9.4201e+00, -6.0634e+00, -5.6150e+00, 5.9008e+02]]], + grad_fn=), 'rgb0': tensor([[0.4433, 0.6003, 0.7992], + [0.2152, 0.1998, 0.2153], + [0.0735, 0.0850, 0.1029], + ..., + [0.3540, 0.3080, 0.2837], + [0.1749, 0.1827, 0.2235], + [0.3189, 0.4440, 0.5941]], grad_fn=), 'disp0': tensor([47.9884, 69.8956, 37.1627, ..., 44.2341, 49.7204, 45.5388], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0032, 0.0044, 0.0048, ..., 0.0054, 0.0946, 0.0040])} +0 0.0007560253143310547 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.504576444625854 +2 23.09836220741272 +3 21.769373893737793 +4 22.287108659744263 +5 21.572120428085327 +6 21.84682059288025 +7 21.969692945480347 +8 21.5997531414032 +9 22.216269493103027 +10 21.671254634857178 +11 20.93572211265564 +12 21.780341863632202 +13 22.02622413635254 +14 21.893131256103516 +15 21.137234210968018 +16 21.906694650650024 +17 21.8553786277771 +18 22.160958528518677 +19 21.885891914367676 +20 21.334173440933228 +21 21.884479522705078 +22 23.122644901275635 +23 21.88218355178833 +24 21.84969925880432 +25 21.921157360076904 +26 22.060325145721436 +27 22.132002592086792 +28 21.689831972122192 +29 22.24827003479004 +30 21.243387699127197 +31 22.039671182632446 +32 22.019796133041382 +33 21.811038494110107 +34 21.433974027633667 +35 21.586784601211548 +36 22.05408024787903 +37 21.681639194488525 +38 22.594347953796387 +39 21.62985897064209 +40 22.729377269744873 +41 22.764358282089233 +42 21.921992540359497 +43 21.58543610572815 +44 21.874486207962036 +45 21.73530340194702 +46 22.100286960601807 +47 21.965385913848877 +48 22.046120166778564 +49 22.390902519226074 +50 22.115832567214966 +51 21.850858449935913 +52 21.734641075134277 +53 21.322685956954956 +54 22.469927310943604 +55 21.920687437057495 +56 22.063884735107422 +57 22.1842360496521 +58 21.527824878692627 +59 21.400846242904663 +60 22.312084436416626 +61 21.719625234603882 +62 22.784637451171875 +63 22.09843683242798 +64 21.842154502868652 +65 22.60177206993103 +66 22.24126124382019 +67 22.30583167076111 +68 21.489046573638916 +69 22.277120113372803 +70 23.00067448616028 +71 22.00833511352539 +72 21.504787921905518 +73 22.248852252960205 +74 22.81650447845459 +75 21.52904224395752 +76 22.926836729049683 +77 21.716399669647217 +78 22.330222845077515 +79 21.510096549987793 +80 22.58855152130127 +81 21.494585037231445 +82 21.779438018798828 +83 22.207791328430176 +84 22.14209270477295 +85 22.19742751121521 +86 22.06249237060547 +87 21.7105553150177 +88 22.385673761367798 +89 21.352991819381714 +90 22.948384761810303 +91 21.673688650131226 +92 21.7391836643219 +93 22.349748611450195 +94 21.16989278793335 +95 23.317340850830078 +96 21.455604314804077 +97 22.736828327178955 +98 22.809645891189575 +99 22.093692779541016 +100 21.700273513793945 +101 22.417089462280273 +102 21.35531210899353 +103 22.743801832199097 +104 21.908172607421875 +105 22.184723377227783 +106 22.969475746154785 +107 22.62721538543701 +108 21.993871688842773 +109 21.638399839401245 +110 22.104410648345947 +111 21.654565811157227 +112 22.21596598625183 +113 22.418636798858643 +114 22.027003526687622 +115 21.407897233963013 +116 22.38401460647583 +117 22.101207733154297 +118 21.287771701812744 +119 22.304469347000122 +test poses shape torch.Size([4, 3, 4]) +0 0.0010943412780761719 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.223957538604736 +2 23.479616403579712 +3 22.345219373703003 +Saved test set +[TRAIN] Iter: 450000 Loss: 0.006546244025230408 PSNR: 26.545249938964844 +[TRAIN] Iter: 450100 Loss: 0.006910353899002075 PSNR: 26.02283477783203 +[TRAIN] Iter: 450200 Loss: 0.005458145402371883 PSNR: 27.077999114990234 +[TRAIN] Iter: 450300 Loss: 0.005464745685458183 PSNR: 27.27784538269043 +[TRAIN] Iter: 450400 Loss: 0.005525689572095871 PSNR: 26.821199417114258 +[TRAIN] Iter: 450500 Loss: 0.006044191308319569 PSNR: 26.561342239379883 +[TRAIN] Iter: 450600 Loss: 0.00742629449814558 PSNR: 26.211170196533203 +[TRAIN] Iter: 450700 Loss: 0.0069634695537388325 PSNR: 26.070003509521484 +[TRAIN] Iter: 450800 Loss: 0.0050446102395653725 PSNR: 27.652509689331055 +[TRAIN] Iter: 450900 Loss: 0.0062941210344433784 PSNR: 26.64177894592285 +[TRAIN] Iter: 451000 Loss: 0.006148043554276228 PSNR: 26.714981079101562 +[TRAIN] Iter: 451100 Loss: 0.006577173247933388 PSNR: 26.07971954345703 +[TRAIN] Iter: 451200 Loss: 0.006579493172466755 PSNR: 27.538591384887695 +[TRAIN] Iter: 451300 Loss: 0.0062087541446089745 PSNR: 26.918506622314453 +[TRAIN] Iter: 451400 Loss: 0.005122182425111532 PSNR: 26.905040740966797 +[TRAIN] Iter: 451500 Loss: 0.0063462345860898495 PSNR: 26.657987594604492 +[TRAIN] Iter: 451600 Loss: 0.006873462349176407 PSNR: 25.658906936645508 +[TRAIN] Iter: 451700 Loss: 0.005592339672148228 PSNR: 26.825387954711914 +[TRAIN] Iter: 451800 Loss: 0.006523824296891689 PSNR: 27.220272064208984 +[TRAIN] Iter: 451900 Loss: 0.006092808675020933 PSNR: 27.100473403930664 +[TRAIN] Iter: 452000 Loss: 0.006353065371513367 PSNR: 26.540496826171875 +[TRAIN] Iter: 452100 Loss: 0.006412318907678127 PSNR: 26.417560577392578 +[TRAIN] Iter: 452200 Loss: 0.0066391220316290855 PSNR: 26.814966201782227 +[TRAIN] Iter: 452300 Loss: 0.006202502641826868 PSNR: 26.49570083618164 +[TRAIN] Iter: 452400 Loss: 0.00630910275503993 PSNR: 27.423797607421875 +[TRAIN] Iter: 452500 Loss: 0.007126795593649149 PSNR: 25.82276153564453 +[TRAIN] Iter: 452600 Loss: 0.0069393012672662735 PSNR: 25.88910484313965 +[TRAIN] Iter: 452700 Loss: 0.006038933992385864 PSNR: 26.875635147094727 +[TRAIN] Iter: 452800 Loss: 0.006039804313331842 PSNR: 26.525463104248047 +[TRAIN] Iter: 452900 Loss: 0.005840339232236147 PSNR: 27.915307998657227 +[TRAIN] Iter: 453000 Loss: 0.005822299979627132 PSNR: 26.901601791381836 +[TRAIN] Iter: 453100 Loss: 0.005549043882638216 PSNR: 26.977863311767578 +[TRAIN] Iter: 453200 Loss: 0.007396260742098093 PSNR: 25.43627166748047 +[TRAIN] Iter: 453300 Loss: 0.005577610805630684 PSNR: 27.22260856628418 +[TRAIN] Iter: 453400 Loss: 0.006631679832935333 PSNR: 26.325977325439453 +[TRAIN] Iter: 453500 Loss: 0.0070542236790061 PSNR: 26.67415428161621 +[TRAIN] Iter: 453600 Loss: 0.006785247009247541 PSNR: 26.5533447265625 +[TRAIN] Iter: 453700 Loss: 0.005899590440094471 PSNR: 27.833499908447266 +[TRAIN] Iter: 453800 Loss: 0.005258374381810427 PSNR: 28.240903854370117 +[TRAIN] Iter: 453900 Loss: 0.007481706328690052 PSNR: 25.901588439941406 +[TRAIN] Iter: 454000 Loss: 0.0069357710890471935 PSNR: 26.328588485717773 +[TRAIN] Iter: 454100 Loss: 0.006599733605980873 PSNR: 26.57817268371582 +[TRAIN] Iter: 454200 Loss: 0.006427195388823748 PSNR: 26.383302688598633 +[TRAIN] Iter: 454300 Loss: 0.006577062886208296 PSNR: 26.189550399780273 +[TRAIN] Iter: 454400 Loss: 0.006183073855936527 PSNR: 26.126571655273438 +[TRAIN] Iter: 454500 Loss: 0.006386192981153727 PSNR: 26.231884002685547 +[TRAIN] Iter: 454600 Loss: 0.0059474557638168335 PSNR: 26.752044677734375 +[TRAIN] Iter: 454700 Loss: 0.007126595824956894 PSNR: 25.971994400024414 +[TRAIN] Iter: 454800 Loss: 0.0071331895887851715 PSNR: 26.057279586791992 +[TRAIN] Iter: 454900 Loss: 0.006511987186968327 PSNR: 26.53203582763672 +[TRAIN] Iter: 455000 Loss: 0.005826836451888084 PSNR: 26.367109298706055 +[TRAIN] Iter: 455100 Loss: 0.0051052868366241455 PSNR: 27.512325286865234 +[TRAIN] Iter: 455200 Loss: 0.005002971738576889 PSNR: 28.350629806518555 +[TRAIN] Iter: 455300 Loss: 0.0055094012059271336 PSNR: 27.636734008789062 +[TRAIN] Iter: 455400 Loss: 0.0054291305132210255 PSNR: 26.84112548828125 +[TRAIN] Iter: 455500 Loss: 0.00539030646905303 PSNR: 27.52962875366211 +[TRAIN] Iter: 455600 Loss: 0.00631191860884428 PSNR: 26.882349014282227 +[TRAIN] Iter: 455700 Loss: 0.0068922885693609715 PSNR: 26.69818687438965 +[TRAIN] Iter: 455800 Loss: 0.006631901022046804 PSNR: 26.228466033935547 +[TRAIN] Iter: 455900 Loss: 0.005639706272631884 PSNR: 28.06916618347168 +[TRAIN] Iter: 456000 Loss: 0.005548254121094942 PSNR: 27.93373680114746 +[TRAIN] Iter: 456100 Loss: 0.0052345688454806805 PSNR: 27.837419509887695 +[TRAIN] Iter: 456200 Loss: 0.0060505555011332035 PSNR: 28.123146057128906 +[TRAIN] Iter: 456300 Loss: 0.005573312286287546 PSNR: 27.948028564453125 +[TRAIN] Iter: 456400 Loss: 0.005771153140813112 PSNR: 28.161352157592773 +[TRAIN] Iter: 456500 Loss: 0.0055561070330441 PSNR: 26.603425979614258 +[TRAIN] Iter: 456600 Loss: 0.0051386067643761635 PSNR: 28.00400161743164 +[TRAIN] Iter: 456700 Loss: 0.007926227524876595 PSNR: 25.369258880615234 +[TRAIN] Iter: 456800 Loss: 0.005903839599341154 PSNR: 26.700319290161133 +[TRAIN] Iter: 456900 Loss: 0.0064095486886799335 PSNR: 25.948043823242188 +[TRAIN] Iter: 457000 Loss: 0.0065039158798754215 PSNR: 26.35453224182129 +[TRAIN] Iter: 457100 Loss: 0.005900513380765915 PSNR: 27.216896057128906 +[TRAIN] Iter: 457200 Loss: 0.007401793729513884 PSNR: 25.96294403076172 +[TRAIN] Iter: 457300 Loss: 0.007002923637628555 PSNR: 26.019210815429688 +[TRAIN] Iter: 457400 Loss: 0.0061285169795155525 PSNR: 26.970272064208984 +[TRAIN] Iter: 457500 Loss: 0.00547921983525157 PSNR: 27.74492645263672 +[TRAIN] Iter: 457600 Loss: 0.006253216881304979 PSNR: 26.575204849243164 +[TRAIN] Iter: 457700 Loss: 0.006078439764678478 PSNR: 27.35338592529297 +[TRAIN] Iter: 457800 Loss: 0.006542698480188847 PSNR: 26.625385284423828 +[TRAIN] Iter: 457900 Loss: 0.0050389207899570465 PSNR: 28.00982666015625 +[TRAIN] Iter: 458000 Loss: 0.005967113189399242 PSNR: 26.910423278808594 +[TRAIN] Iter: 458100 Loss: 0.0044918181374669075 PSNR: 28.87148094177246 +[TRAIN] Iter: 458200 Loss: 0.006102010607719421 PSNR: 26.648679733276367 +[TRAIN] Iter: 458300 Loss: 0.006471926812082529 PSNR: 27.32007598876953 +[TRAIN] Iter: 458400 Loss: 0.006177837960422039 PSNR: 26.269441604614258 +[TRAIN] Iter: 458500 Loss: 0.005293100140988827 PSNR: 28.241334915161133 +[TRAIN] Iter: 458600 Loss: 0.006786661688238382 PSNR: 25.936664581298828 +[TRAIN] Iter: 458700 Loss: 0.006839354522526264 PSNR: 25.877399444580078 +[TRAIN] Iter: 458800 Loss: 0.006271339021623135 PSNR: 27.01365852355957 +[TRAIN] Iter: 458900 Loss: 0.007132234051823616 PSNR: 26.704910278320312 +[TRAIN] Iter: 459000 Loss: 0.006563677452504635 PSNR: 26.375288009643555 +[TRAIN] Iter: 459100 Loss: 0.0063450513407588005 PSNR: 26.641115188598633 +[TRAIN] Iter: 459200 Loss: 0.007119867019355297 PSNR: 25.67433738708496 +[TRAIN] Iter: 459300 Loss: 0.006464598700404167 PSNR: 26.335865020751953 +[TRAIN] Iter: 459400 Loss: 0.005894692614674568 PSNR: 26.63962745666504 +[TRAIN] Iter: 459500 Loss: 0.005622854456305504 PSNR: 27.490276336669922 +[TRAIN] Iter: 459600 Loss: 0.007671298459172249 PSNR: 25.696130752563477 +[TRAIN] Iter: 459700 Loss: 0.006758318282663822 PSNR: 26.19016456604004 +[TRAIN] Iter: 459800 Loss: 0.005643131211400032 PSNR: 27.424213409423828 +[TRAIN] Iter: 459900 Loss: 0.006299090106040239 PSNR: 26.12018585205078 +Saved checkpoints at ./logs/TUT-out-doll-360-np/460000.tar +[TRAIN] Iter: 460000 Loss: 0.006152838934212923 PSNR: 26.4891357421875 +[TRAIN] Iter: 460100 Loss: 0.005417080596089363 PSNR: 28.471086502075195 +[TRAIN] Iter: 460200 Loss: 0.005288134329020977 PSNR: 27.31719207763672 +[TRAIN] Iter: 460300 Loss: 0.006630783900618553 PSNR: 26.426259994506836 +[TRAIN] Iter: 460400 Loss: 0.005363998003304005 PSNR: 28.869165420532227 +[TRAIN] Iter: 460500 Loss: 0.0064924792386591434 PSNR: 26.69903564453125 +[TRAIN] Iter: 460600 Loss: 0.006651999428868294 PSNR: 26.241975784301758 +[TRAIN] Iter: 460700 Loss: 0.006566461641341448 PSNR: 26.570459365844727 +[TRAIN] Iter: 460800 Loss: 0.0066868700087070465 PSNR: 26.232635498046875 +[TRAIN] Iter: 460900 Loss: 0.007467721588909626 PSNR: 26.194929122924805 +[TRAIN] Iter: 461000 Loss: 0.006182059179991484 PSNR: 26.882957458496094 +[TRAIN] Iter: 461100 Loss: 0.0058266399428248405 PSNR: 26.933765411376953 +[TRAIN] Iter: 461200 Loss: 0.0061041126027703285 PSNR: 26.59160804748535 +[TRAIN] Iter: 461300 Loss: 0.006382772698998451 PSNR: 26.67780303955078 +[TRAIN] Iter: 461400 Loss: 0.007478045765310526 PSNR: 25.446311950683594 +[TRAIN] Iter: 461500 Loss: 0.005142971873283386 PSNR: 27.8227481842041 +[TRAIN] Iter: 461600 Loss: 0.005616329610347748 PSNR: 26.75439453125 +[TRAIN] Iter: 461700 Loss: 0.007326668128371239 PSNR: 26.183979034423828 +[TRAIN] Iter: 461800 Loss: 0.0051254406571388245 PSNR: 27.83450698852539 +[TRAIN] Iter: 461900 Loss: 0.00792415626347065 PSNR: 25.74513053894043 +[TRAIN] Iter: 462000 Loss: 0.006038638763129711 PSNR: 26.251949310302734 +[TRAIN] Iter: 462100 Loss: 0.006799844093620777 PSNR: 25.661487579345703 +[TRAIN] Iter: 462200 Loss: 0.004621344618499279 PSNR: 28.983457565307617 +[TRAIN] Iter: 462300 Loss: 0.00787604134529829 PSNR: 26.068836212158203 +[TRAIN] Iter: 462400 Loss: 0.00698943343013525 PSNR: 25.680749893188477 +[TRAIN] Iter: 462500 Loss: 0.005814769770950079 PSNR: 26.685102462768555 +[TRAIN] Iter: 462600 Loss: 0.004774793982505798 PSNR: 28.06965446472168 +[TRAIN] Iter: 462700 Loss: 0.005376417655497789 PSNR: 28.1485652923584 +[TRAIN] Iter: 462800 Loss: 0.006519889459013939 PSNR: 25.919727325439453 +[TRAIN] Iter: 462900 Loss: 0.004810628946870565 PSNR: 27.548185348510742 +[TRAIN] Iter: 463000 Loss: 0.005962551571428776 PSNR: 26.292034149169922 +[TRAIN] Iter: 463100 Loss: 0.006696935743093491 PSNR: 26.48366355895996 +[TRAIN] Iter: 463200 Loss: 0.006678615231066942 PSNR: 26.124298095703125 +[TRAIN] Iter: 463300 Loss: 0.005861182231456041 PSNR: 27.400846481323242 +[TRAIN] Iter: 463400 Loss: 0.0055753132328391075 PSNR: 27.836711883544922 +[TRAIN] Iter: 463500 Loss: 0.004996117204427719 PSNR: 27.212879180908203 +[TRAIN] Iter: 463600 Loss: 0.00497001688927412 PSNR: 28.735942840576172 +[TRAIN] Iter: 463700 Loss: 0.005107585806399584 PSNR: 28.830238342285156 +[TRAIN] Iter: 463800 Loss: 0.005232313182204962 PSNR: 27.531274795532227 +[TRAIN] Iter: 463900 Loss: 0.0057236202992498875 PSNR: 27.043081283569336 +[TRAIN] Iter: 464000 Loss: 0.006165175698697567 PSNR: 27.178386688232422 +[TRAIN] Iter: 464100 Loss: 0.006115565076470375 PSNR: 26.80168342590332 +[TRAIN] Iter: 464200 Loss: 0.004712181631475687 PSNR: 28.2247371673584 +[TRAIN] Iter: 464300 Loss: 0.005100601352751255 PSNR: 28.185983657836914 +[TRAIN] Iter: 464400 Loss: 0.006203526630997658 PSNR: 27.97962760925293 +[TRAIN] Iter: 464500 Loss: 0.005602079909294844 PSNR: 27.782384872436523 +[TRAIN] Iter: 464600 Loss: 0.006952670402824879 PSNR: 26.080049514770508 +[TRAIN] Iter: 464700 Loss: 0.007125050760805607 PSNR: 26.12706756591797 +[TRAIN] Iter: 464800 Loss: 0.007643247954547405 PSNR: 26.11043357849121 +[TRAIN] Iter: 464900 Loss: 0.006739145144820213 PSNR: 26.143943786621094 +[TRAIN] Iter: 465000 Loss: 0.0060631101951003075 PSNR: 26.690452575683594 +[TRAIN] Iter: 465100 Loss: 0.005753469653427601 PSNR: 26.544517517089844 +[TRAIN] Iter: 465200 Loss: 0.0068892305716872215 PSNR: 26.01915168762207 +[TRAIN] Iter: 465300 Loss: 0.006038542836904526 PSNR: 26.63004493713379 +[TRAIN] Iter: 465400 Loss: 0.0072435555048286915 PSNR: 25.50259017944336 +[TRAIN] Iter: 465500 Loss: 0.0060150702483952045 PSNR: 27.028873443603516 +[TRAIN] Iter: 465600 Loss: 0.006519086193293333 PSNR: 26.501953125 +[TRAIN] Iter: 465700 Loss: 0.004595624748617411 PSNR: 28.788999557495117 +[TRAIN] Iter: 465800 Loss: 0.006063176319003105 PSNR: 26.713088989257812 +[TRAIN] Iter: 465900 Loss: 0.007437911815941334 PSNR: 26.71169090270996 +[TRAIN] Iter: 466000 Loss: 0.006896444596350193 PSNR: 25.838472366333008 +[TRAIN] Iter: 466100 Loss: 0.004590448923408985 PSNR: 28.8604793548584 +[TRAIN] Iter: 466200 Loss: 0.004910677671432495 PSNR: 28.933332443237305 +[TRAIN] Iter: 466300 Loss: 0.0059969243593513966 PSNR: 28.074434280395508 +[TRAIN] Iter: 466400 Loss: 0.005987255368381739 PSNR: 26.695369720458984 +[TRAIN] Iter: 466500 Loss: 0.005785634741187096 PSNR: 27.140016555786133 +[TRAIN] Iter: 466600 Loss: 0.00672619603574276 PSNR: 26.496042251586914 +[TRAIN] Iter: 466700 Loss: 0.007104608230292797 PSNR: 26.175567626953125 +[TRAIN] Iter: 466800 Loss: 0.006296672858297825 PSNR: 26.65157127380371 +[TRAIN] Iter: 466900 Loss: 0.005315888207405806 PSNR: 27.40097999572754 +[TRAIN] Iter: 467000 Loss: 0.00633473414927721 PSNR: 26.254554748535156 +[TRAIN] Iter: 467100 Loss: 0.006374427117407322 PSNR: 26.97797966003418 +[TRAIN] Iter: 467200 Loss: 0.005462477914988995 PSNR: 27.937150955200195 +[TRAIN] Iter: 467300 Loss: 0.006056453101336956 PSNR: 27.07465171813965 +[TRAIN] Iter: 467400 Loss: 0.0068014939315617085 PSNR: 26.066049575805664 +[TRAIN] Iter: 467500 Loss: 0.0063000209629535675 PSNR: 26.429933547973633 +[TRAIN] Iter: 467600 Loss: 0.00521426135674119 PSNR: 27.253129959106445 +[TRAIN] Iter: 467700 Loss: 0.006435343995690346 PSNR: 26.251657485961914 +[TRAIN] Iter: 467800 Loss: 0.007020183373242617 PSNR: 25.911470413208008 +[TRAIN] Iter: 467900 Loss: 0.00687782745808363 PSNR: 26.516582489013672 +[TRAIN] Iter: 468000 Loss: 0.007060249801725149 PSNR: 26.105432510375977 +[TRAIN] Iter: 468100 Loss: 0.006508982740342617 PSNR: 26.28554916381836 +[TRAIN] Iter: 468200 Loss: 0.007020492572337389 PSNR: 26.591026306152344 +[TRAIN] Iter: 468300 Loss: 0.005200246348977089 PSNR: 27.901172637939453 +[TRAIN] Iter: 468400 Loss: 0.006017319392412901 PSNR: 26.887361526489258 +[TRAIN] Iter: 468500 Loss: 0.006797291338443756 PSNR: 26.0904541015625 +[TRAIN] Iter: 468600 Loss: 0.006117082200944424 PSNR: 27.314401626586914 +[TRAIN] Iter: 468700 Loss: 0.0072367251850664616 PSNR: 26.152587890625 +[TRAIN] Iter: 468800 Loss: 0.005789787508547306 PSNR: 27.577430725097656 +[TRAIN] Iter: 468900 Loss: 0.004210965242236853 PSNR: 28.874406814575195 +[TRAIN] Iter: 469000 Loss: 0.006083163432776928 PSNR: 26.52146339416504 +[TRAIN] Iter: 469100 Loss: 0.006836075335741043 PSNR: 26.6431941986084 +[TRAIN] Iter: 469200 Loss: 0.005648482125252485 PSNR: 27.176469802856445 +[TRAIN] Iter: 469300 Loss: 0.006030447315424681 PSNR: 27.362964630126953 +[TRAIN] Iter: 469400 Loss: 0.006235387641936541 PSNR: 26.4892635345459 +[TRAIN] Iter: 469500 Loss: 0.00465137604624033 PSNR: 28.299556732177734 +[TRAIN] Iter: 469600 Loss: 0.005842756014317274 PSNR: 26.551151275634766 +[TRAIN] Iter: 469700 Loss: 0.004794830456376076 PSNR: 27.99456214904785 +[TRAIN] Iter: 469800 Loss: 0.005958007648587227 PSNR: 27.22809600830078 +[TRAIN] Iter: 469900 Loss: 0.006784530356526375 PSNR: 26.282934188842773 +Saved checkpoints at ./logs/TUT-out-doll-360-np/470000.tar +[TRAIN] Iter: 470000 Loss: 0.005939685273915529 PSNR: 26.916006088256836 +[TRAIN] Iter: 470100 Loss: 0.00612584687769413 PSNR: 27.087343215942383 +[TRAIN] Iter: 470200 Loss: 0.004356032703071833 PSNR: 28.691831588745117 +[TRAIN] Iter: 470300 Loss: 0.007459558546543121 PSNR: 25.944583892822266 +[TRAIN] Iter: 470400 Loss: 0.006840636022388935 PSNR: 25.86861228942871 +[TRAIN] Iter: 470500 Loss: 0.007571300491690636 PSNR: 25.496471405029297 +[TRAIN] Iter: 470600 Loss: 0.004366409964859486 PSNR: 28.945526123046875 +[TRAIN] Iter: 470700 Loss: 0.005443224683403969 PSNR: 27.4481258392334 +[TRAIN] Iter: 470800 Loss: 0.004795422777533531 PSNR: 28.49788475036621 +[TRAIN] Iter: 470900 Loss: 0.00597268296405673 PSNR: 26.351858139038086 +[TRAIN] Iter: 471000 Loss: 0.005912385880947113 PSNR: 27.563915252685547 +[TRAIN] Iter: 471100 Loss: 0.007002887781709433 PSNR: 25.471031188964844 +[TRAIN] Iter: 471200 Loss: 0.005972284357994795 PSNR: 27.31079864501953 +[TRAIN] Iter: 471300 Loss: 0.0066909184679389 PSNR: 26.204723358154297 +[TRAIN] Iter: 471400 Loss: 0.007712177466601133 PSNR: 25.498186111450195 +[TRAIN] Iter: 471500 Loss: 0.007810087408870459 PSNR: 25.31661605834961 +[TRAIN] Iter: 471600 Loss: 0.006656032521277666 PSNR: 26.513193130493164 +[TRAIN] Iter: 471700 Loss: 0.0064725009724497795 PSNR: 26.720123291015625 +[TRAIN] Iter: 471800 Loss: 0.005737992934882641 PSNR: 28.802446365356445 +[TRAIN] Iter: 471900 Loss: 0.0064588249661028385 PSNR: 26.164947509765625 +[TRAIN] Iter: 472000 Loss: 0.006233280524611473 PSNR: 26.62429428100586 +[TRAIN] Iter: 472100 Loss: 0.006736546754837036 PSNR: 26.26708984375 +[TRAIN] Iter: 472200 Loss: 0.0060391416773200035 PSNR: 26.386011123657227 +[TRAIN] Iter: 472300 Loss: 0.006405319087207317 PSNR: 26.302213668823242 +[TRAIN] Iter: 472400 Loss: 0.006822178140282631 PSNR: 26.03803253173828 +[TRAIN] Iter: 472500 Loss: 0.005674665328115225 PSNR: 27.028913497924805 +[TRAIN] Iter: 472600 Loss: 0.006237020716071129 PSNR: 26.147674560546875 +[TRAIN] Iter: 472700 Loss: 0.0052357143722474575 PSNR: 27.647497177124023 +[TRAIN] Iter: 472800 Loss: 0.006703970953822136 PSNR: 25.870792388916016 +[TRAIN] Iter: 472900 Loss: 0.006682782433927059 PSNR: 25.616817474365234 +[TRAIN] Iter: 473000 Loss: 0.006162252742797136 PSNR: 27.055831909179688 +[TRAIN] Iter: 473100 Loss: 0.006179599557071924 PSNR: 26.607471466064453 +[TRAIN] Iter: 473200 Loss: 0.006607051007449627 PSNR: 27.114116668701172 +[TRAIN] Iter: 473300 Loss: 0.005632077343761921 PSNR: 27.4943904876709 +[TRAIN] Iter: 473400 Loss: 0.00662176962941885 PSNR: 26.384634017944336 +[TRAIN] Iter: 473500 Loss: 0.004924623761326075 PSNR: 28.177249908447266 +[TRAIN] Iter: 473600 Loss: 0.005833854898810387 PSNR: 26.981809616088867 +[TRAIN] Iter: 473700 Loss: 0.006230921950191259 PSNR: 26.96063995361328 +[TRAIN] Iter: 473800 Loss: 0.005453487858176231 PSNR: 27.440204620361328 +[TRAIN] Iter: 473900 Loss: 0.007029119413346052 PSNR: 25.924442291259766 +[TRAIN] Iter: 474000 Loss: 0.007126970682293177 PSNR: 26.055282592773438 +[TRAIN] Iter: 474100 Loss: 0.005061992444097996 PSNR: 27.30451011657715 +[TRAIN] Iter: 474200 Loss: 0.005710680037736893 PSNR: 28.139440536499023 +[TRAIN] Iter: 474300 Loss: 0.005421946756541729 PSNR: 27.747854232788086 +[TRAIN] Iter: 474400 Loss: 0.0068148961290717125 PSNR: 26.069360733032227 +[TRAIN] Iter: 474500 Loss: 0.006440171971917152 PSNR: 26.18670654296875 +[TRAIN] Iter: 474600 Loss: 0.0067177265882492065 PSNR: 26.53240394592285 +[TRAIN] Iter: 474700 Loss: 0.006221383810043335 PSNR: 27.137741088867188 +[TRAIN] Iter: 474800 Loss: 0.007113371975719929 PSNR: 26.396934509277344 +[TRAIN] Iter: 474900 Loss: 0.005381901282817125 PSNR: 27.2122745513916 +[TRAIN] Iter: 475000 Loss: 0.006050523370504379 PSNR: 26.331567764282227 +[TRAIN] Iter: 475100 Loss: 0.007488739676773548 PSNR: 25.970781326293945 +[TRAIN] Iter: 475200 Loss: 0.005233126226812601 PSNR: 27.857099533081055 +[TRAIN] Iter: 475300 Loss: 0.005833256058394909 PSNR: 28.41266441345215 +[TRAIN] Iter: 475400 Loss: 0.006139525678008795 PSNR: 26.42368507385254 +[TRAIN] Iter: 475500 Loss: 0.007956058718264103 PSNR: 25.745223999023438 +[TRAIN] Iter: 475600 Loss: 0.005442928522825241 PSNR: 28.008346557617188 +[TRAIN] Iter: 475700 Loss: 0.006419955752789974 PSNR: 26.44999122619629 +[TRAIN] Iter: 475800 Loss: 0.007193666882812977 PSNR: 25.65566062927246 +[TRAIN] Iter: 475900 Loss: 0.007039311807602644 PSNR: 25.94913101196289 +[TRAIN] Iter: 476000 Loss: 0.006744371727108955 PSNR: 26.34755516052246 +[TRAIN] Iter: 476100 Loss: 0.00488563347607851 PSNR: 27.347728729248047 +[TRAIN] Iter: 476200 Loss: 0.006712278351187706 PSNR: 26.302677154541016 +[TRAIN] Iter: 476300 Loss: 0.0073897927068173885 PSNR: 26.578454971313477 +[TRAIN] Iter: 476400 Loss: 0.007393859326839447 PSNR: 25.44341278076172 +[TRAIN] Iter: 476500 Loss: 0.007758177816867828 PSNR: 26.076087951660156 +[TRAIN] Iter: 476600 Loss: 0.006962033919990063 PSNR: 26.412513732910156 +[TRAIN] Iter: 476700 Loss: 0.006922231055796146 PSNR: 26.1887149810791 +[TRAIN] Iter: 476800 Loss: 0.006505309604108334 PSNR: 26.338579177856445 +[TRAIN] Iter: 476900 Loss: 0.006351180374622345 PSNR: 27.050642013549805 +[TRAIN] Iter: 477000 Loss: 0.006303014233708382 PSNR: 27.060853958129883 +[TRAIN] Iter: 477100 Loss: 0.007852795533835888 PSNR: 25.260711669921875 +[TRAIN] Iter: 477200 Loss: 0.006166137754917145 PSNR: 26.628694534301758 +[TRAIN] Iter: 477300 Loss: 0.007264494895935059 PSNR: 26.16205596923828 +[TRAIN] Iter: 477400 Loss: 0.005540085025131702 PSNR: 26.827253341674805 +[TRAIN] Iter: 477500 Loss: 0.006375510711222887 PSNR: 26.336278915405273 +[TRAIN] Iter: 477600 Loss: 0.00721086747944355 PSNR: 25.984210968017578 +[TRAIN] Iter: 477700 Loss: 0.005735327024012804 PSNR: 28.15434455871582 +[TRAIN] Iter: 477800 Loss: 0.006929095834493637 PSNR: 25.99279022216797 +[TRAIN] Iter: 477900 Loss: 0.006193715147674084 PSNR: 26.522192001342773 +[TRAIN] Iter: 478000 Loss: 0.005483544897288084 PSNR: 28.69664192199707 +[TRAIN] Iter: 478100 Loss: 0.007242014165967703 PSNR: 25.617490768432617 +[TRAIN] Iter: 478200 Loss: 0.006327328272163868 PSNR: 26.453994750976562 +[TRAIN] Iter: 478300 Loss: 0.006609172094613314 PSNR: 26.5966854095459 +[TRAIN] Iter: 478400 Loss: 0.006514459848403931 PSNR: 26.359384536743164 +[TRAIN] Iter: 478500 Loss: 0.006607412360608578 PSNR: 26.10866928100586 +[TRAIN] Iter: 478600 Loss: 0.007262773346155882 PSNR: 25.955415725708008 +[TRAIN] Iter: 478700 Loss: 0.00789845734834671 PSNR: 25.655773162841797 +[TRAIN] Iter: 478800 Loss: 0.005861658602952957 PSNR: 26.73150634765625 +[TRAIN] Iter: 478900 Loss: 0.006057970691472292 PSNR: 26.504179000854492 +[TRAIN] Iter: 479000 Loss: 0.00639695581048727 PSNR: 26.379058837890625 +[TRAIN] Iter: 479100 Loss: 0.004250090103596449 PSNR: 29.195810317993164 +[TRAIN] Iter: 479200 Loss: 0.006234422326087952 PSNR: 26.469953536987305 +[TRAIN] Iter: 479300 Loss: 0.006745906546711922 PSNR: 26.220781326293945 +[TRAIN] Iter: 479400 Loss: 0.0059285350143909454 PSNR: 26.739253997802734 +[TRAIN] Iter: 479500 Loss: 0.0054685454815626144 PSNR: 27.129365921020508 +[TRAIN] Iter: 479600 Loss: 0.006464952602982521 PSNR: 26.51226043701172 +[TRAIN] Iter: 479700 Loss: 0.005299621261656284 PSNR: 28.070573806762695 +[TRAIN] Iter: 479800 Loss: 0.006366334855556488 PSNR: 27.305442810058594 +[TRAIN] Iter: 479900 Loss: 0.006811907514929771 PSNR: 27.08878517150879 +Saved checkpoints at ./logs/TUT-out-doll-360-np/480000.tar +[TRAIN] Iter: 480000 Loss: 0.005999251734465361 PSNR: 27.197525024414062 +[TRAIN] Iter: 480100 Loss: 0.006485590245574713 PSNR: 26.482580184936523 +[TRAIN] Iter: 480200 Loss: 0.00494447723031044 PSNR: 27.68781089782715 +[TRAIN] Iter: 480300 Loss: 0.007439745590090752 PSNR: 25.672504425048828 +[TRAIN] Iter: 480400 Loss: 0.007389081176370382 PSNR: 26.429645538330078 +[TRAIN] Iter: 480500 Loss: 0.005247275810688734 PSNR: 28.234224319458008 +[TRAIN] Iter: 480600 Loss: 0.006524032447487116 PSNR: 27.043886184692383 +[TRAIN] Iter: 480700 Loss: 0.006687796674668789 PSNR: 26.08539581298828 +[TRAIN] Iter: 480800 Loss: 0.006988388951867819 PSNR: 25.748497009277344 +[TRAIN] Iter: 480900 Loss: 0.006781900767236948 PSNR: 27.242036819458008 +[TRAIN] Iter: 481000 Loss: 0.006085475441068411 PSNR: 27.49491310119629 +[TRAIN] Iter: 481100 Loss: 0.005743037443608046 PSNR: 26.740999221801758 +[TRAIN] Iter: 481200 Loss: 0.005458476487547159 PSNR: 27.1852970123291 +[TRAIN] Iter: 481300 Loss: 0.004270837642252445 PSNR: 28.63140869140625 +[TRAIN] Iter: 481400 Loss: 0.006019355729222298 PSNR: 27.72768211364746 +[TRAIN] Iter: 481500 Loss: 0.005525847431272268 PSNR: 28.31656265258789 +[TRAIN] Iter: 481600 Loss: 0.005743149667978287 PSNR: 27.190719604492188 +[TRAIN] Iter: 481700 Loss: 0.006119470112025738 PSNR: 26.82689094543457 +[TRAIN] Iter: 481800 Loss: 0.006735331378877163 PSNR: 26.171337127685547 +[TRAIN] Iter: 481900 Loss: 0.0071800178848207 PSNR: 26.489578247070312 +[TRAIN] Iter: 482000 Loss: 0.007012555375695229 PSNR: 25.833045959472656 +[TRAIN] Iter: 482100 Loss: 0.006213234271854162 PSNR: 27.441810607910156 +[TRAIN] Iter: 482200 Loss: 0.005714861676096916 PSNR: 27.16595458984375 +[TRAIN] Iter: 482300 Loss: 0.006469322368502617 PSNR: 26.95476531982422 +[TRAIN] Iter: 482400 Loss: 0.007426248863339424 PSNR: 26.1425838470459 +[TRAIN] Iter: 482500 Loss: 0.00579297449439764 PSNR: 28.08895492553711 +[TRAIN] Iter: 482600 Loss: 0.0065569463185966015 PSNR: 25.920076370239258 +[TRAIN] Iter: 482700 Loss: 0.005287435371428728 PSNR: 27.53302574157715 +[TRAIN] Iter: 482800 Loss: 0.00616042036563158 PSNR: 27.804166793823242 +[TRAIN] Iter: 482900 Loss: 0.004779738839715719 PSNR: 28.940866470336914 +[TRAIN] Iter: 483000 Loss: 0.0062741851434111595 PSNR: 26.136119842529297 +[TRAIN] Iter: 483100 Loss: 0.006724828854203224 PSNR: 26.447507858276367 +[TRAIN] Iter: 483200 Loss: 0.005175924859941006 PSNR: 28.899097442626953 +[TRAIN] Iter: 483300 Loss: 0.006587176118046045 PSNR: 27.02072525024414 +[TRAIN] Iter: 483400 Loss: 0.006118671502918005 PSNR: 28.423519134521484 +[TRAIN] Iter: 483500 Loss: 0.00618753070011735 PSNR: 27.71682357788086 +[TRAIN] Iter: 483600 Loss: 0.006899637170135975 PSNR: 26.21944236755371 +[TRAIN] Iter: 483700 Loss: 0.006440741941332817 PSNR: 27.11531639099121 +[TRAIN] Iter: 483800 Loss: 0.006904477719217539 PSNR: 26.60393524169922 +[TRAIN] Iter: 483900 Loss: 0.005223352462053299 PSNR: 27.7397403717041 +[TRAIN] Iter: 484000 Loss: 0.005360984243452549 PSNR: 27.423372268676758 +[TRAIN] Iter: 484100 Loss: 0.00619962252676487 PSNR: 26.82084083557129 +[TRAIN] Iter: 484200 Loss: 0.0064257425256073475 PSNR: 26.2039852142334 +[TRAIN] Iter: 484300 Loss: 0.006666851229965687 PSNR: 26.54948616027832 +[TRAIN] Iter: 484400 Loss: 0.007705261930823326 PSNR: 25.31477165222168 +[TRAIN] Iter: 484500 Loss: 0.005874675698578358 PSNR: 27.142261505126953 +[TRAIN] Iter: 484600 Loss: 0.007073866203427315 PSNR: 26.672016143798828 +[TRAIN] Iter: 484700 Loss: 0.005617584567517042 PSNR: 27.68207359313965 +[TRAIN] Iter: 484800 Loss: 0.006231221836060286 PSNR: 26.482206344604492 +[TRAIN] Iter: 484900 Loss: 0.007020300719887018 PSNR: 26.036231994628906 +[TRAIN] Iter: 485000 Loss: 0.006551899015903473 PSNR: 26.65728187561035 +[TRAIN] Iter: 485100 Loss: 0.005213769618421793 PSNR: 26.824813842773438 +[TRAIN] Iter: 485200 Loss: 0.006273502483963966 PSNR: 26.49694061279297 +[TRAIN] Iter: 485300 Loss: 0.007278320845216513 PSNR: 25.669082641601562 +[TRAIN] Iter: 485400 Loss: 0.005920596420764923 PSNR: 26.944175720214844 +[TRAIN] Iter: 485500 Loss: 0.00552550982683897 PSNR: 26.76341438293457 +[TRAIN] Iter: 485600 Loss: 0.006326887756586075 PSNR: 27.033714294433594 +[TRAIN] Iter: 485700 Loss: 0.0045948210172355175 PSNR: 28.222705841064453 +[TRAIN] Iter: 485800 Loss: 0.0074643325060606 PSNR: 26.295862197875977 +[TRAIN] Iter: 485900 Loss: 0.006093018688261509 PSNR: 26.562490463256836 +[TRAIN] Iter: 486000 Loss: 0.00660718372091651 PSNR: 27.773283004760742 +[TRAIN] Iter: 486100 Loss: 0.007354288827627897 PSNR: 25.934059143066406 +[TRAIN] Iter: 486200 Loss: 0.004429265856742859 PSNR: 29.241050720214844 +[TRAIN] Iter: 486300 Loss: 0.007159288041293621 PSNR: 26.42209243774414 +[TRAIN] Iter: 486400 Loss: 0.0057037994265556335 PSNR: 27.209501266479492 +[TRAIN] Iter: 486500 Loss: 0.006925481837242842 PSNR: 25.792312622070312 +[TRAIN] Iter: 486600 Loss: 0.006704818457365036 PSNR: 25.72608184814453 +[TRAIN] Iter: 486700 Loss: 0.005779806990176439 PSNR: 27.619312286376953 +[TRAIN] Iter: 486800 Loss: 0.0073576439172029495 PSNR: 26.205392837524414 +[TRAIN] Iter: 486900 Loss: 0.00675900187343359 PSNR: 26.35142707824707 +[TRAIN] Iter: 487000 Loss: 0.006754055619239807 PSNR: 26.57666778564453 +[TRAIN] Iter: 487100 Loss: 0.006586065050214529 PSNR: 26.793872833251953 +[TRAIN] Iter: 487200 Loss: 0.005976228509098291 PSNR: 26.560123443603516 +[TRAIN] Iter: 487300 Loss: 0.007604752667248249 PSNR: 25.217134475708008 +[TRAIN] Iter: 487400 Loss: 0.006717660464346409 PSNR: 26.615741729736328 +[TRAIN] Iter: 487500 Loss: 0.006970387417823076 PSNR: 26.315250396728516 +[TRAIN] Iter: 487600 Loss: 0.006847890093922615 PSNR: 26.44622230529785 +[TRAIN] Iter: 487700 Loss: 0.005958865862339735 PSNR: 27.33734703063965 +[TRAIN] Iter: 487800 Loss: 0.006718948017805815 PSNR: 26.470256805419922 +[TRAIN] Iter: 487900 Loss: 0.00660821795463562 PSNR: 26.16390037536621 +[TRAIN] Iter: 488000 Loss: 0.006894397083669901 PSNR: 25.93087387084961 +[TRAIN] Iter: 488100 Loss: 0.006732874549925327 PSNR: 26.27797508239746 +[TRAIN] Iter: 488200 Loss: 0.0062457965686917305 PSNR: 26.4458065032959 +[TRAIN] Iter: 488300 Loss: 0.005409682169556618 PSNR: 27.19704818725586 +[TRAIN] Iter: 488400 Loss: 0.006575143896043301 PSNR: 26.576276779174805 +[TRAIN] Iter: 488500 Loss: 0.005008521024137735 PSNR: 28.880098342895508 +[TRAIN] Iter: 488600 Loss: 0.007479489780962467 PSNR: 26.12542152404785 +[TRAIN] Iter: 488700 Loss: 0.0073730237782001495 PSNR: 26.03522300720215 +[TRAIN] Iter: 488800 Loss: 0.005872714798897505 PSNR: 27.15614128112793 +[TRAIN] Iter: 488900 Loss: 0.006932140327990055 PSNR: 26.24205207824707 +[TRAIN] Iter: 489000 Loss: 0.00673750089481473 PSNR: 26.367998123168945 +[TRAIN] Iter: 489100 Loss: 0.005583691410720348 PSNR: 28.023584365844727 +[TRAIN] Iter: 489200 Loss: 0.007346035446971655 PSNR: 26.038349151611328 +[TRAIN] Iter: 489300 Loss: 0.007281598635017872 PSNR: 25.86423110961914 +[TRAIN] Iter: 489400 Loss: 0.006193432025611401 PSNR: 27.36391258239746 +[TRAIN] Iter: 489500 Loss: 0.005944938864558935 PSNR: 26.74937629699707 +[TRAIN] Iter: 489600 Loss: 0.006125772371888161 PSNR: 26.99604034423828 +[TRAIN] Iter: 489700 Loss: 0.005613869987428188 PSNR: 27.37786102294922 +[TRAIN] Iter: 489800 Loss: 0.004862354137003422 PSNR: 28.23711585998535 +[TRAIN] Iter: 489900 Loss: 0.005709603428840637 PSNR: 27.671770095825195 +Saved checkpoints at ./logs/TUT-out-doll-360-np/490000.tar +[TRAIN] Iter: 490000 Loss: 0.007050144020467997 PSNR: 26.256258010864258 +[TRAIN] Iter: 490100 Loss: 0.007571827620267868 PSNR: 26.232458114624023 +[TRAIN] Iter: 490200 Loss: 0.005842161830514669 PSNR: 27.812950134277344 +[TRAIN] Iter: 490300 Loss: 0.005697745364159346 PSNR: 27.288877487182617 +[TRAIN] Iter: 490400 Loss: 0.007060166448354721 PSNR: 25.821435928344727 +[TRAIN] Iter: 490500 Loss: 0.0047852154821157455 PSNR: 28.347082138061523 +[TRAIN] Iter: 490600 Loss: 0.0059858644381165504 PSNR: 26.587596893310547 +[TRAIN] Iter: 490700 Loss: 0.007150780875235796 PSNR: 26.0679931640625 +[TRAIN] Iter: 490800 Loss: 0.006474783644080162 PSNR: 26.426393508911133 +[TRAIN] Iter: 490900 Loss: 0.004663653206080198 PSNR: 28.025747299194336 +[TRAIN] Iter: 491000 Loss: 0.005913246423006058 PSNR: 26.932737350463867 +[TRAIN] Iter: 491100 Loss: 0.0055659860372543335 PSNR: 27.30903434753418 +[TRAIN] Iter: 491200 Loss: 0.004636807832866907 PSNR: 27.67950439453125 +[TRAIN] Iter: 491300 Loss: 0.005636129528284073 PSNR: 27.01567268371582 +[TRAIN] Iter: 491400 Loss: 0.007995706051588058 PSNR: 25.5919189453125 +[TRAIN] Iter: 491500 Loss: 0.0064344219863414764 PSNR: 26.48703384399414 +[TRAIN] Iter: 491600 Loss: 0.00638230936601758 PSNR: 26.543624877929688 +[TRAIN] Iter: 491700 Loss: 0.006582616828382015 PSNR: 26.549407958984375 +[TRAIN] Iter: 491800 Loss: 0.0069979103282094 PSNR: 25.791095733642578 +[TRAIN] Iter: 491900 Loss: 0.006846762262284756 PSNR: 26.290504455566406 +[TRAIN] Iter: 492000 Loss: 0.0074529219418764114 PSNR: 25.473121643066406 +[TRAIN] Iter: 492100 Loss: 0.006604489870369434 PSNR: 27.279754638671875 +[TRAIN] Iter: 492200 Loss: 0.005700940266251564 PSNR: 27.68692970275879 +[TRAIN] Iter: 492300 Loss: 0.006250371225178242 PSNR: 26.770780563354492 +[TRAIN] Iter: 492400 Loss: 0.005779549945145845 PSNR: 27.422527313232422 +[TRAIN] Iter: 492500 Loss: 0.005602575838565826 PSNR: 27.83440589904785 +[TRAIN] Iter: 492600 Loss: 0.005881109274923801 PSNR: 27.182270050048828 +[TRAIN] Iter: 492700 Loss: 0.006135098170489073 PSNR: 27.536592483520508 +[TRAIN] Iter: 492800 Loss: 0.004790547303855419 PSNR: 28.6484317779541 +[TRAIN] Iter: 492900 Loss: 0.006300590466707945 PSNR: 25.888774871826172 +[TRAIN] Iter: 493000 Loss: 0.006516822148114443 PSNR: 26.615230560302734 +[TRAIN] Iter: 493100 Loss: 0.006058672443032265 PSNR: 27.110599517822266 +[TRAIN] Iter: 493200 Loss: 0.005730128847062588 PSNR: 27.682233810424805 +[TRAIN] Iter: 493300 Loss: 0.00760311633348465 PSNR: 25.522249221801758 +[TRAIN] Iter: 493400 Loss: 0.004777866415679455 PSNR: 28.240644454956055 +[TRAIN] Iter: 493500 Loss: 0.00597589323297143 PSNR: 27.667030334472656 +[TRAIN] Iter: 493600 Loss: 0.005941659677773714 PSNR: 26.791704177856445 +[TRAIN] Iter: 493700 Loss: 0.007414798252284527 PSNR: 25.72909164428711 +[TRAIN] Iter: 493800 Loss: 0.0065756370313465595 PSNR: 26.734050750732422 +[TRAIN] Iter: 493900 Loss: 0.007171900477260351 PSNR: 25.6993408203125 +[TRAIN] Iter: 494000 Loss: 0.007020237855613232 PSNR: 26.009328842163086 +[TRAIN] Iter: 494100 Loss: 0.0063827186822891235 PSNR: 26.892581939697266 +[TRAIN] Iter: 494200 Loss: 0.005884738173335791 PSNR: 26.66048240661621 +[TRAIN] Iter: 494300 Loss: 0.006390073336660862 PSNR: 26.298721313476562 +[TRAIN] Iter: 494400 Loss: 0.005092763341963291 PSNR: 27.648221969604492 +[TRAIN] Iter: 494500 Loss: 0.005503139924257994 PSNR: 27.335325241088867 +[TRAIN] Iter: 494600 Loss: 0.00647537037730217 PSNR: 26.340377807617188 +[TRAIN] Iter: 494700 Loss: 0.005876048002392054 PSNR: 26.820266723632812 +[TRAIN] Iter: 494800 Loss: 0.005715264473110437 PSNR: 28.0810546875 +[TRAIN] Iter: 494900 Loss: 0.00684796366840601 PSNR: 26.56979751586914 +[TRAIN] Iter: 495000 Loss: 0.007622472010552883 PSNR: 25.777679443359375 +[TRAIN] Iter: 495100 Loss: 0.005370225291699171 PSNR: 27.934642791748047 +[TRAIN] Iter: 495200 Loss: 0.00484473817050457 PSNR: 28.64299201965332 +[TRAIN] Iter: 495300 Loss: 0.006233879365026951 PSNR: 26.492843627929688 +[TRAIN] Iter: 495400 Loss: 0.006115409545600414 PSNR: 26.625776290893555 +[TRAIN] Iter: 495500 Loss: 0.008076101541519165 PSNR: 25.31505012512207 +[TRAIN] Iter: 495600 Loss: 0.006756071001291275 PSNR: 26.43683433532715 +[TRAIN] Iter: 495700 Loss: 0.005376819055527449 PSNR: 27.247297286987305 +[TRAIN] Iter: 495800 Loss: 0.004738560877740383 PSNR: 28.052486419677734 +[TRAIN] Iter: 495900 Loss: 0.007114632520824671 PSNR: 26.18967056274414 +[TRAIN] Iter: 496000 Loss: 0.005934993736445904 PSNR: 27.448837280273438 +[TRAIN] Iter: 496100 Loss: 0.0064938487485051155 PSNR: 26.620275497436523 +[TRAIN] Iter: 496200 Loss: 0.004997129086405039 PSNR: 27.47862434387207 +[TRAIN] Iter: 496300 Loss: 0.006539081688970327 PSNR: 26.718320846557617 +[TRAIN] Iter: 496400 Loss: 0.007517630234360695 PSNR: 25.472322463989258 +[TRAIN] Iter: 496500 Loss: 0.006293724291026592 PSNR: 26.21462059020996 +[TRAIN] Iter: 496600 Loss: 0.004830579273402691 PSNR: 27.88196563720703 +[TRAIN] Iter: 496700 Loss: 0.006588824093341827 PSNR: 26.511619567871094 +[TRAIN] Iter: 496800 Loss: 0.0046925595961511135 PSNR: 28.545150756835938 +[TRAIN] Iter: 496900 Loss: 0.006165255792438984 PSNR: 26.364761352539062 +[TRAIN] Iter: 497000 Loss: 0.005886164028197527 PSNR: 27.447002410888672 +[TRAIN] Iter: 497100 Loss: 0.005986835807561874 PSNR: 27.914287567138672 +[TRAIN] Iter: 497200 Loss: 0.005305469036102295 PSNR: 27.409982681274414 +[TRAIN] Iter: 497300 Loss: 0.0067319124937057495 PSNR: 26.620874404907227 +[TRAIN] Iter: 497400 Loss: 0.0065779248252511024 PSNR: 27.095422744750977 +[TRAIN] Iter: 497500 Loss: 0.004986326210200787 PSNR: 28.185649871826172 +[TRAIN] Iter: 497600 Loss: 0.006334109231829643 PSNR: 26.20097541809082 +[TRAIN] Iter: 497700 Loss: 0.005428314674645662 PSNR: 27.309398651123047 +[TRAIN] Iter: 497800 Loss: 0.00669598113745451 PSNR: 26.181583404541016 +[TRAIN] Iter: 497900 Loss: 0.005829336121678352 PSNR: 27.714420318603516 +[TRAIN] Iter: 498000 Loss: 0.006508919410407543 PSNR: 26.13218116760254 +[TRAIN] Iter: 498100 Loss: 0.006745981052517891 PSNR: 26.776554107666016 +[TRAIN] Iter: 498200 Loss: 0.0054058837704360485 PSNR: 28.30610466003418 +[TRAIN] Iter: 498300 Loss: 0.0062177712097764015 PSNR: 26.537559509277344 +[TRAIN] Iter: 498400 Loss: 0.005658636335283518 PSNR: 27.90471839904785 +[TRAIN] Iter: 498500 Loss: 0.006392733659595251 PSNR: 26.519804000854492 +[TRAIN] Iter: 498600 Loss: 0.004774460103362799 PSNR: 28.68532943725586 +[TRAIN] Iter: 498700 Loss: 0.007989034056663513 PSNR: 25.67511558532715 +[TRAIN] Iter: 498800 Loss: 0.005516208708286285 PSNR: 27.798425674438477 +[TRAIN] Iter: 498900 Loss: 0.006733258254826069 PSNR: 26.534711837768555 +[TRAIN] Iter: 499000 Loss: 0.005274491384625435 PSNR: 27.515474319458008 +[TRAIN] Iter: 499100 Loss: 0.0061456128023564816 PSNR: 26.82978057861328 +[TRAIN] Iter: 499200 Loss: 0.0065623396076262 PSNR: 26.92643928527832 +[TRAIN] Iter: 499300 Loss: 0.006558482069522142 PSNR: 25.809112548828125 +[TRAIN] Iter: 499400 Loss: 0.007296664174646139 PSNR: 26.336957931518555 +[TRAIN] Iter: 499500 Loss: 0.006018815562129021 PSNR: 27.893489837646484 +[TRAIN] Iter: 499600 Loss: 0.006511654704809189 PSNR: 26.838409423828125 +[TRAIN] Iter: 499700 Loss: 0.0057184770703315735 PSNR: 28.601011276245117 +[TRAIN] Iter: 499800 Loss: 0.006001724395900965 PSNR: 26.9909725189209 +[TRAIN] Iter: 499900 Loss: 0.006567761301994324 PSNR: 26.06740379333496 +Saved checkpoints at ./logs/TUT-out-doll-360-np/500000.tar +0 0.0008001327514648438 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.819551467895508 +2 22.016244173049927 +3 21.92249035835266 +4 21.310906410217285 +5 22.359988689422607 +6 22.662058353424072 +7 21.63681721687317 +8 22.74301290512085 +9 21.645284414291382 +10 22.531360387802124 +11 22.434773445129395 +12 22.079880237579346 +13 22.606736660003662 +14 22.369558095932007 +15 21.251485109329224 +16 21.62516188621521 +17 22.621909141540527 +18 22.09930157661438 +19 21.490631341934204 +20 21.738023281097412 +21 21.65930986404419 +22 21.702991485595703 +23 21.58882999420166 +24 22.441401958465576 +25 22.473184823989868 +26 21.206380367279053 +27 22.081746101379395 +28 22.085391759872437 +29 21.62801170349121 +30 22.55709719657898 +31 21.40914034843445 +32 21.68020796775818 +33 23.405975103378296 +34 21.719813346862793 +35 21.899859189987183 +36 21.96587896347046 +37 21.829794883728027 +38 21.762049198150635 +39 22.101189136505127 +40 21.556281328201294 +41 21.782358169555664 +42 22.75700879096985 +43 22.57408094406128 +44 21.464851140975952 +45 21.914506673812866 +46 23.086254119873047 +47 22.046687841415405 +48 21.64545965194702 +49 21.725335359573364 +50 22.027263402938843 +51 21.8004150390625 +52 21.318535566329956 +53 22.276190757751465 +54 21.499187469482422 +55 21.96587586402893 +56 22.055090188980103 +57 21.970913887023926 +58 22.015114545822144 +59 22.13447666168213 +60 21.568583250045776 +61 22.178250074386597 +62 22.20279097557068 +63 21.714657306671143 +64 21.746381759643555 +65 21.570562601089478 +66 21.659332752227783 +67 22.515503644943237 +68 22.701332807540894 +69 21.890538454055786 +70 22.23626947402954 +71 22.38973593711853 +72 22.267279386520386 +73 21.585484981536865 +74 22.006551027297974 +75 21.74043893814087 +76 22.64398431777954 +77 22.282149076461792 +78 21.74308490753174 +79 22.112848043441772 +80 21.796873092651367 +81 22.01941967010498 +82 21.99528741836548 +83 22.667627811431885 +84 21.690687894821167 +85 21.74548864364624 +86 21.98736882209778 +87 22.469066619873047 +88 23.114168882369995 +89 22.70131206512451 +90 21.271646976470947 +91 22.016716241836548 +92 23.45694851875305 +93 22.67888045310974 +94 21.633132457733154 +95 21.694287061691284 +96 21.431949615478516 +97 22.80787205696106 +98 21.884535312652588 +99 22.23259949684143 +100 22.51903748512268 +101 21.83573627471924 +102 21.754345417022705 +103 22.216278791427612 +104 22.588769912719727 +105 22.331716537475586 +106 21.90135884284973 +107 21.878140449523926 +108 21.91837167739868 +109 21.736298322677612 +110 21.233038902282715 +111 23.486703872680664 +112 21.998494863510132 +113 22.23510718345642 +114 21.895651817321777 +115 22.12818479537964 +116 21.699389457702637 +117 21.930174350738525 +118 21.900211095809937 +119 21.494287252426147 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-9.7024e-01, -1.0544e+00, -1.2033e+00, -3.9188e+01], + [-2.1419e+00, -2.1813e+00, -1.9678e+00, -1.5197e+01], + [-2.1734e+00, -2.1046e+00, -2.1387e+00, -7.1933e+00], + ..., + [-2.8605e+01, -1.9461e+01, -1.4077e+01, 9.3714e+02], + [-2.9540e+01, -1.9646e+01, -1.4015e+01, 9.7912e+02], + [-2.9848e+01, -1.9614e+01, -1.4167e+01, 9.8643e+02]], + + [[-9.9629e-01, -8.6679e-01, -7.0047e-01, -5.5573e+01], + [ 1.2233e+00, 1.2299e+00, 5.9984e-01, -7.6300e+01], + [-1.7841e+00, -1.7499e+00, -1.7176e+00, -7.7973e+01], + ..., + [-1.3814e+00, -1.4158e+00, -2.1665e+00, 4.1783e+01], + [-2.8718e+00, -2.8181e+00, -2.9556e+00, 6.7526e+01], + [-2.7143e+00, -2.2161e+00, -2.3568e+00, 1.1095e+02]], + + [[-1.5016e+00, -9.3659e-01, -2.3569e+00, -6.9176e+01], + [-8.4529e-01, -9.4950e-01, -1.7742e+00, -7.2245e+01], + [-9.8476e-01, -1.0945e+00, -1.8083e+00, -6.7016e+01], + ..., + [-1.2490e+01, -6.5885e+00, -7.4835e+00, 6.8942e+02], + [-1.0852e+01, -6.4451e+00, -8.6882e+00, 6.5817e+02], + [-1.1456e+01, -6.8477e+00, -8.9034e+00, 7.0243e+02]], + + ..., + + [[-1.4249e+00, -5.0609e-01, -1.5435e+00, -8.8207e+01], + [-1.1086e+00, -8.9594e-01, 3.6334e-02, -1.6243e+01], + [-1.0383e+00, -5.4374e-01, 3.0425e-01, -1.0581e+01], + ..., + [-2.3925e+01, -7.3093e+00, -4.9600e-01, 1.2801e+03], + [-2.3933e+01, -6.1641e+00, 3.0909e+00, 1.3112e+03], + [-2.1889e+01, -4.1171e+00, 4.3625e+00, 1.3055e+03]], + + [[-1.7631e+00, -6.2273e-01, 5.9784e-01, -4.1671e+01], + [-5.7767e-01, 2.6303e-01, 1.1662e+00, 4.8412e-01], + [-5.8108e-01, 2.4868e-01, 1.1516e+00, 9.3501e-01], + ..., + [-2.2628e+00, -1.6705e+00, 1.3472e+00, 2.0182e+02], + [-2.2122e+00, -1.6643e+00, 1.2718e+00, 1.8007e+02], + [-2.2290e+00, -1.5878e+00, 1.2830e+00, 1.9291e+02]], + + [[ 1.6299e+00, 2.6665e+00, 3.9083e+00, -2.7776e+01], + [ 1.7438e+00, 2.9427e+00, 3.3694e+00, -3.0706e+01], + [ 1.4413e+00, 2.5248e+00, 2.8709e+00, -2.9495e+01], + ..., + [-1.5319e+00, 2.8085e+00, 3.1467e+00, 1.0524e+03], + [-1.3918e+00, 2.7942e+00, 2.9439e+00, 1.0954e+03], + [-2.2502e+00, 2.2785e+00, 2.7970e+00, 1.0887e+03]]], + grad_fn=), 'rgb0': tensor([[0.2667, 0.2442, 0.2305], + [0.4301, 0.4651, 0.1689], + [0.2862, 0.2555, 0.2639], + ..., + [0.0561, 0.0695, 0.0903], + [0.3532, 0.5636, 0.7820], + [0.8729, 0.9555, 0.9884]], grad_fn=), 'disp0': tensor([ 53.6604, 9.3973, 59.1134, ..., 43.6489, 131.3781, 73.0954], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0040, 0.0147, 0.0285, ..., 0.0051, 0.0046, 0.0045])} +0 0.0007042884826660156 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.509849071502686 +2 22.310954809188843 +3 22.118295907974243 +4 21.838192224502563 +5 21.96587586402893 +6 21.90956449508667 +7 23.10849356651306 +8 22.563543796539307 +9 23.39840841293335 +10 22.409268856048584 +11 21.602410078048706 +12 22.892157077789307 +13 21.744879007339478 +14 22.202178478240967 +15 22.61875891685486 +16 21.733701467514038 +17 21.32873797416687 +18 22.738697290420532 +19 22.275479078292847 +20 21.933537244796753 +21 22.052286863327026 +22 22.147352695465088 +23 22.294249534606934 +24 21.68527388572693 +25 22.09452533721924 +26 22.100404262542725 +27 21.345598936080933 +28 22.33551049232483 +29 22.009321212768555 +30 21.41327738761902 +31 22.74330973625183 +32 20.897159576416016 +33 22.15603542327881 +34 21.328471422195435 +35 22.532998085021973 +36 22.218055725097656 +37 21.579329252243042 +38 21.97023034095764 +39 21.39460802078247 +40 21.808024406433105 +41 22.130407571792603 +42 22.45107626914978 +43 21.15795135498047 +44 21.490346431732178 +45 21.541877269744873 +46 22.725461959838867 +47 21.79417634010315 +48 21.838619709014893 +49 21.654317140579224 +50 21.721898078918457 +51 21.85709500312805 +52 21.626016855239868 +53 22.21009850502014 +54 21.537241220474243 +55 22.48450493812561 +56 21.829848527908325 +57 22.479297399520874 +58 20.95536160469055 +59 21.853789806365967 +60 21.790557622909546 +61 22.21801733970642 +62 21.380199670791626 +63 21.771448850631714 +64 21.909815073013306 +65 21.210970640182495 +66 21.807682514190674 +67 21.580728769302368 +68 21.87993025779724 +69 21.981782913208008 +70 21.7809157371521 +71 21.998749017715454 +72 22.129047393798828 +73 21.729541063308716 +74 21.822530269622803 +75 21.36661386489868 +76 21.395394802093506 +77 21.48321557044983 +78 21.65160632133484 +79 22.73666787147522 +80 21.291409015655518 +81 21.305924892425537 +82 22.46678400039673 +83 21.71197772026062 +84 21.74187421798706 +85 21.990843057632446 +86 22.329749822616577 +87 20.849995136260986 +88 21.715593338012695 +89 21.604503870010376 +90 22.016814708709717 +91 21.924524545669556 +92 21.58213710784912 +93 22.591516733169556 +94 21.178761959075928 +95 21.74002504348755 +96 21.438976764678955 +97 21.432748317718506 +98 21.760019063949585 +99 22.10089898109436 +100 22.654083728790283 +101 21.289257764816284 +102 22.152443885803223 +103 22.771197080612183 +104 21.55667495727539 +105 21.67030119895935 +106 21.62791419029236 +107 21.96145224571228 +108 22.33451771736145 +109 22.012261152267456 +110 22.40938973426819 +111 22.2674503326416 +112 21.810181140899658 +113 22.453364610671997 +114 21.89696502685547 +115 23.287062644958496 +116 22.073509693145752 +117 21.834714651107788 +118 22.370386600494385 +119 21.53245735168457 +test poses shape torch.Size([4, 3, 4]) +0 0.001630544662475586 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.053492784500122 +2 23.063627004623413 +3 21.306122303009033 +Saved test set +[TRAIN] Iter: 500000 Loss: 0.005331831052899361 PSNR: 28.771968841552734 +[TRAIN] Iter: 500100 Loss: 0.007349651772528887 PSNR: 25.932289123535156 +[TRAIN] Iter: 500200 Loss: 0.007310229819267988 PSNR: 26.16008758544922 +[TRAIN] Iter: 500300 Loss: 0.00604682881385088 PSNR: 26.917736053466797 +[TRAIN] Iter: 500400 Loss: 0.00735620129853487 PSNR: 25.91344451904297 +[TRAIN] Iter: 500500 Loss: 0.006924852728843689 PSNR: 25.842561721801758 +[TRAIN] Iter: 500600 Loss: 0.006573145277798176 PSNR: 26.003023147583008 +[TRAIN] Iter: 500700 Loss: 0.0057478067465126514 PSNR: 27.304758071899414 +[TRAIN] Iter: 500800 Loss: 0.006794859189540148 PSNR: 26.441307067871094 +[TRAIN] Iter: 500900 Loss: 0.006290128454566002 PSNR: 26.865049362182617 +[TRAIN] Iter: 501000 Loss: 0.00617385096848011 PSNR: 26.495729446411133 +[TRAIN] Iter: 501100 Loss: 0.007182458881288767 PSNR: 26.04268455505371 +[TRAIN] Iter: 501200 Loss: 0.00706774927675724 PSNR: 25.4720516204834 +[TRAIN] Iter: 501300 Loss: 0.004939014092087746 PSNR: 28.657451629638672 +[TRAIN] Iter: 501400 Loss: 0.007565136067569256 PSNR: 25.536476135253906 +[TRAIN] Iter: 501500 Loss: 0.005498194135725498 PSNR: 28.05465316772461 +[TRAIN] Iter: 501600 Loss: 0.006678114179521799 PSNR: 26.293466567993164 +[TRAIN] Iter: 501700 Loss: 0.0061238231137394905 PSNR: 26.671875 +[TRAIN] Iter: 501800 Loss: 0.004511580802500248 PSNR: 28.63298797607422 +[TRAIN] Iter: 501900 Loss: 0.0068024760112166405 PSNR: 25.887279510498047 +[TRAIN] Iter: 502000 Loss: 0.005131854675710201 PSNR: 28.563459396362305 +[TRAIN] Iter: 502100 Loss: 0.007123881485313177 PSNR: 25.686565399169922 +[TRAIN] Iter: 502200 Loss: 0.007101154420524836 PSNR: 26.186767578125 +[TRAIN] Iter: 502300 Loss: 0.007018284872174263 PSNR: 26.036516189575195 +[TRAIN] Iter: 502400 Loss: 0.006029021460562944 PSNR: 27.130464553833008 +[TRAIN] Iter: 502500 Loss: 0.006201496347784996 PSNR: 26.99779510498047 +[TRAIN] Iter: 502600 Loss: 0.006661837454885244 PSNR: 26.575796127319336 +[TRAIN] Iter: 502700 Loss: 0.007322567980736494 PSNR: 25.586532592773438 +[TRAIN] Iter: 502800 Loss: 0.005444027483463287 PSNR: 28.017080307006836 +[TRAIN] Iter: 502900 Loss: 0.005716565530747175 PSNR: 27.47431182861328 +[TRAIN] Iter: 503000 Loss: 0.007150846999138594 PSNR: 25.862289428710938 +[TRAIN] Iter: 503100 Loss: 0.0051041459664702415 PSNR: 27.57402801513672 +[TRAIN] Iter: 503200 Loss: 0.0059090834110975266 PSNR: 26.79439353942871 +[TRAIN] Iter: 503300 Loss: 0.006537753157317638 PSNR: 26.620079040527344 +[TRAIN] Iter: 503400 Loss: 0.006795315071940422 PSNR: 26.418315887451172 +[TRAIN] Iter: 503500 Loss: 0.00597774563357234 PSNR: 26.529848098754883 +[TRAIN] Iter: 503600 Loss: 0.005091373808681965 PSNR: 28.097949981689453 +[TRAIN] Iter: 503700 Loss: 0.0072099328972399235 PSNR: 25.664108276367188 +[TRAIN] Iter: 503800 Loss: 0.00658711651340127 PSNR: 26.508007049560547 +[TRAIN] Iter: 503900 Loss: 0.005526222754269838 PSNR: 27.204030990600586 +[TRAIN] Iter: 504000 Loss: 0.0049508255906403065 PSNR: 28.52042579650879 +[TRAIN] Iter: 504100 Loss: 0.005854860879480839 PSNR: 26.05337905883789 +[TRAIN] Iter: 504200 Loss: 0.006716047879308462 PSNR: 26.47115707397461 +[TRAIN] Iter: 504300 Loss: 0.0044503528624773026 PSNR: 28.38850975036621 +[TRAIN] Iter: 504400 Loss: 0.004925934597849846 PSNR: 28.741016387939453 +[TRAIN] Iter: 504500 Loss: 0.007226109504699707 PSNR: 25.830644607543945 +[TRAIN] Iter: 504600 Loss: 0.0054221078753471375 PSNR: 27.654178619384766 +[TRAIN] Iter: 504700 Loss: 0.007876136340200901 PSNR: 25.627132415771484 +[TRAIN] Iter: 504800 Loss: 0.006104718428105116 PSNR: 27.097280502319336 +[TRAIN] Iter: 504900 Loss: 0.005439263768494129 PSNR: 27.611343383789062 +[TRAIN] Iter: 505000 Loss: 0.005410068668425083 PSNR: 27.480857849121094 +[TRAIN] Iter: 505100 Loss: 0.007195915561169386 PSNR: 26.036880493164062 +[TRAIN] Iter: 505200 Loss: 0.006165328901261091 PSNR: 27.362056732177734 +[TRAIN] Iter: 505300 Loss: 0.006308178883045912 PSNR: 26.46310043334961 +[TRAIN] Iter: 505400 Loss: 0.00592179736122489 PSNR: 27.215621948242188 +[TRAIN] Iter: 505500 Loss: 0.007176869083195925 PSNR: 25.90171241760254 +[TRAIN] Iter: 505600 Loss: 0.006428980268537998 PSNR: 26.40884780883789 +[TRAIN] Iter: 505700 Loss: 0.007233790121972561 PSNR: 25.602008819580078 +[TRAIN] Iter: 505800 Loss: 0.006100408732891083 PSNR: 26.31214714050293 +[TRAIN] Iter: 505900 Loss: 0.005596051458269358 PSNR: 27.518293380737305 +[TRAIN] Iter: 506000 Loss: 0.007570881396532059 PSNR: 25.649261474609375 +[TRAIN] Iter: 506100 Loss: 0.004740248899906874 PSNR: 28.628299713134766 +[TRAIN] Iter: 506200 Loss: 0.006920462939888239 PSNR: 25.974252700805664 +[TRAIN] Iter: 506300 Loss: 0.00863193441182375 PSNR: 25.349306106567383 +[TRAIN] Iter: 506400 Loss: 0.0058750188909471035 PSNR: 26.68578338623047 +[TRAIN] Iter: 506500 Loss: 0.00582108274102211 PSNR: 26.80735969543457 +[TRAIN] Iter: 506600 Loss: 0.006955370772629976 PSNR: 26.25444221496582 +[TRAIN] Iter: 506700 Loss: 0.005268647335469723 PSNR: 27.28980827331543 +[TRAIN] Iter: 506800 Loss: 0.006474709138274193 PSNR: 26.32240104675293 +[TRAIN] Iter: 506900 Loss: 0.006420131307095289 PSNR: 26.219562530517578 +[TRAIN] Iter: 507000 Loss: 0.00579815823584795 PSNR: 27.49361228942871 +[TRAIN] Iter: 507100 Loss: 0.004630757961422205 PSNR: 28.409757614135742 +[TRAIN] Iter: 507200 Loss: 0.006746904458850622 PSNR: 25.840293884277344 +[TRAIN] Iter: 507300 Loss: 0.005199936218559742 PSNR: 27.180713653564453 +[TRAIN] Iter: 507400 Loss: 0.006484678015112877 PSNR: 26.887609481811523 +[TRAIN] Iter: 507500 Loss: 0.00522993691265583 PSNR: 27.213132858276367 +[TRAIN] Iter: 507600 Loss: 0.004794355481863022 PSNR: 29.043638229370117 +[TRAIN] Iter: 507700 Loss: 0.007487887050956488 PSNR: 25.864152908325195 +[TRAIN] Iter: 507800 Loss: 0.004924537148326635 PSNR: 27.727657318115234 +[TRAIN] Iter: 507900 Loss: 0.006052684038877487 PSNR: 27.976713180541992 +[TRAIN] Iter: 508000 Loss: 0.005241054110229015 PSNR: 28.09442710876465 +[TRAIN] Iter: 508100 Loss: 0.006105005741119385 PSNR: 26.87759017944336 +[TRAIN] Iter: 508200 Loss: 0.006452503614127636 PSNR: 26.370935440063477 +[TRAIN] Iter: 508300 Loss: 0.007172245532274246 PSNR: 26.451705932617188 +[TRAIN] Iter: 508400 Loss: 0.008066010661423206 PSNR: 25.004480361938477 +[TRAIN] Iter: 508500 Loss: 0.00624065799638629 PSNR: 27.131237030029297 +[TRAIN] Iter: 508600 Loss: 0.006499012466520071 PSNR: 27.476266860961914 +[TRAIN] Iter: 508700 Loss: 0.007434048689901829 PSNR: 25.537689208984375 +[TRAIN] Iter: 508800 Loss: 0.007808885537087917 PSNR: 26.448932647705078 +[TRAIN] Iter: 508900 Loss: 0.0065035452134907246 PSNR: 26.58513069152832 +[TRAIN] Iter: 509000 Loss: 0.0056077223271131516 PSNR: 26.881805419921875 +[TRAIN] Iter: 509100 Loss: 0.006646132096648216 PSNR: 26.2499942779541 +[TRAIN] Iter: 509200 Loss: 0.005293024703860283 PSNR: 28.812042236328125 +[TRAIN] Iter: 509300 Loss: 0.005665637087076902 PSNR: 27.656673431396484 +[TRAIN] Iter: 509400 Loss: 0.006153848022222519 PSNR: 26.793384552001953 +[TRAIN] Iter: 509500 Loss: 0.005902455188333988 PSNR: 27.035114288330078 +[TRAIN] Iter: 509600 Loss: 0.005071304738521576 PSNR: 28.053617477416992 +[TRAIN] Iter: 509700 Loss: 0.0058913566172122955 PSNR: 26.901830673217773 +[TRAIN] Iter: 509800 Loss: 0.006379810161888599 PSNR: 27.10404396057129 +[TRAIN] Iter: 509900 Loss: 0.006805024575442076 PSNR: 26.446014404296875 +Saved checkpoints at ./logs/TUT-out-doll-360-np/510000.tar +[TRAIN] Iter: 510000 Loss: 0.006098870653659105 PSNR: 26.640132904052734 +[TRAIN] Iter: 510100 Loss: 0.0050303698517382145 PSNR: 28.186832427978516 +[TRAIN] Iter: 510200 Loss: 0.006939682178199291 PSNR: 26.07698631286621 +[TRAIN] Iter: 510300 Loss: 0.007094766478985548 PSNR: 26.48176383972168 +[TRAIN] Iter: 510400 Loss: 0.006111535243690014 PSNR: 26.961698532104492 +[TRAIN] Iter: 510500 Loss: 0.004758971743285656 PSNR: 28.55143928527832 +[TRAIN] Iter: 510600 Loss: 0.004467777907848358 PSNR: 28.99515724182129 +[TRAIN] Iter: 510700 Loss: 0.006437498144805431 PSNR: 26.47810935974121 +[TRAIN] Iter: 510800 Loss: 0.00786469504237175 PSNR: 25.18352699279785 +[TRAIN] Iter: 510900 Loss: 0.007550553884357214 PSNR: 25.8798885345459 +[TRAIN] Iter: 511000 Loss: 0.005983191076666117 PSNR: 26.622385025024414 +[TRAIN] Iter: 511100 Loss: 0.0052470676600933075 PSNR: 27.458980560302734 +[TRAIN] Iter: 511200 Loss: 0.006244052201509476 PSNR: 27.467660903930664 +[TRAIN] Iter: 511300 Loss: 0.0075200628489255905 PSNR: 26.583087921142578 +[TRAIN] Iter: 511400 Loss: 0.005456935148686171 PSNR: 27.587657928466797 +[TRAIN] Iter: 511500 Loss: 0.006806527264416218 PSNR: 25.832866668701172 +[TRAIN] Iter: 511600 Loss: 0.007596035022288561 PSNR: 25.588096618652344 +[TRAIN] Iter: 511700 Loss: 0.005718772299587727 PSNR: 27.138961791992188 +[TRAIN] Iter: 511800 Loss: 0.004984665662050247 PSNR: 28.10225486755371 +[TRAIN] Iter: 511900 Loss: 0.00668111490085721 PSNR: 26.2445011138916 +[TRAIN] Iter: 512000 Loss: 0.007947677746415138 PSNR: 25.07574462890625 +[TRAIN] Iter: 512100 Loss: 0.0056587946601212025 PSNR: 27.254638671875 +[TRAIN] Iter: 512200 Loss: 0.005453960970044136 PSNR: 29.02867889404297 +[TRAIN] Iter: 512300 Loss: 0.006710038986057043 PSNR: 26.290630340576172 +[TRAIN] Iter: 512400 Loss: 0.005080520641058683 PSNR: 28.221410751342773 +[TRAIN] Iter: 512500 Loss: 0.006733898539096117 PSNR: 26.181718826293945 +[TRAIN] Iter: 512600 Loss: 0.006032932549715042 PSNR: 28.553133010864258 +[TRAIN] Iter: 512700 Loss: 0.006127181928604841 PSNR: 28.022855758666992 +[TRAIN] Iter: 512800 Loss: 0.004959603771567345 PSNR: 28.13892936706543 +[TRAIN] Iter: 512900 Loss: 0.0053703966550529 PSNR: 28.1333065032959 +[TRAIN] Iter: 513000 Loss: 0.007519867271184921 PSNR: 25.789581298828125 +[TRAIN] Iter: 513100 Loss: 0.0063672661781311035 PSNR: 26.785036087036133 +[TRAIN] Iter: 513200 Loss: 0.006026878021657467 PSNR: 26.967573165893555 +[TRAIN] Iter: 513300 Loss: 0.0066690947860479355 PSNR: 26.120323181152344 +[TRAIN] Iter: 513400 Loss: 0.005922991782426834 PSNR: 27.010345458984375 +[TRAIN] Iter: 513500 Loss: 0.00705200107768178 PSNR: 26.156030654907227 +[TRAIN] Iter: 513600 Loss: 0.005872645415365696 PSNR: 26.72998809814453 +[TRAIN] Iter: 513700 Loss: 0.00633019907400012 PSNR: 26.13365936279297 +[TRAIN] Iter: 513800 Loss: 0.006430261768400669 PSNR: 26.185880661010742 +[TRAIN] Iter: 513900 Loss: 0.006185943726450205 PSNR: 26.721094131469727 +[TRAIN] Iter: 514000 Loss: 0.006317618303000927 PSNR: 26.60173988342285 +[TRAIN] Iter: 514100 Loss: 0.007747670169919729 PSNR: 25.456768035888672 +[TRAIN] Iter: 514200 Loss: 0.006709589157253504 PSNR: 26.757600784301758 +[TRAIN] Iter: 514300 Loss: 0.004826586693525314 PSNR: 27.93539047241211 +[TRAIN] Iter: 514400 Loss: 0.00449363561347127 PSNR: 28.68708610534668 +[TRAIN] Iter: 514500 Loss: 0.005442118272185326 PSNR: 27.921524047851562 +[TRAIN] Iter: 514600 Loss: 0.0061715091578662395 PSNR: 27.15110969543457 +[TRAIN] Iter: 514700 Loss: 0.00566183403134346 PSNR: 28.206565856933594 +[TRAIN] Iter: 514800 Loss: 0.00599999213591218 PSNR: 26.38036346435547 +[TRAIN] Iter: 514900 Loss: 0.005854335613548756 PSNR: 26.566375732421875 +[TRAIN] Iter: 515000 Loss: 0.006127545610070229 PSNR: 26.464536666870117 +[TRAIN] Iter: 515100 Loss: 0.004422116093337536 PSNR: 28.836467742919922 +[TRAIN] Iter: 515200 Loss: 0.0057518258690834045 PSNR: 26.70830535888672 +[TRAIN] Iter: 515300 Loss: 0.006558651104569435 PSNR: 26.704381942749023 +[TRAIN] Iter: 515400 Loss: 0.006077886093407869 PSNR: 26.64203453063965 +[TRAIN] Iter: 515500 Loss: 0.007172195240855217 PSNR: 26.10027503967285 +[TRAIN] Iter: 515600 Loss: 0.006862662732601166 PSNR: 26.122575759887695 +[TRAIN] Iter: 515700 Loss: 0.006658196449279785 PSNR: 26.097991943359375 +[TRAIN] Iter: 515800 Loss: 0.006468635983765125 PSNR: 26.802207946777344 +[TRAIN] Iter: 515900 Loss: 0.006456172093749046 PSNR: 26.60026741027832 +[TRAIN] Iter: 516000 Loss: 0.007053115405142307 PSNR: 26.879308700561523 +[TRAIN] Iter: 516100 Loss: 0.005443896632641554 PSNR: 27.20479393005371 +[TRAIN] Iter: 516200 Loss: 0.0061911726370453835 PSNR: 26.412107467651367 +[TRAIN] Iter: 516300 Loss: 0.00606963038444519 PSNR: 26.746034622192383 +[TRAIN] Iter: 516400 Loss: 0.005858633201569319 PSNR: 26.582504272460938 +[TRAIN] Iter: 516500 Loss: 0.006206216290593147 PSNR: 27.15045166015625 +[TRAIN] Iter: 516600 Loss: 0.007126567419618368 PSNR: 26.27280044555664 +[TRAIN] Iter: 516700 Loss: 0.007727389223873615 PSNR: 26.13162612915039 +[TRAIN] Iter: 516800 Loss: 0.007047063671052456 PSNR: 26.667789459228516 +[TRAIN] Iter: 516900 Loss: 0.005248555913567543 PSNR: 27.41973304748535 +[TRAIN] Iter: 517000 Loss: 0.007164645008742809 PSNR: 25.888334274291992 +[TRAIN] Iter: 517100 Loss: 0.006554764695465565 PSNR: 26.524616241455078 +[TRAIN] Iter: 517200 Loss: 0.006570105440914631 PSNR: 27.01905632019043 +[TRAIN] Iter: 517300 Loss: 0.006360076367855072 PSNR: 26.260339736938477 +[TRAIN] Iter: 517400 Loss: 0.007144059985876083 PSNR: 25.642044067382812 +[TRAIN] Iter: 517500 Loss: 0.005251912400126457 PSNR: 28.226943969726562 +[TRAIN] Iter: 517600 Loss: 0.006354169454425573 PSNR: 26.80713653564453 +[TRAIN] Iter: 517700 Loss: 0.0072609614580869675 PSNR: 25.93838882446289 +[TRAIN] Iter: 517800 Loss: 0.006601179949939251 PSNR: 26.539562225341797 +[TRAIN] Iter: 517900 Loss: 0.005975177977234125 PSNR: 27.19687271118164 +[TRAIN] Iter: 518000 Loss: 0.006791525520384312 PSNR: 25.64288902282715 +[TRAIN] Iter: 518100 Loss: 0.005870634224265814 PSNR: 27.038541793823242 +[TRAIN] Iter: 518200 Loss: 0.007459866814315319 PSNR: 25.659927368164062 +[TRAIN] Iter: 518300 Loss: 0.004780799150466919 PSNR: 28.564537048339844 +[TRAIN] Iter: 518400 Loss: 0.00581348966807127 PSNR: 26.865732192993164 +[TRAIN] Iter: 518500 Loss: 0.007177193183451891 PSNR: 26.845029830932617 +[TRAIN] Iter: 518600 Loss: 0.005450562573969364 PSNR: 27.617488861083984 +[TRAIN] Iter: 518700 Loss: 0.006245577707886696 PSNR: 27.57181739807129 +[TRAIN] Iter: 518800 Loss: 0.005840097554028034 PSNR: 27.160892486572266 +[TRAIN] Iter: 518900 Loss: 0.005905260797590017 PSNR: 28.105873107910156 +[TRAIN] Iter: 519000 Loss: 0.006855254527181387 PSNR: 26.112131118774414 +[TRAIN] Iter: 519100 Loss: 0.008262505754828453 PSNR: 25.403711318969727 +[TRAIN] Iter: 519200 Loss: 0.0064042797312140465 PSNR: 26.597681045532227 +[TRAIN] Iter: 519300 Loss: 0.004898311570286751 PSNR: 27.067724227905273 +[TRAIN] Iter: 519400 Loss: 0.00851858127862215 PSNR: 25.281774520874023 +[TRAIN] Iter: 519500 Loss: 0.006242404691874981 PSNR: 27.718826293945312 +[TRAIN] Iter: 519600 Loss: 0.006944708991795778 PSNR: 26.45268440246582 +[TRAIN] Iter: 519700 Loss: 0.007074843160808086 PSNR: 25.906286239624023 +[TRAIN] Iter: 519800 Loss: 0.00696460111066699 PSNR: 26.353599548339844 +[TRAIN] Iter: 519900 Loss: 0.005598548799753189 PSNR: 27.012794494628906 +Saved checkpoints at ./logs/TUT-out-doll-360-np/520000.tar +[TRAIN] Iter: 520000 Loss: 0.005785109009593725 PSNR: 27.119882583618164 +[TRAIN] Iter: 520100 Loss: 0.005896294489502907 PSNR: 26.90254020690918 +[TRAIN] Iter: 520200 Loss: 0.006028808653354645 PSNR: 28.051780700683594 +[TRAIN] Iter: 520300 Loss: 0.007237776182591915 PSNR: 26.080305099487305 +[TRAIN] Iter: 520400 Loss: 0.005663561634719372 PSNR: 26.544109344482422 +[TRAIN] Iter: 520500 Loss: 0.006470757536590099 PSNR: 26.465133666992188 +[TRAIN] Iter: 520600 Loss: 0.007391857914626598 PSNR: 25.517160415649414 +[TRAIN] Iter: 520700 Loss: 0.0067255087196826935 PSNR: 26.07328987121582 +[TRAIN] Iter: 520800 Loss: 0.004874889738857746 PSNR: 27.834074020385742 +[TRAIN] Iter: 520900 Loss: 0.006489370949566364 PSNR: 26.73430824279785 +[TRAIN] Iter: 521000 Loss: 0.006704973056912422 PSNR: 25.975671768188477 +[TRAIN] Iter: 521100 Loss: 0.00660446472465992 PSNR: 26.25482177734375 +[TRAIN] Iter: 521200 Loss: 0.005975693929940462 PSNR: 27.046266555786133 +[TRAIN] Iter: 521300 Loss: 0.005201368592679501 PSNR: 28.52165412902832 +[TRAIN] Iter: 521400 Loss: 0.005274483468383551 PSNR: 28.467653274536133 +[TRAIN] Iter: 521500 Loss: 0.006008335389196873 PSNR: 27.5891170501709 +[TRAIN] Iter: 521600 Loss: 0.005480504594743252 PSNR: 27.640113830566406 +[TRAIN] Iter: 521700 Loss: 0.007193622644990683 PSNR: 26.340421676635742 +[TRAIN] Iter: 521800 Loss: 0.008293134160339832 PSNR: 25.02519416809082 +[TRAIN] Iter: 521900 Loss: 0.005497043486684561 PSNR: 27.77372932434082 +[TRAIN] Iter: 522000 Loss: 0.004937892314046621 PSNR: 28.574195861816406 +[TRAIN] Iter: 522100 Loss: 0.006063922308385372 PSNR: 27.237028121948242 +[TRAIN] Iter: 522200 Loss: 0.007185945753008127 PSNR: 25.697925567626953 +[TRAIN] Iter: 522300 Loss: 0.005128597840666771 PSNR: 27.966312408447266 +[TRAIN] Iter: 522400 Loss: 0.006267473101615906 PSNR: 26.24510955810547 +[TRAIN] Iter: 522500 Loss: 0.004223882686346769 PSNR: 29.1272029876709 +[TRAIN] Iter: 522600 Loss: 0.007197936996817589 PSNR: 26.030845642089844 +[TRAIN] Iter: 522700 Loss: 0.004648725502192974 PSNR: 28.715057373046875 +[TRAIN] Iter: 522800 Loss: 0.005321749020367861 PSNR: 28.159778594970703 +[TRAIN] Iter: 522900 Loss: 0.0056616682559251785 PSNR: 26.989248275756836 +[TRAIN] Iter: 523000 Loss: 0.006150456145405769 PSNR: 26.46457862854004 +[TRAIN] Iter: 523100 Loss: 0.0063958726823329926 PSNR: 26.446462631225586 +[TRAIN] Iter: 523200 Loss: 0.005596081726253033 PSNR: 27.05185890197754 +[TRAIN] Iter: 523300 Loss: 0.004595950711518526 PSNR: 28.46002960205078 +[TRAIN] Iter: 523400 Loss: 0.006114386487752199 PSNR: 27.084239959716797 +[TRAIN] Iter: 523500 Loss: 0.005514277145266533 PSNR: 27.378631591796875 +[TRAIN] Iter: 523600 Loss: 0.0070586176589131355 PSNR: 25.81749725341797 +[TRAIN] Iter: 523700 Loss: 0.004586586728692055 PSNR: 28.913000106811523 +[TRAIN] Iter: 523800 Loss: 0.0050477138720452785 PSNR: 28.75039291381836 +[TRAIN] Iter: 523900 Loss: 0.005691696424037218 PSNR: 28.03840446472168 +[TRAIN] Iter: 524000 Loss: 0.0054351151920855045 PSNR: 27.19166374206543 +[TRAIN] Iter: 524100 Loss: 0.005252477712929249 PSNR: 27.16429328918457 +[TRAIN] Iter: 524200 Loss: 0.006929404567927122 PSNR: 26.428634643554688 +[TRAIN] Iter: 524300 Loss: 0.006531698629260063 PSNR: 26.483091354370117 +[TRAIN] Iter: 524400 Loss: 0.006345891393721104 PSNR: 26.615310668945312 +[TRAIN] Iter: 524500 Loss: 0.005336245521903038 PSNR: 27.654258728027344 +[TRAIN] Iter: 524600 Loss: 0.004944865591824055 PSNR: 27.43256378173828 +[TRAIN] Iter: 524700 Loss: 0.00636629480868578 PSNR: 26.9106388092041 +[TRAIN] Iter: 524800 Loss: 0.005410992074757814 PSNR: 27.39545249938965 +[TRAIN] Iter: 524900 Loss: 0.00625956105068326 PSNR: 27.490562438964844 +[TRAIN] Iter: 525000 Loss: 0.007361955009400845 PSNR: 26.15761375427246 +[TRAIN] Iter: 525100 Loss: 0.007234969176352024 PSNR: 25.462587356567383 +[TRAIN] Iter: 525200 Loss: 0.006440743803977966 PSNR: 26.658058166503906 +[TRAIN] Iter: 525300 Loss: 0.0074466741643846035 PSNR: 26.019140243530273 +[TRAIN] Iter: 525400 Loss: 0.0067764767445623875 PSNR: 26.700002670288086 +[TRAIN] Iter: 525500 Loss: 0.006909788120537996 PSNR: 26.160831451416016 +[TRAIN] Iter: 525600 Loss: 0.006363212130963802 PSNR: 26.544822692871094 +[TRAIN] Iter: 525700 Loss: 0.007627819664776325 PSNR: 25.945825576782227 +[TRAIN] Iter: 525800 Loss: 0.007100355811417103 PSNR: 25.965959548950195 +[TRAIN] Iter: 525900 Loss: 0.007544857449829578 PSNR: 25.95758628845215 +[TRAIN] Iter: 526000 Loss: 0.006183440797030926 PSNR: 26.355224609375 +[TRAIN] Iter: 526100 Loss: 0.005754618905484676 PSNR: 28.27433204650879 +[TRAIN] Iter: 526200 Loss: 0.006010306999087334 PSNR: 27.25404930114746 +[TRAIN] Iter: 526300 Loss: 0.006394839379936457 PSNR: 26.58399200439453 +[TRAIN] Iter: 526400 Loss: 0.0062422920018434525 PSNR: 26.844579696655273 +[TRAIN] Iter: 526500 Loss: 0.0050361501052975655 PSNR: 27.121858596801758 +[TRAIN] Iter: 526600 Loss: 0.005035323090851307 PSNR: 27.99123191833496 +[TRAIN] Iter: 526700 Loss: 0.007198974955826998 PSNR: 26.10605239868164 +[TRAIN] Iter: 526800 Loss: 0.006478980183601379 PSNR: 26.15968894958496 +[TRAIN] Iter: 526900 Loss: 0.006709203589707613 PSNR: 27.305681228637695 +[TRAIN] Iter: 527000 Loss: 0.007452019490301609 PSNR: 25.75033950805664 +[TRAIN] Iter: 527100 Loss: 0.005920374300330877 PSNR: 27.076078414916992 +[TRAIN] Iter: 527200 Loss: 0.00624070456251502 PSNR: 26.356246948242188 +[TRAIN] Iter: 527300 Loss: 0.008150698617100716 PSNR: 25.066566467285156 +[TRAIN] Iter: 527400 Loss: 0.00558896642178297 PSNR: 27.51600456237793 +[TRAIN] Iter: 527500 Loss: 0.006026911549270153 PSNR: 27.358165740966797 +[TRAIN] Iter: 527600 Loss: 0.0053900498896837234 PSNR: 27.101177215576172 +[TRAIN] Iter: 527700 Loss: 0.006379421800374985 PSNR: 26.18389892578125 +[TRAIN] Iter: 527800 Loss: 0.006907295435667038 PSNR: 25.880979537963867 +[TRAIN] Iter: 527900 Loss: 0.006145469844341278 PSNR: 27.078310012817383 +[TRAIN] Iter: 528000 Loss: 0.006369341630488634 PSNR: 26.420068740844727 +[TRAIN] Iter: 528100 Loss: 0.007601463235914707 PSNR: 25.39365005493164 +[TRAIN] Iter: 528200 Loss: 0.005567468702793121 PSNR: 27.128520965576172 +[TRAIN] Iter: 528300 Loss: 0.005956913344562054 PSNR: 27.3785343170166 +[TRAIN] Iter: 528400 Loss: 0.006379833444952965 PSNR: 25.920066833496094 +[TRAIN] Iter: 528500 Loss: 0.006003354676067829 PSNR: 26.90642738342285 +[TRAIN] Iter: 528600 Loss: 0.007312551140785217 PSNR: 26.115341186523438 +[TRAIN] Iter: 528700 Loss: 0.007418899796903133 PSNR: 26.050701141357422 +[TRAIN] Iter: 528800 Loss: 0.00422246428206563 PSNR: 29.111705780029297 +[TRAIN] Iter: 528900 Loss: 0.004446267616003752 PSNR: 28.125381469726562 +[TRAIN] Iter: 529000 Loss: 0.007339579053223133 PSNR: 26.172348022460938 +[TRAIN] Iter: 529100 Loss: 0.006804043892771006 PSNR: 27.87178611755371 +[TRAIN] Iter: 529200 Loss: 0.005595982540398836 PSNR: 27.215341567993164 +[TRAIN] Iter: 529300 Loss: 0.005142761394381523 PSNR: 28.403879165649414 +[TRAIN] Iter: 529400 Loss: 0.006685065571218729 PSNR: 26.585636138916016 +[TRAIN] Iter: 529500 Loss: 0.006865219213068485 PSNR: 26.84319305419922 +[TRAIN] Iter: 529600 Loss: 0.006833258550614119 PSNR: 26.002336502075195 +[TRAIN] Iter: 529700 Loss: 0.006735844537615776 PSNR: 25.81517791748047 +[TRAIN] Iter: 529800 Loss: 0.007320149336010218 PSNR: 26.121767044067383 +[TRAIN] Iter: 529900 Loss: 0.007314834278076887 PSNR: 26.45621109008789 +Saved checkpoints at ./logs/TUT-out-doll-360-np/530000.tar +[TRAIN] Iter: 530000 Loss: 0.007167094387114048 PSNR: 26.6855411529541 +[TRAIN] Iter: 530100 Loss: 0.0067236581817269325 PSNR: 26.20302391052246 +[TRAIN] Iter: 530200 Loss: 0.005772470496594906 PSNR: 27.47408103942871 +[TRAIN] Iter: 530300 Loss: 0.007005191408097744 PSNR: 25.985057830810547 +[TRAIN] Iter: 530400 Loss: 0.005526918917894363 PSNR: 27.6938533782959 +[TRAIN] Iter: 530500 Loss: 0.00618148734793067 PSNR: 27.20423126220703 +[TRAIN] Iter: 530600 Loss: 0.007047571241855621 PSNR: 26.33643341064453 +[TRAIN] Iter: 530700 Loss: 0.006330979522317648 PSNR: 27.3416690826416 +[TRAIN] Iter: 530800 Loss: 0.006426697131246328 PSNR: 26.412981033325195 +[TRAIN] Iter: 530900 Loss: 0.007362430915236473 PSNR: 26.323368072509766 +[TRAIN] Iter: 531000 Loss: 0.004395684227347374 PSNR: 28.84476089477539 +[TRAIN] Iter: 531100 Loss: 0.004694593604654074 PSNR: 28.280956268310547 +[TRAIN] Iter: 531200 Loss: 0.005616287235170603 PSNR: 27.730323791503906 +[TRAIN] Iter: 531300 Loss: 0.007144590839743614 PSNR: 26.094310760498047 +[TRAIN] Iter: 531400 Loss: 0.006503812037408352 PSNR: 25.94075584411621 +[TRAIN] Iter: 531500 Loss: 0.005171275697648525 PSNR: 28.13258171081543 +[TRAIN] Iter: 531600 Loss: 0.00566354114562273 PSNR: 28.150991439819336 +[TRAIN] Iter: 531700 Loss: 0.006062498316168785 PSNR: 26.505216598510742 +[TRAIN] Iter: 531800 Loss: 0.00674443319439888 PSNR: 26.356538772583008 +[TRAIN] Iter: 531900 Loss: 0.005866683088243008 PSNR: 26.50741958618164 +[TRAIN] Iter: 532000 Loss: 0.006765883881598711 PSNR: 25.780786514282227 +[TRAIN] Iter: 532100 Loss: 0.005779728293418884 PSNR: 27.320838928222656 +[TRAIN] Iter: 532200 Loss: 0.005562134087085724 PSNR: 28.08062171936035 +[TRAIN] Iter: 532300 Loss: 0.006226084195077419 PSNR: 26.608272552490234 +[TRAIN] Iter: 532400 Loss: 0.006191314198076725 PSNR: 25.97158432006836 +[TRAIN] Iter: 532500 Loss: 0.006431546062231064 PSNR: 26.941162109375 +[TRAIN] Iter: 532600 Loss: 0.005453211255371571 PSNR: 27.798261642456055 +[TRAIN] Iter: 532700 Loss: 0.007828563451766968 PSNR: 25.642385482788086 +[TRAIN] Iter: 532800 Loss: 0.006017147563397884 PSNR: 26.738492965698242 +[TRAIN] Iter: 532900 Loss: 0.0065550128929317 PSNR: 25.77699089050293 +[TRAIN] Iter: 533000 Loss: 0.007070605643093586 PSNR: 26.005971908569336 +[TRAIN] Iter: 533100 Loss: 0.006161295343190432 PSNR: 27.169157028198242 +[TRAIN] Iter: 533200 Loss: 0.006483262870460749 PSNR: 26.263818740844727 +[TRAIN] Iter: 533300 Loss: 0.00935299787670374 PSNR: 25.02471160888672 +[TRAIN] Iter: 533400 Loss: 0.006244783755391836 PSNR: 26.693862915039062 +[TRAIN] Iter: 533500 Loss: 0.006647209636867046 PSNR: 26.49430274963379 +[TRAIN] Iter: 533600 Loss: 0.004889274016022682 PSNR: 28.213153839111328 +[TRAIN] Iter: 533700 Loss: 0.005983877927064896 PSNR: 27.03041648864746 +[TRAIN] Iter: 533800 Loss: 0.005799503065645695 PSNR: 27.132478713989258 +[TRAIN] Iter: 533900 Loss: 0.0061047496274113655 PSNR: 27.62213897705078 +[TRAIN] Iter: 534000 Loss: 0.005167718045413494 PSNR: 27.915752410888672 +[TRAIN] Iter: 534100 Loss: 0.006464571226388216 PSNR: 26.28249168395996 +[TRAIN] Iter: 534200 Loss: 0.005991643760353327 PSNR: 26.79392433166504 +[TRAIN] Iter: 534300 Loss: 0.006602844689041376 PSNR: 26.295225143432617 +[TRAIN] Iter: 534400 Loss: 0.005137238185852766 PSNR: 28.080183029174805 +[TRAIN] Iter: 534500 Loss: 0.006103686057031155 PSNR: 26.812551498413086 +[TRAIN] Iter: 534600 Loss: 0.007337607443332672 PSNR: 26.3283634185791 +[TRAIN] Iter: 534700 Loss: 0.0051277936436235905 PSNR: 28.231550216674805 +[TRAIN] Iter: 534800 Loss: 0.005788605660200119 PSNR: 27.67542266845703 +[TRAIN] Iter: 534900 Loss: 0.006395851261913776 PSNR: 26.79973602294922 +[TRAIN] Iter: 535000 Loss: 0.006557472050189972 PSNR: 26.249347686767578 +[TRAIN] Iter: 535100 Loss: 0.006414088420569897 PSNR: 26.611604690551758 +[TRAIN] Iter: 535200 Loss: 0.007053615991026163 PSNR: 26.573413848876953 +[TRAIN] Iter: 535300 Loss: 0.00668766163289547 PSNR: 26.43118667602539 +[TRAIN] Iter: 535400 Loss: 0.007118224166333675 PSNR: 26.315786361694336 +[TRAIN] Iter: 535500 Loss: 0.004573501646518707 PSNR: 29.35251808166504 +[TRAIN] Iter: 535600 Loss: 0.005409062374383211 PSNR: 27.073951721191406 +[TRAIN] Iter: 535700 Loss: 0.0066533563658595085 PSNR: 26.041322708129883 +[TRAIN] Iter: 535800 Loss: 0.006742868572473526 PSNR: 27.12836265563965 +[TRAIN] Iter: 535900 Loss: 0.0071118795312941074 PSNR: 26.606752395629883 +[TRAIN] Iter: 536000 Loss: 0.005970976315438747 PSNR: 26.50104331970215 +[TRAIN] Iter: 536100 Loss: 0.006157222669571638 PSNR: 26.812637329101562 +[TRAIN] Iter: 536200 Loss: 0.0068892682902514935 PSNR: 27.655393600463867 +[TRAIN] Iter: 536300 Loss: 0.005480309948325157 PSNR: 27.006311416625977 +[TRAIN] Iter: 536400 Loss: 0.006477816961705685 PSNR: 25.870849609375 +[TRAIN] Iter: 536500 Loss: 0.005993299651890993 PSNR: 26.917295455932617 +[TRAIN] Iter: 536600 Loss: 0.006854542531073093 PSNR: 25.7913761138916 +[TRAIN] Iter: 536700 Loss: 0.006386732216924429 PSNR: 26.62554359436035 +[TRAIN] Iter: 536800 Loss: 0.008678478188812733 PSNR: 25.384061813354492 +[TRAIN] Iter: 536900 Loss: 0.006229330785572529 PSNR: 26.98395347595215 +[TRAIN] Iter: 537000 Loss: 0.005925642792135477 PSNR: 27.344526290893555 +[TRAIN] Iter: 537100 Loss: 0.006847801618278027 PSNR: 27.038747787475586 +[TRAIN] Iter: 537200 Loss: 0.007392445579171181 PSNR: 25.58942222595215 +[TRAIN] Iter: 537300 Loss: 0.005732701160013676 PSNR: 26.930004119873047 +[TRAIN] Iter: 537400 Loss: 0.0065227351151406765 PSNR: 26.892852783203125 +[TRAIN] Iter: 537500 Loss: 0.006671123206615448 PSNR: 26.435216903686523 +[TRAIN] Iter: 537600 Loss: 0.005008789710700512 PSNR: 27.897674560546875 +[TRAIN] Iter: 537700 Loss: 0.00668638851493597 PSNR: 26.225168228149414 +[TRAIN] Iter: 537800 Loss: 0.006323219742625952 PSNR: 26.509845733642578 +[TRAIN] Iter: 537900 Loss: 0.006997063290327787 PSNR: 26.41387367248535 +[TRAIN] Iter: 538000 Loss: 0.007651930674910545 PSNR: 25.629642486572266 +[TRAIN] Iter: 538100 Loss: 0.005985450930893421 PSNR: 26.352415084838867 +[TRAIN] Iter: 538200 Loss: 0.005036460235714912 PSNR: 27.797666549682617 +[TRAIN] Iter: 538300 Loss: 0.004972045309841633 PSNR: 28.477907180786133 +[TRAIN] Iter: 538400 Loss: 0.006282208487391472 PSNR: 28.35314178466797 +[TRAIN] Iter: 538500 Loss: 0.0063023800030350685 PSNR: 26.89912986755371 +[TRAIN] Iter: 538600 Loss: 0.006292546633630991 PSNR: 26.602815628051758 +[TRAIN] Iter: 538700 Loss: 0.005345714744180441 PSNR: 26.945234298706055 +[TRAIN] Iter: 538800 Loss: 0.006505648605525494 PSNR: 26.268341064453125 +[TRAIN] Iter: 538900 Loss: 0.006949266418814659 PSNR: 26.170562744140625 +[TRAIN] Iter: 539000 Loss: 0.006585447117686272 PSNR: 26.7016544342041 +[TRAIN] Iter: 539100 Loss: 0.0048714568838477135 PSNR: 28.46376609802246 +[TRAIN] Iter: 539200 Loss: 0.006659623235464096 PSNR: 26.320154190063477 +[TRAIN] Iter: 539300 Loss: 0.006644602864980698 PSNR: 26.595552444458008 +[TRAIN] Iter: 539400 Loss: 0.00800800696015358 PSNR: 25.927078247070312 +[TRAIN] Iter: 539500 Loss: 0.005211278796195984 PSNR: 27.7191104888916 +[TRAIN] Iter: 539600 Loss: 0.006187548395246267 PSNR: 26.551855087280273 +[TRAIN] Iter: 539700 Loss: 0.005173162557184696 PSNR: 29.033506393432617 +[TRAIN] Iter: 539800 Loss: 0.0051222750917077065 PSNR: 27.712421417236328 +[TRAIN] Iter: 539900 Loss: 0.005322678945958614 PSNR: 27.2493839263916 +Saved checkpoints at ./logs/TUT-out-doll-360-np/540000.tar +[TRAIN] Iter: 540000 Loss: 0.0055932337418198586 PSNR: 27.488672256469727 +[TRAIN] Iter: 540100 Loss: 0.006616382859647274 PSNR: 26.056589126586914 +[TRAIN] Iter: 540200 Loss: 0.007419471628963947 PSNR: 26.09330177307129 +[TRAIN] Iter: 540300 Loss: 0.005267618224024773 PSNR: 27.515722274780273 +[TRAIN] Iter: 540400 Loss: 0.004968734923750162 PSNR: 28.361255645751953 +[TRAIN] Iter: 540500 Loss: 0.005853723734617233 PSNR: 26.995948791503906 +[TRAIN] Iter: 540600 Loss: 0.005591642577201128 PSNR: 27.803659439086914 +[TRAIN] Iter: 540700 Loss: 0.005778172984719276 PSNR: 26.8128604888916 +[TRAIN] Iter: 540800 Loss: 0.006082638166844845 PSNR: 26.994905471801758 +[TRAIN] Iter: 540900 Loss: 0.005813241004943848 PSNR: 26.7984561920166 +[TRAIN] Iter: 541000 Loss: 0.004952154587954283 PSNR: 28.526153564453125 +[TRAIN] Iter: 541100 Loss: 0.0065744491294026375 PSNR: 25.980640411376953 +[TRAIN] Iter: 541200 Loss: 0.005325085949152708 PSNR: 28.748245239257812 +[TRAIN] Iter: 541300 Loss: 0.005665660835802555 PSNR: 27.096994400024414 +[TRAIN] Iter: 541400 Loss: 0.006874057464301586 PSNR: 27.00383758544922 +[TRAIN] Iter: 541500 Loss: 0.008278688415884972 PSNR: 26.146038055419922 +[TRAIN] Iter: 541600 Loss: 0.006552667357027531 PSNR: 26.176280975341797 +[TRAIN] Iter: 541700 Loss: 0.005403584334999323 PSNR: 27.24738883972168 +[TRAIN] Iter: 541800 Loss: 0.006530290935188532 PSNR: 25.96196937561035 +[TRAIN] Iter: 541900 Loss: 0.0068677691742777824 PSNR: 26.42061996459961 +[TRAIN] Iter: 542000 Loss: 0.005047116428613663 PSNR: 28.77532958984375 +[TRAIN] Iter: 542100 Loss: 0.007220357656478882 PSNR: 25.793954849243164 +[TRAIN] Iter: 542200 Loss: 0.004789226222783327 PSNR: 28.339630126953125 +[TRAIN] Iter: 542300 Loss: 0.005857248790562153 PSNR: 26.91507911682129 +[TRAIN] Iter: 542400 Loss: 0.005601624958217144 PSNR: 26.99349021911621 +[TRAIN] Iter: 542500 Loss: 0.00607244111597538 PSNR: 26.53459358215332 +[TRAIN] Iter: 542600 Loss: 0.004883608780801296 PSNR: 28.705703735351562 +[TRAIN] Iter: 542700 Loss: 0.005582203157246113 PSNR: 27.641857147216797 +[TRAIN] Iter: 542800 Loss: 0.006139501929283142 PSNR: 26.620635986328125 +[TRAIN] Iter: 542900 Loss: 0.00428790831938386 PSNR: 28.201597213745117 +[TRAIN] Iter: 543000 Loss: 0.005662502720952034 PSNR: 26.88916015625 +[TRAIN] Iter: 543100 Loss: 0.006972835399210453 PSNR: 25.913806915283203 +[TRAIN] Iter: 543200 Loss: 0.005570015870034695 PSNR: 27.093215942382812 +[TRAIN] Iter: 543300 Loss: 0.005677073262631893 PSNR: 28.334552764892578 +[TRAIN] Iter: 543400 Loss: 0.006383330561220646 PSNR: 27.313472747802734 +[TRAIN] Iter: 543500 Loss: 0.005434335675090551 PSNR: 27.641775131225586 +[TRAIN] Iter: 543600 Loss: 0.007299579214304686 PSNR: 26.399213790893555 +[TRAIN] Iter: 543700 Loss: 0.005097184330224991 PSNR: 27.193655014038086 +[TRAIN] Iter: 543800 Loss: 0.005156500265002251 PSNR: 28.600181579589844 +[TRAIN] Iter: 543900 Loss: 0.007995110005140305 PSNR: 25.655345916748047 +[TRAIN] Iter: 544000 Loss: 0.004894739016890526 PSNR: 28.47830581665039 +[TRAIN] Iter: 544100 Loss: 0.0062485672533512115 PSNR: 26.452220916748047 +[TRAIN] Iter: 544200 Loss: 0.005234100855886936 PSNR: 27.915002822875977 +[TRAIN] Iter: 544300 Loss: 0.006166360341012478 PSNR: 25.909059524536133 +[TRAIN] Iter: 544400 Loss: 0.005696462467312813 PSNR: 27.209705352783203 +[TRAIN] Iter: 544500 Loss: 0.007572690024971962 PSNR: 25.984554290771484 +[TRAIN] Iter: 544600 Loss: 0.006513920612633228 PSNR: 26.4596004486084 +[TRAIN] Iter: 544700 Loss: 0.006333048455417156 PSNR: 27.46625518798828 +[TRAIN] Iter: 544800 Loss: 0.004829554818570614 PSNR: 29.18705940246582 +[TRAIN] Iter: 544900 Loss: 0.006798483897000551 PSNR: 26.04043960571289 +[TRAIN] Iter: 545000 Loss: 0.007288855966180563 PSNR: 25.986398696899414 +[TRAIN] Iter: 545100 Loss: 0.007615671958774328 PSNR: 25.702726364135742 +[TRAIN] Iter: 545200 Loss: 0.00830540806055069 PSNR: 25.254432678222656 +[TRAIN] Iter: 545300 Loss: 0.005183514207601547 PSNR: 28.187345504760742 +[TRAIN] Iter: 545400 Loss: 0.006986726075410843 PSNR: 26.16122817993164 +[TRAIN] Iter: 545500 Loss: 0.0068671731278300285 PSNR: 25.986520767211914 +[TRAIN] Iter: 545600 Loss: 0.006473489571362734 PSNR: 26.862876892089844 +[TRAIN] Iter: 545700 Loss: 0.006933215074241161 PSNR: 26.089242935180664 +[TRAIN] Iter: 545800 Loss: 0.006927268113940954 PSNR: 25.803361892700195 +[TRAIN] Iter: 545900 Loss: 0.0050347610376775265 PSNR: 28.23935317993164 +[TRAIN] Iter: 546000 Loss: 0.006987134460359812 PSNR: 26.782981872558594 +[TRAIN] Iter: 546100 Loss: 0.006027298513799906 PSNR: 27.672447204589844 +[TRAIN] Iter: 546200 Loss: 0.005590349435806274 PSNR: 27.616987228393555 +[TRAIN] Iter: 546300 Loss: 0.006583811715245247 PSNR: 26.01739501953125 +[TRAIN] Iter: 546400 Loss: 0.007669991813600063 PSNR: 25.98174285888672 +[TRAIN] Iter: 546500 Loss: 0.006703444756567478 PSNR: 26.269100189208984 +[TRAIN] Iter: 546600 Loss: 0.007856867276132107 PSNR: 25.56606101989746 +[TRAIN] Iter: 546700 Loss: 0.006723815109580755 PSNR: 26.3332462310791 +[TRAIN] Iter: 546800 Loss: 0.006974721793085337 PSNR: 26.367462158203125 +[TRAIN] Iter: 546900 Loss: 0.005816125310957432 PSNR: 27.267385482788086 +[TRAIN] Iter: 547000 Loss: 0.005505978595465422 PSNR: 27.620071411132812 +[TRAIN] Iter: 547100 Loss: 0.0065349312499165535 PSNR: 26.998308181762695 +[TRAIN] Iter: 547200 Loss: 0.0062442743219435215 PSNR: 25.91617774963379 +[TRAIN] Iter: 547300 Loss: 0.00657310476526618 PSNR: 27.216522216796875 +[TRAIN] Iter: 547400 Loss: 0.00589949544519186 PSNR: 27.188926696777344 +[TRAIN] Iter: 547500 Loss: 0.004952284507453442 PSNR: 28.026002883911133 +[TRAIN] Iter: 547600 Loss: 0.005765803158283234 PSNR: 27.608985900878906 +[TRAIN] Iter: 547700 Loss: 0.0060173883102834225 PSNR: 27.20353889465332 +[TRAIN] Iter: 547800 Loss: 0.004755021072924137 PSNR: 28.658552169799805 +[TRAIN] Iter: 547900 Loss: 0.005024755839258432 PSNR: 28.424001693725586 +[TRAIN] Iter: 548000 Loss: 0.008518039248883724 PSNR: 25.266054153442383 +[TRAIN] Iter: 548100 Loss: 0.0069032348692417145 PSNR: 26.33135414123535 +[TRAIN] Iter: 548200 Loss: 0.005767547059804201 PSNR: 26.265451431274414 +[TRAIN] Iter: 548300 Loss: 0.006344345398247242 PSNR: 27.78243064880371 +[TRAIN] Iter: 548400 Loss: 0.006003358401358128 PSNR: 27.16402816772461 +[TRAIN] Iter: 548500 Loss: 0.004713682923465967 PSNR: 27.47389793395996 +[TRAIN] Iter: 548600 Loss: 0.005655854940414429 PSNR: 28.02000617980957 +[TRAIN] Iter: 548700 Loss: 0.007095838896930218 PSNR: 26.075159072875977 +[TRAIN] Iter: 548800 Loss: 0.005311471410095692 PSNR: 27.231346130371094 +[TRAIN] Iter: 548900 Loss: 0.006252781953662634 PSNR: 26.585582733154297 +[TRAIN] Iter: 549000 Loss: 0.0050566066056489944 PSNR: 28.216880798339844 +[TRAIN] Iter: 549100 Loss: 0.005861732643097639 PSNR: 26.961654663085938 +[TRAIN] Iter: 549200 Loss: 0.006647860631346703 PSNR: 26.845481872558594 +[TRAIN] Iter: 549300 Loss: 0.0066742682829499245 PSNR: 26.59241485595703 +[TRAIN] Iter: 549400 Loss: 0.006414385512471199 PSNR: 26.14297103881836 +[TRAIN] Iter: 549500 Loss: 0.006742771714925766 PSNR: 26.192190170288086 +[TRAIN] Iter: 549600 Loss: 0.0064110541716217995 PSNR: 27.416770935058594 +[TRAIN] Iter: 549700 Loss: 0.005986236501485109 PSNR: 28.384735107421875 +[TRAIN] Iter: 549800 Loss: 0.006963456980884075 PSNR: 26.323312759399414 +[TRAIN] Iter: 549900 Loss: 0.006134688854217529 PSNR: 26.87140464782715 +Saved checkpoints at ./logs/TUT-out-doll-360-np/550000.tar +0 0.0008676052093505859 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 20.930405616760254 +2 21.666110515594482 +3 21.39902687072754 +4 22.524611949920654 +5 22.609028816223145 +6 22.627617835998535 +7 22.764472007751465 +8 20.921340465545654 +9 22.013177633285522 +10 21.64651918411255 +11 22.24430274963379 +12 21.798436880111694 +13 21.38562321662903 +14 21.73882246017456 +15 21.532840728759766 +16 21.712897062301636 +17 21.320629119873047 +18 21.838144540786743 +19 22.032968759536743 +20 22.56275248527527 +21 22.851810693740845 +22 21.39835238456726 +23 22.243170022964478 +24 21.65018939971924 +25 21.71523666381836 +26 22.224066019058228 +27 22.000845670700073 +28 21.781373023986816 +29 23.528383493423462 +30 21.211466073989868 +31 21.566348791122437 +32 21.744004011154175 +33 21.305311918258667 +34 22.211286306381226 +35 21.783023595809937 +36 21.099302291870117 +37 21.615877628326416 +38 21.441345691680908 +39 22.32198166847229 +40 22.647964239120483 +41 20.873902797698975 +42 21.55509066581726 +43 22.069010734558105 +44 21.75598978996277 +45 21.81851053237915 +46 21.75240683555603 +47 21.563900232315063 +48 22.03947114944458 +49 22.09463095664978 +50 21.008573532104492 +51 23.114078521728516 +52 22.39621639251709 +53 21.674057006835938 +54 21.708349227905273 +55 21.68101692199707 +56 22.617061614990234 +57 21.787964582443237 +58 21.947898626327515 +59 22.050442934036255 +60 22.71200132369995 +61 22.15450882911682 +62 22.578176498413086 +63 20.85839295387268 +64 21.704411268234253 +65 22.39290690422058 +66 21.428627490997314 +67 21.657575845718384 +68 22.22533416748047 +69 21.45826554298401 +70 21.542154550552368 +71 21.354562044143677 +72 21.93702793121338 +73 22.081053495407104 +74 21.957719326019287 +75 21.85908079147339 +76 21.30912947654724 +77 21.701813459396362 +78 22.323372840881348 +79 21.478073358535767 +80 22.269225358963013 +81 23.573880910873413 +82 21.9113986492157 +83 21.156620025634766 +84 22.253481149673462 +85 21.648306131362915 +86 21.773221969604492 +87 22.053616285324097 +88 21.045396089553833 +89 22.306265115737915 +90 22.23193049430847 +91 21.916266918182373 +92 21.69517493247986 +93 22.583638668060303 +94 21.710373163223267 +95 21.48074960708618 +96 22.157219886779785 +97 21.90946125984192 +98 22.36548900604248 +99 22.07108163833618 +100 21.532982349395752 +101 22.333183765411377 +102 21.272046327590942 +103 21.37655282020569 +104 21.7034113407135 +105 21.826180934906006 +106 21.69113540649414 +107 21.554924964904785 +108 21.306302547454834 +109 22.12160348892212 +110 22.01620650291443 +111 21.718669652938843 +112 21.549336433410645 +113 21.7004816532135 +114 21.914990425109863 +115 21.54139018058777 +116 21.4698805809021 +117 21.507490396499634 +118 21.377751111984253 +119 21.792502880096436 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 1.2977e+00, 1.1664e+00, 1.1306e+00, -3.4296e+01], + [-6.9481e-01, -7.6533e-01, -9.8003e-01, -5.4719e+01], + [-1.5266e-02, -2.8450e-01, -6.0858e-01, -1.7474e+01], + ..., + [-3.9012e+00, -2.2209e+00, -5.0726e+00, 8.4896e+02], + [-5.8865e+00, -4.5197e+00, -6.3660e+00, 8.1155e+02], + [-8.3154e+00, -5.8091e+00, -8.3404e+00, 7.6047e+02]], + + [[-4.4811e-01, -1.2832e-01, 2.6456e-01, -3.1196e+01], + [-1.1862e+00, -4.7695e-01, 5.9236e-01, -1.0052e+01], + [-1.2095e+00, -4.9444e-01, 5.9050e-01, -9.7554e+00], + ..., + [-9.6892e+00, -6.6217e+00, -6.9447e+00, 6.1553e+02], + [-9.4363e+00, -6.4636e+00, -6.5071e+00, 6.2572e+02], + [-9.8884e+00, -6.8496e+00, -7.1756e+00, 6.3706e+02]], + + [[-1.4612e-01, -7.3980e-02, -3.2360e-01, -4.4001e+01], + [-6.2016e-01, -3.6338e-01, 9.7135e-02, -6.5277e+01], + [ 1.3148e+00, 1.1072e+00, 1.0553e+00, -2.0754e+01], + ..., + [-6.5812e+00, -3.9815e+00, -2.4442e+00, 3.1583e+02], + [-7.2972e+00, -5.2899e+00, -3.3717e+00, 4.0044e+02], + [-7.4022e+00, -4.7651e+00, -3.5843e+00, 3.9697e+02]], + + ..., + + [[-1.2607e+00, -1.4815e+00, -1.6752e+00, -2.8088e+01], + [-1.8034e+00, -1.6871e+00, -1.5796e+00, -3.7025e+01], + [-1.6898e+00, -1.4106e+00, -1.1039e+00, -3.1152e+01], + ..., + [-2.2782e+01, -1.5076e+01, -1.5681e+01, 4.7310e+02], + [-2.2154e+01, -1.5000e+01, -1.4913e+01, 4.9669e+02], + [-2.5060e+01, -1.7395e+01, -1.8162e+01, 4.8459e+02]], + + [[ 7.3979e-01, 7.8992e-01, 9.0942e-01, -2.5256e+01], + [ 6.0695e-01, 1.1660e+00, 2.0607e+00, -5.8063e+01], + [-3.6986e-01, 6.0024e-01, 2.1468e+00, -3.3281e+01], + ..., + [ 3.5377e+00, 5.9095e+00, 8.1926e+00, 3.2275e+02], + [ 2.3373e+00, 4.7736e+00, 6.9182e+00, 3.0989e+02], + [ 3.6922e+00, 6.4013e+00, 9.5724e+00, 3.0851e+02]], + + [[-2.2068e+00, -1.0376e+00, 4.4762e-01, -3.5445e+01], + [-1.2163e+00, -3.5400e-01, 5.9353e-01, 8.8099e+00], + [-1.1930e+00, -3.3416e-01, 6.1387e-01, 8.3522e+00], + ..., + [-8.4360e+00, -8.0514e+00, 1.0320e-02, 4.1669e+02], + [-8.8063e+00, -8.5072e+00, -2.9831e-01, 4.2064e+02], + [-8.5621e+00, -8.0990e+00, -2.3642e-01, 4.2462e+02]]], + grad_fn=), 'rgb0': tensor([[0.3772, 0.3605, 0.3797], + [0.2892, 0.4396, 0.6826], + [0.1984, 0.1949, 0.2297], + ..., + [0.4560, 0.4906, 0.5745], + [0.4600, 0.4443, 0.4649], + [0.2383, 0.4135, 0.6506]], grad_fn=), 'disp0': tensor([ 35.3774, 97.7074, 20.0179, ..., 113.4468, 19.2944, 42.9096], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0051, 0.0047, 0.0056, ..., 0.0046, 0.0061, 0.0035])} +0 0.0007781982421875 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.900084495544434 +2 22.040159225463867 +3 21.13410234451294 +4 21.858579397201538 +5 21.813025951385498 +6 21.756977558135986 +7 22.07144546508789 +8 21.688812732696533 +9 21.22470450401306 +10 21.811416625976562 +11 22.391375064849854 +12 21.303110599517822 +13 22.075752019882202 +14 22.019895792007446 +15 21.387722730636597 +16 21.976541996002197 +17 21.59329843521118 +18 21.582263231277466 +19 22.208436727523804 +20 21.65848422050476 +21 21.75463628768921 +22 21.909929275512695 +23 21.72421383857727 +24 22.23436951637268 +25 23.022521495819092 +26 21.853797435760498 +27 21.839298248291016 +28 22.280352115631104 +29 22.292006969451904 +30 21.395668268203735 +31 21.496432304382324 +32 21.98318386077881 +33 21.402556657791138 +34 22.134448766708374 +35 21.072128295898438 +36 22.467142581939697 +37 21.36501455307007 +38 21.671842575073242 +39 21.136635541915894 +40 21.80847430229187 +41 21.588520288467407 +42 22.157503366470337 +43 22.03196406364441 +44 22.17594814300537 +45 22.275651693344116 +46 21.674516439437866 +47 21.560530185699463 +48 21.129910230636597 +49 22.89915418624878 +50 21.564201593399048 +51 21.170586109161377 +52 22.217478036880493 +53 21.488369464874268 +54 21.24702000617981 +55 22.41868257522583 +56 22.156178951263428 +57 21.6646671295166 +58 21.72728991508484 +59 21.898398876190186 +60 22.620176076889038 +61 22.726428031921387 +62 22.66702389717102 +63 21.179077863693237 +64 22.6362144947052 +65 22.181681632995605 +66 21.624000787734985 +67 21.759299278259277 +68 21.765052318572998 +69 22.36878514289856 +70 21.42717432975769 +71 21.76825475692749 +72 22.018397092819214 +73 21.73257040977478 +74 21.60110306739807 +75 22.42619299888611 +76 21.600175380706787 +77 21.874656677246094 +78 22.34152913093567 +79 21.581176042556763 +80 21.644501209259033 +81 22.061414003372192 +82 21.578544855117798 +83 22.6180419921875 +84 22.251545906066895 +85 21.79358148574829 +86 21.816158533096313 +87 21.354191541671753 +88 21.351178646087646 +89 22.050039052963257 +90 21.966017246246338 +91 21.624111890792847 +92 22.378916025161743 +93 22.364320516586304 +94 21.86920189857483 +95 21.85255241394043 +96 21.60054850578308 +97 21.85763168334961 +98 21.919217824935913 +99 21.939600706100464 +100 22.25835394859314 +101 22.154224395751953 +102 21.853235006332397 +103 21.710264921188354 +104 21.26652216911316 +105 22.55121159553528 +106 21.75229787826538 +107 21.523536682128906 +108 21.80846381187439 +109 22.080125331878662 +110 21.947901964187622 +111 22.24077606201172 +112 21.29448676109314 +113 22.32422661781311 +114 21.546730041503906 +115 21.713033437728882 +116 21.814121961593628 +117 22.633712768554688 +118 22.006876468658447 +119 21.82833981513977 +test poses shape torch.Size([4, 3, 4]) +0 0.0016756057739257812 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 20.98716950416565 +2 22.098033905029297 +3 21.776559114456177 +Saved test set +[TRAIN] Iter: 550000 Loss: 0.006078954320400953 PSNR: 26.75234603881836 +[TRAIN] Iter: 550100 Loss: 0.006484600715339184 PSNR: 26.37424087524414 +[TRAIN] Iter: 550200 Loss: 0.006506041623651981 PSNR: 26.53688621520996 +[TRAIN] Iter: 550300 Loss: 0.006901219487190247 PSNR: 26.748085021972656 +[TRAIN] Iter: 550400 Loss: 0.007038365118205547 PSNR: 25.800451278686523 +[TRAIN] Iter: 550500 Loss: 0.004484144970774651 PSNR: 28.787870407104492 +[TRAIN] Iter: 550600 Loss: 0.007044710218906403 PSNR: 26.15854263305664 +[TRAIN] Iter: 550700 Loss: 0.005246993154287338 PSNR: 27.779254913330078 +[TRAIN] Iter: 550800 Loss: 0.0058835879899561405 PSNR: 26.897449493408203 +[TRAIN] Iter: 550900 Loss: 0.007344505749642849 PSNR: 25.84319305419922 +[TRAIN] Iter: 551000 Loss: 0.0054337577894330025 PSNR: 27.436649322509766 +[TRAIN] Iter: 551100 Loss: 0.0048193903639912605 PSNR: 27.827980041503906 +[TRAIN] Iter: 551200 Loss: 0.006607702001929283 PSNR: 26.314472198486328 +[TRAIN] Iter: 551300 Loss: 0.004889696836471558 PSNR: 27.502729415893555 +[TRAIN] Iter: 551400 Loss: 0.006452253088355064 PSNR: 26.358110427856445 +[TRAIN] Iter: 551500 Loss: 0.005796135403215885 PSNR: 27.55668830871582 +[TRAIN] Iter: 551600 Loss: 0.005552429705858231 PSNR: 27.287273406982422 +[TRAIN] Iter: 551700 Loss: 0.005427686031907797 PSNR: 27.507036209106445 +[TRAIN] Iter: 551800 Loss: 0.0053169745951890945 PSNR: 28.321392059326172 +[TRAIN] Iter: 551900 Loss: 0.0069566406309604645 PSNR: 26.24243927001953 +[TRAIN] Iter: 552000 Loss: 0.005703598260879517 PSNR: 26.911457061767578 +[TRAIN] Iter: 552100 Loss: 0.005348142236471176 PSNR: 26.9743709564209 +[TRAIN] Iter: 552200 Loss: 0.005612274166196585 PSNR: 27.5520076751709 +[TRAIN] Iter: 552300 Loss: 0.007515250239521265 PSNR: 25.84402084350586 +[TRAIN] Iter: 552400 Loss: 0.006300314329564571 PSNR: 26.60784912109375 +[TRAIN] Iter: 552500 Loss: 0.007405941374599934 PSNR: 25.939912796020508 +[TRAIN] Iter: 552600 Loss: 0.008364143781363964 PSNR: 25.107807159423828 +[TRAIN] Iter: 552700 Loss: 0.005287745036184788 PSNR: 28.494670867919922 +[TRAIN] Iter: 552800 Loss: 0.006477567832916975 PSNR: 26.417808532714844 +[TRAIN] Iter: 552900 Loss: 0.006118890829384327 PSNR: 28.285825729370117 +[TRAIN] Iter: 553000 Loss: 0.0064425356686115265 PSNR: 26.66878890991211 +[TRAIN] Iter: 553100 Loss: 0.0066618360579013824 PSNR: 26.44635009765625 +[TRAIN] Iter: 553200 Loss: 0.006577409338206053 PSNR: 26.991518020629883 +[TRAIN] Iter: 553300 Loss: 0.005081562325358391 PSNR: 28.128320693969727 +[TRAIN] Iter: 553400 Loss: 0.006894244812428951 PSNR: 26.63498878479004 +[TRAIN] Iter: 553500 Loss: 0.006620368454605341 PSNR: 26.13265037536621 +[TRAIN] Iter: 553600 Loss: 0.006338315084576607 PSNR: 26.159801483154297 +[TRAIN] Iter: 553700 Loss: 0.0052889687940478325 PSNR: 27.42813491821289 +[TRAIN] Iter: 553800 Loss: 0.006821382790803909 PSNR: 25.747262954711914 +[TRAIN] Iter: 553900 Loss: 0.006851951591670513 PSNR: 25.901416778564453 +[TRAIN] Iter: 554000 Loss: 0.005167629104107618 PSNR: 27.007966995239258 +[TRAIN] Iter: 554100 Loss: 0.007508022710680962 PSNR: 26.104551315307617 +[TRAIN] Iter: 554200 Loss: 0.00613999692723155 PSNR: 26.7608585357666 +[TRAIN] Iter: 554300 Loss: 0.005397287663072348 PSNR: 28.120256423950195 +[TRAIN] Iter: 554400 Loss: 0.006627282127737999 PSNR: 27.00017547607422 +[TRAIN] Iter: 554500 Loss: 0.0076459674164652824 PSNR: 25.587379455566406 +[TRAIN] Iter: 554600 Loss: 0.0061814626678824425 PSNR: 26.09526824951172 +[TRAIN] Iter: 554700 Loss: 0.007795047014951706 PSNR: 25.560792922973633 +[TRAIN] Iter: 554800 Loss: 0.0069847721606493 PSNR: 26.90540885925293 +[TRAIN] Iter: 554900 Loss: 0.006630029063671827 PSNR: 26.301668167114258 +[TRAIN] Iter: 555000 Loss: 0.006421857513487339 PSNR: 26.110750198364258 +[TRAIN] Iter: 555100 Loss: 0.0064971912652254105 PSNR: 26.61269760131836 +[TRAIN] Iter: 555200 Loss: 0.006003392860293388 PSNR: 26.296762466430664 +[TRAIN] Iter: 555300 Loss: 0.006065405439585447 PSNR: 26.623628616333008 +[TRAIN] Iter: 555400 Loss: 0.006706518121063709 PSNR: 26.522380828857422 +[TRAIN] Iter: 555500 Loss: 0.00728309340775013 PSNR: 26.00000762939453 +[TRAIN] Iter: 555600 Loss: 0.006698556710034609 PSNR: 25.849397659301758 +[TRAIN] Iter: 555700 Loss: 0.006153000518679619 PSNR: 27.727771759033203 +[TRAIN] Iter: 555800 Loss: 0.004326228052377701 PSNR: 28.530826568603516 +[TRAIN] Iter: 555900 Loss: 0.007164320442825556 PSNR: 26.04768943786621 +[TRAIN] Iter: 556000 Loss: 0.0065957047045230865 PSNR: 26.624141693115234 +[TRAIN] Iter: 556100 Loss: 0.004931201227009296 PSNR: 27.669002532958984 +[TRAIN] Iter: 556200 Loss: 0.007210221141576767 PSNR: 25.861547470092773 +[TRAIN] Iter: 556300 Loss: 0.007987793534994125 PSNR: 26.089275360107422 +[TRAIN] Iter: 556400 Loss: 0.00598550820723176 PSNR: 26.302173614501953 +[TRAIN] Iter: 556500 Loss: 0.006443963386118412 PSNR: 26.64261817932129 +[TRAIN] Iter: 556600 Loss: 0.007205922156572342 PSNR: 26.14388084411621 +[TRAIN] Iter: 556700 Loss: 0.007578518241643906 PSNR: 25.979923248291016 +[TRAIN] Iter: 556800 Loss: 0.005553467199206352 PSNR: 27.01215171813965 +[TRAIN] Iter: 556900 Loss: 0.005887857172638178 PSNR: 27.50455665588379 +[TRAIN] Iter: 557000 Loss: 0.006465570069849491 PSNR: 26.444992065429688 +[TRAIN] Iter: 557100 Loss: 0.007024702616035938 PSNR: 26.24810791015625 +[TRAIN] Iter: 557200 Loss: 0.005517763085663319 PSNR: 27.77936363220215 +[TRAIN] Iter: 557300 Loss: 0.006831349804997444 PSNR: 26.340133666992188 +[TRAIN] Iter: 557400 Loss: 0.007375758606940508 PSNR: 25.949634552001953 +[TRAIN] Iter: 557500 Loss: 0.005592603236436844 PSNR: 27.08588218688965 +[TRAIN] Iter: 557600 Loss: 0.006484377197921276 PSNR: 26.763151168823242 +[TRAIN] Iter: 557700 Loss: 0.0064996774308383465 PSNR: 26.207672119140625 +[TRAIN] Iter: 557800 Loss: 0.0072839390486478806 PSNR: 25.74338150024414 +[TRAIN] Iter: 557900 Loss: 0.004936483222991228 PSNR: 28.15955352783203 +[TRAIN] Iter: 558000 Loss: 0.005804234649986029 PSNR: 26.699636459350586 +[TRAIN] Iter: 558100 Loss: 0.004780968651175499 PSNR: 28.471694946289062 +[TRAIN] Iter: 558200 Loss: 0.007122840732336044 PSNR: 26.90944480895996 +[TRAIN] Iter: 558300 Loss: 0.0070102927275002 PSNR: 26.2154541015625 +[TRAIN] Iter: 558400 Loss: 0.0066276732832193375 PSNR: 26.157268524169922 +[TRAIN] Iter: 558500 Loss: 0.006214432884007692 PSNR: 26.422142028808594 +[TRAIN] Iter: 558600 Loss: 0.005165342707186937 PSNR: 27.645301818847656 +[TRAIN] Iter: 558700 Loss: 0.0053930822759866714 PSNR: 27.677324295043945 +[TRAIN] Iter: 558800 Loss: 0.006373798009008169 PSNR: 26.330259323120117 +[TRAIN] Iter: 558900 Loss: 0.0063913920894265175 PSNR: 26.585315704345703 +[TRAIN] Iter: 559000 Loss: 0.004848486743867397 PSNR: 27.51691246032715 +[TRAIN] Iter: 559100 Loss: 0.007254060823470354 PSNR: 25.92098045349121 +[TRAIN] Iter: 559200 Loss: 0.005849418230354786 PSNR: 27.24697494506836 +[TRAIN] Iter: 559300 Loss: 0.006369438488036394 PSNR: 26.53114128112793 +[TRAIN] Iter: 559400 Loss: 0.006435828283429146 PSNR: 26.34107780456543 +[TRAIN] Iter: 559500 Loss: 0.007549325469881296 PSNR: 25.567039489746094 +[TRAIN] Iter: 559600 Loss: 0.006079540587961674 PSNR: 26.119630813598633 +[TRAIN] Iter: 559700 Loss: 0.005105541553348303 PSNR: 28.32032585144043 +[TRAIN] Iter: 559800 Loss: 0.006363993044942617 PSNR: 26.1738224029541 +[TRAIN] Iter: 559900 Loss: 0.008150536566972733 PSNR: 25.459274291992188 +Saved checkpoints at ./logs/TUT-out-doll-360-np/560000.tar +[TRAIN] Iter: 560000 Loss: 0.0050529069267213345 PSNR: 27.750152587890625 +[TRAIN] Iter: 560100 Loss: 0.006483984179794788 PSNR: 26.22511100769043 +[TRAIN] Iter: 560200 Loss: 0.007454768754541874 PSNR: 26.19702911376953 +[TRAIN] Iter: 560300 Loss: 0.006791865453124046 PSNR: 25.753463745117188 +[TRAIN] Iter: 560400 Loss: 0.0056183901615440845 PSNR: 27.136398315429688 +[TRAIN] Iter: 560500 Loss: 0.006372151896357536 PSNR: 26.5522403717041 +[TRAIN] Iter: 560600 Loss: 0.006253768689930439 PSNR: 28.185556411743164 +[TRAIN] Iter: 560700 Loss: 0.005427893716841936 PSNR: 28.1519775390625 +[TRAIN] Iter: 560800 Loss: 0.00692228227853775 PSNR: 26.439130783081055 +[TRAIN] Iter: 560900 Loss: 0.007097909227013588 PSNR: 26.08700180053711 +[TRAIN] Iter: 561000 Loss: 0.006706524640321732 PSNR: 27.280759811401367 +[TRAIN] Iter: 561100 Loss: 0.006780373398214579 PSNR: 26.744871139526367 +[TRAIN] Iter: 561200 Loss: 0.005350316409021616 PSNR: 27.835952758789062 +[TRAIN] Iter: 561300 Loss: 0.0057071796618402 PSNR: 26.708280563354492 +[TRAIN] Iter: 561400 Loss: 0.007562515325844288 PSNR: 25.80938148498535 +[TRAIN] Iter: 561500 Loss: 0.006708987522870302 PSNR: 25.878250122070312 +[TRAIN] Iter: 561600 Loss: 0.006259836722165346 PSNR: 26.087888717651367 +[TRAIN] Iter: 561700 Loss: 0.00658523291349411 PSNR: 26.374841690063477 +[TRAIN] Iter: 561800 Loss: 0.006149426568299532 PSNR: 26.60901641845703 +[TRAIN] Iter: 561900 Loss: 0.006186900194734335 PSNR: 26.57405662536621 +[TRAIN] Iter: 562000 Loss: 0.005196449812501669 PSNR: 27.574047088623047 +[TRAIN] Iter: 562100 Loss: 0.004694144707173109 PSNR: 27.52758026123047 +[TRAIN] Iter: 562200 Loss: 0.006803237833082676 PSNR: 26.263771057128906 +[TRAIN] Iter: 562300 Loss: 0.006070211995393038 PSNR: 26.98209571838379 +[TRAIN] Iter: 562400 Loss: 0.007167350500822067 PSNR: 25.614093780517578 +[TRAIN] Iter: 562500 Loss: 0.006057254038751125 PSNR: 26.644920349121094 +[TRAIN] Iter: 562600 Loss: 0.005879315547645092 PSNR: 27.249614715576172 +[TRAIN] Iter: 562700 Loss: 0.0065557886846363544 PSNR: 26.458740234375 +[TRAIN] Iter: 562800 Loss: 0.005773089826107025 PSNR: 27.467588424682617 +[TRAIN] Iter: 562900 Loss: 0.006783651188015938 PSNR: 26.258214950561523 +[TRAIN] Iter: 563000 Loss: 0.005911711603403091 PSNR: 26.588237762451172 +[TRAIN] Iter: 563100 Loss: 0.007235088385641575 PSNR: 25.763479232788086 +[TRAIN] Iter: 563200 Loss: 0.006630239076912403 PSNR: 26.60918617248535 +[TRAIN] Iter: 563300 Loss: 0.006891057826578617 PSNR: 25.991430282592773 +[TRAIN] Iter: 563400 Loss: 0.006207283120602369 PSNR: 26.71117401123047 +[TRAIN] Iter: 563500 Loss: 0.005409085191786289 PSNR: 26.970687866210938 +[TRAIN] Iter: 563600 Loss: 0.005243021994829178 PSNR: 27.781553268432617 +[TRAIN] Iter: 563700 Loss: 0.006053052376955748 PSNR: 27.165773391723633 +[TRAIN] Iter: 563800 Loss: 0.006845388561487198 PSNR: 26.43269920349121 +[TRAIN] Iter: 563900 Loss: 0.007382632698863745 PSNR: 25.970876693725586 +[TRAIN] Iter: 564000 Loss: 0.006791344378143549 PSNR: 26.1713924407959 +[TRAIN] Iter: 564100 Loss: 0.00663642305880785 PSNR: 26.384838104248047 +[TRAIN] Iter: 564200 Loss: 0.006929970346391201 PSNR: 25.83700942993164 +[TRAIN] Iter: 564300 Loss: 0.0047062477096915245 PSNR: 27.862409591674805 +[TRAIN] Iter: 564400 Loss: 0.007770867086946964 PSNR: 24.93225860595703 +[TRAIN] Iter: 564500 Loss: 0.007261144928634167 PSNR: 25.862096786499023 +[TRAIN] Iter: 564600 Loss: 0.004766382742673159 PSNR: 28.694438934326172 +[TRAIN] Iter: 564700 Loss: 0.007424286566674709 PSNR: 25.488176345825195 +[TRAIN] Iter: 564800 Loss: 0.006195124238729477 PSNR: 26.73243522644043 +[TRAIN] Iter: 564900 Loss: 0.005064818076789379 PSNR: 27.585731506347656 +[TRAIN] Iter: 565000 Loss: 0.005411065183579922 PSNR: 27.602153778076172 +[TRAIN] Iter: 565100 Loss: 0.005553498864173889 PSNR: 28.000436782836914 +[TRAIN] Iter: 565200 Loss: 0.005576043855398893 PSNR: 27.6968936920166 +[TRAIN] Iter: 565300 Loss: 0.0063621411100029945 PSNR: 26.386947631835938 +[TRAIN] Iter: 565400 Loss: 0.007260351907461882 PSNR: 26.044361114501953 +[TRAIN] Iter: 565500 Loss: 0.005477686878293753 PSNR: 28.313562393188477 +[TRAIN] Iter: 565600 Loss: 0.006654669065028429 PSNR: 26.188919067382812 +[TRAIN] Iter: 565700 Loss: 0.005571024492383003 PSNR: 27.64913558959961 +[TRAIN] Iter: 565800 Loss: 0.005533728748559952 PSNR: 27.668315887451172 +[TRAIN] Iter: 565900 Loss: 0.007286375388503075 PSNR: 25.572139739990234 +[TRAIN] Iter: 566000 Loss: 0.006365481298416853 PSNR: 26.703065872192383 +[TRAIN] Iter: 566100 Loss: 0.005505869165062904 PSNR: 27.45760726928711 +[TRAIN] Iter: 566200 Loss: 0.004600105341523886 PSNR: 28.125106811523438 +[TRAIN] Iter: 566300 Loss: 0.006060808897018433 PSNR: 27.1087646484375 +[TRAIN] Iter: 566400 Loss: 0.00658140005543828 PSNR: 26.90768051147461 +[TRAIN] Iter: 566500 Loss: 0.005755668040364981 PSNR: 26.586238861083984 +[TRAIN] Iter: 566600 Loss: 0.0067171999253332615 PSNR: 26.13488006591797 +[TRAIN] Iter: 566700 Loss: 0.00575354415923357 PSNR: 26.658710479736328 +[TRAIN] Iter: 566800 Loss: 0.006685027852654457 PSNR: 26.681020736694336 +[TRAIN] Iter: 566900 Loss: 0.007109814323484898 PSNR: 26.3785457611084 +[TRAIN] Iter: 567000 Loss: 0.007865028455853462 PSNR: 25.89497947692871 +[TRAIN] Iter: 567100 Loss: 0.005867416504770517 PSNR: 26.82843589782715 +[TRAIN] Iter: 567200 Loss: 0.007475363090634346 PSNR: 25.895729064941406 +[TRAIN] Iter: 567300 Loss: 0.005227597430348396 PSNR: 28.890413284301758 +[TRAIN] Iter: 567400 Loss: 0.006645663175731897 PSNR: 26.15975570678711 +[TRAIN] Iter: 567500 Loss: 0.006346928887069225 PSNR: 26.524688720703125 +[TRAIN] Iter: 567600 Loss: 0.0043253228068351746 PSNR: 28.70730209350586 +[TRAIN] Iter: 567700 Loss: 0.005002378486096859 PSNR: 27.995159149169922 +[TRAIN] Iter: 567800 Loss: 0.004741298034787178 PSNR: 28.336200714111328 +[TRAIN] Iter: 567900 Loss: 0.005278429016470909 PSNR: 28.32889175415039 +[TRAIN] Iter: 568000 Loss: 0.00574110820889473 PSNR: 27.184640884399414 +[TRAIN] Iter: 568100 Loss: 0.006263738498091698 PSNR: 26.57633399963379 +[TRAIN] Iter: 568200 Loss: 0.00651090033352375 PSNR: 26.639020919799805 +[TRAIN] Iter: 568300 Loss: 0.00767007889226079 PSNR: 26.106002807617188 +[TRAIN] Iter: 568400 Loss: 0.006969237234443426 PSNR: 25.9254150390625 +[TRAIN] Iter: 568500 Loss: 0.006489795632660389 PSNR: 26.60199546813965 +[TRAIN] Iter: 568600 Loss: 0.006533393636345863 PSNR: 26.68527603149414 +[TRAIN] Iter: 568700 Loss: 0.005715095438063145 PSNR: 27.37055206298828 +[TRAIN] Iter: 568800 Loss: 0.005144739057868719 PSNR: 28.7962703704834 +[TRAIN] Iter: 568900 Loss: 0.007636303082108498 PSNR: 25.608617782592773 +[TRAIN] Iter: 569000 Loss: 0.006960654631257057 PSNR: 26.591564178466797 +[TRAIN] Iter: 569100 Loss: 0.005979273933917284 PSNR: 26.500497817993164 +[TRAIN] Iter: 569200 Loss: 0.005563385784626007 PSNR: 26.93684959411621 +[TRAIN] Iter: 569300 Loss: 0.0061956364661455154 PSNR: 26.60478401184082 +[TRAIN] Iter: 569400 Loss: 0.0072534047067165375 PSNR: 25.805240631103516 +[TRAIN] Iter: 569500 Loss: 0.006036047358065844 PSNR: 26.020450592041016 +[TRAIN] Iter: 569600 Loss: 0.006695928517729044 PSNR: 26.064640045166016 +[TRAIN] Iter: 569700 Loss: 0.005919898394495249 PSNR: 27.315149307250977 +[TRAIN] Iter: 569800 Loss: 0.008022012189030647 PSNR: 25.0835018157959 +[TRAIN] Iter: 569900 Loss: 0.00495142862200737 PSNR: 28.173826217651367 +Saved checkpoints at ./logs/TUT-out-doll-360-np/570000.tar +[TRAIN] Iter: 570000 Loss: 0.005084384232759476 PSNR: 27.723466873168945 +[TRAIN] Iter: 570100 Loss: 0.007383297197520733 PSNR: 25.62348747253418 +[TRAIN] Iter: 570200 Loss: 0.0064974501729011536 PSNR: 27.999650955200195 +[TRAIN] Iter: 570300 Loss: 0.006533259525895119 PSNR: 27.738555908203125 +[TRAIN] Iter: 570400 Loss: 0.005154338199645281 PSNR: 27.394287109375 +[TRAIN] Iter: 570500 Loss: 0.006647459696978331 PSNR: 25.86707878112793 +[TRAIN] Iter: 570600 Loss: 0.006478528492152691 PSNR: 26.015695571899414 +[TRAIN] Iter: 570700 Loss: 0.006122674327343702 PSNR: 27.336078643798828 +[TRAIN] Iter: 570800 Loss: 0.007426667958498001 PSNR: 25.360240936279297 +[TRAIN] Iter: 570900 Loss: 0.006299527361989021 PSNR: 26.597299575805664 +[TRAIN] Iter: 571000 Loss: 0.007653180509805679 PSNR: 25.486909866333008 +[TRAIN] Iter: 571100 Loss: 0.0065987505950033665 PSNR: 26.668909072875977 +[TRAIN] Iter: 571200 Loss: 0.008118843659758568 PSNR: 25.33957290649414 +[TRAIN] Iter: 571300 Loss: 0.005426621995866299 PSNR: 28.43264389038086 +[TRAIN] Iter: 571400 Loss: 0.00608791084960103 PSNR: 26.992795944213867 +[TRAIN] Iter: 571500 Loss: 0.006753177847713232 PSNR: 26.002140045166016 +[TRAIN] Iter: 571600 Loss: 0.007561600301414728 PSNR: 26.25398826599121 +[TRAIN] Iter: 571700 Loss: 0.006040770560503006 PSNR: 26.58249855041504 +[TRAIN] Iter: 571800 Loss: 0.005860663019120693 PSNR: 26.877182006835938 +[TRAIN] Iter: 571900 Loss: 0.00711334403604269 PSNR: 26.264047622680664 +[TRAIN] Iter: 572000 Loss: 0.006591416895389557 PSNR: 26.607454299926758 +[TRAIN] Iter: 572100 Loss: 0.006571713835000992 PSNR: 26.059816360473633 +[TRAIN] Iter: 572200 Loss: 0.005389973055571318 PSNR: 27.64180564880371 +[TRAIN] Iter: 572300 Loss: 0.005152735393494368 PSNR: 28.36186981201172 +[TRAIN] Iter: 572400 Loss: 0.005512668751180172 PSNR: 28.1926212310791 +[TRAIN] Iter: 572500 Loss: 0.006373112089931965 PSNR: 26.259260177612305 +[TRAIN] Iter: 572600 Loss: 0.008044976741075516 PSNR: 25.690589904785156 +[TRAIN] Iter: 572700 Loss: 0.006689342204481363 PSNR: 26.13405990600586 +[TRAIN] Iter: 572800 Loss: 0.0058366162702441216 PSNR: 26.803573608398438 +[TRAIN] Iter: 572900 Loss: 0.006626259069889784 PSNR: 26.406604766845703 +[TRAIN] Iter: 573000 Loss: 0.005766634363681078 PSNR: 27.23506736755371 +[TRAIN] Iter: 573100 Loss: 0.004858922213315964 PSNR: 28.040462493896484 +[TRAIN] Iter: 573200 Loss: 0.007087903097271919 PSNR: 25.823490142822266 +[TRAIN] Iter: 573300 Loss: 0.006684774998575449 PSNR: 26.76430320739746 +[TRAIN] Iter: 573400 Loss: 0.00734205637127161 PSNR: 25.571184158325195 +[TRAIN] Iter: 573500 Loss: 0.006637849844992161 PSNR: 25.865650177001953 +[TRAIN] Iter: 573600 Loss: 0.006576338782906532 PSNR: 26.177528381347656 +[TRAIN] Iter: 573700 Loss: 0.006829564459621906 PSNR: 26.638168334960938 +[TRAIN] Iter: 573800 Loss: 0.006712151691317558 PSNR: 25.789020538330078 +[TRAIN] Iter: 573900 Loss: 0.0054525635205209255 PSNR: 27.157569885253906 +[TRAIN] Iter: 574000 Loss: 0.006620875559747219 PSNR: 26.397537231445312 +[TRAIN] Iter: 574100 Loss: 0.005616738926619291 PSNR: 28.274110794067383 +[TRAIN] Iter: 574200 Loss: 0.0063391756266355515 PSNR: 26.051631927490234 +[TRAIN] Iter: 574300 Loss: 0.006814450956881046 PSNR: 27.068077087402344 +[TRAIN] Iter: 574400 Loss: 0.007736276835203171 PSNR: 25.964862823486328 +[TRAIN] Iter: 574500 Loss: 0.00477616349235177 PSNR: 28.813833236694336 +[TRAIN] Iter: 574600 Loss: 0.005138400476425886 PSNR: 27.49173355102539 +[TRAIN] Iter: 574700 Loss: 0.006410328205674887 PSNR: 26.522890090942383 +[TRAIN] Iter: 574800 Loss: 0.004709980450570583 PSNR: 28.811309814453125 +[TRAIN] Iter: 574900 Loss: 0.005352392792701721 PSNR: 27.17730712890625 +[TRAIN] Iter: 575000 Loss: 0.006689653731882572 PSNR: 26.762432098388672 +[TRAIN] Iter: 575100 Loss: 0.004874881356954575 PSNR: 27.80033302307129 +[TRAIN] Iter: 575200 Loss: 0.006543045863509178 PSNR: 26.684450149536133 +[TRAIN] Iter: 575300 Loss: 0.0066502876579761505 PSNR: 26.106151580810547 +[TRAIN] Iter: 575400 Loss: 0.007109471131116152 PSNR: 26.32117462158203 +[TRAIN] Iter: 575500 Loss: 0.005199595354497433 PSNR: 27.851396560668945 +[TRAIN] Iter: 575600 Loss: 0.0055495621636509895 PSNR: 27.516130447387695 +[TRAIN] Iter: 575700 Loss: 0.006339360494166613 PSNR: 26.27163314819336 +[TRAIN] Iter: 575800 Loss: 0.005366834811866283 PSNR: 28.319129943847656 +[TRAIN] Iter: 575900 Loss: 0.006203021388500929 PSNR: 27.062957763671875 +[TRAIN] Iter: 576000 Loss: 0.005868302658200264 PSNR: 26.895122528076172 +[TRAIN] Iter: 576100 Loss: 0.005076008848845959 PSNR: 28.08045768737793 +[TRAIN] Iter: 576200 Loss: 0.0053660073317587376 PSNR: 27.774864196777344 +[TRAIN] Iter: 576300 Loss: 0.006298476830124855 PSNR: 26.42621612548828 +[TRAIN] Iter: 576400 Loss: 0.006001130677759647 PSNR: 26.660980224609375 +[TRAIN] Iter: 576500 Loss: 0.005266580265015364 PSNR: 27.01927375793457 +[TRAIN] Iter: 576600 Loss: 0.0062648579478263855 PSNR: 26.59991455078125 +[TRAIN] Iter: 576700 Loss: 0.00542623782530427 PSNR: 28.421995162963867 +[TRAIN] Iter: 576800 Loss: 0.006133373361080885 PSNR: 26.780214309692383 +[TRAIN] Iter: 576900 Loss: 0.006184154190123081 PSNR: 26.257261276245117 +[TRAIN] Iter: 577000 Loss: 0.007055017165839672 PSNR: 26.012367248535156 +[TRAIN] Iter: 577100 Loss: 0.006075507495552301 PSNR: 26.701473236083984 +[TRAIN] Iter: 577200 Loss: 0.0069886427372694016 PSNR: 25.881162643432617 +[TRAIN] Iter: 577300 Loss: 0.0064462278969585896 PSNR: 26.313566207885742 +[TRAIN] Iter: 577400 Loss: 0.005744416266679764 PSNR: 26.881200790405273 +[TRAIN] Iter: 577500 Loss: 0.0048406412824988365 PSNR: 28.263778686523438 +[TRAIN] Iter: 577600 Loss: 0.006388497538864613 PSNR: 26.59205436706543 +[TRAIN] Iter: 577700 Loss: 0.006108676549047232 PSNR: 26.866497039794922 +[TRAIN] Iter: 577800 Loss: 0.005244379863142967 PSNR: 28.134262084960938 +[TRAIN] Iter: 577900 Loss: 0.006750585976988077 PSNR: 26.45232391357422 +[TRAIN] Iter: 578000 Loss: 0.005423033609986305 PSNR: 27.274070739746094 +[TRAIN] Iter: 578100 Loss: 0.007102882023900747 PSNR: 25.82716941833496 +[TRAIN] Iter: 578200 Loss: 0.0058389161713421345 PSNR: 27.141260147094727 +[TRAIN] Iter: 578300 Loss: 0.006047262344509363 PSNR: 26.378337860107422 +[TRAIN] Iter: 578400 Loss: 0.006842529401183128 PSNR: 26.18636131286621 +[TRAIN] Iter: 578500 Loss: 0.004555631894618273 PSNR: 28.824613571166992 +[TRAIN] Iter: 578600 Loss: 0.005533806048333645 PSNR: 26.940372467041016 +[TRAIN] Iter: 578700 Loss: 0.005943399388343096 PSNR: 27.841732025146484 +[TRAIN] Iter: 578800 Loss: 0.005234148353338242 PSNR: 27.837356567382812 +[TRAIN] Iter: 578900 Loss: 0.005920573603361845 PSNR: 27.482765197753906 +[TRAIN] Iter: 579000 Loss: 0.005301712546497583 PSNR: 28.11223793029785 +[TRAIN] Iter: 579100 Loss: 0.00657013151794672 PSNR: 26.168235778808594 +[TRAIN] Iter: 579200 Loss: 0.0060902610421180725 PSNR: 26.639141082763672 +[TRAIN] Iter: 579300 Loss: 0.005636543966829777 PSNR: 26.922557830810547 +[TRAIN] Iter: 579400 Loss: 0.006050770170986652 PSNR: 26.434131622314453 +[TRAIN] Iter: 579500 Loss: 0.005728019401431084 PSNR: 26.724422454833984 +[TRAIN] Iter: 579600 Loss: 0.005306406877934933 PSNR: 27.835355758666992 +[TRAIN] Iter: 579700 Loss: 0.006176528986543417 PSNR: 26.422344207763672 +[TRAIN] Iter: 579800 Loss: 0.008405608125030994 PSNR: 24.971832275390625 +[TRAIN] Iter: 579900 Loss: 0.0071732839569449425 PSNR: 26.073068618774414 +Saved checkpoints at ./logs/TUT-out-doll-360-np/580000.tar +[TRAIN] Iter: 580000 Loss: 0.0051427497528493404 PSNR: 27.551280975341797 +[TRAIN] Iter: 580100 Loss: 0.005963210016489029 PSNR: 27.34003448486328 +[TRAIN] Iter: 580200 Loss: 0.005274305120110512 PSNR: 28.697725296020508 +[TRAIN] Iter: 580300 Loss: 0.0065859230235219 PSNR: 26.211618423461914 +[TRAIN] Iter: 580400 Loss: 0.006822244264185429 PSNR: 27.108999252319336 +[TRAIN] Iter: 580500 Loss: 0.005652832332998514 PSNR: 27.368915557861328 +[TRAIN] Iter: 580600 Loss: 0.007809218484908342 PSNR: 25.34098243713379 +[TRAIN] Iter: 580700 Loss: 0.00595815759152174 PSNR: 26.95439338684082 +[TRAIN] Iter: 580800 Loss: 0.005871611647307873 PSNR: 26.692148208618164 +[TRAIN] Iter: 580900 Loss: 0.005962112918496132 PSNR: 27.219240188598633 +[TRAIN] Iter: 581000 Loss: 0.006421055179089308 PSNR: 26.692798614501953 +[TRAIN] Iter: 581100 Loss: 0.004921200219541788 PSNR: 27.785972595214844 +[TRAIN] Iter: 581200 Loss: 0.006131077650934458 PSNR: 26.67182731628418 +[TRAIN] Iter: 581300 Loss: 0.006574593018740416 PSNR: 26.968822479248047 +[TRAIN] Iter: 581400 Loss: 0.0067679984495043755 PSNR: 25.796615600585938 +[TRAIN] Iter: 581500 Loss: 0.00631901016458869 PSNR: 26.50188636779785 +[TRAIN] Iter: 581600 Loss: 0.0076750582084059715 PSNR: 25.697147369384766 +[TRAIN] Iter: 581700 Loss: 0.00599785428494215 PSNR: 27.799863815307617 +[TRAIN] Iter: 581800 Loss: 0.005209206137806177 PSNR: 27.60120391845703 +[TRAIN] Iter: 581900 Loss: 0.007228245493024588 PSNR: 25.787799835205078 +[TRAIN] Iter: 582000 Loss: 0.006685941945761442 PSNR: 26.296489715576172 +[TRAIN] Iter: 582100 Loss: 0.004553955513983965 PSNR: 28.903663635253906 +[TRAIN] Iter: 582200 Loss: 0.008140680380165577 PSNR: 25.24898910522461 +[TRAIN] Iter: 582300 Loss: 0.004794060252606869 PSNR: 27.993614196777344 +[TRAIN] Iter: 582400 Loss: 0.008021552115678787 PSNR: 25.17953109741211 +[TRAIN] Iter: 582500 Loss: 0.004712269175797701 PSNR: 28.75303077697754 +[TRAIN] Iter: 582600 Loss: 0.006823425181210041 PSNR: 26.38628387451172 +[TRAIN] Iter: 582700 Loss: 0.005198880564421415 PSNR: 27.23146629333496 +[TRAIN] Iter: 582800 Loss: 0.006530667655169964 PSNR: 26.08380889892578 +[TRAIN] Iter: 582900 Loss: 0.005749681033194065 PSNR: 26.639400482177734 +[TRAIN] Iter: 583000 Loss: 0.005968624725937843 PSNR: 27.206829071044922 +[TRAIN] Iter: 583100 Loss: 0.00650367047637701 PSNR: 26.355411529541016 +[TRAIN] Iter: 583200 Loss: 0.006160994991660118 PSNR: 27.150869369506836 +[TRAIN] Iter: 583300 Loss: 0.006410220172256231 PSNR: 26.609722137451172 +[TRAIN] Iter: 583400 Loss: 0.007011753506958485 PSNR: 26.06310272216797 +[TRAIN] Iter: 583500 Loss: 0.005629176273941994 PSNR: 27.11774253845215 +[TRAIN] Iter: 583600 Loss: 0.008201491087675095 PSNR: 25.416959762573242 +[TRAIN] Iter: 583700 Loss: 0.0064888447523117065 PSNR: 27.179039001464844 +[TRAIN] Iter: 583800 Loss: 0.0071469638496637344 PSNR: 26.168127059936523 +[TRAIN] Iter: 583900 Loss: 0.005494418554008007 PSNR: 27.926359176635742 +[TRAIN] Iter: 584000 Loss: 0.007276811171323061 PSNR: 25.867923736572266 +[TRAIN] Iter: 584100 Loss: 0.007466696202754974 PSNR: 25.81136703491211 +[TRAIN] Iter: 584200 Loss: 0.007122337818145752 PSNR: 25.595558166503906 +[TRAIN] Iter: 584300 Loss: 0.005160779692232609 PSNR: 28.777339935302734 +[TRAIN] Iter: 584400 Loss: 0.00686220359057188 PSNR: 25.76555824279785 +[TRAIN] Iter: 584500 Loss: 0.005609531421214342 PSNR: 27.424755096435547 +[TRAIN] Iter: 584600 Loss: 0.007611539214849472 PSNR: 26.200071334838867 +[TRAIN] Iter: 584700 Loss: 0.0061590103432536125 PSNR: 26.419153213500977 +[TRAIN] Iter: 584800 Loss: 0.00600323174148798 PSNR: 27.10370635986328 +[TRAIN] Iter: 584900 Loss: 0.007721599657088518 PSNR: 25.35056495666504 +[TRAIN] Iter: 585000 Loss: 0.006323681212961674 PSNR: 26.950347900390625 +[TRAIN] Iter: 585100 Loss: 0.006792531814426184 PSNR: 27.68136978149414 +[TRAIN] Iter: 585200 Loss: 0.006769854109734297 PSNR: 26.604894638061523 +[TRAIN] Iter: 585300 Loss: 0.00545679684728384 PSNR: 28.801233291625977 +[TRAIN] Iter: 585400 Loss: 0.006944907829165459 PSNR: 26.445072174072266 +[TRAIN] Iter: 585500 Loss: 0.005471258424222469 PSNR: 27.346460342407227 +[TRAIN] Iter: 585600 Loss: 0.006016735918819904 PSNR: 26.581584930419922 +[TRAIN] Iter: 585700 Loss: 0.005581073462963104 PSNR: 28.578479766845703 +[TRAIN] Iter: 585800 Loss: 0.007000437937676907 PSNR: 26.237083435058594 +[TRAIN] Iter: 585900 Loss: 0.005955846980214119 PSNR: 26.65488052368164 +[TRAIN] Iter: 586000 Loss: 0.0059953490272164345 PSNR: 27.224979400634766 +[TRAIN] Iter: 586100 Loss: 0.006751263979822397 PSNR: 26.95649528503418 +[TRAIN] Iter: 586200 Loss: 0.005842951592057943 PSNR: 27.25493049621582 +[TRAIN] Iter: 586300 Loss: 0.0062607210129499435 PSNR: 26.91860008239746 +[TRAIN] Iter: 586400 Loss: 0.0068821716122329235 PSNR: 26.65892791748047 +[TRAIN] Iter: 586500 Loss: 0.006529596634209156 PSNR: 26.48708724975586 +[TRAIN] Iter: 586600 Loss: 0.005843484308570623 PSNR: 27.902915954589844 +[TRAIN] Iter: 586700 Loss: 0.005426340736448765 PSNR: 27.24568748474121 +[TRAIN] Iter: 586800 Loss: 0.007055331952869892 PSNR: 25.969409942626953 +[TRAIN] Iter: 586900 Loss: 0.005895435810089111 PSNR: 26.92935562133789 +[TRAIN] Iter: 587000 Loss: 0.00712662935256958 PSNR: 25.89639663696289 +[TRAIN] Iter: 587100 Loss: 0.007562458515167236 PSNR: 26.160463333129883 +[TRAIN] Iter: 587200 Loss: 0.006637216545641422 PSNR: 26.54932403564453 +[TRAIN] Iter: 587300 Loss: 0.006892929319292307 PSNR: 25.91108512878418 +[TRAIN] Iter: 587400 Loss: 0.005645030178129673 PSNR: 27.713441848754883 +[TRAIN] Iter: 587500 Loss: 0.007509544957429171 PSNR: 26.04876136779785 +[TRAIN] Iter: 587600 Loss: 0.006652586627751589 PSNR: 26.814029693603516 +[TRAIN] Iter: 587700 Loss: 0.005984234623610973 PSNR: 26.43353843688965 +[TRAIN] Iter: 587800 Loss: 0.004512017592787743 PSNR: 28.731718063354492 +[TRAIN] Iter: 587900 Loss: 0.00491302739828825 PSNR: 29.292774200439453 +[TRAIN] Iter: 588000 Loss: 0.006167570129036903 PSNR: 26.983009338378906 +[TRAIN] Iter: 588100 Loss: 0.005318556912243366 PSNR: 27.58376693725586 +[TRAIN] Iter: 588200 Loss: 0.005325988866388798 PSNR: 26.99981689453125 +[TRAIN] Iter: 588300 Loss: 0.00481090322136879 PSNR: 28.84311866760254 +[TRAIN] Iter: 588400 Loss: 0.006151705980300903 PSNR: 26.54317283630371 +[TRAIN] Iter: 588500 Loss: 0.006859960965812206 PSNR: 26.077529907226562 +[TRAIN] Iter: 588600 Loss: 0.007103615440428257 PSNR: 25.902040481567383 +[TRAIN] Iter: 588700 Loss: 0.005564609542489052 PSNR: 26.889488220214844 +[TRAIN] Iter: 588800 Loss: 0.006660535465925932 PSNR: 26.49281883239746 +[TRAIN] Iter: 588900 Loss: 0.0050931028090417385 PSNR: 29.028018951416016 +[TRAIN] Iter: 589000 Loss: 0.007036774419248104 PSNR: 25.761335372924805 +[TRAIN] Iter: 589100 Loss: 0.006281662732362747 PSNR: 27.12822914123535 +[TRAIN] Iter: 589200 Loss: 0.006542276591062546 PSNR: 26.393787384033203 +[TRAIN] Iter: 589300 Loss: 0.005033508874475956 PSNR: 28.100627899169922 +[TRAIN] Iter: 589400 Loss: 0.0063749318942427635 PSNR: 26.524702072143555 +[TRAIN] Iter: 589500 Loss: 0.00589144229888916 PSNR: 27.299423217773438 +[TRAIN] Iter: 589600 Loss: 0.0057842624373734 PSNR: 27.343412399291992 +[TRAIN] Iter: 589700 Loss: 0.005328727886080742 PSNR: 27.542055130004883 +[TRAIN] Iter: 589800 Loss: 0.006438166834414005 PSNR: 26.530183792114258 +[TRAIN] Iter: 589900 Loss: 0.006507852580398321 PSNR: 26.284954071044922 +Saved checkpoints at ./logs/TUT-out-doll-360-np/590000.tar +[TRAIN] Iter: 590000 Loss: 0.005579912103712559 PSNR: 27.146392822265625 +[TRAIN] Iter: 590100 Loss: 0.005882451310753822 PSNR: 26.97047996520996 +[TRAIN] Iter: 590200 Loss: 0.007844161242246628 PSNR: 25.25111961364746 +[TRAIN] Iter: 590300 Loss: 0.006219876930117607 PSNR: 26.538593292236328 +[TRAIN] Iter: 590400 Loss: 0.006292598322033882 PSNR: 26.37483024597168 +[TRAIN] Iter: 590500 Loss: 0.005042253527790308 PSNR: 28.241992950439453 +[TRAIN] Iter: 590600 Loss: 0.006714177317917347 PSNR: 25.872121810913086 +[TRAIN] Iter: 590700 Loss: 0.004483707249164581 PSNR: 28.442420959472656 +[TRAIN] Iter: 590800 Loss: 0.005990005098283291 PSNR: 26.197364807128906 +[TRAIN] Iter: 590900 Loss: 0.005371308885514736 PSNR: 27.54210090637207 +[TRAIN] Iter: 591000 Loss: 0.006080447696149349 PSNR: 26.010780334472656 +[TRAIN] Iter: 591100 Loss: 0.005210867151618004 PSNR: 26.89548683166504 +[TRAIN] Iter: 591200 Loss: 0.005224348045885563 PSNR: 28.912382125854492 +[TRAIN] Iter: 591300 Loss: 0.006314212456345558 PSNR: 26.78360366821289 +[TRAIN] Iter: 591400 Loss: 0.005764476023614407 PSNR: 26.90618133544922 +[TRAIN] Iter: 591500 Loss: 0.006781162228435278 PSNR: 26.78443145751953 +[TRAIN] Iter: 591600 Loss: 0.005963569972664118 PSNR: 27.291213989257812 +[TRAIN] Iter: 591700 Loss: 0.006287435535341501 PSNR: 26.70586585998535 +[TRAIN] Iter: 591800 Loss: 0.005638123024255037 PSNR: 26.51521873474121 +[TRAIN] Iter: 591900 Loss: 0.005163899157196283 PSNR: 27.892248153686523 +[TRAIN] Iter: 592000 Loss: 0.006558060646057129 PSNR: 26.76213836669922 +[TRAIN] Iter: 592100 Loss: 0.0052018905989825726 PSNR: 28.373777389526367 +[TRAIN] Iter: 592200 Loss: 0.005928932223469019 PSNR: 26.870807647705078 +[TRAIN] Iter: 592300 Loss: 0.0048464080318808556 PSNR: 28.26723861694336 +[TRAIN] Iter: 592400 Loss: 0.006148729473352432 PSNR: 26.806673049926758 +[TRAIN] Iter: 592500 Loss: 0.0055763693526387215 PSNR: 27.20970916748047 +[TRAIN] Iter: 592600 Loss: 0.004043225198984146 PSNR: 28.569761276245117 +[TRAIN] Iter: 592700 Loss: 0.0066105094738304615 PSNR: 26.384239196777344 +[TRAIN] Iter: 592800 Loss: 0.005911448039114475 PSNR: 26.59621238708496 +[TRAIN] Iter: 592900 Loss: 0.006454284302890301 PSNR: 26.39198875427246 +[TRAIN] Iter: 593000 Loss: 0.006857286673039198 PSNR: 26.630542755126953 +[TRAIN] Iter: 593100 Loss: 0.004274584352970123 PSNR: 28.58263397216797 +[TRAIN] Iter: 593200 Loss: 0.005150133743882179 PSNR: 27.183340072631836 +[TRAIN] Iter: 593300 Loss: 0.006585075985640287 PSNR: 26.545989990234375 +[TRAIN] Iter: 593400 Loss: 0.0055651236325502396 PSNR: 27.619537353515625 +[TRAIN] Iter: 593500 Loss: 0.006064613349735737 PSNR: 26.699193954467773 +[TRAIN] Iter: 593600 Loss: 0.00541955791413784 PSNR: 26.78269386291504 +[TRAIN] Iter: 593700 Loss: 0.005078259389847517 PSNR: 27.396955490112305 +[TRAIN] Iter: 593800 Loss: 0.007040063850581646 PSNR: 26.366641998291016 +[TRAIN] Iter: 593900 Loss: 0.00562688522040844 PSNR: 28.019575119018555 +[TRAIN] Iter: 594000 Loss: 0.006670940201729536 PSNR: 26.10077667236328 +[TRAIN] Iter: 594100 Loss: 0.007308711297810078 PSNR: 25.523284912109375 +[TRAIN] Iter: 594200 Loss: 0.006882356479763985 PSNR: 26.141630172729492 +[TRAIN] Iter: 594300 Loss: 0.007912776432931423 PSNR: 25.759265899658203 +[TRAIN] Iter: 594400 Loss: 0.006205893121659756 PSNR: 26.634822845458984 +[TRAIN] Iter: 594500 Loss: 0.0073820073157548904 PSNR: 25.826169967651367 +[TRAIN] Iter: 594600 Loss: 0.006375210825353861 PSNR: 27.030271530151367 +[TRAIN] Iter: 594700 Loss: 0.004731180611997843 PSNR: 28.471290588378906 +[TRAIN] Iter: 594800 Loss: 0.006447926629334688 PSNR: 26.727872848510742 +[TRAIN] Iter: 594900 Loss: 0.007009530905634165 PSNR: 26.119531631469727 +[TRAIN] Iter: 595000 Loss: 0.00673321820795536 PSNR: 26.214147567749023 +[TRAIN] Iter: 595100 Loss: 0.0059822192415595055 PSNR: 26.8956241607666 +[TRAIN] Iter: 595200 Loss: 0.007167992647737265 PSNR: 26.034622192382812 +[TRAIN] Iter: 595300 Loss: 0.0058293333277106285 PSNR: 26.920425415039062 +[TRAIN] Iter: 595400 Loss: 0.005795283243060112 PSNR: 27.28317642211914 +[TRAIN] Iter: 595500 Loss: 0.005830448120832443 PSNR: 26.849945068359375 +[TRAIN] Iter: 595600 Loss: 0.005805606953799725 PSNR: 27.407711029052734 +[TRAIN] Iter: 595700 Loss: 0.006332879886031151 PSNR: 26.523452758789062 +[TRAIN] Iter: 595800 Loss: 0.005058548878878355 PSNR: 28.0853328704834 +[TRAIN] Iter: 595900 Loss: 0.005427786149084568 PSNR: 26.818267822265625 +[TRAIN] Iter: 596000 Loss: 0.006663457956165075 PSNR: 26.57048797607422 +[TRAIN] Iter: 596100 Loss: 0.004040495026856661 PSNR: 28.94099998474121 +[TRAIN] Iter: 596200 Loss: 0.006426175124943256 PSNR: 26.60300636291504 +[TRAIN] Iter: 596300 Loss: 0.004797791130840778 PSNR: 28.476648330688477 +[TRAIN] Iter: 596400 Loss: 0.006304802373051643 PSNR: 26.309322357177734 +[TRAIN] Iter: 596500 Loss: 0.006085570901632309 PSNR: 27.13890838623047 +[TRAIN] Iter: 596600 Loss: 0.006534465588629246 PSNR: 26.650089263916016 +[TRAIN] Iter: 596700 Loss: 0.0058955843560397625 PSNR: 26.370412826538086 +[TRAIN] Iter: 596800 Loss: 0.004902742337435484 PSNR: 27.978479385375977 +[TRAIN] Iter: 596900 Loss: 0.006027554161846638 PSNR: 27.14645004272461 +[TRAIN] Iter: 597000 Loss: 0.006303262896835804 PSNR: 26.32587242126465 +[TRAIN] Iter: 597100 Loss: 0.006793968379497528 PSNR: 26.361309051513672 +[TRAIN] Iter: 597200 Loss: 0.005688236560672522 PSNR: 27.022289276123047 +[TRAIN] Iter: 597300 Loss: 0.006042300723493099 PSNR: 26.57012367248535 +[TRAIN] Iter: 597400 Loss: 0.006134411785751581 PSNR: 26.312664031982422 +[TRAIN] Iter: 597500 Loss: 0.005469674710184336 PSNR: 27.474853515625 +[TRAIN] Iter: 597600 Loss: 0.007191305048763752 PSNR: 26.29909896850586 +[TRAIN] Iter: 597700 Loss: 0.0070516187697649 PSNR: 26.08584976196289 +[TRAIN] Iter: 597800 Loss: 0.00544081162661314 PSNR: 27.610132217407227 +[TRAIN] Iter: 597900 Loss: 0.006958846002817154 PSNR: 26.350418090820312 +[TRAIN] Iter: 598000 Loss: 0.004920372273772955 PSNR: 28.089908599853516 +[TRAIN] Iter: 598100 Loss: 0.00783543661236763 PSNR: 25.53303337097168 +[TRAIN] Iter: 598200 Loss: 0.006229077465832233 PSNR: 26.289119720458984 +[TRAIN] Iter: 598300 Loss: 0.005839984398335218 PSNR: 27.102094650268555 +[TRAIN] Iter: 598400 Loss: 0.006314285099506378 PSNR: 26.492568969726562 +[TRAIN] Iter: 598500 Loss: 0.005422416143119335 PSNR: 28.0262508392334 +[TRAIN] Iter: 598600 Loss: 0.007718421518802643 PSNR: 25.421459197998047 +[TRAIN] Iter: 598700 Loss: 0.007040812633931637 PSNR: 26.11077880859375 +[TRAIN] Iter: 598800 Loss: 0.004690886940807104 PSNR: 27.971261978149414 +[TRAIN] Iter: 598900 Loss: 0.005411998368799686 PSNR: 27.617124557495117 +[TRAIN] Iter: 599000 Loss: 0.005207378882914782 PSNR: 28.396242141723633 +[TRAIN] Iter: 599100 Loss: 0.00478612445294857 PSNR: 28.89507484436035 +[TRAIN] Iter: 599200 Loss: 0.0061827367171645164 PSNR: 27.202451705932617 +[TRAIN] Iter: 599300 Loss: 0.006942293606698513 PSNR: 26.12258529663086 +[TRAIN] Iter: 599400 Loss: 0.004942046944051981 PSNR: 28.70334815979004 +[TRAIN] Iter: 599500 Loss: 0.0067642563953995705 PSNR: 26.797706604003906 +[TRAIN] Iter: 599600 Loss: 0.006407920736819506 PSNR: 26.96058464050293 +[TRAIN] Iter: 599700 Loss: 0.005960389040410519 PSNR: 26.9996395111084 +[TRAIN] Iter: 599800 Loss: 0.0054641906172037125 PSNR: 27.740781784057617 +[TRAIN] Iter: 599900 Loss: 0.005093017593026161 PSNR: 28.011009216308594 +Saved checkpoints at ./logs/TUT-out-doll-360-np/600000.tar +0 0.000843048095703125 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.190810441970825 +2 22.16739773750305 +3 21.986591339111328 +4 21.818490743637085 +5 23.087742805480957 +6 22.56204390525818 +7 21.604097604751587 +8 22.229706287384033 +9 22.85798478126526 +10 21.414393663406372 +11 22.49926209449768 +12 21.133636713027954 +13 23.112271070480347 +14 22.027122259140015 +15 21.668545246124268 +16 21.555901288986206 +17 21.83822226524353 +18 22.139958143234253 +19 22.64946150779724 +20 21.47791814804077 +21 23.365360498428345 +22 21.829646348953247 +23 21.43800139427185 +24 21.577043056488037 +25 22.054036855697632 +26 22.07601499557495 +27 22.11315393447876 +28 21.974616289138794 +29 21.456217527389526 +30 21.891615390777588 +31 22.367427825927734 +32 21.619587182998657 +33 22.766695261001587 +34 22.03184676170349 +35 21.5697021484375 +36 21.627952814102173 +37 22.847103595733643 +38 21.496602773666382 +39 22.138520002365112 +40 21.74109959602356 +41 22.08530592918396 +42 21.236231088638306 +43 22.00830578804016 +44 22.551448106765747 +45 22.32005023956299 +46 21.261945009231567 +47 22.295877933502197 +48 21.654953241348267 +49 21.2874493598938 +50 21.8914155960083 +51 21.61005663871765 +52 21.498645544052124 +53 21.891656398773193 +54 21.524036645889282 +55 21.683080434799194 +56 23.147139072418213 +57 21.7136709690094 +58 22.739205837249756 +59 21.441620349884033 +60 22.55275058746338 +61 21.369200706481934 +62 22.077237844467163 +63 21.24058485031128 +64 22.535961151123047 +65 22.560088396072388 +66 21.555113315582275 +67 20.931610822677612 +68 21.69780731201172 +69 21.482954740524292 +70 22.345443725585938 +71 21.66418433189392 +72 22.095906734466553 +73 21.931050539016724 +74 22.191905736923218 +75 21.572067975997925 +76 21.96149253845215 +77 22.148341417312622 +78 21.996787548065186 +79 21.606131315231323 +80 20.940980911254883 +81 21.992501497268677 +82 21.88560652732849 +83 21.988053798675537 +84 21.539113521575928 +85 21.770275831222534 +86 21.557108402252197 +87 22.19680643081665 +88 22.341753244400024 +89 21.592974424362183 +90 21.744591236114502 +91 22.33245038986206 +92 22.469456434249878 +93 21.261730194091797 +94 21.84614634513855 +95 21.296366453170776 +96 21.729806900024414 +97 21.83950185775757 +98 21.369729042053223 +99 22.32718539237976 +100 21.697354078292847 +101 21.46474575996399 +102 21.629387617111206 +103 21.73470115661621 +104 21.378163814544678 +105 23.12581777572632 +106 21.62528371810913 +107 22.248615264892578 +108 22.24228072166443 +109 21.82244086265564 +110 22.480000257492065 +111 21.650792121887207 +112 21.617074966430664 +113 21.537498235702515 +114 22.269774675369263 +115 22.100444555282593 +116 21.244693279266357 +117 21.40260124206543 +118 21.646541833877563 +119 22.07081151008606 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 1.9485e-01, -7.7490e-02, -1.7888e-01, -2.8511e+01], + [ 1.3576e+00, 1.4966e+00, 1.6119e+00, -2.9701e+01], + [ 1.1545e+00, 1.5776e+00, 1.9135e+00, -3.8603e+01], + ..., + [-9.6683e+00, -7.2684e+00, -7.0190e+00, 7.0367e+02], + [-7.1333e+00, -5.6058e+00, -5.3499e+00, 6.1236e+02], + [-1.0932e+01, -8.6739e+00, -7.4056e+00, 6.6249e+02]], + + [[-6.5745e-01, 1.0698e-01, 7.0545e-01, -2.4161e+01], + [-6.9586e-01, 1.2816e-01, 1.2912e+00, -1.2267e+01], + [-9.9036e-01, -2.3869e-01, 6.7289e-01, 2.7516e+00], + ..., + [-1.7011e+01, -1.6739e+01, -3.2788e+00, 4.6221e+02], + [-1.6558e+01, -1.5935e+01, -2.7106e+00, 4.6935e+02], + [-1.6971e+01, -1.6410e+01, -3.0092e+00, 4.7863e+02]], + + [[ 1.3811e+00, 1.0706e+00, 1.1122e+00, -5.0289e+01], + [ 2.0944e+00, 2.0546e+00, 2.2012e+00, -4.0461e+01], + [ 5.7680e-01, 1.9601e-01, 3.4623e-01, -2.2045e+01], + ..., + [-5.5156e+00, -3.9420e+00, -4.8130e+00, 2.0828e+02], + [-5.9633e+00, -4.0833e+00, -4.7403e+00, 1.9279e+02], + [-4.4747e+00, -3.4117e+00, -3.8765e+00, 1.2998e+02]], + + ..., + + [[-1.0694e+00, -1.2814e+00, -1.6360e+00, -5.8555e+01], + [ 1.9605e-02, 1.3595e-01, 7.7810e-01, -3.0688e+01], + [ 8.0265e-01, 7.1097e-01, 1.2440e+00, -2.7282e+01], + ..., + [-1.2097e+01, 8.2876e-01, 4.9118e+00, 8.5246e+02], + [-1.2012e+01, 4.5412e-01, 4.0066e+00, 9.1048e+02], + [-1.3275e+01, -8.1395e-01, 2.0669e+00, 9.0315e+02]], + + [[-1.2379e+00, 1.4446e+00, 1.1954e+00, -1.0010e+02], + [-2.6406e+00, -1.1584e+00, 7.0850e-01, -4.0033e+01], + [-8.7160e-01, -1.8291e-01, 7.3187e-01, 1.9545e-02], + ..., + [-9.0884e+00, -8.5802e+00, 6.6788e-01, 5.0257e+02], + [-9.8294e+00, -9.5340e+00, -6.5363e-01, 5.1546e+02], + [-8.9064e+00, -8.3028e+00, 6.8251e-01, 5.0404e+02]], + + [[-3.8232e-01, -2.1800e-01, -1.4881e-01, -3.4450e+01], + [-1.6323e+00, -1.7729e+00, -1.8222e+00, 3.7007e+01], + [-1.6180e+00, -1.7628e+00, -1.8189e+00, 3.6739e+01], + ..., + [-2.5840e+01, -1.6642e+01, -1.0918e+01, 9.2512e+02], + [-2.6269e+01, -1.6604e+01, -1.0609e+01, 9.4567e+02], + [-2.8081e+01, -1.9194e+01, -1.3411e+01, 9.4760e+02]]], + grad_fn=), 'rgb0': tensor([[0.3403, 0.3229, 0.3230], + [0.2821, 0.4469, 0.6702], + [0.1942, 0.2109, 0.2312], + ..., + [0.2573, 0.2403, 0.2405], + [0.2756, 0.4398, 0.6711], + [0.1128, 0.1165, 0.1181]], grad_fn=), 'disp0': tensor([ 74.8006, 27.7083, 39.2011, ..., 110.4377, 31.3996, 43.7025], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0053, 0.0053, 0.0058, ..., 0.0029, 0.0054, 0.0039])} +0 0.0009698867797851562 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.404106855392456 +2 22.93153738975525 +3 21.392470598220825 +4 22.58634638786316 +5 22.209704399108887 +6 21.600030183792114 +7 22.695754051208496 +8 21.778557300567627 +9 21.587573051452637 +10 23.277633666992188 +11 20.887317657470703 +12 22.06663703918457 +13 21.768043994903564 +14 21.97257089614868 +15 21.66436791419983 +16 21.06643557548523 +17 22.277106285095215 +18 22.13385009765625 +19 21.914966344833374 +20 21.546752452850342 +21 21.920439958572388 +22 21.31616520881653 +23 21.78629159927368 +24 21.67831325531006 +25 21.267667293548584 +26 21.8885817527771 +27 20.965996026992798 +28 21.40582537651062 +29 22.53704261779785 +30 21.454506874084473 +31 21.964014768600464 +32 21.421916246414185 +33 21.241711854934692 +34 21.05527639389038 +35 22.035046815872192 +36 22.4669132232666 +37 21.215883493423462 +38 22.62841248512268 +39 21.497026920318604 +40 21.66469955444336 +41 22.089293241500854 +42 22.25095510482788 +43 21.419300079345703 +44 21.80164122581482 +45 21.659353733062744 +46 21.4493350982666 +47 22.79597020149231 +48 21.137930631637573 +49 22.72718119621277 +50 21.514382362365723 +51 22.686500787734985 +52 21.331472158432007 +53 21.789294481277466 +54 21.78094244003296 +55 21.60040020942688 +56 21.71382474899292 +57 21.45397400856018 +58 21.881425857543945 +59 21.69907259941101 +60 21.164990186691284 +61 21.930657386779785 +62 21.349454641342163 +63 21.422584056854248 +64 22.65769863128662 +65 22.209927797317505 +66 21.542285203933716 +67 21.77865481376648 +68 21.7915678024292 +69 22.03542685508728 +70 22.015605449676514 +71 21.950339555740356 +72 21.93309235572815 +73 22.63683819770813 +74 22.948163270950317 +75 21.48415446281433 +76 21.96328115463257 +77 22.147725582122803 +78 21.566055297851562 +79 22.48612356185913 +80 21.455869913101196 +81 22.2503502368927 +82 21.803076028823853 +83 22.143057346343994 +84 22.210959434509277 +85 21.108384370803833 +86 21.82653284072876 +87 21.905527353286743 +88 21.672908544540405 +89 21.33758854866028 +90 21.800859451293945 +91 21.63034152984619 +92 21.89839744567871 +93 22.54658269882202 +94 21.892637968063354 +95 21.296513319015503 +96 22.0828754901886 +97 21.36579942703247 +98 22.90842890739441 +99 21.627317667007446 +100 21.508609771728516 +101 21.583409786224365 +102 22.335259199142456 +103 22.15752100944519 +104 21.026216983795166 +105 22.56914234161377 +106 21.677668809890747 +107 21.73214077949524 +108 22.39958953857422 +109 22.68412709236145 +110 21.444841861724854 +111 21.280762910842896 +112 22.126444816589355 +113 22.14435362815857 +114 22.3713858127594 +115 21.6236093044281 +116 22.493335962295532 +117 22.390392065048218 +118 23.305150032043457 +119 21.130308628082275 +test poses shape torch.Size([4, 3, 4]) +0 0.0010683536529541016 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.558390378952026 +2 22.151456356048584 +3 21.566200256347656 +Saved test set +[TRAIN] Iter: 600000 Loss: 0.006115424446761608 PSNR: 26.782209396362305 +[TRAIN] Iter: 600100 Loss: 0.005334081128239632 PSNR: 28.971466064453125 +[TRAIN] Iter: 600200 Loss: 0.0050140563398599625 PSNR: 28.33224105834961 +[TRAIN] Iter: 600300 Loss: 0.0049459850415587425 PSNR: 28.154321670532227 +[TRAIN] Iter: 600400 Loss: 0.006894703954458237 PSNR: 26.288433074951172 +[TRAIN] Iter: 600500 Loss: 0.007255164906382561 PSNR: 26.092227935791016 +[TRAIN] Iter: 600600 Loss: 0.004848178010433912 PSNR: 27.859670639038086 +[TRAIN] Iter: 600700 Loss: 0.008139532059431076 PSNR: 26.2381649017334 +[TRAIN] Iter: 600800 Loss: 0.006558138411492109 PSNR: 26.239261627197266 +[TRAIN] Iter: 600900 Loss: 0.006263453047722578 PSNR: 26.661306381225586 +[TRAIN] Iter: 601000 Loss: 0.006023373454809189 PSNR: 26.92936134338379 +[TRAIN] Iter: 601100 Loss: 0.004830234218388796 PSNR: 29.191856384277344 +[TRAIN] Iter: 601200 Loss: 0.006417791824787855 PSNR: 27.415849685668945 +[TRAIN] Iter: 601300 Loss: 0.006657502613961697 PSNR: 26.463722229003906 +[TRAIN] Iter: 601400 Loss: 0.00647992268204689 PSNR: 26.402786254882812 +[TRAIN] Iter: 601500 Loss: 0.005364247132092714 PSNR: 27.228137969970703 +[TRAIN] Iter: 601600 Loss: 0.006587171461433172 PSNR: 26.445112228393555 +[TRAIN] Iter: 601700 Loss: 0.007331092841923237 PSNR: 25.9010066986084 +[TRAIN] Iter: 601800 Loss: 0.0089704729616642 PSNR: 25.529747009277344 +[TRAIN] Iter: 601900 Loss: 0.005579207558184862 PSNR: 27.119213104248047 +[TRAIN] Iter: 602000 Loss: 0.005442395806312561 PSNR: 28.200109481811523 +[TRAIN] Iter: 602100 Loss: 0.0059059420600533485 PSNR: 27.586009979248047 +[TRAIN] Iter: 602200 Loss: 0.0074379174038767815 PSNR: 26.095108032226562 +[TRAIN] Iter: 602300 Loss: 0.005005926359444857 PSNR: 28.23225975036621 +[TRAIN] Iter: 602400 Loss: 0.007229062728583813 PSNR: 26.116561889648438 +[TRAIN] Iter: 602500 Loss: 0.006192871369421482 PSNR: 28.134416580200195 +[TRAIN] Iter: 602600 Loss: 0.005660806782543659 PSNR: 27.356613159179688 +[TRAIN] Iter: 602700 Loss: 0.004766901955008507 PSNR: 27.62496566772461 +[TRAIN] Iter: 602800 Loss: 0.006473344285041094 PSNR: 26.547151565551758 +[TRAIN] Iter: 602900 Loss: 0.00570372398942709 PSNR: 26.853225708007812 +[TRAIN] Iter: 603000 Loss: 0.006316029466688633 PSNR: 27.183990478515625 +[TRAIN] Iter: 603100 Loss: 0.004830941557884216 PSNR: 27.41482925415039 +[TRAIN] Iter: 603200 Loss: 0.0067452602088451385 PSNR: 26.208189010620117 +[TRAIN] Iter: 603300 Loss: 0.00579421129077673 PSNR: 26.990020751953125 +[TRAIN] Iter: 603400 Loss: 0.00732879713177681 PSNR: 26.877925872802734 +[TRAIN] Iter: 603500 Loss: 0.006895611062645912 PSNR: 25.89113426208496 +[TRAIN] Iter: 603600 Loss: 0.004164029844105244 PSNR: 28.982667922973633 +[TRAIN] Iter: 603700 Loss: 0.006452176719903946 PSNR: 26.484703063964844 +[TRAIN] Iter: 603800 Loss: 0.006629071664065123 PSNR: 26.740013122558594 +[TRAIN] Iter: 603900 Loss: 0.007171078585088253 PSNR: 25.73206901550293 +[TRAIN] Iter: 604000 Loss: 0.005575439892709255 PSNR: 26.986400604248047 +[TRAIN] Iter: 604100 Loss: 0.005392284132540226 PSNR: 27.053726196289062 +[TRAIN] Iter: 604200 Loss: 0.006433712784200907 PSNR: 27.255504608154297 +[TRAIN] Iter: 604300 Loss: 0.006772374268621206 PSNR: 26.616077423095703 +[TRAIN] Iter: 604400 Loss: 0.006923924200236797 PSNR: 26.231287002563477 +[TRAIN] Iter: 604500 Loss: 0.005580300465226173 PSNR: 26.984636306762695 +[TRAIN] Iter: 604600 Loss: 0.006604838650673628 PSNR: 26.33135414123535 +[TRAIN] Iter: 604700 Loss: 0.005676931701600552 PSNR: 26.811630249023438 +[TRAIN] Iter: 604800 Loss: 0.005960935261100531 PSNR: 26.921823501586914 +[TRAIN] Iter: 604900 Loss: 0.006974922958761454 PSNR: 26.19762420654297 +[TRAIN] Iter: 605000 Loss: 0.006946414243429899 PSNR: 25.824766159057617 +[TRAIN] Iter: 605100 Loss: 0.008262340910732746 PSNR: 25.633983612060547 +[TRAIN] Iter: 605200 Loss: 0.005901429336518049 PSNR: 26.962360382080078 +[TRAIN] Iter: 605300 Loss: 0.005454357713460922 PSNR: 27.4720458984375 +[TRAIN] Iter: 605400 Loss: 0.005918656475841999 PSNR: 28.570253372192383 +[TRAIN] Iter: 605500 Loss: 0.006128012202680111 PSNR: 26.81374740600586 +[TRAIN] Iter: 605600 Loss: 0.006435096263885498 PSNR: 26.279077529907227 +[TRAIN] Iter: 605700 Loss: 0.006826143711805344 PSNR: 26.05797576904297 +[TRAIN] Iter: 605800 Loss: 0.0071341246366500854 PSNR: 26.517011642456055 +[TRAIN] Iter: 605900 Loss: 0.005291352979838848 PSNR: 26.976749420166016 +[TRAIN] Iter: 606000 Loss: 0.005157740321010351 PSNR: 27.257692337036133 +[TRAIN] Iter: 606100 Loss: 0.007026974111795425 PSNR: 25.618942260742188 +[TRAIN] Iter: 606200 Loss: 0.00591838825494051 PSNR: 27.240713119506836 +[TRAIN] Iter: 606300 Loss: 0.007045045029371977 PSNR: 26.43517303466797 +[TRAIN] Iter: 606400 Loss: 0.006617898587137461 PSNR: 26.183982849121094 +[TRAIN] Iter: 606500 Loss: 0.006329709198325872 PSNR: 27.598012924194336 +[TRAIN] Iter: 606600 Loss: 0.006128798238933086 PSNR: 26.400772094726562 +[TRAIN] Iter: 606700 Loss: 0.00679225567728281 PSNR: 26.117826461791992 +[TRAIN] Iter: 606800 Loss: 0.0071680666878819466 PSNR: 25.775299072265625 +[TRAIN] Iter: 606900 Loss: 0.006140705198049545 PSNR: 27.49673080444336 +[TRAIN] Iter: 607000 Loss: 0.005996020045131445 PSNR: 27.096227645874023 +[TRAIN] Iter: 607100 Loss: 0.004674581810832024 PSNR: 28.578344345092773 +[TRAIN] Iter: 607200 Loss: 0.005005382001399994 PSNR: 27.99315643310547 +[TRAIN] Iter: 607300 Loss: 0.006701810285449028 PSNR: 26.578096389770508 +[TRAIN] Iter: 607400 Loss: 0.0047487523406744 PSNR: 28.74275779724121 +[TRAIN] Iter: 607500 Loss: 0.005156703293323517 PSNR: 28.425840377807617 +[TRAIN] Iter: 607600 Loss: 0.005464344285428524 PSNR: 27.405250549316406 +[TRAIN] Iter: 607700 Loss: 0.006409984547644854 PSNR: 27.412328720092773 +[TRAIN] Iter: 607800 Loss: 0.005486857611685991 PSNR: 28.100982666015625 +[TRAIN] Iter: 607900 Loss: 0.006081072613596916 PSNR: 26.901371002197266 +[TRAIN] Iter: 608000 Loss: 0.006402329541742802 PSNR: 26.2618408203125 +[TRAIN] Iter: 608100 Loss: 0.006158588919788599 PSNR: 27.372268676757812 +[TRAIN] Iter: 608200 Loss: 0.004513087682425976 PSNR: 28.224369049072266 +[TRAIN] Iter: 608300 Loss: 0.005248371511697769 PSNR: 26.63927459716797 +[TRAIN] Iter: 608400 Loss: 0.0053507001139223576 PSNR: 27.32404899597168 +[TRAIN] Iter: 608500 Loss: 0.005993472412228584 PSNR: 26.721540451049805 +[TRAIN] Iter: 608600 Loss: 0.005484016612172127 PSNR: 27.92770004272461 +[TRAIN] Iter: 608700 Loss: 0.006911425851285458 PSNR: 26.312780380249023 +[TRAIN] Iter: 608800 Loss: 0.004869296681135893 PSNR: 28.068967819213867 +[TRAIN] Iter: 608900 Loss: 0.005199353210628033 PSNR: 28.445354461669922 +[TRAIN] Iter: 609000 Loss: 0.006969374604523182 PSNR: 25.894208908081055 +[TRAIN] Iter: 609100 Loss: 0.0065627205185592175 PSNR: 26.160724639892578 +[TRAIN] Iter: 609200 Loss: 0.005946766585111618 PSNR: 27.526607513427734 +[TRAIN] Iter: 609300 Loss: 0.0059623816050589085 PSNR: 26.9747257232666 +[TRAIN] Iter: 609400 Loss: 0.005558250471949577 PSNR: 27.351118087768555 +[TRAIN] Iter: 609500 Loss: 0.0065915510058403015 PSNR: 26.174949645996094 +[TRAIN] Iter: 609600 Loss: 0.005461007356643677 PSNR: 27.181549072265625 +[TRAIN] Iter: 609700 Loss: 0.006699860095977783 PSNR: 25.963855743408203 +[TRAIN] Iter: 609800 Loss: 0.004955632612109184 PSNR: 28.573373794555664 +[TRAIN] Iter: 609900 Loss: 0.006475457921624184 PSNR: 26.4881534576416 +Saved checkpoints at ./logs/TUT-out-doll-360-np/610000.tar +[TRAIN] Iter: 610000 Loss: 0.006062413565814495 PSNR: 26.686763763427734 +[TRAIN] Iter: 610100 Loss: 0.005649601109325886 PSNR: 27.50680160522461 +[TRAIN] Iter: 610200 Loss: 0.005196763668209314 PSNR: 27.85661506652832 +[TRAIN] Iter: 610300 Loss: 0.005296861752867699 PSNR: 27.17862892150879 +[TRAIN] Iter: 610400 Loss: 0.005377581343054771 PSNR: 27.85529327392578 +[TRAIN] Iter: 610500 Loss: 0.007952496409416199 PSNR: 25.402793884277344 +[TRAIN] Iter: 610600 Loss: 0.005951644852757454 PSNR: 28.184553146362305 +[TRAIN] Iter: 610700 Loss: 0.005644648335874081 PSNR: 26.79494857788086 +[TRAIN] Iter: 610800 Loss: 0.005913755856454372 PSNR: 27.63695526123047 +[TRAIN] Iter: 610900 Loss: 0.00503441272303462 PSNR: 28.351093292236328 +[TRAIN] Iter: 611000 Loss: 0.0067612044513225555 PSNR: 26.645790100097656 +[TRAIN] Iter: 611100 Loss: 0.006994474679231644 PSNR: 26.969074249267578 +[TRAIN] Iter: 611200 Loss: 0.006076253950595856 PSNR: 27.84469985961914 +[TRAIN] Iter: 611300 Loss: 0.005999898537993431 PSNR: 27.244985580444336 +[TRAIN] Iter: 611400 Loss: 0.005018332973122597 PSNR: 28.25621795654297 +[TRAIN] Iter: 611500 Loss: 0.004788742866367102 PSNR: 29.33956527709961 +[TRAIN] Iter: 611600 Loss: 0.0065880464389920235 PSNR: 25.879592895507812 +[TRAIN] Iter: 611700 Loss: 0.004860962741076946 PSNR: 27.99472427368164 +[TRAIN] Iter: 611800 Loss: 0.005328000523149967 PSNR: 28.217269897460938 +[TRAIN] Iter: 611900 Loss: 0.00596045795828104 PSNR: 26.381319046020508 +[TRAIN] Iter: 612000 Loss: 0.005046411417424679 PSNR: 28.096237182617188 +[TRAIN] Iter: 612100 Loss: 0.00627135531976819 PSNR: 26.546716690063477 +[TRAIN] Iter: 612200 Loss: 0.005453038960695267 PSNR: 27.522016525268555 +[TRAIN] Iter: 612300 Loss: 0.005758167710155249 PSNR: 27.147375106811523 +[TRAIN] Iter: 612400 Loss: 0.005077429115772247 PSNR: 27.933208465576172 +[TRAIN] Iter: 612500 Loss: 0.005410605110228062 PSNR: 27.928682327270508 +[TRAIN] Iter: 612600 Loss: 0.005957896821200848 PSNR: 26.660093307495117 +[TRAIN] Iter: 612700 Loss: 0.006952657364308834 PSNR: 26.2635440826416 +[TRAIN] Iter: 612800 Loss: 0.005826713517308235 PSNR: 27.75574493408203 +[TRAIN] Iter: 612900 Loss: 0.004390771500766277 PSNR: 28.09771728515625 +[TRAIN] Iter: 613000 Loss: 0.005121740512549877 PSNR: 27.92120933532715 +[TRAIN] Iter: 613100 Loss: 0.005528229288756847 PSNR: 27.967899322509766 +[TRAIN] Iter: 613200 Loss: 0.007146197836846113 PSNR: 25.730371475219727 +[TRAIN] Iter: 613300 Loss: 0.007609731517732143 PSNR: 25.49109649658203 +[TRAIN] Iter: 613400 Loss: 0.006786758545786142 PSNR: 26.480867385864258 +[TRAIN] Iter: 613500 Loss: 0.00648707477375865 PSNR: 26.16778564453125 +[TRAIN] Iter: 613600 Loss: 0.005423779133707285 PSNR: 28.050378799438477 +[TRAIN] Iter: 613700 Loss: 0.005434505641460419 PSNR: 27.82745361328125 +[TRAIN] Iter: 613800 Loss: 0.007914847694337368 PSNR: 26.122377395629883 +[TRAIN] Iter: 613900 Loss: 0.005825908854603767 PSNR: 27.117755889892578 +[TRAIN] Iter: 614000 Loss: 0.005464133340865374 PSNR: 28.24156951904297 +[TRAIN] Iter: 614100 Loss: 0.006484423764050007 PSNR: 26.028026580810547 +[TRAIN] Iter: 614200 Loss: 0.004589559510350227 PSNR: 28.799745559692383 +[TRAIN] Iter: 614300 Loss: 0.006318812258541584 PSNR: 26.393590927124023 +[TRAIN] Iter: 614400 Loss: 0.006784882862120867 PSNR: 27.201122283935547 +[TRAIN] Iter: 614500 Loss: 0.006178086623549461 PSNR: 26.503437042236328 +[TRAIN] Iter: 614600 Loss: 0.006564187351614237 PSNR: 26.264101028442383 +[TRAIN] Iter: 614700 Loss: 0.007481158711016178 PSNR: 26.231767654418945 +[TRAIN] Iter: 614800 Loss: 0.005689689889550209 PSNR: 27.394561767578125 +[TRAIN] Iter: 614900 Loss: 0.0067957863211631775 PSNR: 26.395660400390625 +[TRAIN] Iter: 615000 Loss: 0.00591178797185421 PSNR: 26.462797164916992 +[TRAIN] Iter: 615100 Loss: 0.006467936560511589 PSNR: 26.422168731689453 +[TRAIN] Iter: 615200 Loss: 0.0072927894070744514 PSNR: 25.855371475219727 +[TRAIN] Iter: 615300 Loss: 0.004468382336199284 PSNR: 28.933185577392578 +[TRAIN] Iter: 615400 Loss: 0.004892378579825163 PSNR: 28.040456771850586 +[TRAIN] Iter: 615500 Loss: 0.00620944844558835 PSNR: 27.049972534179688 +[TRAIN] Iter: 615600 Loss: 0.006487809121608734 PSNR: 26.311813354492188 +[TRAIN] Iter: 615700 Loss: 0.006722765043377876 PSNR: 26.433889389038086 +[TRAIN] Iter: 615800 Loss: 0.006772060412913561 PSNR: 26.290685653686523 +[TRAIN] Iter: 615900 Loss: 0.0077711306512355804 PSNR: 25.695438385009766 +[TRAIN] Iter: 616000 Loss: 0.007677004672586918 PSNR: 25.67464828491211 +[TRAIN] Iter: 616100 Loss: 0.006262785755097866 PSNR: 27.532655715942383 +[TRAIN] Iter: 616200 Loss: 0.005580748897045851 PSNR: 27.296464920043945 +[TRAIN] Iter: 616300 Loss: 0.006075425073504448 PSNR: 26.738351821899414 +[TRAIN] Iter: 616400 Loss: 0.006657640915364027 PSNR: 26.670482635498047 +[TRAIN] Iter: 616500 Loss: 0.005598250776529312 PSNR: 26.9195499420166 +[TRAIN] Iter: 616600 Loss: 0.005394740030169487 PSNR: 28.10062026977539 +[TRAIN] Iter: 616700 Loss: 0.005740907974541187 PSNR: 27.89984130859375 +[TRAIN] Iter: 616800 Loss: 0.00695806834846735 PSNR: 26.134632110595703 +[TRAIN] Iter: 616900 Loss: 0.0051122382283210754 PSNR: 28.612640380859375 +[TRAIN] Iter: 617000 Loss: 0.006765217520296574 PSNR: 26.972257614135742 +[TRAIN] Iter: 617100 Loss: 0.006936247460544109 PSNR: 25.990291595458984 +[TRAIN] Iter: 617200 Loss: 0.006503568962216377 PSNR: 26.03631019592285 +[TRAIN] Iter: 617300 Loss: 0.006161772646009922 PSNR: 26.562307357788086 +[TRAIN] Iter: 617400 Loss: 0.005213398486375809 PSNR: 29.184667587280273 +[TRAIN] Iter: 617500 Loss: 0.0050442335195839405 PSNR: 28.3824462890625 +[TRAIN] Iter: 617600 Loss: 0.006638592109084129 PSNR: 26.550718307495117 +[TRAIN] Iter: 617700 Loss: 0.006815245840698481 PSNR: 26.511646270751953 +[TRAIN] Iter: 617800 Loss: 0.005389110185205936 PSNR: 28.02851104736328 +[TRAIN] Iter: 617900 Loss: 0.006822369992733002 PSNR: 25.87177848815918 +[TRAIN] Iter: 618000 Loss: 0.00617966428399086 PSNR: 26.508159637451172 +[TRAIN] Iter: 618100 Loss: 0.005323419813066721 PSNR: 28.762853622436523 +[TRAIN] Iter: 618200 Loss: 0.005207314155995846 PSNR: 27.339067459106445 +[TRAIN] Iter: 618300 Loss: 0.006097818724811077 PSNR: 27.054473876953125 +[TRAIN] Iter: 618400 Loss: 0.0053147440776228905 PSNR: 27.748531341552734 +[TRAIN] Iter: 618500 Loss: 0.006066713947802782 PSNR: 26.598289489746094 +[TRAIN] Iter: 618600 Loss: 0.006964575033634901 PSNR: 26.409643173217773 +[TRAIN] Iter: 618700 Loss: 0.007157234940677881 PSNR: 26.281787872314453 +[TRAIN] Iter: 618800 Loss: 0.005459993612021208 PSNR: 27.328123092651367 +[TRAIN] Iter: 618900 Loss: 0.006546619348227978 PSNR: 26.312881469726562 +[TRAIN] Iter: 619000 Loss: 0.007283494807779789 PSNR: 26.142030715942383 +[TRAIN] Iter: 619100 Loss: 0.0072723980993032455 PSNR: 25.42457389831543 +[TRAIN] Iter: 619200 Loss: 0.005849783308804035 PSNR: 26.98012351989746 +[TRAIN] Iter: 619300 Loss: 0.006394700147211552 PSNR: 27.052963256835938 +[TRAIN] Iter: 619400 Loss: 0.006490026600658894 PSNR: 27.020822525024414 +[TRAIN] Iter: 619500 Loss: 0.006381046026945114 PSNR: 26.935670852661133 +[TRAIN] Iter: 619600 Loss: 0.006754024885594845 PSNR: 26.056011199951172 +[TRAIN] Iter: 619700 Loss: 0.006759387440979481 PSNR: 26.03606414794922 +[TRAIN] Iter: 619800 Loss: 0.005012975540012121 PSNR: 27.40413475036621 +[TRAIN] Iter: 619900 Loss: 0.006080022547394037 PSNR: 27.044639587402344 +Saved checkpoints at ./logs/TUT-out-doll-360-np/620000.tar +[TRAIN] Iter: 620000 Loss: 0.005983691196888685 PSNR: 26.62366485595703 +[TRAIN] Iter: 620100 Loss: 0.00526365777477622 PSNR: 27.577537536621094 +[TRAIN] Iter: 620200 Loss: 0.00548472860828042 PSNR: 28.02762222290039 +[TRAIN] Iter: 620300 Loss: 0.006286969408392906 PSNR: 26.892574310302734 +[TRAIN] Iter: 620400 Loss: 0.006387949921190739 PSNR: 27.0185489654541 +[TRAIN] Iter: 620500 Loss: 0.005911553278565407 PSNR: 26.80093765258789 +[TRAIN] Iter: 620600 Loss: 0.006876101717352867 PSNR: 25.954618453979492 +[TRAIN] Iter: 620700 Loss: 0.0055371676571667194 PSNR: 27.53021240234375 +[TRAIN] Iter: 620800 Loss: 0.004391858354210854 PSNR: 29.24274444580078 +[TRAIN] Iter: 620900 Loss: 0.006409928202629089 PSNR: 26.042089462280273 +[TRAIN] Iter: 621000 Loss: 0.007235682103782892 PSNR: 26.403371810913086 +[TRAIN] Iter: 621100 Loss: 0.0063277315348386765 PSNR: 26.528467178344727 +[TRAIN] Iter: 621200 Loss: 0.007411688566207886 PSNR: 26.410490036010742 +[TRAIN] Iter: 621300 Loss: 0.006522403098642826 PSNR: 26.39874839782715 +[TRAIN] Iter: 621400 Loss: 0.006149263121187687 PSNR: 26.03734016418457 +[TRAIN] Iter: 621500 Loss: 0.005923828110098839 PSNR: 27.242284774780273 +[TRAIN] Iter: 621600 Loss: 0.005302610341459513 PSNR: 27.737995147705078 +[TRAIN] Iter: 621700 Loss: 0.0058761355467140675 PSNR: 27.03368377685547 +[TRAIN] Iter: 621800 Loss: 0.00696286978200078 PSNR: 25.789825439453125 +[TRAIN] Iter: 621900 Loss: 0.007088018581271172 PSNR: 25.844823837280273 +[TRAIN] Iter: 622000 Loss: 0.006454604212194681 PSNR: 26.52865982055664 +[TRAIN] Iter: 622100 Loss: 0.0065075247548520565 PSNR: 26.50719451904297 +[TRAIN] Iter: 622200 Loss: 0.006198928225785494 PSNR: 26.508224487304688 +[TRAIN] Iter: 622300 Loss: 0.005776493344455957 PSNR: 27.912067413330078 +[TRAIN] Iter: 622400 Loss: 0.005839499179273844 PSNR: 27.424898147583008 +[TRAIN] Iter: 622500 Loss: 0.006112158298492432 PSNR: 26.471065521240234 +[TRAIN] Iter: 622600 Loss: 0.006908715702593327 PSNR: 25.9287109375 +[TRAIN] Iter: 622700 Loss: 0.006859518121927977 PSNR: 25.685836791992188 +[TRAIN] Iter: 622800 Loss: 0.0069279177114367485 PSNR: 26.324909210205078 +[TRAIN] Iter: 622900 Loss: 0.005003801081329584 PSNR: 28.71086311340332 +[TRAIN] Iter: 623000 Loss: 0.00591506902128458 PSNR: 27.389158248901367 +[TRAIN] Iter: 623100 Loss: 0.006256727036088705 PSNR: 26.365726470947266 +[TRAIN] Iter: 623200 Loss: 0.006158932577818632 PSNR: 26.813636779785156 +[TRAIN] Iter: 623300 Loss: 0.007608548738062382 PSNR: 25.6722469329834 +[TRAIN] Iter: 623400 Loss: 0.004843026399612427 PSNR: 28.3924617767334 +[TRAIN] Iter: 623500 Loss: 0.00766479317098856 PSNR: 25.615032196044922 +[TRAIN] Iter: 623600 Loss: 0.005845719017088413 PSNR: 27.046852111816406 +[TRAIN] Iter: 623700 Loss: 0.004697433672845364 PSNR: 28.553800582885742 +[TRAIN] Iter: 623800 Loss: 0.006671248469501734 PSNR: 26.171228408813477 +[TRAIN] Iter: 623900 Loss: 0.006501937285065651 PSNR: 26.06151580810547 +[TRAIN] Iter: 624000 Loss: 0.006443898659199476 PSNR: 26.609786987304688 +[TRAIN] Iter: 624100 Loss: 0.004934617318212986 PSNR: 28.473896026611328 +[TRAIN] Iter: 624200 Loss: 0.005404823459684849 PSNR: 27.4465389251709 +[TRAIN] Iter: 624300 Loss: 0.005703039932996035 PSNR: 26.73207664489746 +[TRAIN] Iter: 624400 Loss: 0.005016337148845196 PSNR: 27.617368698120117 +[TRAIN] Iter: 624500 Loss: 0.005344517529010773 PSNR: 27.900278091430664 +[TRAIN] Iter: 624600 Loss: 0.005250094458460808 PSNR: 28.062131881713867 +[TRAIN] Iter: 624700 Loss: 0.005091513507068157 PSNR: 27.321725845336914 +[TRAIN] Iter: 624800 Loss: 0.00735265901312232 PSNR: 25.539926528930664 +[TRAIN] Iter: 624900 Loss: 0.0054993946105241776 PSNR: 27.980714797973633 +[TRAIN] Iter: 625000 Loss: 0.00620325468480587 PSNR: 26.818592071533203 +[TRAIN] Iter: 625100 Loss: 0.007421751506626606 PSNR: 26.164316177368164 +[TRAIN] Iter: 625200 Loss: 0.006242336239665747 PSNR: 26.82919692993164 +[TRAIN] Iter: 625300 Loss: 0.006077180616557598 PSNR: 26.767738342285156 +[TRAIN] Iter: 625400 Loss: 0.007049660198390484 PSNR: 26.208740234375 +[TRAIN] Iter: 625500 Loss: 0.0055386838503181934 PSNR: 26.997060775756836 +[TRAIN] Iter: 625600 Loss: 0.006234311498701572 PSNR: 27.108688354492188 +[TRAIN] Iter: 625700 Loss: 0.007320725824683905 PSNR: 25.96779441833496 +[TRAIN] Iter: 625800 Loss: 0.0055951206013560295 PSNR: 27.067916870117188 +[TRAIN] Iter: 625900 Loss: 0.006232530809938908 PSNR: 26.182405471801758 +[TRAIN] Iter: 626000 Loss: 0.00660906033590436 PSNR: 26.192720413208008 +[TRAIN] Iter: 626100 Loss: 0.006487017497420311 PSNR: 26.348804473876953 +[TRAIN] Iter: 626200 Loss: 0.007217844948172569 PSNR: 26.067047119140625 +[TRAIN] Iter: 626300 Loss: 0.00627541309222579 PSNR: 27.079206466674805 +[TRAIN] Iter: 626400 Loss: 0.006447069812566042 PSNR: 26.984689712524414 +[TRAIN] Iter: 626500 Loss: 0.00569052342325449 PSNR: 28.154115676879883 +[TRAIN] Iter: 626600 Loss: 0.007302134297788143 PSNR: 25.764284133911133 +[TRAIN] Iter: 626700 Loss: 0.006131001282483339 PSNR: 26.989116668701172 +[TRAIN] Iter: 626800 Loss: 0.006458706222474575 PSNR: 26.296762466430664 +[TRAIN] Iter: 626900 Loss: 0.00701336981728673 PSNR: 26.650781631469727 +[TRAIN] Iter: 627000 Loss: 0.0069314269348979 PSNR: 26.021146774291992 +[TRAIN] Iter: 627100 Loss: 0.006061742082238197 PSNR: 26.6864070892334 +[TRAIN] Iter: 627200 Loss: 0.004077761434018612 PSNR: 28.81370735168457 +[TRAIN] Iter: 627300 Loss: 0.006216390058398247 PSNR: 26.75596046447754 +[TRAIN] Iter: 627400 Loss: 0.006975150667130947 PSNR: 26.568397521972656 +[TRAIN] Iter: 627500 Loss: 0.0050217704847455025 PSNR: 28.313161849975586 +[TRAIN] Iter: 627600 Loss: 0.006312476471066475 PSNR: 26.0872802734375 +[TRAIN] Iter: 627700 Loss: 0.00714451540261507 PSNR: 26.679033279418945 +[TRAIN] Iter: 627800 Loss: 0.005999288521707058 PSNR: 26.683046340942383 +[TRAIN] Iter: 627900 Loss: 0.006446219049394131 PSNR: 26.558595657348633 +[TRAIN] Iter: 628000 Loss: 0.006215604953467846 PSNR: 26.953603744506836 +[TRAIN] Iter: 628100 Loss: 0.005476767662912607 PSNR: 27.85759162902832 +[TRAIN] Iter: 628200 Loss: 0.005973975174129009 PSNR: 26.967586517333984 +[TRAIN] Iter: 628300 Loss: 0.007063353434205055 PSNR: 25.765151977539062 +[TRAIN] Iter: 628400 Loss: 0.005213212221860886 PSNR: 27.690471649169922 +[TRAIN] Iter: 628500 Loss: 0.005564240738749504 PSNR: 27.033367156982422 +[TRAIN] Iter: 628600 Loss: 0.008079605177044868 PSNR: 25.022964477539062 +[TRAIN] Iter: 628700 Loss: 0.00452070776373148 PSNR: 28.42223358154297 +[TRAIN] Iter: 628800 Loss: 0.005974926985800266 PSNR: 27.48224639892578 +[TRAIN] Iter: 628900 Loss: 0.006218686234205961 PSNR: 26.408498764038086 +[TRAIN] Iter: 629000 Loss: 0.007095792330801487 PSNR: 25.95825958251953 +[TRAIN] Iter: 629100 Loss: 0.005302052013576031 PSNR: 28.1397647857666 +[TRAIN] Iter: 629200 Loss: 0.005868698004633188 PSNR: 28.3229923248291 +[TRAIN] Iter: 629300 Loss: 0.005955955013632774 PSNR: 27.000730514526367 +[TRAIN] Iter: 629400 Loss: 0.005708638578653336 PSNR: 27.67391586303711 +[TRAIN] Iter: 629500 Loss: 0.0064427899196743965 PSNR: 27.019451141357422 +[TRAIN] Iter: 629600 Loss: 0.005444979295134544 PSNR: 26.455738067626953 +[TRAIN] Iter: 629700 Loss: 0.004209301434457302 PSNR: 28.620357513427734 +[TRAIN] Iter: 629800 Loss: 0.007236140314489603 PSNR: 26.06753921508789 +[TRAIN] Iter: 629900 Loss: 0.006131548900157213 PSNR: 27.445140838623047 +Saved checkpoints at ./logs/TUT-out-doll-360-np/630000.tar +[TRAIN] Iter: 630000 Loss: 0.00622211117297411 PSNR: 26.78046226501465 +[TRAIN] Iter: 630100 Loss: 0.005442361813038588 PSNR: 27.510826110839844 +[TRAIN] Iter: 630200 Loss: 0.006920870393514633 PSNR: 26.14328956604004 +[TRAIN] Iter: 630300 Loss: 0.007149063516408205 PSNR: 26.074697494506836 +[TRAIN] Iter: 630400 Loss: 0.006119033321738243 PSNR: 26.681116104125977 +[TRAIN] Iter: 630500 Loss: 0.005686772521585226 PSNR: 26.749217987060547 +[TRAIN] Iter: 630600 Loss: 0.008217835798859596 PSNR: 26.001928329467773 +[TRAIN] Iter: 630700 Loss: 0.005740494001656771 PSNR: 28.00104331970215 +[TRAIN] Iter: 630800 Loss: 0.005837049335241318 PSNR: 27.49658966064453 +[TRAIN] Iter: 630900 Loss: 0.006719682365655899 PSNR: 26.166791915893555 +[TRAIN] Iter: 631000 Loss: 0.006140378303825855 PSNR: 26.969417572021484 +[TRAIN] Iter: 631100 Loss: 0.006612839642912149 PSNR: 26.402545928955078 +[TRAIN] Iter: 631200 Loss: 0.005871424917131662 PSNR: 27.576215744018555 +[TRAIN] Iter: 631300 Loss: 0.005820796824991703 PSNR: 27.139766693115234 +[TRAIN] Iter: 631400 Loss: 0.007042103447020054 PSNR: 26.0878963470459 +[TRAIN] Iter: 631500 Loss: 0.006056612357497215 PSNR: 26.47502899169922 +[TRAIN] Iter: 631600 Loss: 0.006200266070663929 PSNR: 26.632707595825195 +[TRAIN] Iter: 631700 Loss: 0.005990904755890369 PSNR: 26.764827728271484 +[TRAIN] Iter: 631800 Loss: 0.006553700193762779 PSNR: 26.064090728759766 +[TRAIN] Iter: 631900 Loss: 0.006461614277213812 PSNR: 27.09423065185547 +[TRAIN] Iter: 632000 Loss: 0.005971075966954231 PSNR: 26.943500518798828 +[TRAIN] Iter: 632100 Loss: 0.006646151654422283 PSNR: 26.33777618408203 +[TRAIN] Iter: 632200 Loss: 0.007143129128962755 PSNR: 26.03523826599121 +[TRAIN] Iter: 632300 Loss: 0.00595127884298563 PSNR: 26.45047950744629 +[TRAIN] Iter: 632400 Loss: 0.007216607220470905 PSNR: 25.795488357543945 +[TRAIN] Iter: 632500 Loss: 0.004843004979193211 PSNR: 28.1395263671875 +[TRAIN] Iter: 632600 Loss: 0.005690976977348328 PSNR: 28.346879959106445 +[TRAIN] Iter: 632700 Loss: 0.006296577397733927 PSNR: 26.43675422668457 +[TRAIN] Iter: 632800 Loss: 0.005766845308244228 PSNR: 27.22112274169922 +[TRAIN] Iter: 632900 Loss: 0.004785593133419752 PSNR: 28.519657135009766 +[TRAIN] Iter: 633000 Loss: 0.006585182622075081 PSNR: 26.288494110107422 +[TRAIN] Iter: 633100 Loss: 0.008793951943516731 PSNR: 25.025026321411133 +[TRAIN] Iter: 633200 Loss: 0.006456158589571714 PSNR: 27.00250816345215 +[TRAIN] Iter: 633300 Loss: 0.005954474210739136 PSNR: 27.154911041259766 +[TRAIN] Iter: 633400 Loss: 0.004878420848399401 PSNR: 28.06846809387207 +[TRAIN] Iter: 633500 Loss: 0.006057577207684517 PSNR: 27.09882926940918 +[TRAIN] Iter: 633600 Loss: 0.00524830911308527 PSNR: 28.303741455078125 +[TRAIN] Iter: 633700 Loss: 0.0057310741394758224 PSNR: 27.094614028930664 +[TRAIN] Iter: 633800 Loss: 0.007199532352387905 PSNR: 25.691221237182617 +[TRAIN] Iter: 633900 Loss: 0.006309328600764275 PSNR: 27.404359817504883 +[TRAIN] Iter: 634000 Loss: 0.006759254727512598 PSNR: 25.66388702392578 +[TRAIN] Iter: 634100 Loss: 0.0065198722295463085 PSNR: 25.696258544921875 +[TRAIN] Iter: 634200 Loss: 0.0057657379657030106 PSNR: 27.772497177124023 +[TRAIN] Iter: 634300 Loss: 0.007486891467124224 PSNR: 25.691049575805664 +[TRAIN] Iter: 634400 Loss: 0.006529258564114571 PSNR: 26.742794036865234 +[TRAIN] Iter: 634500 Loss: 0.005057178437709808 PSNR: 28.101842880249023 +[TRAIN] Iter: 634600 Loss: 0.005200106184929609 PSNR: 28.042245864868164 +[TRAIN] Iter: 634700 Loss: 0.005128525197505951 PSNR: 28.636762619018555 +[TRAIN] Iter: 634800 Loss: 0.006881549023091793 PSNR: 26.422468185424805 +[TRAIN] Iter: 634900 Loss: 0.0070388661697506905 PSNR: 26.223052978515625 +[TRAIN] Iter: 635000 Loss: 0.005076718516647816 PSNR: 28.824539184570312 +[TRAIN] Iter: 635100 Loss: 0.00668728444725275 PSNR: 26.7469425201416 +[TRAIN] Iter: 635200 Loss: 0.00678556552156806 PSNR: 25.65636444091797 +[TRAIN] Iter: 635300 Loss: 0.0062568713910877705 PSNR: 26.126237869262695 +[TRAIN] Iter: 635400 Loss: 0.0074325259774923325 PSNR: 26.678762435913086 +[TRAIN] Iter: 635500 Loss: 0.007047232240438461 PSNR: 25.831296920776367 +[TRAIN] Iter: 635600 Loss: 0.004943834617733955 PSNR: 28.477882385253906 +[TRAIN] Iter: 635700 Loss: 0.006008529104292393 PSNR: 27.070354461669922 +[TRAIN] Iter: 635800 Loss: 0.007311371620744467 PSNR: 25.948213577270508 +[TRAIN] Iter: 635900 Loss: 0.00506920600309968 PSNR: 28.96086883544922 +[TRAIN] Iter: 636000 Loss: 0.00503650214523077 PSNR: 27.67808723449707 +[TRAIN] Iter: 636100 Loss: 0.00533306784927845 PSNR: 28.372922897338867 +[TRAIN] Iter: 636200 Loss: 0.006277724634855986 PSNR: 27.268211364746094 +[TRAIN] Iter: 636300 Loss: 0.00503528444096446 PSNR: 27.71585464477539 +[TRAIN] Iter: 636400 Loss: 0.005154670216143131 PSNR: 28.5310115814209 +[TRAIN] Iter: 636500 Loss: 0.005059335380792618 PSNR: 28.335500717163086 +[TRAIN] Iter: 636600 Loss: 0.0070534213446080685 PSNR: 26.265365600585938 +[TRAIN] Iter: 636700 Loss: 0.006298845633864403 PSNR: 27.82038688659668 +[TRAIN] Iter: 636800 Loss: 0.006373254116624594 PSNR: 26.597200393676758 +[TRAIN] Iter: 636900 Loss: 0.007297555450350046 PSNR: 25.51467514038086 +[TRAIN] Iter: 637000 Loss: 0.006460759788751602 PSNR: 26.60843849182129 +[TRAIN] Iter: 637100 Loss: 0.005681593902409077 PSNR: 26.8082275390625 +[TRAIN] Iter: 637200 Loss: 0.007531790062785149 PSNR: 26.546852111816406 +[TRAIN] Iter: 637300 Loss: 0.006324515677988529 PSNR: 27.174962997436523 +[TRAIN] Iter: 637400 Loss: 0.005704549141228199 PSNR: 27.274662017822266 +[TRAIN] Iter: 637500 Loss: 0.006561038084328175 PSNR: 26.729236602783203 +[TRAIN] Iter: 637600 Loss: 0.004867122508585453 PSNR: 27.979257583618164 +[TRAIN] Iter: 637700 Loss: 0.006604474037885666 PSNR: 26.361995697021484 +[TRAIN] Iter: 637800 Loss: 0.005703816190361977 PSNR: 27.014320373535156 +[TRAIN] Iter: 637900 Loss: 0.006579785607755184 PSNR: 26.869335174560547 +[TRAIN] Iter: 638000 Loss: 0.006328205112367868 PSNR: 26.3319149017334 +[TRAIN] Iter: 638100 Loss: 0.006701699458062649 PSNR: 26.089887619018555 +[TRAIN] Iter: 638200 Loss: 0.006219805683940649 PSNR: 27.048931121826172 +[TRAIN] Iter: 638300 Loss: 0.006263070739805698 PSNR: 27.260316848754883 +[TRAIN] Iter: 638400 Loss: 0.006683443672955036 PSNR: 25.924468994140625 +[TRAIN] Iter: 638500 Loss: 0.007392706349492073 PSNR: 25.376474380493164 +[TRAIN] Iter: 638600 Loss: 0.005622244905680418 PSNR: 27.35122299194336 +[TRAIN] Iter: 638700 Loss: 0.007145864889025688 PSNR: 26.089872360229492 +[TRAIN] Iter: 638800 Loss: 0.005233896430581808 PSNR: 28.21092987060547 +[TRAIN] Iter: 638900 Loss: 0.0047300877049565315 PSNR: 28.472423553466797 +[TRAIN] Iter: 639000 Loss: 0.005711398087441921 PSNR: 26.853975296020508 +[TRAIN] Iter: 639100 Loss: 0.005331064108759165 PSNR: 28.61623191833496 +[TRAIN] Iter: 639200 Loss: 0.006731703411787748 PSNR: 26.525806427001953 +[TRAIN] Iter: 639300 Loss: 0.006933874450623989 PSNR: 25.962390899658203 +[TRAIN] Iter: 639400 Loss: 0.006091192830353975 PSNR: 26.925121307373047 +[TRAIN] Iter: 639500 Loss: 0.006690167356282473 PSNR: 26.418588638305664 +[TRAIN] Iter: 639600 Loss: 0.004847796633839607 PSNR: 28.059932708740234 +[TRAIN] Iter: 639700 Loss: 0.005981577560305595 PSNR: 28.532724380493164 +[TRAIN] Iter: 639800 Loss: 0.0065803928300738335 PSNR: 26.476688385009766 +[TRAIN] Iter: 639900 Loss: 0.007717319298535585 PSNR: 25.704654693603516 +Saved checkpoints at ./logs/TUT-out-doll-360-np/640000.tar +[TRAIN] Iter: 640000 Loss: 0.006270472891628742 PSNR: 27.596057891845703 +[TRAIN] Iter: 640100 Loss: 0.005696763284504414 PSNR: 27.270885467529297 +[TRAIN] Iter: 640200 Loss: 0.00527955824509263 PSNR: 28.4716854095459 +[TRAIN] Iter: 640300 Loss: 0.0063746110536158085 PSNR: 26.56304931640625 +[TRAIN] Iter: 640400 Loss: 0.005320734344422817 PSNR: 27.272544860839844 +[TRAIN] Iter: 640500 Loss: 0.006090371869504452 PSNR: 28.781335830688477 +[TRAIN] Iter: 640600 Loss: 0.006312103942036629 PSNR: 26.575618743896484 +[TRAIN] Iter: 640700 Loss: 0.006241281982511282 PSNR: 26.962221145629883 +[TRAIN] Iter: 640800 Loss: 0.006065665744245052 PSNR: 26.926054000854492 +[TRAIN] Iter: 640900 Loss: 0.006415161304175854 PSNR: 26.799962997436523 +[TRAIN] Iter: 641000 Loss: 0.005596733186393976 PSNR: 25.880475997924805 +[TRAIN] Iter: 641100 Loss: 0.005861557088792324 PSNR: 27.09677505493164 +[TRAIN] Iter: 641200 Loss: 0.005488539580255747 PSNR: 27.220596313476562 +[TRAIN] Iter: 641300 Loss: 0.006737056188285351 PSNR: 26.03774642944336 +[TRAIN] Iter: 641400 Loss: 0.0068711331114172935 PSNR: 25.72840690612793 +[TRAIN] Iter: 641500 Loss: 0.006924521178007126 PSNR: 26.117660522460938 +[TRAIN] Iter: 641600 Loss: 0.005798251833766699 PSNR: 27.704378128051758 +[TRAIN] Iter: 641700 Loss: 0.005397048778831959 PSNR: 27.99859046936035 +[TRAIN] Iter: 641800 Loss: 0.006560477428138256 PSNR: 26.28167724609375 +[TRAIN] Iter: 641900 Loss: 0.006296238396316767 PSNR: 27.353134155273438 +[TRAIN] Iter: 642000 Loss: 0.006171924993395805 PSNR: 26.776763916015625 +[TRAIN] Iter: 642100 Loss: 0.007326574996113777 PSNR: 26.008405685424805 +[TRAIN] Iter: 642200 Loss: 0.006193709559738636 PSNR: 26.85610580444336 +[TRAIN] Iter: 642300 Loss: 0.0070814937353134155 PSNR: 26.190637588500977 +[TRAIN] Iter: 642400 Loss: 0.005480586551129818 PSNR: 27.40678596496582 +[TRAIN] Iter: 642500 Loss: 0.006943576969206333 PSNR: 26.081378936767578 +[TRAIN] Iter: 642600 Loss: 0.0046159448102116585 PSNR: 28.37447738647461 +[TRAIN] Iter: 642700 Loss: 0.005382968112826347 PSNR: 26.760913848876953 +[TRAIN] Iter: 642800 Loss: 0.005830100271850824 PSNR: 27.592405319213867 +[TRAIN] Iter: 642900 Loss: 0.006370411720126867 PSNR: 26.450712203979492 +[TRAIN] Iter: 643000 Loss: 0.004605499561876059 PSNR: 28.127267837524414 +[TRAIN] Iter: 643100 Loss: 0.004926975350826979 PSNR: 28.350013732910156 +[TRAIN] Iter: 643200 Loss: 0.0074525498785078526 PSNR: 26.522167205810547 +[TRAIN] Iter: 643300 Loss: 0.006541903130710125 PSNR: 26.359054565429688 +[TRAIN] Iter: 643400 Loss: 0.007437463849782944 PSNR: 25.614822387695312 +[TRAIN] Iter: 643500 Loss: 0.006694670766592026 PSNR: 26.48592758178711 +[TRAIN] Iter: 643600 Loss: 0.006999565288424492 PSNR: 26.68222999572754 +[TRAIN] Iter: 643700 Loss: 0.005975033156573772 PSNR: 26.847105026245117 +[TRAIN] Iter: 643800 Loss: 0.006773772649466991 PSNR: 26.825458526611328 +[TRAIN] Iter: 643900 Loss: 0.006438815034925938 PSNR: 27.30805778503418 +[TRAIN] Iter: 644000 Loss: 0.0070859333500266075 PSNR: 26.183570861816406 +[TRAIN] Iter: 644100 Loss: 0.006847864016890526 PSNR: 26.32938003540039 +[TRAIN] Iter: 644200 Loss: 0.006565657909959555 PSNR: 26.580148696899414 +[TRAIN] Iter: 644300 Loss: 0.005241172853857279 PSNR: 27.885103225708008 +[TRAIN] Iter: 644400 Loss: 0.006350564770400524 PSNR: 26.05345344543457 +[TRAIN] Iter: 644500 Loss: 0.0055048903450369835 PSNR: 28.11543083190918 +[TRAIN] Iter: 644600 Loss: 0.005999057553708553 PSNR: 27.4445743560791 +[TRAIN] Iter: 644700 Loss: 0.0054407380521297455 PSNR: 27.197851181030273 +[TRAIN] Iter: 644800 Loss: 0.005163330119103193 PSNR: 27.734655380249023 +[TRAIN] Iter: 644900 Loss: 0.00700484961271286 PSNR: 25.984493255615234 +[TRAIN] Iter: 645000 Loss: 0.006458297371864319 PSNR: 27.623374938964844 +[TRAIN] Iter: 645100 Loss: 0.005893628112971783 PSNR: 26.596458435058594 +[TRAIN] Iter: 645200 Loss: 0.007323797792196274 PSNR: 26.46599578857422 +[TRAIN] Iter: 645300 Loss: 0.006595723330974579 PSNR: 26.18710708618164 +[TRAIN] Iter: 645400 Loss: 0.0056868912652134895 PSNR: 27.27437973022461 +[TRAIN] Iter: 645500 Loss: 0.006544916424900293 PSNR: 26.548263549804688 +[TRAIN] Iter: 645600 Loss: 0.005708641372621059 PSNR: 27.569293975830078 +[TRAIN] Iter: 645700 Loss: 0.005312448367476463 PSNR: 28.60871696472168 +[TRAIN] Iter: 645800 Loss: 0.007724224589765072 PSNR: 25.638935089111328 +[TRAIN] Iter: 645900 Loss: 0.006282043643295765 PSNR: 26.3780574798584 +[TRAIN] Iter: 646000 Loss: 0.005445973016321659 PSNR: 28.24073028564453 +[TRAIN] Iter: 646100 Loss: 0.005636522546410561 PSNR: 27.597869873046875 +[TRAIN] Iter: 646200 Loss: 0.0070439716801047325 PSNR: 25.62352752685547 +[TRAIN] Iter: 646300 Loss: 0.005796798970550299 PSNR: 27.112565994262695 +[TRAIN] Iter: 646400 Loss: 0.006162920035421848 PSNR: 27.167646408081055 +[TRAIN] Iter: 646500 Loss: 0.005474873818457127 PSNR: 27.79006576538086 +[TRAIN] Iter: 646600 Loss: 0.007382270880043507 PSNR: 25.792875289916992 +[TRAIN] Iter: 646700 Loss: 0.005723038222640753 PSNR: 27.380294799804688 +[TRAIN] Iter: 646800 Loss: 0.007041063625365496 PSNR: 26.383434295654297 +[TRAIN] Iter: 646900 Loss: 0.006942399311810732 PSNR: 25.628446578979492 +[TRAIN] Iter: 647000 Loss: 0.006013272330164909 PSNR: 27.025516510009766 +[TRAIN] Iter: 647100 Loss: 0.006595788057893515 PSNR: 26.341197967529297 +[TRAIN] Iter: 647200 Loss: 0.004508111160248518 PSNR: 28.672706604003906 +[TRAIN] Iter: 647300 Loss: 0.005059328395873308 PSNR: 28.32602882385254 +[TRAIN] Iter: 647400 Loss: 0.005064221564680338 PSNR: 28.775259017944336 +[TRAIN] Iter: 647500 Loss: 0.006976863369345665 PSNR: 26.20496940612793 +[TRAIN] Iter: 647600 Loss: 0.007867660373449326 PSNR: 25.457061767578125 +[TRAIN] Iter: 647700 Loss: 0.006436706520617008 PSNR: 27.14371109008789 +[TRAIN] Iter: 647800 Loss: 0.004697015509009361 PSNR: 28.587648391723633 +[TRAIN] Iter: 647900 Loss: 0.006215980742126703 PSNR: 26.965068817138672 +[TRAIN] Iter: 648000 Loss: 0.00583737064152956 PSNR: 26.973308563232422 +[TRAIN] Iter: 648100 Loss: 0.005056160967797041 PSNR: 28.475322723388672 +[TRAIN] Iter: 648200 Loss: 0.006322168745100498 PSNR: 26.348188400268555 +[TRAIN] Iter: 648300 Loss: 0.005325283855199814 PSNR: 28.423561096191406 +[TRAIN] Iter: 648400 Loss: 0.008221914060413837 PSNR: 25.697500228881836 +[TRAIN] Iter: 648500 Loss: 0.005883955396711826 PSNR: 26.742647171020508 +[TRAIN] Iter: 648600 Loss: 0.005023020785301924 PSNR: 28.723295211791992 +[TRAIN] Iter: 648700 Loss: 0.007449937053024769 PSNR: 26.074012756347656 +[TRAIN] Iter: 648800 Loss: 0.006988967768847942 PSNR: 26.642045974731445 +[TRAIN] Iter: 648900 Loss: 0.00630736630409956 PSNR: 26.862892150878906 +[TRAIN] Iter: 649000 Loss: 0.0070944614708423615 PSNR: 26.422962188720703 +[TRAIN] Iter: 649100 Loss: 0.006647656671702862 PSNR: 26.34772300720215 +[TRAIN] Iter: 649200 Loss: 0.006295864935964346 PSNR: 26.7544002532959 +[TRAIN] Iter: 649300 Loss: 0.005869092419743538 PSNR: 27.51287269592285 +[TRAIN] Iter: 649400 Loss: 0.004926973953843117 PSNR: 29.292236328125 +[TRAIN] Iter: 649500 Loss: 0.006696819793432951 PSNR: 26.22335433959961 +[TRAIN] Iter: 649600 Loss: 0.006666448898613453 PSNR: 26.0496826171875 +[TRAIN] Iter: 649700 Loss: 0.005605305545032024 PSNR: 27.681486129760742 +[TRAIN] Iter: 649800 Loss: 0.0059023285284638405 PSNR: 26.914194107055664 +[TRAIN] Iter: 649900 Loss: 0.006145683117210865 PSNR: 27.06728172302246 +Saved checkpoints at ./logs/TUT-out-doll-360-np/650000.tar +0 0.0008854866027832031 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.372082948684692 +2 22.4568510055542 +3 22.816502332687378 +4 21.918121576309204 +5 22.847418069839478 +6 22.004516124725342 +7 22.69146728515625 +8 22.32210350036621 +9 22.26134991645813 +10 22.691187143325806 +11 21.649405241012573 +12 22.012670516967773 +13 21.692683696746826 +14 22.75528359413147 +15 22.16391611099243 +16 22.28108501434326 +17 21.988524198532104 +18 21.736357927322388 +19 23.61352300643921 +20 22.20125150680542 +21 21.737285375595093 +22 22.2094144821167 +23 22.389859914779663 +24 22.491650581359863 +25 22.488272428512573 +26 21.75301718711853 +27 21.87599229812622 +28 21.823749542236328 +29 22.7885320186615 +30 21.408632040023804 +31 21.814843893051147 +32 22.39038920402527 +33 21.875011920928955 +34 22.16114115715027 +35 22.10538387298584 +36 21.971744298934937 +37 21.647549867630005 +38 21.931868076324463 +39 22.421972513198853 +40 21.22306776046753 +41 22.428426265716553 +42 22.03731369972229 +43 22.729633569717407 +44 21.797365188598633 +45 22.07649874687195 +46 22.341843843460083 +47 21.48034977912903 +48 21.836464881896973 +49 22.018545150756836 +50 21.997572422027588 +51 21.892017364501953 +52 22.243247747421265 +53 22.08267045021057 +54 22.066138744354248 +55 22.945439338684082 +56 22.045148611068726 +57 21.438171863555908 +58 22.539474725723267 +59 21.752627849578857 +60 21.657604932785034 +61 21.42299723625183 +62 22.165926456451416 +63 22.109360218048096 +64 21.974443674087524 +65 21.29555106163025 +66 21.50864267349243 +67 22.419527769088745 +68 21.840805292129517 +69 21.23641562461853 +70 22.259021520614624 +71 22.693135261535645 +72 21.661280155181885 +73 22.293561697006226 +74 22.1384539604187 +75 22.031768321990967 +76 22.251134634017944 +77 23.02248454093933 +78 22.287388563156128 +79 22.58503007888794 +80 22.104865074157715 +81 23.000495672225952 +82 21.550055980682373 +83 22.524186372756958 +84 21.64976215362549 +85 21.83841586112976 +86 22.19558548927307 +87 22.608386754989624 +88 21.554767370224 +89 22.564313411712646 +90 22.146329879760742 +91 21.966447591781616 +92 21.97084379196167 +93 21.385880708694458 +94 22.152748584747314 +95 21.598042488098145 +96 21.844983339309692 +97 21.33190608024597 +98 21.78507971763611 +99 21.57254648208618 +100 22.03678798675537 +101 21.84256887435913 +102 22.05470895767212 +103 21.709226369857788 +104 21.861106634140015 +105 22.016561269760132 +106 21.475855350494385 +107 22.188867330551147 +108 22.177042484283447 +109 21.762934923171997 +110 23.134510040283203 +111 21.66510272026062 +112 21.993117809295654 +113 21.868824005126953 +114 22.1320378780365 +115 21.762011528015137 +116 22.63577675819397 +117 22.078723192214966 +118 22.237410306930542 +119 22.421587467193604 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-1.0145e+00, -1.2600e+00, -1.4756e+00, -4.8493e+01], + [-1.5280e+00, -1.5065e+00, -1.3174e+00, -1.1709e+01], + [-1.4779e+00, -1.4585e+00, -1.2569e+00, -1.2685e+01], + ..., + [-1.1985e+01, -8.4551e+00, -6.8709e+00, 3.5164e+02], + [-1.0265e+01, -6.0476e+00, -3.3153e+00, 3.7473e+02], + [-1.2664e+01, -9.0290e+00, -7.6701e+00, 3.6044e+02]], + + [[-1.1852e+00, -1.3869e+00, -1.3688e+00, 7.8991e+01], + [-1.2145e+00, -1.3837e+00, -1.4009e+00, 3.0356e+01], + [ 1.2520e+00, 1.2877e+00, 1.8588e+00, -4.5693e+01], + ..., + [-1.3804e+01, -1.7376e+00, 2.7412e+00, 9.1794e+02], + [-1.0439e+01, 1.9500e+00, 6.3642e+00, 8.9089e+02], + [-1.4195e+01, -9.6869e-01, 3.9369e+00, 9.9755e+02]], + + [[-3.5622e-01, 1.0664e+00, 2.2678e+00, -3.4212e+01], + [-6.8604e-01, 3.0594e-01, 1.3093e+00, -1.8287e+01], + [-5.5830e-01, 2.8617e-01, 1.6558e+00, -2.1718e+01], + ..., + [-9.9334e+00, -9.5174e+00, -6.9100e-01, 4.9430e+02], + [-9.6547e+00, -8.9272e+00, 4.6164e-01, 5.1121e+02], + [-9.6749e+00, -8.9270e+00, 3.0656e-02, 5.1564e+02]], + + ..., + + [[-7.3484e-01, 1.1161e-01, 9.2365e-01, -1.3633e+01], + [-4.7474e-01, 2.0569e-01, 1.0871e+00, 6.7310e+00], + [-4.0432e-01, 2.5598e-01, 1.1400e+00, 6.8937e+00], + ..., + [-3.3181e+00, -8.6263e-01, 1.4605e+01, 6.0558e+02], + [-3.6344e+00, -1.4210e+00, 1.3599e+01, 6.3071e+02], + [-5.2371e+00, -3.2088e+00, 1.1445e+01, 5.7659e+02]], + + [[ 8.5160e-01, 1.8877e+00, 3.0070e+00, 1.1474e+01], + [ 9.3436e-01, 1.7936e+00, 2.8064e+00, 1.5032e+01], + [ 1.3073e+00, 2.3906e+00, 3.5376e+00, 6.4788e+00], + ..., + [ 2.5733e+00, 6.5137e+00, 6.2811e+00, 7.3173e+02], + [ 2.6855e+00, 6.5831e+00, 6.1942e+00, 7.3561e+02], + [ 1.8582e+00, 5.6814e+00, 5.7876e+00, 6.8227e+02]], + + [[ 1.6213e+00, 1.8157e+00, 1.9965e+00, -4.9421e+01], + [ 1.2379e+00, 2.0056e+00, 3.4219e+00, -8.1514e+00], + [ 2.2723e+00, 3.3686e+00, 4.8209e+00, 1.4186e+01], + ..., + [ 2.7165e-01, 3.0979e+00, 3.0762e+00, 4.8621e+02], + [ 7.6651e-02, 2.9941e+00, 2.7458e+00, 5.3974e+02], + [ 7.8696e-01, 3.7085e+00, 3.3691e+00, 5.5550e+02]]], + grad_fn=), 'rgb0': tensor([[0.3379, 0.4736, 0.6308], + [0.2350, 0.2101, 0.2094], + [0.2778, 0.4331, 0.6658], + ..., + [0.3699, 0.5314, 0.7376], + [0.7251, 0.8755, 0.9565], + [0.8680, 0.9457, 0.9849]], grad_fn=), 'disp0': tensor([ 42.3374, 139.0569, 66.1272, ..., 155.2353, 166.2859, 27.2640], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0047, 0.2655, 0.0033, ..., 0.3313, 0.2900, 0.0026])} +0 0.0008516311645507812 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.40317964553833 +2 21.149730443954468 +3 22.231865644454956 +4 22.19496512413025 +5 23.5496666431427 +6 22.662036418914795 +7 22.1806857585907 +8 22.481167554855347 +9 21.822586059570312 +10 22.291184425354004 +11 21.39017653465271 +12 22.108646392822266 +13 22.93863606452942 +14 21.30578589439392 +15 21.467453479766846 +16 21.75265669822693 +17 22.749809503555298 +18 22.559901475906372 +19 21.903871297836304 +20 21.98780369758606 +21 22.661818504333496 +22 21.870904684066772 +23 21.940070152282715 +24 21.426941871643066 +25 21.68801474571228 +26 22.56984281539917 +27 21.962657928466797 +28 22.108922004699707 +29 21.768563747406006 +30 21.997248649597168 +31 22.149963855743408 +32 21.528642892837524 +33 21.77879762649536 +34 21.808444261550903 +35 22.10123896598816 +36 21.78898859024048 +37 21.74757480621338 +38 21.719539165496826 +39 22.314451456069946 +40 22.41991353034973 +41 21.70672845840454 +42 22.29896068572998 +43 23.248887300491333 +44 22.724836826324463 +45 22.553570985794067 +46 21.592061519622803 +47 21.86211633682251 +48 22.787368297576904 +49 22.113687992095947 +50 21.69120764732361 +51 21.80652618408203 +52 21.884274005889893 +53 21.847795724868774 +54 22.04014277458191 +55 22.261962413787842 +56 21.29822540283203 +57 22.43944478034973 +58 22.928147315979004 +59 21.76144552230835 +60 22.103721857070923 +61 21.58959722518921 +62 21.720703840255737 +63 21.641850233078003 +64 22.49888801574707 +65 21.86340880393982 +66 22.248297929763794 +67 22.354732513427734 +68 22.97842836380005 +69 22.266167879104614 +70 22.273695707321167 +71 21.518462419509888 +72 22.24449872970581 +73 22.639479875564575 +74 22.645557403564453 +75 22.24407982826233 +76 22.025356769561768 +77 22.303544282913208 +78 22.137256622314453 +79 21.390983819961548 +80 22.067453622817993 +81 21.361814737319946 +82 21.460622549057007 +83 21.724350929260254 +84 22.456011056900024 +85 22.396533250808716 +86 21.456164121627808 +87 22.43629789352417 +88 21.760859727859497 +89 22.486661672592163 +90 21.575376272201538 +91 21.77056074142456 +92 21.434166193008423 +93 22.56448483467102 +94 21.752960443496704 +95 22.104320764541626 +96 22.324976921081543 +97 21.256969451904297 +98 22.12263059616089 +99 22.12405276298523 +100 22.150147676467896 +101 21.10839343070984 +102 21.79008460044861 +103 22.41185712814331 +104 21.90753984451294 +105 22.369975566864014 +106 22.470043659210205 +107 22.05327558517456 +108 22.592520236968994 +109 21.946751594543457 +110 21.88883662223816 +111 22.30837345123291 +112 21.849658966064453 +113 21.783496141433716 +114 22.344646215438843 +115 22.705276489257812 +116 21.6717472076416 +117 21.368039846420288 +118 23.081382989883423 +119 22.534271717071533 +test poses shape torch.Size([4, 3, 4]) +0 0.0019562244415283203 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.78317403793335 +2 21.33478283882141 +3 21.67224383354187 +Saved test set +[TRAIN] Iter: 650000 Loss: 0.0061669982969760895 PSNR: 26.746196746826172 +[TRAIN] Iter: 650100 Loss: 0.0067900363355875015 PSNR: 26.267162322998047 +[TRAIN] Iter: 650200 Loss: 0.005347687751054764 PSNR: 27.1807861328125 +[TRAIN] Iter: 650300 Loss: 0.0079898526892066 PSNR: 25.90764617919922 +[TRAIN] Iter: 650400 Loss: 0.005967418197542429 PSNR: 27.09853744506836 +[TRAIN] Iter: 650500 Loss: 0.005375167354941368 PSNR: 28.596637725830078 +[TRAIN] Iter: 650600 Loss: 0.005916948430240154 PSNR: 26.61214256286621 +[TRAIN] Iter: 650700 Loss: 0.0062195779755711555 PSNR: 26.8549747467041 +[TRAIN] Iter: 650800 Loss: 0.0065470850095152855 PSNR: 26.458276748657227 +[TRAIN] Iter: 650900 Loss: 0.006617351435124874 PSNR: 25.931724548339844 +[TRAIN] Iter: 651000 Loss: 0.007202154491096735 PSNR: 26.649484634399414 +[TRAIN] Iter: 651100 Loss: 0.005716387182474136 PSNR: 26.937171936035156 +[TRAIN] Iter: 651200 Loss: 0.005975573323667049 PSNR: 26.249418258666992 +[TRAIN] Iter: 651300 Loss: 0.006995465140789747 PSNR: 25.977510452270508 +[TRAIN] Iter: 651400 Loss: 0.006433047354221344 PSNR: 26.940732955932617 +[TRAIN] Iter: 651500 Loss: 0.0066130938939750195 PSNR: 27.149858474731445 +[TRAIN] Iter: 651600 Loss: 0.005341933574527502 PSNR: 27.976449966430664 +[TRAIN] Iter: 651700 Loss: 0.0059961117804050446 PSNR: 27.282005310058594 +[TRAIN] Iter: 651800 Loss: 0.0053937602788209915 PSNR: 27.545957565307617 +[TRAIN] Iter: 651900 Loss: 0.0056600142270326614 PSNR: 27.099754333496094 +[TRAIN] Iter: 652000 Loss: 0.004751625936478376 PSNR: 27.757347106933594 +[TRAIN] Iter: 652100 Loss: 0.006431182846426964 PSNR: 26.417844772338867 +[TRAIN] Iter: 652200 Loss: 0.005750963930040598 PSNR: 27.039058685302734 +[TRAIN] Iter: 652300 Loss: 0.00747006107121706 PSNR: 26.117820739746094 +[TRAIN] Iter: 652400 Loss: 0.00827536266297102 PSNR: 25.349889755249023 +[TRAIN] Iter: 652500 Loss: 0.006556716747581959 PSNR: 26.047327041625977 +[TRAIN] Iter: 652600 Loss: 0.0047634439542889595 PSNR: 28.42055892944336 +[TRAIN] Iter: 652700 Loss: 0.005945957265794277 PSNR: 27.78060531616211 +[TRAIN] Iter: 652800 Loss: 0.006915329955518246 PSNR: 25.964555740356445 +[TRAIN] Iter: 652900 Loss: 0.007962318137288094 PSNR: 25.671762466430664 +[TRAIN] Iter: 653000 Loss: 0.004698865115642548 PSNR: 28.24726104736328 +[TRAIN] Iter: 653100 Loss: 0.005082976073026657 PSNR: 27.643644332885742 +[TRAIN] Iter: 653200 Loss: 0.006579393986612558 PSNR: 26.975706100463867 +[TRAIN] Iter: 653300 Loss: 0.0061912438832223415 PSNR: 27.276899337768555 +[TRAIN] Iter: 653400 Loss: 0.0067817214876413345 PSNR: 26.93736457824707 +[TRAIN] Iter: 653500 Loss: 0.005706028547137976 PSNR: 26.859764099121094 +[TRAIN] Iter: 653600 Loss: 0.00595288909971714 PSNR: 27.4171142578125 +[TRAIN] Iter: 653700 Loss: 0.006632355507463217 PSNR: 26.756074905395508 +[TRAIN] Iter: 653800 Loss: 0.006367712281644344 PSNR: 26.604629516601562 +[TRAIN] Iter: 653900 Loss: 0.007422067224979401 PSNR: 25.930965423583984 +[TRAIN] Iter: 654000 Loss: 0.004279271699488163 PSNR: 28.9372615814209 +[TRAIN] Iter: 654100 Loss: 0.0063834828324615955 PSNR: 27.046024322509766 +[TRAIN] Iter: 654200 Loss: 0.006530212238430977 PSNR: 27.389137268066406 +[TRAIN] Iter: 654300 Loss: 0.006615767255425453 PSNR: 26.584516525268555 +[TRAIN] Iter: 654400 Loss: 0.006312018260359764 PSNR: 26.455841064453125 +[TRAIN] Iter: 654500 Loss: 0.005629226565361023 PSNR: 27.5065975189209 +[TRAIN] Iter: 654600 Loss: 0.0056888326071202755 PSNR: 27.40566062927246 +[TRAIN] Iter: 654700 Loss: 0.00667426036670804 PSNR: 27.112836837768555 +[TRAIN] Iter: 654800 Loss: 0.006068351678550243 PSNR: 27.711828231811523 +[TRAIN] Iter: 654900 Loss: 0.005899712909013033 PSNR: 27.416385650634766 +[TRAIN] Iter: 655000 Loss: 0.005510423332452774 PSNR: 27.931438446044922 +[TRAIN] Iter: 655100 Loss: 0.0050699179992079735 PSNR: 27.948932647705078 +[TRAIN] Iter: 655200 Loss: 0.00635181088000536 PSNR: 26.23993682861328 +[TRAIN] Iter: 655300 Loss: 0.006465451791882515 PSNR: 25.739723205566406 +[TRAIN] Iter: 655400 Loss: 0.006240935064852238 PSNR: 26.93267059326172 +[TRAIN] Iter: 655500 Loss: 0.006438655778765678 PSNR: 26.770015716552734 +[TRAIN] Iter: 655600 Loss: 0.005959584843367338 PSNR: 27.323896408081055 +[TRAIN] Iter: 655700 Loss: 0.00539216585457325 PSNR: 27.88823890686035 +[TRAIN] Iter: 655800 Loss: 0.005402206908911467 PSNR: 28.207530975341797 +[TRAIN] Iter: 655900 Loss: 0.006194080226123333 PSNR: 27.683382034301758 +[TRAIN] Iter: 656000 Loss: 0.0041009956039488316 PSNR: 28.835163116455078 +[TRAIN] Iter: 656100 Loss: 0.007011767476797104 PSNR: 26.10358238220215 +[TRAIN] Iter: 656200 Loss: 0.006052433513104916 PSNR: 26.477569580078125 +[TRAIN] Iter: 656300 Loss: 0.009307151660323143 PSNR: 24.307697296142578 +[TRAIN] Iter: 656400 Loss: 0.0059502627700567245 PSNR: 26.954465866088867 +[TRAIN] Iter: 656500 Loss: 0.0070269289426505566 PSNR: 26.188682556152344 +[TRAIN] Iter: 656600 Loss: 0.008098963648080826 PSNR: 26.034992218017578 +[TRAIN] Iter: 656700 Loss: 0.007196236401796341 PSNR: 26.21156883239746 +[TRAIN] Iter: 656800 Loss: 0.005688983015716076 PSNR: 27.508211135864258 +[TRAIN] Iter: 656900 Loss: 0.007194047793745995 PSNR: 25.726238250732422 +[TRAIN] Iter: 657000 Loss: 0.005894537083804607 PSNR: 27.391340255737305 +[TRAIN] Iter: 657100 Loss: 0.0053440420888364315 PSNR: 27.580543518066406 +[TRAIN] Iter: 657200 Loss: 0.00588805740699172 PSNR: 26.441755294799805 +[TRAIN] Iter: 657300 Loss: 0.005738606210798025 PSNR: 27.042800903320312 +[TRAIN] Iter: 657400 Loss: 0.0058122919872403145 PSNR: 27.17049217224121 +[TRAIN] Iter: 657500 Loss: 0.005321367643773556 PSNR: 27.524934768676758 +[TRAIN] Iter: 657600 Loss: 0.006627545692026615 PSNR: 26.19110679626465 +[TRAIN] Iter: 657700 Loss: 0.00698486901819706 PSNR: 26.306720733642578 +[TRAIN] Iter: 657800 Loss: 0.007066176738590002 PSNR: 25.849380493164062 +[TRAIN] Iter: 657900 Loss: 0.006519314832985401 PSNR: 26.578861236572266 +[TRAIN] Iter: 658000 Loss: 0.005419974680989981 PSNR: 27.37312126159668 +[TRAIN] Iter: 658100 Loss: 0.006212693173438311 PSNR: 26.405576705932617 +[TRAIN] Iter: 658200 Loss: 0.006675033830106258 PSNR: 26.60308837890625 +[TRAIN] Iter: 658300 Loss: 0.006710492540150881 PSNR: 27.52817153930664 +[TRAIN] Iter: 658400 Loss: 0.0063676992431283 PSNR: 26.51975440979004 +[TRAIN] Iter: 658500 Loss: 0.005734325852245092 PSNR: 27.318361282348633 +[TRAIN] Iter: 658600 Loss: 0.0053086960688233376 PSNR: 27.70012092590332 +[TRAIN] Iter: 658700 Loss: 0.0074190059676766396 PSNR: 25.98851203918457 +[TRAIN] Iter: 658800 Loss: 0.005221339873969555 PSNR: 28.113529205322266 +[TRAIN] Iter: 658900 Loss: 0.006187836639583111 PSNR: 26.37865447998047 +[TRAIN] Iter: 659000 Loss: 0.0059797512367367744 PSNR: 28.147966384887695 +[TRAIN] Iter: 659100 Loss: 0.005591290071606636 PSNR: 27.96953582763672 +[TRAIN] Iter: 659200 Loss: 0.005028475075960159 PSNR: 27.28883171081543 +[TRAIN] Iter: 659300 Loss: 0.007514594122767448 PSNR: 26.371654510498047 +[TRAIN] Iter: 659400 Loss: 0.005364593118429184 PSNR: 27.597339630126953 +[TRAIN] Iter: 659500 Loss: 0.006655849050730467 PSNR: 26.62353515625 +[TRAIN] Iter: 659600 Loss: 0.005551398731768131 PSNR: 27.080997467041016 +[TRAIN] Iter: 659700 Loss: 0.005114681087434292 PSNR: 28.32340431213379 +[TRAIN] Iter: 659800 Loss: 0.0057266224175691605 PSNR: 26.951202392578125 +[TRAIN] Iter: 659900 Loss: 0.006464917212724686 PSNR: 26.396570205688477 +Saved checkpoints at ./logs/TUT-out-doll-360-np/660000.tar +[TRAIN] Iter: 660000 Loss: 0.0060416413471102715 PSNR: 26.352378845214844 +[TRAIN] Iter: 660100 Loss: 0.0050803860649466515 PSNR: 28.093029022216797 +[TRAIN] Iter: 660200 Loss: 0.006868906319141388 PSNR: 26.003353118896484 +[TRAIN] Iter: 660300 Loss: 0.007913745939731598 PSNR: 25.21588134765625 +[TRAIN] Iter: 660400 Loss: 0.005630241706967354 PSNR: 27.435962677001953 +[TRAIN] Iter: 660500 Loss: 0.006393461488187313 PSNR: 26.143836975097656 +[TRAIN] Iter: 660600 Loss: 0.0074171158485114574 PSNR: 25.79265594482422 +[TRAIN] Iter: 660700 Loss: 0.006491352804005146 PSNR: 26.74224090576172 +[TRAIN] Iter: 660800 Loss: 0.007285808678716421 PSNR: 26.320785522460938 +[TRAIN] Iter: 660900 Loss: 0.007124574854969978 PSNR: 25.119150161743164 +[TRAIN] Iter: 661000 Loss: 0.00559369008988142 PSNR: 27.147533416748047 +[TRAIN] Iter: 661100 Loss: 0.006893383339047432 PSNR: 25.941368103027344 +[TRAIN] Iter: 661200 Loss: 0.005596879404038191 PSNR: 27.92275619506836 +[TRAIN] Iter: 661300 Loss: 0.006204185541719198 PSNR: 26.15414047241211 +[TRAIN] Iter: 661400 Loss: 0.00633741170167923 PSNR: 26.024932861328125 +[TRAIN] Iter: 661500 Loss: 0.006356658414006233 PSNR: 26.76935577392578 +[TRAIN] Iter: 661600 Loss: 0.00586484931409359 PSNR: 27.038570404052734 +[TRAIN] Iter: 661700 Loss: 0.006548063363879919 PSNR: 26.584980010986328 +[TRAIN] Iter: 661800 Loss: 0.005741589702665806 PSNR: 26.844850540161133 +[TRAIN] Iter: 661900 Loss: 0.0076178982853889465 PSNR: 26.18802833557129 +[TRAIN] Iter: 662000 Loss: 0.005241550505161285 PSNR: 27.695070266723633 +[TRAIN] Iter: 662100 Loss: 0.006778433918952942 PSNR: 27.132884979248047 +[TRAIN] Iter: 662200 Loss: 0.005508643575012684 PSNR: 26.699874877929688 +[TRAIN] Iter: 662300 Loss: 0.007208351977169514 PSNR: 26.13058853149414 +[TRAIN] Iter: 662400 Loss: 0.004135942552238703 PSNR: 28.562191009521484 +[TRAIN] Iter: 662500 Loss: 0.00509827071800828 PSNR: 27.92632293701172 +[TRAIN] Iter: 662600 Loss: 0.006593336351215839 PSNR: 26.414339065551758 +[TRAIN] Iter: 662700 Loss: 0.007424824871122837 PSNR: 25.75705909729004 +[TRAIN] Iter: 662800 Loss: 0.006979234516620636 PSNR: 26.400985717773438 +[TRAIN] Iter: 662900 Loss: 0.0062247938476502895 PSNR: 26.298240661621094 +[TRAIN] Iter: 663000 Loss: 0.006798724643886089 PSNR: 26.235788345336914 +[TRAIN] Iter: 663100 Loss: 0.00641485583037138 PSNR: 26.835262298583984 +[TRAIN] Iter: 663200 Loss: 0.004915457684546709 PSNR: 27.293968200683594 +[TRAIN] Iter: 663300 Loss: 0.0055292947217822075 PSNR: 27.574081420898438 +[TRAIN] Iter: 663400 Loss: 0.006308343261480331 PSNR: 26.86455726623535 +[TRAIN] Iter: 663500 Loss: 0.0056788986548781395 PSNR: 26.65839195251465 +[TRAIN] Iter: 663600 Loss: 0.006692774593830109 PSNR: 26.804956436157227 +[TRAIN] Iter: 663700 Loss: 0.006063288077712059 PSNR: 26.733478546142578 +[TRAIN] Iter: 663800 Loss: 0.005246332380920649 PSNR: 28.102323532104492 +[TRAIN] Iter: 663900 Loss: 0.006521034985780716 PSNR: 26.867162704467773 +[TRAIN] Iter: 664000 Loss: 0.006104365922510624 PSNR: 26.65585708618164 +[TRAIN] Iter: 664100 Loss: 0.006735045462846756 PSNR: 26.119098663330078 +[TRAIN] Iter: 664200 Loss: 0.006503663025796413 PSNR: 26.167957305908203 +[TRAIN] Iter: 664300 Loss: 0.004766023252159357 PSNR: 28.5217342376709 +[TRAIN] Iter: 664400 Loss: 0.005454707890748978 PSNR: 27.363554000854492 +[TRAIN] Iter: 664500 Loss: 0.00636263657361269 PSNR: 26.29584503173828 +[TRAIN] Iter: 664600 Loss: 0.006088280119001865 PSNR: 26.582353591918945 +[TRAIN] Iter: 664700 Loss: 0.005098458379507065 PSNR: 27.446823120117188 +[TRAIN] Iter: 664800 Loss: 0.0072515676729381084 PSNR: 26.30484962463379 +[TRAIN] Iter: 664900 Loss: 0.006287489086389542 PSNR: 27.15797996520996 +[TRAIN] Iter: 665000 Loss: 0.006004465278238058 PSNR: 28.10943031311035 +[TRAIN] Iter: 665100 Loss: 0.0061488403007388115 PSNR: 26.4350643157959 +[TRAIN] Iter: 665200 Loss: 0.005022986326366663 PSNR: 28.82808494567871 +[TRAIN] Iter: 665300 Loss: 0.00626287842169404 PSNR: 26.480575561523438 +[TRAIN] Iter: 665400 Loss: 0.005798265803605318 PSNR: 26.745548248291016 +[TRAIN] Iter: 665500 Loss: 0.006176521070301533 PSNR: 26.876379013061523 +[TRAIN] Iter: 665600 Loss: 0.005810839589685202 PSNR: 27.071903228759766 +[TRAIN] Iter: 665700 Loss: 0.006999011151492596 PSNR: 26.47679901123047 +[TRAIN] Iter: 665800 Loss: 0.005796709097921848 PSNR: 27.088817596435547 +[TRAIN] Iter: 665900 Loss: 0.005615069530904293 PSNR: 26.932174682617188 +[TRAIN] Iter: 666000 Loss: 0.005821209400892258 PSNR: 28.20362091064453 +[TRAIN] Iter: 666100 Loss: 0.006785443052649498 PSNR: 25.90896224975586 +[TRAIN] Iter: 666200 Loss: 0.006705747917294502 PSNR: 26.149944305419922 +[TRAIN] Iter: 666300 Loss: 0.00800642091780901 PSNR: 25.329797744750977 +[TRAIN] Iter: 666400 Loss: 0.004173086024820805 PSNR: 28.795242309570312 +[TRAIN] Iter: 666500 Loss: 0.00676455395296216 PSNR: 26.732515335083008 +[TRAIN] Iter: 666600 Loss: 0.006660821381956339 PSNR: 26.432659149169922 +[TRAIN] Iter: 666700 Loss: 0.006884504575282335 PSNR: 26.249317169189453 +[TRAIN] Iter: 666800 Loss: 0.007145401556044817 PSNR: 26.13689613342285 +[TRAIN] Iter: 666900 Loss: 0.007185027934610844 PSNR: 25.650142669677734 +[TRAIN] Iter: 667000 Loss: 0.006138567812740803 PSNR: 27.702733993530273 +[TRAIN] Iter: 667100 Loss: 0.006476948037743568 PSNR: 26.474939346313477 +[TRAIN] Iter: 667200 Loss: 0.007353463210165501 PSNR: 26.255592346191406 +[TRAIN] Iter: 667300 Loss: 0.005677088163793087 PSNR: 27.168859481811523 +[TRAIN] Iter: 667400 Loss: 0.006409135647118092 PSNR: 26.400678634643555 +[TRAIN] Iter: 667500 Loss: 0.006411735899746418 PSNR: 26.682775497436523 +[TRAIN] Iter: 667600 Loss: 0.005678391084074974 PSNR: 27.13363265991211 +[TRAIN] Iter: 667700 Loss: 0.005854856222867966 PSNR: 27.833675384521484 +[TRAIN] Iter: 667800 Loss: 0.007728797383606434 PSNR: 25.82080078125 +[TRAIN] Iter: 667900 Loss: 0.004614523611962795 PSNR: 28.660913467407227 +[TRAIN] Iter: 668000 Loss: 0.005293917376548052 PSNR: 28.267454147338867 +[TRAIN] Iter: 668100 Loss: 0.004798756912350655 PSNR: 28.811185836791992 +[TRAIN] Iter: 668200 Loss: 0.00547163188457489 PSNR: 27.679567337036133 +[TRAIN] Iter: 668300 Loss: 0.007186590228229761 PSNR: 26.537750244140625 +[TRAIN] Iter: 668400 Loss: 0.00672429334372282 PSNR: 26.474668502807617 +[TRAIN] Iter: 668500 Loss: 0.004215754568576813 PSNR: 29.476348876953125 +[TRAIN] Iter: 668600 Loss: 0.006058557890355587 PSNR: 26.872920989990234 +[TRAIN] Iter: 668700 Loss: 0.0069864485412836075 PSNR: 25.440019607543945 +[TRAIN] Iter: 668800 Loss: 0.006217477843165398 PSNR: 27.19019317626953 +[TRAIN] Iter: 668900 Loss: 0.006720381788909435 PSNR: 26.49831771850586 +[TRAIN] Iter: 669000 Loss: 0.005922945681959391 PSNR: 26.918167114257812 +[TRAIN] Iter: 669100 Loss: 0.007385239936411381 PSNR: 26.219758987426758 +[TRAIN] Iter: 669200 Loss: 0.0075298817828297615 PSNR: 25.4063663482666 +[TRAIN] Iter: 669300 Loss: 0.00522582745179534 PSNR: 26.947406768798828 +[TRAIN] Iter: 669400 Loss: 0.007451915182173252 PSNR: 25.91116714477539 +[TRAIN] Iter: 669500 Loss: 0.005968824960291386 PSNR: 27.54218864440918 +[TRAIN] Iter: 669600 Loss: 0.006093855015933514 PSNR: 26.594829559326172 +[TRAIN] Iter: 669700 Loss: 0.005461159162223339 PSNR: 27.738121032714844 +[TRAIN] Iter: 669800 Loss: 0.005063880234956741 PSNR: 27.998437881469727 +[TRAIN] Iter: 669900 Loss: 0.0048728222027421 PSNR: 28.51356315612793 +Saved checkpoints at ./logs/TUT-out-doll-360-np/670000.tar +[TRAIN] Iter: 670000 Loss: 0.006633814424276352 PSNR: 26.440683364868164 +[TRAIN] Iter: 670100 Loss: 0.005835402756929398 PSNR: 27.575307846069336 +[TRAIN] Iter: 670200 Loss: 0.006480908952653408 PSNR: 26.346235275268555 +[TRAIN] Iter: 670300 Loss: 0.0061238957569003105 PSNR: 26.550411224365234 +[TRAIN] Iter: 670400 Loss: 0.005194119177758694 PSNR: 28.121658325195312 +[TRAIN] Iter: 670500 Loss: 0.006171104498207569 PSNR: 27.150054931640625 +[TRAIN] Iter: 670600 Loss: 0.006262392271310091 PSNR: 26.594804763793945 +[TRAIN] Iter: 670700 Loss: 0.005649575963616371 PSNR: 27.012088775634766 +[TRAIN] Iter: 670800 Loss: 0.006523306015878916 PSNR: 26.56569480895996 +[TRAIN] Iter: 670900 Loss: 0.006501559633761644 PSNR: 26.652631759643555 +[TRAIN] Iter: 671000 Loss: 0.006995921488851309 PSNR: 26.192222595214844 +[TRAIN] Iter: 671100 Loss: 0.005732453428208828 PSNR: 27.272869110107422 +[TRAIN] Iter: 671200 Loss: 0.004715982358902693 PSNR: 29.346052169799805 +[TRAIN] Iter: 671300 Loss: 0.005590273067355156 PSNR: 27.441722869873047 +[TRAIN] Iter: 671400 Loss: 0.005058133974671364 PSNR: 28.043519973754883 +[TRAIN] Iter: 671500 Loss: 0.0055627962574362755 PSNR: 28.416534423828125 +[TRAIN] Iter: 671600 Loss: 0.007109884172677994 PSNR: 25.939029693603516 +[TRAIN] Iter: 671700 Loss: 0.006498734932392836 PSNR: 26.284147262573242 +[TRAIN] Iter: 671800 Loss: 0.0063634817488491535 PSNR: 26.701879501342773 +[TRAIN] Iter: 671900 Loss: 0.005161174573004246 PSNR: 28.83969497680664 +[TRAIN] Iter: 672000 Loss: 0.006059744860976934 PSNR: 26.754236221313477 +[TRAIN] Iter: 672100 Loss: 0.004558779299259186 PSNR: 28.632368087768555 +[TRAIN] Iter: 672200 Loss: 0.007414353545755148 PSNR: 25.449533462524414 +[TRAIN] Iter: 672300 Loss: 0.004222576506435871 PSNR: 29.19776725769043 +[TRAIN] Iter: 672400 Loss: 0.0066570136696100235 PSNR: 26.392213821411133 +[TRAIN] Iter: 672500 Loss: 0.005708513781428337 PSNR: 27.379806518554688 +[TRAIN] Iter: 672600 Loss: 0.006001495290547609 PSNR: 26.813949584960938 +[TRAIN] Iter: 672700 Loss: 0.0081408079713583 PSNR: 25.10430335998535 +[TRAIN] Iter: 672800 Loss: 0.004915245342999697 PSNR: 27.22380256652832 +[TRAIN] Iter: 672900 Loss: 0.0067663649097085 PSNR: 26.899150848388672 +[TRAIN] Iter: 673000 Loss: 0.005262420978397131 PSNR: 27.915937423706055 +[TRAIN] Iter: 673100 Loss: 0.005758056882768869 PSNR: 26.42774200439453 +[TRAIN] Iter: 673200 Loss: 0.005507133901119232 PSNR: 26.690834045410156 +[TRAIN] Iter: 673300 Loss: 0.007768586277961731 PSNR: 25.807538986206055 +[TRAIN] Iter: 673400 Loss: 0.006162450648844242 PSNR: 28.074352264404297 +[TRAIN] Iter: 673500 Loss: 0.005672113038599491 PSNR: 27.661293029785156 +[TRAIN] Iter: 673600 Loss: 0.006300619337707758 PSNR: 26.618833541870117 +[TRAIN] Iter: 673700 Loss: 0.004808267578482628 PSNR: 28.305593490600586 +[TRAIN] Iter: 673800 Loss: 0.006222774274647236 PSNR: 26.531829833984375 +[TRAIN] Iter: 673900 Loss: 0.007004049606621265 PSNR: 25.766986846923828 +[TRAIN] Iter: 674000 Loss: 0.007691891863942146 PSNR: 26.056100845336914 +[TRAIN] Iter: 674100 Loss: 0.006077239289879799 PSNR: 27.053401947021484 +[TRAIN] Iter: 674200 Loss: 0.006413763388991356 PSNR: 27.486473083496094 +[TRAIN] Iter: 674300 Loss: 0.005952771753072739 PSNR: 27.91120719909668 +[TRAIN] Iter: 674400 Loss: 0.004678127355873585 PSNR: 28.680105209350586 +[TRAIN] Iter: 674500 Loss: 0.00539597449824214 PSNR: 27.293476104736328 +[TRAIN] Iter: 674600 Loss: 0.006645407527685165 PSNR: 26.267845153808594 +[TRAIN] Iter: 674700 Loss: 0.006293586455285549 PSNR: 27.18451690673828 +[TRAIN] Iter: 674800 Loss: 0.006989642046391964 PSNR: 26.01568031311035 +[TRAIN] Iter: 674900 Loss: 0.0063117677345871925 PSNR: 26.52125358581543 +[TRAIN] Iter: 675000 Loss: 0.005359609145671129 PSNR: 26.690834045410156 +[TRAIN] Iter: 675100 Loss: 0.004685271065682173 PSNR: 28.443889617919922 +[TRAIN] Iter: 675200 Loss: 0.006848239805549383 PSNR: 26.550174713134766 +[TRAIN] Iter: 675300 Loss: 0.007047951687127352 PSNR: 26.025592803955078 +[TRAIN] Iter: 675400 Loss: 0.005957853049039841 PSNR: 26.905221939086914 +[TRAIN] Iter: 675500 Loss: 0.006859610788524151 PSNR: 27.35175895690918 +[TRAIN] Iter: 675600 Loss: 0.006834470666944981 PSNR: 26.09029769897461 +[TRAIN] Iter: 675700 Loss: 0.0062476471066474915 PSNR: 26.46244239807129 +[TRAIN] Iter: 675800 Loss: 0.0051726084202528 PSNR: 27.926889419555664 +[TRAIN] Iter: 675900 Loss: 0.00634784996509552 PSNR: 26.925386428833008 +[TRAIN] Iter: 676000 Loss: 0.005450937431305647 PSNR: 28.000226974487305 +[TRAIN] Iter: 676100 Loss: 0.005220472812652588 PSNR: 28.370891571044922 +[TRAIN] Iter: 676200 Loss: 0.006594426464289427 PSNR: 26.239696502685547 +[TRAIN] Iter: 676300 Loss: 0.0071714892983436584 PSNR: 25.967790603637695 +[TRAIN] Iter: 676400 Loss: 0.005614699795842171 PSNR: 26.769502639770508 +[TRAIN] Iter: 676500 Loss: 0.006075060926377773 PSNR: 26.061498641967773 +[TRAIN] Iter: 676600 Loss: 0.005478017032146454 PSNR: 28.14702796936035 +[TRAIN] Iter: 676700 Loss: 0.006945811212062836 PSNR: 25.911672592163086 +[TRAIN] Iter: 676800 Loss: 0.0066771130077540874 PSNR: 26.411376953125 +[TRAIN] Iter: 676900 Loss: 0.006788427475839853 PSNR: 26.641786575317383 +[TRAIN] Iter: 677000 Loss: 0.006170021835714579 PSNR: 26.92501449584961 +[TRAIN] Iter: 677100 Loss: 0.005529104731976986 PSNR: 27.89214515686035 +[TRAIN] Iter: 677200 Loss: 0.005216856487095356 PSNR: 28.08029556274414 +[TRAIN] Iter: 677300 Loss: 0.006989175453782082 PSNR: 26.18373680114746 +[TRAIN] Iter: 677400 Loss: 0.006129361689090729 PSNR: 26.748579025268555 +[TRAIN] Iter: 677500 Loss: 0.006136007606983185 PSNR: 27.95364761352539 +[TRAIN] Iter: 677600 Loss: 0.006533626466989517 PSNR: 26.591754913330078 +[TRAIN] Iter: 677700 Loss: 0.005525246262550354 PSNR: 27.320152282714844 +[TRAIN] Iter: 677800 Loss: 0.006564882583916187 PSNR: 26.367948532104492 +[TRAIN] Iter: 677900 Loss: 0.0062095653265714645 PSNR: 26.396787643432617 +[TRAIN] Iter: 678000 Loss: 0.006577572785317898 PSNR: 26.369413375854492 +[TRAIN] Iter: 678100 Loss: 0.006763793528079987 PSNR: 26.19457244873047 +[TRAIN] Iter: 678200 Loss: 0.0054261148907244205 PSNR: 28.265419006347656 +[TRAIN] Iter: 678300 Loss: 0.006296762265264988 PSNR: 26.536487579345703 +[TRAIN] Iter: 678400 Loss: 0.006627765949815512 PSNR: 26.174118041992188 +[TRAIN] Iter: 678500 Loss: 0.006355456076562405 PSNR: 26.902576446533203 +[TRAIN] Iter: 678600 Loss: 0.006390689872205257 PSNR: 26.72051239013672 +[TRAIN] Iter: 678700 Loss: 0.006983447819948196 PSNR: 25.920988082885742 +[TRAIN] Iter: 678800 Loss: 0.004744732286781073 PSNR: 27.777511596679688 +[TRAIN] Iter: 678900 Loss: 0.0069854985922575 PSNR: 25.845069885253906 +[TRAIN] Iter: 679000 Loss: 0.0064669279381632805 PSNR: 26.55961036682129 +[TRAIN] Iter: 679100 Loss: 0.005529632791876793 PSNR: 27.40248680114746 +[TRAIN] Iter: 679200 Loss: 0.006125459913164377 PSNR: 27.243671417236328 +[TRAIN] Iter: 679300 Loss: 0.005547762848436832 PSNR: 27.084543228149414 +[TRAIN] Iter: 679400 Loss: 0.006248466204851866 PSNR: 26.174325942993164 +[TRAIN] Iter: 679500 Loss: 0.006468357518315315 PSNR: 26.539573669433594 +[TRAIN] Iter: 679600 Loss: 0.006381968967616558 PSNR: 26.419004440307617 +[TRAIN] Iter: 679700 Loss: 0.005673408508300781 PSNR: 26.57994270324707 +[TRAIN] Iter: 679800 Loss: 0.00527830608189106 PSNR: 27.505477905273438 +[TRAIN] Iter: 679900 Loss: 0.007144656032323837 PSNR: 26.004589080810547 +Saved checkpoints at ./logs/TUT-out-doll-360-np/680000.tar +[TRAIN] Iter: 680000 Loss: 0.0056951213628053665 PSNR: 28.260744094848633 +[TRAIN] Iter: 680100 Loss: 0.005412260536104441 PSNR: 27.21271514892578 +[TRAIN] Iter: 680200 Loss: 0.0067867739126086235 PSNR: 26.545055389404297 +[TRAIN] Iter: 680300 Loss: 0.0071380604058504105 PSNR: 25.69743537902832 +[TRAIN] Iter: 680400 Loss: 0.006890660151839256 PSNR: 26.572349548339844 +[TRAIN] Iter: 680500 Loss: 0.007066224701702595 PSNR: 27.277969360351562 +[TRAIN] Iter: 680600 Loss: 0.006951208226382732 PSNR: 26.470619201660156 +[TRAIN] Iter: 680700 Loss: 0.006292510777711868 PSNR: 26.70893669128418 +[TRAIN] Iter: 680800 Loss: 0.0069147818721830845 PSNR: 26.57265853881836 +[TRAIN] Iter: 680900 Loss: 0.0063460469245910645 PSNR: 26.880935668945312 +[TRAIN] Iter: 681000 Loss: 0.005635405424982309 PSNR: 27.473575592041016 +[TRAIN] Iter: 681100 Loss: 0.0069638402201235294 PSNR: 26.551721572875977 +[TRAIN] Iter: 681200 Loss: 0.006003549322485924 PSNR: 27.056194305419922 +[TRAIN] Iter: 681300 Loss: 0.00657286774367094 PSNR: 26.192922592163086 +[TRAIN] Iter: 681400 Loss: 0.0056695169769227505 PSNR: 27.121824264526367 +[TRAIN] Iter: 681500 Loss: 0.006784386932849884 PSNR: 26.56394386291504 +[TRAIN] Iter: 681600 Loss: 0.005261345766484737 PSNR: 27.968395233154297 +[TRAIN] Iter: 681700 Loss: 0.0054904501885175705 PSNR: 27.377941131591797 +[TRAIN] Iter: 681800 Loss: 0.005400041583925486 PSNR: 28.074533462524414 +[TRAIN] Iter: 681900 Loss: 0.007355320267379284 PSNR: 26.1773624420166 +[TRAIN] Iter: 682000 Loss: 0.004978035110980272 PSNR: 27.48752784729004 +[TRAIN] Iter: 682100 Loss: 0.006039891857653856 PSNR: 26.884628295898438 +[TRAIN] Iter: 682200 Loss: 0.005397669970989227 PSNR: 26.708723068237305 +[TRAIN] Iter: 682300 Loss: 0.006652522832155228 PSNR: 25.86313247680664 +[TRAIN] Iter: 682400 Loss: 0.0049560521729290485 PSNR: 27.701065063476562 +[TRAIN] Iter: 682500 Loss: 0.0075871082954108715 PSNR: 25.74471092224121 +[TRAIN] Iter: 682600 Loss: 0.006674005649983883 PSNR: 27.150537490844727 +[TRAIN] Iter: 682700 Loss: 0.00696868821978569 PSNR: 26.488187789916992 +[TRAIN] Iter: 682800 Loss: 0.005692061502486467 PSNR: 26.890701293945312 +[TRAIN] Iter: 682900 Loss: 0.005910384468734264 PSNR: 26.53203010559082 +[TRAIN] Iter: 683000 Loss: 0.00642877072095871 PSNR: 27.013776779174805 +[TRAIN] Iter: 683100 Loss: 0.006654735654592514 PSNR: 26.433326721191406 +[TRAIN] Iter: 683200 Loss: 0.007948438636958599 PSNR: 25.517623901367188 +[TRAIN] Iter: 683300 Loss: 0.004972007125616074 PSNR: 28.384300231933594 +[TRAIN] Iter: 683400 Loss: 0.005891494452953339 PSNR: 26.951932907104492 +[TRAIN] Iter: 683500 Loss: 0.006465476006269455 PSNR: 26.365093231201172 +[TRAIN] Iter: 683600 Loss: 0.006124808453023434 PSNR: 26.971424102783203 +[TRAIN] Iter: 683700 Loss: 0.007937289774417877 PSNR: 25.369840621948242 +[TRAIN] Iter: 683800 Loss: 0.00679817795753479 PSNR: 26.470155715942383 +[TRAIN] Iter: 683900 Loss: 0.006029896438121796 PSNR: 26.272838592529297 +[TRAIN] Iter: 684000 Loss: 0.004852652549743652 PSNR: 28.009403228759766 +[TRAIN] Iter: 684100 Loss: 0.006881591398268938 PSNR: 26.462177276611328 +[TRAIN] Iter: 684200 Loss: 0.0059754978865385056 PSNR: 26.66912078857422 +[TRAIN] Iter: 684300 Loss: 0.0053450847044587135 PSNR: 27.072484970092773 +[TRAIN] Iter: 684400 Loss: 0.006006044335663319 PSNR: 26.450397491455078 +[TRAIN] Iter: 684500 Loss: 0.004395344760268927 PSNR: 29.116043090820312 +[TRAIN] Iter: 684600 Loss: 0.006497624330222607 PSNR: 25.8549747467041 +[TRAIN] Iter: 684700 Loss: 0.0063950782641768456 PSNR: 26.767749786376953 +[TRAIN] Iter: 684800 Loss: 0.006795363966375589 PSNR: 26.03074836730957 +[TRAIN] Iter: 684900 Loss: 0.006156411953270435 PSNR: 25.79484748840332 +[TRAIN] Iter: 685000 Loss: 0.0064696259796619415 PSNR: 26.372514724731445 +[TRAIN] Iter: 685100 Loss: 0.00518798315897584 PSNR: 28.125640869140625 +[TRAIN] Iter: 685200 Loss: 0.005068358965218067 PSNR: 28.280982971191406 +[TRAIN] Iter: 685300 Loss: 0.0052772569470107555 PSNR: 28.074092864990234 +[TRAIN] Iter: 685400 Loss: 0.007283356506377459 PSNR: 26.301544189453125 +[TRAIN] Iter: 685500 Loss: 0.004310831427574158 PSNR: 28.874940872192383 +[TRAIN] Iter: 685600 Loss: 0.0069285123609006405 PSNR: 26.028600692749023 +[TRAIN] Iter: 685700 Loss: 0.0066787246614694595 PSNR: 26.448219299316406 +[TRAIN] Iter: 685800 Loss: 0.005956125445663929 PSNR: 27.119348526000977 +[TRAIN] Iter: 685900 Loss: 0.004721520002931356 PSNR: 28.324861526489258 +[TRAIN] Iter: 686000 Loss: 0.005811595357954502 PSNR: 27.799100875854492 +[TRAIN] Iter: 686100 Loss: 0.007366186939179897 PSNR: 25.354705810546875 +[TRAIN] Iter: 686200 Loss: 0.0063716270960867405 PSNR: 27.728439331054688 +[TRAIN] Iter: 686300 Loss: 0.008441286161541939 PSNR: 25.423797607421875 +[TRAIN] Iter: 686400 Loss: 0.005497073754668236 PSNR: 27.768695831298828 +[TRAIN] Iter: 686500 Loss: 0.006243548355996609 PSNR: 26.808073043823242 +[TRAIN] Iter: 686600 Loss: 0.004561474546790123 PSNR: 28.429399490356445 +[TRAIN] Iter: 686700 Loss: 0.006738375406712294 PSNR: 26.27537727355957 +[TRAIN] Iter: 686800 Loss: 0.007923945784568787 PSNR: 25.370553970336914 +[TRAIN] Iter: 686900 Loss: 0.005369769874960184 PSNR: 26.7036075592041 +[TRAIN] Iter: 687000 Loss: 0.007040153257548809 PSNR: 26.58524513244629 +[TRAIN] Iter: 687100 Loss: 0.004467159043997526 PSNR: 28.87009620666504 +[TRAIN] Iter: 687200 Loss: 0.0062769739888608456 PSNR: 26.726438522338867 +[TRAIN] Iter: 687300 Loss: 0.006533595267683268 PSNR: 26.608854293823242 +[TRAIN] Iter: 687400 Loss: 0.005896516144275665 PSNR: 27.074270248413086 +[TRAIN] Iter: 687500 Loss: 0.005576705560088158 PSNR: 28.158367156982422 +[TRAIN] Iter: 687600 Loss: 0.006693663541227579 PSNR: 26.824674606323242 +[TRAIN] Iter: 687700 Loss: 0.005454618483781815 PSNR: 27.978843688964844 +[TRAIN] Iter: 687800 Loss: 0.005761382170021534 PSNR: 26.90378189086914 +[TRAIN] Iter: 687900 Loss: 0.006554508116096258 PSNR: 26.315061569213867 +[TRAIN] Iter: 688000 Loss: 0.0054812985472381115 PSNR: 27.118043899536133 +[TRAIN] Iter: 688100 Loss: 0.00619757454842329 PSNR: 26.499927520751953 +[TRAIN] Iter: 688200 Loss: 0.008455376140773296 PSNR: 25.485490798950195 +[TRAIN] Iter: 688300 Loss: 0.006993578281253576 PSNR: 26.354530334472656 +[TRAIN] Iter: 688400 Loss: 0.007201955188065767 PSNR: 25.594768524169922 +[TRAIN] Iter: 688500 Loss: 0.006084247026592493 PSNR: 26.656444549560547 +[TRAIN] Iter: 688600 Loss: 0.00613056356087327 PSNR: 26.55726432800293 +[TRAIN] Iter: 688700 Loss: 0.007699550595134497 PSNR: 25.71225929260254 +[TRAIN] Iter: 688800 Loss: 0.004858516622334719 PSNR: 27.942285537719727 +[TRAIN] Iter: 688900 Loss: 0.005533093120902777 PSNR: 27.303571701049805 +[TRAIN] Iter: 689000 Loss: 0.0056517357006669044 PSNR: 26.613954544067383 +[TRAIN] Iter: 689100 Loss: 0.00680046621710062 PSNR: 25.985952377319336 +[TRAIN] Iter: 689200 Loss: 0.006021684966981411 PSNR: 26.544708251953125 +[TRAIN] Iter: 689300 Loss: 0.0066422363743186 PSNR: 27.516855239868164 +[TRAIN] Iter: 689400 Loss: 0.004651154391467571 PSNR: 28.302261352539062 +[TRAIN] Iter: 689500 Loss: 0.005146333947777748 PSNR: 27.94215965270996 +[TRAIN] Iter: 689600 Loss: 0.006365305744111538 PSNR: 25.79461669921875 +[TRAIN] Iter: 689700 Loss: 0.006780346855521202 PSNR: 25.935585021972656 +[TRAIN] Iter: 689800 Loss: 0.005464262794703245 PSNR: 26.91969108581543 +[TRAIN] Iter: 689900 Loss: 0.006854086648672819 PSNR: 26.135778427124023 +Saved checkpoints at ./logs/TUT-out-doll-360-np/690000.tar +[TRAIN] Iter: 690000 Loss: 0.0069747972302138805 PSNR: 26.073226928710938 +[TRAIN] Iter: 690100 Loss: 0.0057747261598706245 PSNR: 27.523969650268555 +[TRAIN] Iter: 690200 Loss: 0.006075939163565636 PSNR: 26.945669174194336 +[TRAIN] Iter: 690300 Loss: 0.004739805124700069 PSNR: 28.450389862060547 +[TRAIN] Iter: 690400 Loss: 0.005763974040746689 PSNR: 27.125165939331055 +[TRAIN] Iter: 690500 Loss: 0.006760634481906891 PSNR: 26.533891677856445 +[TRAIN] Iter: 690600 Loss: 0.006789679639041424 PSNR: 26.155826568603516 +[TRAIN] Iter: 690700 Loss: 0.006970943883061409 PSNR: 25.896997451782227 +[TRAIN] Iter: 690800 Loss: 0.006777612492442131 PSNR: 26.14517593383789 +[TRAIN] Iter: 690900 Loss: 0.006455280818045139 PSNR: 26.27775764465332 +[TRAIN] Iter: 691000 Loss: 0.006284152623265982 PSNR: 27.595443725585938 +[TRAIN] Iter: 691100 Loss: 0.005354247055947781 PSNR: 27.938426971435547 +[TRAIN] Iter: 691200 Loss: 0.006276017054915428 PSNR: 27.017601013183594 +[TRAIN] Iter: 691300 Loss: 0.006894990336149931 PSNR: 26.188688278198242 +[TRAIN] Iter: 691400 Loss: 0.005825452972203493 PSNR: 26.772323608398438 +[TRAIN] Iter: 691500 Loss: 0.006947650574147701 PSNR: 25.873798370361328 +[TRAIN] Iter: 691600 Loss: 0.006087411195039749 PSNR: 26.997072219848633 +[TRAIN] Iter: 691700 Loss: 0.006045360583811998 PSNR: 26.312335968017578 +[TRAIN] Iter: 691800 Loss: 0.006601939909160137 PSNR: 26.39223289489746 +[TRAIN] Iter: 691900 Loss: 0.006614701356738806 PSNR: 26.562551498413086 +[TRAIN] Iter: 692000 Loss: 0.0069852243177592754 PSNR: 26.143104553222656 +[TRAIN] Iter: 692100 Loss: 0.006924464367330074 PSNR: 26.21841049194336 +[TRAIN] Iter: 692200 Loss: 0.004779634065926075 PSNR: 28.73102569580078 +[TRAIN] Iter: 692300 Loss: 0.006259447894990444 PSNR: 26.312532424926758 +[TRAIN] Iter: 692400 Loss: 0.00494048185646534 PSNR: 27.89299201965332 +[TRAIN] Iter: 692500 Loss: 0.004841262474656105 PSNR: 28.644502639770508 +[TRAIN] Iter: 692600 Loss: 0.006463724188506603 PSNR: 26.311410903930664 +[TRAIN] Iter: 692700 Loss: 0.006362085696309805 PSNR: 27.29332733154297 +[TRAIN] Iter: 692800 Loss: 0.006834331899881363 PSNR: 25.712940216064453 +[TRAIN] Iter: 692900 Loss: 0.004867228679358959 PSNR: 27.879650115966797 +[TRAIN] Iter: 693000 Loss: 0.006492194253951311 PSNR: 26.113018035888672 +[TRAIN] Iter: 693100 Loss: 0.006889186333864927 PSNR: 27.335193634033203 +[TRAIN] Iter: 693200 Loss: 0.006295275874435902 PSNR: 26.029203414916992 +[TRAIN] Iter: 693300 Loss: 0.005677956156432629 PSNR: 26.683042526245117 +[TRAIN] Iter: 693400 Loss: 0.007133162580430508 PSNR: 26.250024795532227 +[TRAIN] Iter: 693500 Loss: 0.005693988874554634 PSNR: 27.15328598022461 +[TRAIN] Iter: 693600 Loss: 0.005252499599009752 PSNR: 27.371232986450195 +[TRAIN] Iter: 693700 Loss: 0.004120299126952887 PSNR: 28.562641143798828 +[TRAIN] Iter: 693800 Loss: 0.0046932874247431755 PSNR: 28.14478874206543 +[TRAIN] Iter: 693900 Loss: 0.005871787667274475 PSNR: 27.20298194885254 +[TRAIN] Iter: 694000 Loss: 0.007046807091683149 PSNR: 26.591825485229492 +[TRAIN] Iter: 694100 Loss: 0.005814513191580772 PSNR: 27.101879119873047 +[TRAIN] Iter: 694200 Loss: 0.006769103929400444 PSNR: 26.325387954711914 +[TRAIN] Iter: 694300 Loss: 0.004295708611607552 PSNR: 29.425010681152344 +[TRAIN] Iter: 694400 Loss: 0.005037255119532347 PSNR: 28.096242904663086 +[TRAIN] Iter: 694500 Loss: 0.005513000767678022 PSNR: 28.379430770874023 +[TRAIN] Iter: 694600 Loss: 0.0063690366223454475 PSNR: 26.811914443969727 +[TRAIN] Iter: 694700 Loss: 0.005648781545460224 PSNR: 27.749372482299805 +[TRAIN] Iter: 694800 Loss: 0.007725244387984276 PSNR: 25.529783248901367 +[TRAIN] Iter: 694900 Loss: 0.0064719058573246 PSNR: 26.349626541137695 +[TRAIN] Iter: 695000 Loss: 0.007441134192049503 PSNR: 26.44550323486328 +[TRAIN] Iter: 695100 Loss: 0.006271191406995058 PSNR: 26.5240478515625 +[TRAIN] Iter: 695200 Loss: 0.007791456300765276 PSNR: 25.934898376464844 +[TRAIN] Iter: 695300 Loss: 0.006112571805715561 PSNR: 26.59769630432129 +[TRAIN] Iter: 695400 Loss: 0.00691151712089777 PSNR: 26.42936897277832 +[TRAIN] Iter: 695500 Loss: 0.007092634215950966 PSNR: 26.316951751708984 +[TRAIN] Iter: 695600 Loss: 0.00644479226320982 PSNR: 26.594005584716797 +[TRAIN] Iter: 695700 Loss: 0.006418150383979082 PSNR: 26.98600959777832 +[TRAIN] Iter: 695800 Loss: 0.004657980054616928 PSNR: 28.55592918395996 +[TRAIN] Iter: 695900 Loss: 0.00711962953209877 PSNR: 26.168825149536133 +[TRAIN] Iter: 696000 Loss: 0.006087103858590126 PSNR: 26.801965713500977 +[TRAIN] Iter: 696100 Loss: 0.005347502417862415 PSNR: 27.487449645996094 +[TRAIN] Iter: 696200 Loss: 0.00647797342389822 PSNR: 26.556259155273438 +[TRAIN] Iter: 696300 Loss: 0.007655521389096975 PSNR: 25.62316131591797 +[TRAIN] Iter: 696400 Loss: 0.00688934326171875 PSNR: 25.94301414489746 +[TRAIN] Iter: 696500 Loss: 0.005532131064683199 PSNR: 27.5092716217041 +[TRAIN] Iter: 696600 Loss: 0.005272483918815851 PSNR: 27.212158203125 +[TRAIN] Iter: 696700 Loss: 0.006634825840592384 PSNR: 26.524612426757812 +[TRAIN] Iter: 696800 Loss: 0.005366461351513863 PSNR: 27.64213752746582 +[TRAIN] Iter: 696900 Loss: 0.004916679579764605 PSNR: 27.73720359802246 +[TRAIN] Iter: 697000 Loss: 0.007077143527567387 PSNR: 26.27033042907715 +[TRAIN] Iter: 697100 Loss: 0.005700208246707916 PSNR: 28.73154640197754 +[TRAIN] Iter: 697200 Loss: 0.006438927724957466 PSNR: 26.592859268188477 +[TRAIN] Iter: 697300 Loss: 0.007397821173071861 PSNR: 26.11559295654297 +[TRAIN] Iter: 697400 Loss: 0.008189843036234379 PSNR: 25.523719787597656 +[TRAIN] Iter: 697500 Loss: 0.005575600080192089 PSNR: 28.41982650756836 +[TRAIN] Iter: 697600 Loss: 0.005383084528148174 PSNR: 26.971717834472656 +[TRAIN] Iter: 697700 Loss: 0.005777467042207718 PSNR: 26.79322052001953 +[TRAIN] Iter: 697800 Loss: 0.006339372135698795 PSNR: 26.12764549255371 +[TRAIN] Iter: 697900 Loss: 0.005442419089376926 PSNR: 28.482234954833984 +[TRAIN] Iter: 698000 Loss: 0.005861293058842421 PSNR: 26.974374771118164 +[TRAIN] Iter: 698100 Loss: 0.006542019546031952 PSNR: 26.452457427978516 +[TRAIN] Iter: 698200 Loss: 0.00570013327524066 PSNR: 27.898677825927734 +[TRAIN] Iter: 698300 Loss: 0.007159578613936901 PSNR: 26.027816772460938 +[TRAIN] Iter: 698400 Loss: 0.006645927671343088 PSNR: 26.182823181152344 +[TRAIN] Iter: 698500 Loss: 0.0068549723364412785 PSNR: 26.1259822845459 +[TRAIN] Iter: 698600 Loss: 0.0068893833085894585 PSNR: 25.93456268310547 +[TRAIN] Iter: 698700 Loss: 0.006333803758025169 PSNR: 26.35796356201172 +[TRAIN] Iter: 698800 Loss: 0.006617557257413864 PSNR: 26.14013671875 +[TRAIN] Iter: 698900 Loss: 0.0063741314224898815 PSNR: 26.80938720703125 +[TRAIN] Iter: 699000 Loss: 0.00575223658233881 PSNR: 28.177654266357422 +[TRAIN] Iter: 699100 Loss: 0.006130353547632694 PSNR: 26.497682571411133 +[TRAIN] Iter: 699200 Loss: 0.007223819382488728 PSNR: 26.09238052368164 +[TRAIN] Iter: 699300 Loss: 0.005771825555711985 PSNR: 27.376468658447266 +[TRAIN] Iter: 699400 Loss: 0.006591705605387688 PSNR: 26.365882873535156 +[TRAIN] Iter: 699500 Loss: 0.005799387581646442 PSNR: 27.015825271606445 +[TRAIN] Iter: 699600 Loss: 0.005990356206893921 PSNR: 26.812896728515625 +[TRAIN] Iter: 699700 Loss: 0.005193102639168501 PSNR: 28.907596588134766 +[TRAIN] Iter: 699800 Loss: 0.004640388302505016 PSNR: 28.959766387939453 +[TRAIN] Iter: 699900 Loss: 0.0051874881610274315 PSNR: 26.906875610351562 +Saved checkpoints at ./logs/TUT-out-doll-360-np/700000.tar +0 0.0010211467742919922 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.124958753585815 +2 22.17416524887085 +3 21.27080988883972 +4 22.703940629959106 +5 21.09476613998413 +6 21.86717939376831 +7 22.5489764213562 +8 22.354128122329712 +9 22.066125869750977 +10 22.0406711101532 +11 21.411465883255005 +12 22.184574127197266 +13 22.73083758354187 +14 23.01887607574463 +15 21.455206155776978 +16 22.14576745033264 +17 21.760430574417114 +18 22.904579401016235 +19 21.573461294174194 +20 22.242387533187866 +21 21.733360767364502 +22 22.199406147003174 +23 22.377944707870483 +24 21.653733730316162 +25 23.16638445854187 +26 21.71764874458313 +27 22.602644443511963 +28 21.052478075027466 +29 21.848878622055054 +30 21.868441343307495 +31 23.517953634262085 +32 21.46322536468506 +33 21.346761465072632 +34 22.203117847442627 +35 21.548277378082275 +36 22.41120433807373 +37 21.82081961631775 +38 21.98711848258972 +39 21.683080673217773 +40 21.934199810028076 +41 21.934703826904297 +42 22.318074941635132 +43 21.86634135246277 +44 22.241903066635132 +45 21.219608306884766 +46 22.113937377929688 +47 21.509597063064575 +48 22.494341373443604 +49 21.778525352478027 +50 22.031352043151855 +51 22.31211805343628 +52 22.066101789474487 +53 22.011474132537842 +54 21.08221936225891 +55 21.840060710906982 +56 22.231565475463867 +57 21.20325779914856 +58 21.357741832733154 +59 21.91392469406128 +60 22.298951148986816 +61 21.72688937187195 +62 22.273173570632935 +63 22.1729519367218 +64 21.454988956451416 +65 21.544905424118042 +66 22.010931253433228 +67 21.01923894882202 +68 21.972827672958374 +69 21.223525762557983 +70 22.116079092025757 +71 21.53665280342102 +72 21.345443964004517 +73 21.75222611427307 +74 21.635664224624634 +75 22.35777187347412 +76 21.47312617301941 +77 21.194038152694702 +78 21.601518869400024 +79 21.318249702453613 +80 21.904367208480835 +81 22.271127939224243 +82 21.279929876327515 +83 22.57246732711792 +84 21.959954261779785 +85 21.48121166229248 +86 22.09354019165039 +87 21.691063404083252 +88 21.911694049835205 +89 21.97355628013611 +90 22.2097327709198 +91 22.009341955184937 +92 21.689809322357178 +93 21.665942192077637 +94 22.411306142807007 +95 21.627428770065308 +96 21.615073204040527 +97 21.8359956741333 +98 21.711020946502686 +99 21.257906675338745 +100 21.807724475860596 +101 21.564117193222046 +102 22.31682777404785 +103 21.323224782943726 +104 22.24201464653015 +105 21.87523078918457 +106 21.966156005859375 +107 21.857704877853394 +108 22.96290421485901 +109 23.036527633666992 +110 21.77438712120056 +111 21.078975200653076 +112 22.250582933425903 +113 22.42879581451416 +114 21.51529026031494 +115 21.90750503540039 +116 21.977556943893433 +117 22.613593101501465 +118 21.58202028274536 +119 22.207648754119873 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 1.1384e+00, 2.5001e+00, 3.4476e+00, -2.7730e+01], + [ 1.2071e+00, 1.8259e+00, 2.4831e+00, -2.8250e+01], + [ 8.0556e-01, 1.2013e+00, 1.7471e+00, -2.5508e+01], + ..., + [ 5.5034e-01, 2.3692e+00, 2.7949e+00, 3.1652e+02], + [ 8.7236e-01, 2.6668e+00, 2.7413e+00, 3.0300e+02], + [ 6.3847e-01, 2.3236e+00, 2.5596e+00, 2.9011e+02]], + + [[ 4.9253e-01, 3.8789e-01, 8.4796e-02, -6.4867e+01], + [ 5.0352e-01, 4.2557e-02, -9.2091e-01, -7.9538e+01], + [ 1.7265e+00, 1.6046e+00, 3.8701e-01, -7.1537e+01], + ..., + [-1.1121e+01, -7.4882e+00, -7.8416e+00, 3.6405e+02], + [-1.0228e+01, -6.3942e+00, -7.2614e+00, 4.1080e+02], + [-1.1305e+01, -7.3451e+00, -7.8630e+00, 3.6287e+02]], + + [[ 8.5551e-01, 9.5317e-01, 7.1090e-01, -7.2208e+01], + [ 4.8216e-01, 2.0087e-01, -1.3380e+00, -8.8938e+01], + [-2.9039e-01, -7.9061e-01, -8.9616e-01, -9.1475e+00], + ..., + [-6.0464e+00, -4.4081e+00, -4.8846e+00, 1.7384e+02], + [-6.3144e+00, -4.5592e+00, -4.7040e+00, 1.9644e+02], + [-5.9171e+00, -4.3023e+00, -4.1129e+00, 2.0954e+02]], + + ..., + + [[-1.4681e-01, 7.5749e-01, 2.1915e+00, -7.7701e+00], + [-1.0109e+00, -2.9560e-01, 7.3487e-01, 1.2491e-01], + [-9.6851e-01, -3.0992e-01, 6.6972e-01, -1.9042e+00], + ..., + [-1.0549e+01, -9.3019e+00, -5.5151e-02, 5.7426e+02], + [-1.0164e+01, -8.7652e+00, 4.2143e-02, 5.7168e+02], + [-1.0839e+01, -9.5908e+00, -8.4872e-01, 5.8646e+02]], + + [[ 7.9455e-01, 7.1370e-01, -1.4280e+00, -5.4224e+01], + [ 5.5788e-01, 5.1684e-01, 9.6139e-03, -4.9325e+01], + [ 1.6252e+00, 1.4317e+00, 7.6079e-01, -5.9521e+01], + ..., + [-3.2076e+00, -8.8642e-01, -5.9464e-02, 7.0440e+02], + [-1.2191e+00, 1.1591e+00, -5.1921e-01, 6.4328e+02], + [-1.2865e+00, 1.1529e+00, 2.3051e+00, 6.9271e+02]], + + [[-4.9832e-01, 3.9155e-01, 1.6359e+00, -3.7849e+01], + [ 3.2809e-01, 8.8032e-01, 2.0418e+00, -1.2238e+00], + [ 3.5874e-01, 8.9251e-01, 2.0705e+00, -1.0096e+00], + ..., + [-3.7848e+00, -1.7289e+00, -1.6846e+00, 2.3585e+02], + [-3.6716e+00, -1.6066e+00, -1.9383e+00, 2.4642e+02], + [-3.2326e+00, -1.2515e+00, -1.8454e+00, 2.2808e+02]]], + grad_fn=), 'rgb0': tensor([[0.6899, 0.7829, 0.9103], + [0.2948, 0.2741, 0.2831], + [0.3408, 0.2985, 0.2711], + ..., + [0.2863, 0.4348, 0.6648], + [0.2551, 0.2462, 0.0765], + [0.5507, 0.6906, 0.8624]], grad_fn=), 'disp0': tensor([ 61.5363, 102.0978, 37.5454, ..., 60.2985, 16.8772, 55.1816], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0031, 0.0039, 0.0064, ..., 0.0029, 0.0244, 0.0040])} +0 0.0008740425109863281 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.488971710205078 +2 22.415785789489746 +3 21.240528106689453 +4 21.715740442276 +5 22.145664930343628 +6 21.833999395370483 +7 23.30413579940796 +8 21.189937829971313 +9 22.043087482452393 +10 21.586511850357056 +11 21.62285017967224 +12 22.28895878791809 +13 21.3729145526886 +14 22.168635845184326 +15 21.668569564819336 +16 21.849791288375854 +17 22.46558690071106 +18 21.655754804611206 +19 22.027326345443726 +20 21.947682857513428 +21 22.091833353042603 +22 22.150475025177002 +23 22.37133550643921 +24 22.916476488113403 +25 21.94069218635559 +26 21.141502380371094 +27 22.085182189941406 +28 22.96000099182129 +29 21.86337924003601 +30 21.72798752784729 +31 22.105793952941895 +32 21.728232622146606 +33 21.653559684753418 +34 21.901651859283447 +35 21.909523963928223 +36 21.91947627067566 +37 21.72537875175476 +38 21.73291516304016 +39 21.973876476287842 +40 21.68471908569336 +41 21.713152170181274 +42 21.94405722618103 +43 21.715760231018066 +44 22.16525673866272 +45 22.008152723312378 +46 21.778069496154785 +47 21.98926329612732 +48 21.89174461364746 +49 21.509870052337646 +50 22.457151889801025 +51 21.476560354232788 +52 21.52515745162964 +53 22.133692741394043 +54 22.475387811660767 +55 21.236659288406372 +56 22.342151403427124 +57 21.469684600830078 +58 21.90899968147278 +59 21.815712451934814 +60 22.322395086288452 +61 22.113999128341675 +62 21.656362056732178 +63 22.58014988899231 +64 22.529691219329834 +65 22.19754195213318 +66 22.75541067123413 +67 22.82520055770874 +68 21.640549421310425 +69 22.835550785064697 +70 22.02713942527771 +71 22.29210376739502 +72 21.2574360370636 +73 22.159245014190674 +74 22.5084867477417 +75 21.768054723739624 +76 22.070393800735474 +77 22.49961757659912 +78 21.79713487625122 +79 21.768743753433228 +80 21.993638038635254 +81 22.404874563217163 +82 21.780147552490234 +83 21.48570442199707 +84 21.728407859802246 +85 22.972181797027588 +86 22.299981832504272 +87 21.220783472061157 +88 22.008150577545166 +89 22.49744439125061 +90 21.562171936035156 +91 21.62381863594055 +92 21.494286060333252 +93 21.90493655204773 +94 22.17215323448181 +95 21.016645669937134 +96 22.201191186904907 +97 22.049590587615967 +98 21.648122787475586 +99 21.4463632106781 +100 21.729083776474 +101 22.980242490768433 +102 22.0132839679718 +103 21.39378070831299 +104 21.0748507976532 +105 22.785953521728516 +106 21.759748458862305 +107 22.21907877922058 +108 22.13545250892639 +109 21.55596137046814 +110 21.97043251991272 +111 22.2712082862854 +112 21.118420124053955 +113 21.608014822006226 +114 21.820870399475098 +115 21.60365319252014 +116 21.433632135391235 +117 21.47106909751892 +118 21.4114887714386 +119 21.826351642608643 +test poses shape torch.Size([4, 3, 4]) +0 0.0014154911041259766 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 23.24443507194519 +2 21.12875485420227 +3 22.27758240699768 +Saved test set +[TRAIN] Iter: 700000 Loss: 0.005026957951486111 PSNR: 28.689014434814453 +[TRAIN] Iter: 700100 Loss: 0.006387891247868538 PSNR: 26.00898551940918 +[TRAIN] Iter: 700200 Loss: 0.005808886606246233 PSNR: 26.9102840423584 +[TRAIN] Iter: 700300 Loss: 0.007505727931857109 PSNR: 26.079065322875977 +[TRAIN] Iter: 700400 Loss: 0.006075465586036444 PSNR: 26.43232536315918 +[TRAIN] Iter: 700500 Loss: 0.006811103783547878 PSNR: 26.086315155029297 +[TRAIN] Iter: 700600 Loss: 0.006586572155356407 PSNR: 26.436141967773438 +[TRAIN] Iter: 700700 Loss: 0.005696773529052734 PSNR: 27.278879165649414 +[TRAIN] Iter: 700800 Loss: 0.007239706814289093 PSNR: 25.704816818237305 +[TRAIN] Iter: 700900 Loss: 0.005732336081564426 PSNR: 27.048599243164062 +[TRAIN] Iter: 701000 Loss: 0.005351829342544079 PSNR: 28.161163330078125 +[TRAIN] Iter: 701100 Loss: 0.006185843609273434 PSNR: 27.163005828857422 +[TRAIN] Iter: 701200 Loss: 0.004046911373734474 PSNR: 28.724136352539062 +[TRAIN] Iter: 701300 Loss: 0.0062483432702720165 PSNR: 26.34278678894043 +[TRAIN] Iter: 701400 Loss: 0.006342608481645584 PSNR: 27.214176177978516 +[TRAIN] Iter: 701500 Loss: 0.006829011719673872 PSNR: 27.359216690063477 +[TRAIN] Iter: 701600 Loss: 0.005254629999399185 PSNR: 27.675479888916016 +[TRAIN] Iter: 701700 Loss: 0.004676122218370438 PSNR: 28.71451759338379 +[TRAIN] Iter: 701800 Loss: 0.005208026617765427 PSNR: 27.575862884521484 +[TRAIN] Iter: 701900 Loss: 0.0071903676725924015 PSNR: 25.871492385864258 +[TRAIN] Iter: 702000 Loss: 0.0052831461653113365 PSNR: 28.07353401184082 +[TRAIN] Iter: 702100 Loss: 0.0059516276232898235 PSNR: 26.552701950073242 +[TRAIN] Iter: 702200 Loss: 0.006268555298447609 PSNR: 27.00564956665039 +[TRAIN] Iter: 702300 Loss: 0.004876058083027601 PSNR: 27.714704513549805 +[TRAIN] Iter: 702400 Loss: 0.006566688418388367 PSNR: 26.842247009277344 +[TRAIN] Iter: 702500 Loss: 0.004809086211025715 PSNR: 28.926414489746094 +[TRAIN] Iter: 702600 Loss: 0.00720897875726223 PSNR: 26.2213134765625 +[TRAIN] Iter: 702700 Loss: 0.006152682937681675 PSNR: 26.481130599975586 +[TRAIN] Iter: 702800 Loss: 0.005870048888027668 PSNR: 26.41722869873047 +[TRAIN] Iter: 702900 Loss: 0.007452310994267464 PSNR: 25.886280059814453 +[TRAIN] Iter: 703000 Loss: 0.006025008857250214 PSNR: 26.616655349731445 +[TRAIN] Iter: 703100 Loss: 0.006341416854411364 PSNR: 26.925336837768555 +[TRAIN] Iter: 703200 Loss: 0.005208043847233057 PSNR: 28.064788818359375 +[TRAIN] Iter: 703300 Loss: 0.004544147290289402 PSNR: 29.21653175354004 +[TRAIN] Iter: 703400 Loss: 0.007463350892066956 PSNR: 26.27063751220703 +[TRAIN] Iter: 703500 Loss: 0.006295288912951946 PSNR: 26.233497619628906 +[TRAIN] Iter: 703600 Loss: 0.0076333461329340935 PSNR: 25.36140251159668 +[TRAIN] Iter: 703700 Loss: 0.00594168808311224 PSNR: 27.291044235229492 +[TRAIN] Iter: 703800 Loss: 0.006706155836582184 PSNR: 26.63637351989746 +[TRAIN] Iter: 703900 Loss: 0.006513858214020729 PSNR: 26.290416717529297 +[TRAIN] Iter: 704000 Loss: 0.005861605983227491 PSNR: 27.621061325073242 +[TRAIN] Iter: 704100 Loss: 0.006075171288102865 PSNR: 26.58643913269043 +[TRAIN] Iter: 704200 Loss: 0.006083513610064983 PSNR: 26.698747634887695 +[TRAIN] Iter: 704300 Loss: 0.005182494409382343 PSNR: 28.418760299682617 +[TRAIN] Iter: 704400 Loss: 0.006832963787019253 PSNR: 26.37050437927246 +[TRAIN] Iter: 704500 Loss: 0.005265981890261173 PSNR: 27.248886108398438 +[TRAIN] Iter: 704600 Loss: 0.006565853953361511 PSNR: 26.40864372253418 +[TRAIN] Iter: 704700 Loss: 0.004421261604875326 PSNR: 28.95094871520996 +[TRAIN] Iter: 704800 Loss: 0.004327909089624882 PSNR: 27.925758361816406 +[TRAIN] Iter: 704900 Loss: 0.004708223044872284 PSNR: 28.96282196044922 +[TRAIN] Iter: 705000 Loss: 0.006109982263296843 PSNR: 27.516887664794922 +[TRAIN] Iter: 705100 Loss: 0.006815969944000244 PSNR: 26.258296966552734 +[TRAIN] Iter: 705200 Loss: 0.006090899929404259 PSNR: 26.52651596069336 +[TRAIN] Iter: 705300 Loss: 0.006416680291295052 PSNR: 26.56138801574707 +[TRAIN] Iter: 705400 Loss: 0.0064519792795181274 PSNR: 26.272459030151367 +[TRAIN] Iter: 705500 Loss: 0.006487486883997917 PSNR: 26.54766273498535 +[TRAIN] Iter: 705600 Loss: 0.0072124917060136795 PSNR: 25.986469268798828 +[TRAIN] Iter: 705700 Loss: 0.006245192606002092 PSNR: 26.897586822509766 +[TRAIN] Iter: 705800 Loss: 0.006753452587872744 PSNR: 26.139741897583008 +[TRAIN] Iter: 705900 Loss: 0.006405284628272057 PSNR: 26.34682846069336 +[TRAIN] Iter: 706000 Loss: 0.007796687074005604 PSNR: 25.770078659057617 +[TRAIN] Iter: 706100 Loss: 0.005758579820394516 PSNR: 27.754426956176758 +[TRAIN] Iter: 706200 Loss: 0.006302536930888891 PSNR: 26.188560485839844 +[TRAIN] Iter: 706300 Loss: 0.006581209599971771 PSNR: 26.29729461669922 +[TRAIN] Iter: 706400 Loss: 0.006413188762962818 PSNR: 26.92964744567871 +[TRAIN] Iter: 706500 Loss: 0.005878969095647335 PSNR: 27.42713165283203 +[TRAIN] Iter: 706600 Loss: 0.007854171097278595 PSNR: 25.480703353881836 +[TRAIN] Iter: 706700 Loss: 0.0064783659763634205 PSNR: 26.79736328125 +[TRAIN] Iter: 706800 Loss: 0.0063606747426092625 PSNR: 26.677616119384766 +[TRAIN] Iter: 706900 Loss: 0.006386482156813145 PSNR: 26.583553314208984 +[TRAIN] Iter: 707000 Loss: 0.004939571488648653 PSNR: 28.358463287353516 +[TRAIN] Iter: 707100 Loss: 0.006934929173439741 PSNR: 26.23788833618164 +[TRAIN] Iter: 707200 Loss: 0.00636986643075943 PSNR: 26.140583038330078 +[TRAIN] Iter: 707300 Loss: 0.00639561889693141 PSNR: 26.718368530273438 +[TRAIN] Iter: 707400 Loss: 0.006837320514023304 PSNR: 26.141267776489258 +[TRAIN] Iter: 707500 Loss: 0.006002585403621197 PSNR: 26.56127166748047 +[TRAIN] Iter: 707600 Loss: 0.006384062580764294 PSNR: 26.637407302856445 +[TRAIN] Iter: 707700 Loss: 0.006724409759044647 PSNR: 26.184297561645508 +[TRAIN] Iter: 707800 Loss: 0.005936014465987682 PSNR: 27.31769371032715 +[TRAIN] Iter: 707900 Loss: 0.0058944993652403355 PSNR: 26.877811431884766 +[TRAIN] Iter: 708000 Loss: 0.004349398426711559 PSNR: 28.909379959106445 +[TRAIN] Iter: 708100 Loss: 0.0057523855939507484 PSNR: 27.332319259643555 +[TRAIN] Iter: 708200 Loss: 0.006771955173462629 PSNR: 26.539121627807617 +[TRAIN] Iter: 708300 Loss: 0.007167455740272999 PSNR: 26.332496643066406 +[TRAIN] Iter: 708400 Loss: 0.006015895865857601 PSNR: 26.871732711791992 +[TRAIN] Iter: 708500 Loss: 0.006444117054343224 PSNR: 26.049057006835938 +[TRAIN] Iter: 708600 Loss: 0.006983200088143349 PSNR: 25.635560989379883 +[TRAIN] Iter: 708700 Loss: 0.0062453728169202805 PSNR: 26.445667266845703 +[TRAIN] Iter: 708800 Loss: 0.004705832805484533 PSNR: 27.124906539916992 +[TRAIN] Iter: 708900 Loss: 0.005771121475845575 PSNR: 27.322345733642578 +[TRAIN] Iter: 709000 Loss: 0.004771588370203972 PSNR: 28.12090492248535 +[TRAIN] Iter: 709100 Loss: 0.005099670961499214 PSNR: 28.27474021911621 +[TRAIN] Iter: 709200 Loss: 0.00617674645036459 PSNR: 26.780046463012695 +[TRAIN] Iter: 709300 Loss: 0.005990574136376381 PSNR: 27.193628311157227 +[TRAIN] Iter: 709400 Loss: 0.004823035094887018 PSNR: 28.59719467163086 +[TRAIN] Iter: 709500 Loss: 0.00687874760478735 PSNR: 26.819095611572266 +[TRAIN] Iter: 709600 Loss: 0.005542365368455648 PSNR: 28.10269546508789 +[TRAIN] Iter: 709700 Loss: 0.00623637530952692 PSNR: 26.74394416809082 +[TRAIN] Iter: 709800 Loss: 0.006759215146303177 PSNR: 25.812198638916016 +[TRAIN] Iter: 709900 Loss: 0.006107271183282137 PSNR: 28.268110275268555 +Saved checkpoints at ./logs/TUT-out-doll-360-np/710000.tar +[TRAIN] Iter: 710000 Loss: 0.006125241983681917 PSNR: 26.588987350463867 +[TRAIN] Iter: 710100 Loss: 0.005356861278414726 PSNR: 27.993743896484375 +[TRAIN] Iter: 710200 Loss: 0.006432616151869297 PSNR: 26.19190788269043 +[TRAIN] Iter: 710300 Loss: 0.00628829188644886 PSNR: 26.96139907836914 +[TRAIN] Iter: 710400 Loss: 0.006529686506837606 PSNR: 26.670269012451172 +[TRAIN] Iter: 710500 Loss: 0.006811461411416531 PSNR: 26.18655776977539 +[TRAIN] Iter: 710600 Loss: 0.006909557618200779 PSNR: 26.785011291503906 +[TRAIN] Iter: 710700 Loss: 0.005636022426187992 PSNR: 27.81690788269043 +[TRAIN] Iter: 710800 Loss: 0.00701139448210597 PSNR: 25.89204978942871 +[TRAIN] Iter: 710900 Loss: 0.0050851269625127316 PSNR: 26.9598445892334 +[TRAIN] Iter: 711000 Loss: 0.005856534466147423 PSNR: 27.128934860229492 +[TRAIN] Iter: 711100 Loss: 0.006869119126349688 PSNR: 26.150714874267578 +[TRAIN] Iter: 711200 Loss: 0.006056726444512606 PSNR: 26.65591049194336 +[TRAIN] Iter: 711300 Loss: 0.0073544979095458984 PSNR: 25.934967041015625 +[TRAIN] Iter: 711400 Loss: 0.005789319518953562 PSNR: 26.805770874023438 +[TRAIN] Iter: 711500 Loss: 0.006342804525047541 PSNR: 26.734174728393555 +[TRAIN] Iter: 711600 Loss: 0.0049441177397966385 PSNR: 27.74073600769043 +[TRAIN] Iter: 711700 Loss: 0.005550669506192207 PSNR: 26.91889762878418 +[TRAIN] Iter: 711800 Loss: 0.007379273883998394 PSNR: 25.619464874267578 +[TRAIN] Iter: 711900 Loss: 0.006056136451661587 PSNR: 26.272550582885742 +[TRAIN] Iter: 712000 Loss: 0.005379996728152037 PSNR: 27.473535537719727 +[TRAIN] Iter: 712100 Loss: 0.005868266336619854 PSNR: 26.619054794311523 +[TRAIN] Iter: 712200 Loss: 0.007835143245756626 PSNR: 25.72502326965332 +[TRAIN] Iter: 712300 Loss: 0.00560110155493021 PSNR: 27.469472885131836 +[TRAIN] Iter: 712400 Loss: 0.005755784921348095 PSNR: 28.19510841369629 +[TRAIN] Iter: 712500 Loss: 0.004878326319158077 PSNR: 27.975360870361328 +[TRAIN] Iter: 712600 Loss: 0.0059695118106901646 PSNR: 26.527524948120117 +[TRAIN] Iter: 712700 Loss: 0.005228004418313503 PSNR: 27.605907440185547 +[TRAIN] Iter: 712800 Loss: 0.006616756319999695 PSNR: 26.530845642089844 +[TRAIN] Iter: 712900 Loss: 0.007444084621965885 PSNR: 25.73037338256836 +[TRAIN] Iter: 713000 Loss: 0.006280853413045406 PSNR: 26.813722610473633 +[TRAIN] Iter: 713100 Loss: 0.006672955118119717 PSNR: 26.319013595581055 +[TRAIN] Iter: 713200 Loss: 0.005337044596672058 PSNR: 27.4483642578125 +[TRAIN] Iter: 713300 Loss: 0.006551465019583702 PSNR: 26.559110641479492 +[TRAIN] Iter: 713400 Loss: 0.006062584929168224 PSNR: 26.776227951049805 +[TRAIN] Iter: 713500 Loss: 0.0060134157538414 PSNR: 26.34722900390625 +[TRAIN] Iter: 713600 Loss: 0.006864586845040321 PSNR: 26.22096061706543 +[TRAIN] Iter: 713700 Loss: 0.007559044752269983 PSNR: 26.39620590209961 +[TRAIN] Iter: 713800 Loss: 0.005493719596415758 PSNR: 27.634061813354492 +[TRAIN] Iter: 713900 Loss: 0.006820871960371733 PSNR: 26.234180450439453 +[TRAIN] Iter: 714000 Loss: 0.006442544981837273 PSNR: 27.916629791259766 +[TRAIN] Iter: 714100 Loss: 0.005946590099483728 PSNR: 27.64635467529297 +[TRAIN] Iter: 714200 Loss: 0.005654814653098583 PSNR: 27.113737106323242 +[TRAIN] Iter: 714300 Loss: 0.006151436362415552 PSNR: 26.181873321533203 +[TRAIN] Iter: 714400 Loss: 0.007762689143419266 PSNR: 25.776151657104492 +[TRAIN] Iter: 714500 Loss: 0.005670610349625349 PSNR: 27.121313095092773 +[TRAIN] Iter: 714600 Loss: 0.005794938188046217 PSNR: 27.108152389526367 +[TRAIN] Iter: 714700 Loss: 0.007243582978844643 PSNR: 25.665761947631836 +[TRAIN] Iter: 714800 Loss: 0.005718262866139412 PSNR: 27.08793067932129 +[TRAIN] Iter: 714900 Loss: 0.0050907377153635025 PSNR: 28.505250930786133 +[TRAIN] Iter: 715000 Loss: 0.006985760293900967 PSNR: 26.145986557006836 +[TRAIN] Iter: 715100 Loss: 0.0057899062521755695 PSNR: 27.538286209106445 +[TRAIN] Iter: 715200 Loss: 0.005965151824057102 PSNR: 26.38639259338379 +[TRAIN] Iter: 715300 Loss: 0.006431181915104389 PSNR: 26.99652099609375 +[TRAIN] Iter: 715400 Loss: 0.0063223023898899555 PSNR: 26.170215606689453 +[TRAIN] Iter: 715500 Loss: 0.004471621476113796 PSNR: 28.50986671447754 +[TRAIN] Iter: 715600 Loss: 0.005167174153029919 PSNR: 28.13908576965332 +[TRAIN] Iter: 715700 Loss: 0.007769537158310413 PSNR: 25.472488403320312 +[TRAIN] Iter: 715800 Loss: 0.005374518223106861 PSNR: 28.151891708374023 +[TRAIN] Iter: 715900 Loss: 0.005637629423290491 PSNR: 27.010770797729492 +[TRAIN] Iter: 716000 Loss: 0.005508163943886757 PSNR: 26.90904426574707 +[TRAIN] Iter: 716100 Loss: 0.005595020018517971 PSNR: 27.88516616821289 +[TRAIN] Iter: 716200 Loss: 0.007593528367578983 PSNR: 25.633926391601562 +[TRAIN] Iter: 716300 Loss: 0.005400650203227997 PSNR: 27.768827438354492 +[TRAIN] Iter: 716400 Loss: 0.006463217549026012 PSNR: 26.81697654724121 +[TRAIN] Iter: 716500 Loss: 0.006173328030854464 PSNR: 26.635719299316406 +[TRAIN] Iter: 716600 Loss: 0.006564762443304062 PSNR: 26.499786376953125 +[TRAIN] Iter: 716700 Loss: 0.006974493619054556 PSNR: 25.8909969329834 +[TRAIN] Iter: 716800 Loss: 0.006562494672834873 PSNR: 26.590116500854492 +[TRAIN] Iter: 716900 Loss: 0.007360653951764107 PSNR: 25.657588958740234 +[TRAIN] Iter: 717000 Loss: 0.007112640887498856 PSNR: 26.121809005737305 +[TRAIN] Iter: 717100 Loss: 0.00581194506958127 PSNR: 26.868793487548828 +[TRAIN] Iter: 717200 Loss: 0.004564098082482815 PSNR: 29.249608993530273 +[TRAIN] Iter: 717300 Loss: 0.005477896425873041 PSNR: 27.22478485107422 +[TRAIN] Iter: 717400 Loss: 0.005551772657781839 PSNR: 27.1080379486084 +[TRAIN] Iter: 717500 Loss: 0.006607846822589636 PSNR: 26.257680892944336 +[TRAIN] Iter: 717600 Loss: 0.005504156928509474 PSNR: 26.891246795654297 +[TRAIN] Iter: 717700 Loss: 0.006739430595189333 PSNR: 26.893239974975586 +[TRAIN] Iter: 717800 Loss: 0.006190148182213306 PSNR: 27.20471954345703 +[TRAIN] Iter: 717900 Loss: 0.004668698646128178 PSNR: 29.948062896728516 +[TRAIN] Iter: 718000 Loss: 0.006238461472094059 PSNR: 27.56106185913086 +[TRAIN] Iter: 718100 Loss: 0.005491395480930805 PSNR: 28.35004997253418 +[TRAIN] Iter: 718200 Loss: 0.0069529092870652676 PSNR: 26.085657119750977 +[TRAIN] Iter: 718300 Loss: 0.006275814957916737 PSNR: 26.228179931640625 +[TRAIN] Iter: 718400 Loss: 0.005878721363842487 PSNR: 27.114559173583984 +[TRAIN] Iter: 718500 Loss: 0.0059650978073477745 PSNR: 27.644954681396484 +[TRAIN] Iter: 718600 Loss: 0.005940448492765427 PSNR: 26.738901138305664 +[TRAIN] Iter: 718700 Loss: 0.0054012928158044815 PSNR: 27.31844139099121 +[TRAIN] Iter: 718800 Loss: 0.007423202507197857 PSNR: 26.296066284179688 +[TRAIN] Iter: 718900 Loss: 0.00567826721817255 PSNR: 26.75143814086914 +[TRAIN] Iter: 719000 Loss: 0.005159628577530384 PSNR: 28.594900131225586 +[TRAIN] Iter: 719100 Loss: 0.006363191641867161 PSNR: 26.325712203979492 +[TRAIN] Iter: 719200 Loss: 0.00677125109359622 PSNR: 27.797168731689453 +[TRAIN] Iter: 719300 Loss: 0.006484226323664188 PSNR: 26.663375854492188 +[TRAIN] Iter: 719400 Loss: 0.005261297337710857 PSNR: 27.46396255493164 +[TRAIN] Iter: 719500 Loss: 0.004600501619279385 PSNR: 27.879560470581055 +[TRAIN] Iter: 719600 Loss: 0.006889673415571451 PSNR: 26.170045852661133 +[TRAIN] Iter: 719700 Loss: 0.007892809808254242 PSNR: 25.69643211364746 +[TRAIN] Iter: 719800 Loss: 0.006165897473692894 PSNR: 26.780031204223633 +[TRAIN] Iter: 719900 Loss: 0.006226572208106518 PSNR: 27.157257080078125 +Saved checkpoints at ./logs/TUT-out-doll-360-np/720000.tar +[TRAIN] Iter: 720000 Loss: 0.006452927831560373 PSNR: 26.131540298461914 +[TRAIN] Iter: 720100 Loss: 0.008195973001420498 PSNR: 25.758962631225586 +[TRAIN] Iter: 720200 Loss: 0.006687766872346401 PSNR: 26.71735954284668 +[TRAIN] Iter: 720300 Loss: 0.00665346859022975 PSNR: 26.9296875 +[TRAIN] Iter: 720400 Loss: 0.00738955195993185 PSNR: 26.077850341796875 +[TRAIN] Iter: 720500 Loss: 0.006018429063260555 PSNR: 27.304180145263672 +[TRAIN] Iter: 720600 Loss: 0.006515060551464558 PSNR: 26.707860946655273 +[TRAIN] Iter: 720700 Loss: 0.007436257787048817 PSNR: 26.1984920501709 +[TRAIN] Iter: 720800 Loss: 0.00874145608395338 PSNR: 25.43526268005371 +[TRAIN] Iter: 720900 Loss: 0.006603291258215904 PSNR: 26.43094253540039 +[TRAIN] Iter: 721000 Loss: 0.006536497734487057 PSNR: 26.874431610107422 +[TRAIN] Iter: 721100 Loss: 0.006477193906903267 PSNR: 27.185575485229492 +[TRAIN] Iter: 721200 Loss: 0.005924486555159092 PSNR: 27.347410202026367 +[TRAIN] Iter: 721300 Loss: 0.006337043829262257 PSNR: 28.185583114624023 +[TRAIN] Iter: 721400 Loss: 0.006597134750336409 PSNR: 26.419492721557617 +[TRAIN] Iter: 721500 Loss: 0.004521280061453581 PSNR: 28.569049835205078 +[TRAIN] Iter: 721600 Loss: 0.0064202407374978065 PSNR: 27.237329483032227 +[TRAIN] Iter: 721700 Loss: 0.006054583005607128 PSNR: 27.464250564575195 +[TRAIN] Iter: 721800 Loss: 0.006401817314326763 PSNR: 26.78104019165039 +[TRAIN] Iter: 721900 Loss: 0.005513136275112629 PSNR: 27.353160858154297 +[TRAIN] Iter: 722000 Loss: 0.0075208647176623344 PSNR: 25.816930770874023 +[TRAIN] Iter: 722100 Loss: 0.006336977705359459 PSNR: 26.923688888549805 +[TRAIN] Iter: 722200 Loss: 0.0064108707010746 PSNR: 26.08319091796875 +[TRAIN] Iter: 722300 Loss: 0.006502881180495024 PSNR: 25.857677459716797 +[TRAIN] Iter: 722400 Loss: 0.006137360818684101 PSNR: 27.456541061401367 +[TRAIN] Iter: 722500 Loss: 0.007441899739205837 PSNR: 25.939062118530273 +[TRAIN] Iter: 722600 Loss: 0.005874741822481155 PSNR: 27.785831451416016 +[TRAIN] Iter: 722700 Loss: 0.006310421973466873 PSNR: 26.154592514038086 +[TRAIN] Iter: 722800 Loss: 0.004753581248223782 PSNR: 28.54549789428711 +[TRAIN] Iter: 722900 Loss: 0.007228722795844078 PSNR: 25.284584045410156 +[TRAIN] Iter: 723000 Loss: 0.005398748442530632 PSNR: 28.339948654174805 +[TRAIN] Iter: 723100 Loss: 0.007090125232934952 PSNR: 26.847929000854492 +[TRAIN] Iter: 723200 Loss: 0.00552096264436841 PSNR: 27.52593421936035 +[TRAIN] Iter: 723300 Loss: 0.0063700550235807896 PSNR: 26.248083114624023 +[TRAIN] Iter: 723400 Loss: 0.005731368437409401 PSNR: 27.54913330078125 +[TRAIN] Iter: 723500 Loss: 0.007038461044430733 PSNR: 26.444860458374023 +[TRAIN] Iter: 723600 Loss: 0.00668294494971633 PSNR: 26.293210983276367 +[TRAIN] Iter: 723700 Loss: 0.007157749962061644 PSNR: 26.296615600585938 +[TRAIN] Iter: 723800 Loss: 0.006407229695469141 PSNR: 27.14876365661621 +[TRAIN] Iter: 723900 Loss: 0.006344462279230356 PSNR: 26.578933715820312 +[TRAIN] Iter: 724000 Loss: 0.007052282802760601 PSNR: 25.950389862060547 +[TRAIN] Iter: 724100 Loss: 0.005636550486087799 PSNR: 27.98411750793457 +[TRAIN] Iter: 724200 Loss: 0.0072526512667536736 PSNR: 26.321935653686523 +[TRAIN] Iter: 724300 Loss: 0.006869372446089983 PSNR: 25.845104217529297 +[TRAIN] Iter: 724400 Loss: 0.004811904393136501 PSNR: 27.534698486328125 +[TRAIN] Iter: 724500 Loss: 0.007481396198272705 PSNR: 26.106355667114258 +[TRAIN] Iter: 724600 Loss: 0.007344059646129608 PSNR: 25.770132064819336 +[TRAIN] Iter: 724700 Loss: 0.005198516882956028 PSNR: 28.402732849121094 +[TRAIN] Iter: 724800 Loss: 0.006045207846909761 PSNR: 28.19306182861328 +[TRAIN] Iter: 724900 Loss: 0.006702146027237177 PSNR: 26.703115463256836 +[TRAIN] Iter: 725000 Loss: 0.006315887905657291 PSNR: 26.86058807373047 +[TRAIN] Iter: 725100 Loss: 0.006998442113399506 PSNR: 26.059234619140625 +[TRAIN] Iter: 725200 Loss: 0.006756878457963467 PSNR: 26.01314926147461 +[TRAIN] Iter: 725300 Loss: 0.005680589936673641 PSNR: 27.859569549560547 +[TRAIN] Iter: 725400 Loss: 0.00617218716070056 PSNR: 26.781606674194336 +[TRAIN] Iter: 725500 Loss: 0.0050773415714502335 PSNR: 27.847518920898438 +[TRAIN] Iter: 725600 Loss: 0.007443238981068134 PSNR: 25.78784942626953 +[TRAIN] Iter: 725700 Loss: 0.006510677747428417 PSNR: 26.356586456298828 +[TRAIN] Iter: 725800 Loss: 0.0067244963720440865 PSNR: 26.952665328979492 +[TRAIN] Iter: 725900 Loss: 0.00543777272105217 PSNR: 28.23627471923828 +[TRAIN] Iter: 726000 Loss: 0.005889233201742172 PSNR: 26.650089263916016 +[TRAIN] Iter: 726100 Loss: 0.00526832602918148 PSNR: 28.744672775268555 +[TRAIN] Iter: 726200 Loss: 0.005232193972915411 PSNR: 27.39741325378418 +[TRAIN] Iter: 726300 Loss: 0.007627072744071484 PSNR: 26.06014060974121 +[TRAIN] Iter: 726400 Loss: 0.005792008712887764 PSNR: 26.715171813964844 +[TRAIN] Iter: 726500 Loss: 0.006371026858687401 PSNR: 26.415924072265625 +[TRAIN] Iter: 726600 Loss: 0.005630072671920061 PSNR: 28.225879669189453 +[TRAIN] Iter: 726700 Loss: 0.006534322164952755 PSNR: 26.281408309936523 +[TRAIN] Iter: 726800 Loss: 0.005146978888660669 PSNR: 28.706695556640625 +[TRAIN] Iter: 726900 Loss: 0.007075030822306871 PSNR: 26.1718692779541 +[TRAIN] Iter: 727000 Loss: 0.006639020051807165 PSNR: 25.772035598754883 +[TRAIN] Iter: 727100 Loss: 0.006310129538178444 PSNR: 26.639690399169922 +[TRAIN] Iter: 727200 Loss: 0.007186577655375004 PSNR: 25.637924194335938 +[TRAIN] Iter: 727300 Loss: 0.005628509446978569 PSNR: 27.25011444091797 +[TRAIN] Iter: 727400 Loss: 0.007059579715132713 PSNR: 26.32726287841797 +[TRAIN] Iter: 727500 Loss: 0.006576464977115393 PSNR: 26.14376449584961 +[TRAIN] Iter: 727600 Loss: 0.006697256583720446 PSNR: 27.30611801147461 +[TRAIN] Iter: 727700 Loss: 0.005572405643761158 PSNR: 26.78596305847168 +[TRAIN] Iter: 727800 Loss: 0.004974182695150375 PSNR: 28.70013427734375 +[TRAIN] Iter: 727900 Loss: 0.005422394722700119 PSNR: 28.4772891998291 +[TRAIN] Iter: 728000 Loss: 0.0052641332149505615 PSNR: 28.216623306274414 +[TRAIN] Iter: 728100 Loss: 0.008076151832938194 PSNR: 25.4554500579834 +[TRAIN] Iter: 728200 Loss: 0.005160956643521786 PSNR: 27.599641799926758 +[TRAIN] Iter: 728300 Loss: 0.0064882030710577965 PSNR: 26.98886489868164 +[TRAIN] Iter: 728400 Loss: 0.005822259932756424 PSNR: 27.889928817749023 +[TRAIN] Iter: 728500 Loss: 0.005174598190933466 PSNR: 28.358230590820312 +[TRAIN] Iter: 728600 Loss: 0.0054819826036691666 PSNR: 27.897907257080078 +[TRAIN] Iter: 728700 Loss: 0.006135495845228434 PSNR: 27.183467864990234 +[TRAIN] Iter: 728800 Loss: 0.006213861051946878 PSNR: 26.296262741088867 +[TRAIN] Iter: 728900 Loss: 0.0059981029480695724 PSNR: 27.132009506225586 +[TRAIN] Iter: 729000 Loss: 0.005881713703274727 PSNR: 26.679521560668945 +[TRAIN] Iter: 729100 Loss: 0.006574835628271103 PSNR: 26.678407669067383 +[TRAIN] Iter: 729200 Loss: 0.006854329723864794 PSNR: 26.079578399658203 +[TRAIN] Iter: 729300 Loss: 0.007559332065284252 PSNR: 26.035110473632812 +[TRAIN] Iter: 729400 Loss: 0.005779679864645004 PSNR: 27.364286422729492 +[TRAIN] Iter: 729500 Loss: 0.006327909417450428 PSNR: 26.391992568969727 +[TRAIN] Iter: 729600 Loss: 0.008045781403779984 PSNR: 25.63389015197754 +[TRAIN] Iter: 729700 Loss: 0.005355217028409243 PSNR: 27.524024963378906 +[TRAIN] Iter: 729800 Loss: 0.005957749672234058 PSNR: 26.87896156311035 +[TRAIN] Iter: 729900 Loss: 0.006363365799188614 PSNR: 27.695030212402344 +Saved checkpoints at ./logs/TUT-out-doll-360-np/730000.tar +[TRAIN] Iter: 730000 Loss: 0.006285143084824085 PSNR: 26.581003189086914 +[TRAIN] Iter: 730100 Loss: 0.004471819847822189 PSNR: 29.16269302368164 +[TRAIN] Iter: 730200 Loss: 0.006259813439100981 PSNR: 26.52428436279297 +[TRAIN] Iter: 730300 Loss: 0.0050080521032214165 PSNR: 28.750425338745117 +[TRAIN] Iter: 730400 Loss: 0.006582665257155895 PSNR: 26.482412338256836 +[TRAIN] Iter: 730500 Loss: 0.005624550394713879 PSNR: 28.16230583190918 +[TRAIN] Iter: 730600 Loss: 0.005045196507126093 PSNR: 28.914094924926758 +[TRAIN] Iter: 730700 Loss: 0.006688050460070372 PSNR: 26.75687599182129 +[TRAIN] Iter: 730800 Loss: 0.006252219434827566 PSNR: 27.300016403198242 +[TRAIN] Iter: 730900 Loss: 0.007513160817325115 PSNR: 25.729764938354492 +[TRAIN] Iter: 731000 Loss: 0.006531980820000172 PSNR: 26.029552459716797 +[TRAIN] Iter: 731100 Loss: 0.005738717503845692 PSNR: 27.051523208618164 +[TRAIN] Iter: 731200 Loss: 0.006100811995565891 PSNR: 26.420934677124023 +[TRAIN] Iter: 731300 Loss: 0.00606789905577898 PSNR: 26.77324676513672 +[TRAIN] Iter: 731400 Loss: 0.00492355739697814 PSNR: 28.03276252746582 +[TRAIN] Iter: 731500 Loss: 0.0061393557116389275 PSNR: 27.35521125793457 +[TRAIN] Iter: 731600 Loss: 0.006338730454444885 PSNR: 26.697355270385742 +[TRAIN] Iter: 731700 Loss: 0.005362001247704029 PSNR: 27.18332290649414 +[TRAIN] Iter: 731800 Loss: 0.006109288893640041 PSNR: 27.802169799804688 +[TRAIN] Iter: 731900 Loss: 0.005689207464456558 PSNR: 27.904884338378906 +[TRAIN] Iter: 732000 Loss: 0.006675567012280226 PSNR: 25.686525344848633 +[TRAIN] Iter: 732100 Loss: 0.007258090656250715 PSNR: 26.080291748046875 +[TRAIN] Iter: 732200 Loss: 0.006042519584298134 PSNR: 27.121400833129883 +[TRAIN] Iter: 732300 Loss: 0.007719164714217186 PSNR: 25.48201560974121 +[TRAIN] Iter: 732400 Loss: 0.00568266399204731 PSNR: 27.521909713745117 +[TRAIN] Iter: 732500 Loss: 0.005888664163649082 PSNR: 26.251426696777344 +[TRAIN] Iter: 732600 Loss: 0.006093323230743408 PSNR: 26.432735443115234 +[TRAIN] Iter: 732700 Loss: 0.00562745425850153 PSNR: 27.963167190551758 +[TRAIN] Iter: 732800 Loss: 0.007955851033329964 PSNR: 25.489501953125 +[TRAIN] Iter: 732900 Loss: 0.005068142432719469 PSNR: 28.01735496520996 +[TRAIN] Iter: 733000 Loss: 0.007575368974357843 PSNR: 25.9282169342041 +[TRAIN] Iter: 733100 Loss: 0.00604229373857379 PSNR: 26.98982048034668 +[TRAIN] Iter: 733200 Loss: 0.005028579384088516 PSNR: 27.05767822265625 +[TRAIN] Iter: 733300 Loss: 0.006857375148683786 PSNR: 26.512420654296875 +[TRAIN] Iter: 733400 Loss: 0.0056440564803779125 PSNR: 27.339765548706055 +[TRAIN] Iter: 733500 Loss: 0.005934250541031361 PSNR: 27.077890396118164 +[TRAIN] Iter: 733600 Loss: 0.005359047092497349 PSNR: 27.179534912109375 +[TRAIN] Iter: 733700 Loss: 0.006377062294632196 PSNR: 26.788156509399414 +[TRAIN] Iter: 733800 Loss: 0.004674695897847414 PSNR: 28.4492244720459 +[TRAIN] Iter: 733900 Loss: 0.004834570921957493 PSNR: 27.919940948486328 +[TRAIN] Iter: 734000 Loss: 0.006094261072576046 PSNR: 26.509737014770508 +[TRAIN] Iter: 734100 Loss: 0.0063169896602630615 PSNR: 26.019617080688477 +[TRAIN] Iter: 734200 Loss: 0.00776994414627552 PSNR: 25.925559997558594 +[TRAIN] Iter: 734300 Loss: 0.00784444622695446 PSNR: 25.589664459228516 +[TRAIN] Iter: 734400 Loss: 0.006124754436314106 PSNR: 26.61039161682129 +[TRAIN] Iter: 734500 Loss: 0.006560572423040867 PSNR: 26.340049743652344 +[TRAIN] Iter: 734600 Loss: 0.006498339585959911 PSNR: 26.231952667236328 +[TRAIN] Iter: 734700 Loss: 0.00506548210978508 PSNR: 28.04345703125 +[TRAIN] Iter: 734800 Loss: 0.006924463901668787 PSNR: 26.085968017578125 +[TRAIN] Iter: 734900 Loss: 0.007656371686607599 PSNR: 25.43170928955078 +[TRAIN] Iter: 735000 Loss: 0.0063117798417806625 PSNR: 27.479698181152344 +[TRAIN] Iter: 735100 Loss: 0.006060236133635044 PSNR: 26.824481964111328 +[TRAIN] Iter: 735200 Loss: 0.006353015545755625 PSNR: 26.544910430908203 +[TRAIN] Iter: 735300 Loss: 0.005300803575664759 PSNR: 27.44357681274414 +[TRAIN] Iter: 735400 Loss: 0.006412997841835022 PSNR: 26.659814834594727 +[TRAIN] Iter: 735500 Loss: 0.00734836608171463 PSNR: 25.99863052368164 +[TRAIN] Iter: 735600 Loss: 0.0049131810665130615 PSNR: 28.59020233154297 +[TRAIN] Iter: 735700 Loss: 0.006808522157371044 PSNR: 26.306371688842773 +[TRAIN] Iter: 735800 Loss: 0.005501389969140291 PSNR: 27.957216262817383 +[TRAIN] Iter: 735900 Loss: 0.0057166218757629395 PSNR: 27.81962776184082 +[TRAIN] Iter: 736000 Loss: 0.00656603928655386 PSNR: 26.592845916748047 +[TRAIN] Iter: 736100 Loss: 0.00584759097546339 PSNR: 27.085752487182617 +[TRAIN] Iter: 736200 Loss: 0.0058368416503071785 PSNR: 27.019073486328125 +[TRAIN] Iter: 736300 Loss: 0.006643320433795452 PSNR: 26.206424713134766 +[TRAIN] Iter: 736400 Loss: 0.00640113465487957 PSNR: 26.549409866333008 +[TRAIN] Iter: 736500 Loss: 0.006206751801073551 PSNR: 25.972366333007812 +[TRAIN] Iter: 736600 Loss: 0.00673151109367609 PSNR: 26.758251190185547 +[TRAIN] Iter: 736700 Loss: 0.005592621862888336 PSNR: 28.271692276000977 +[TRAIN] Iter: 736800 Loss: 0.006082432344555855 PSNR: 27.001787185668945 +[TRAIN] Iter: 736900 Loss: 0.005940218456089497 PSNR: 27.415828704833984 +[TRAIN] Iter: 737000 Loss: 0.0059166159480810165 PSNR: 27.464448928833008 +[TRAIN] Iter: 737100 Loss: 0.0062780254520475864 PSNR: 26.805660247802734 +[TRAIN] Iter: 737200 Loss: 0.005180518142879009 PSNR: 27.917936325073242 +[TRAIN] Iter: 737300 Loss: 0.006197954993695021 PSNR: 26.91628074645996 +[TRAIN] Iter: 737400 Loss: 0.0057800509966909885 PSNR: 27.549182891845703 +[TRAIN] Iter: 737500 Loss: 0.0056000808253884315 PSNR: 27.350284576416016 +[TRAIN] Iter: 737600 Loss: 0.006779716815799475 PSNR: 25.93330955505371 +[TRAIN] Iter: 737700 Loss: 0.006067614071071148 PSNR: 26.48355484008789 +[TRAIN] Iter: 737800 Loss: 0.005766966380178928 PSNR: 28.013212203979492 +[TRAIN] Iter: 737900 Loss: 0.005970068275928497 PSNR: 26.99898338317871 +[TRAIN] Iter: 738000 Loss: 0.005933825392276049 PSNR: 26.892444610595703 +[TRAIN] Iter: 738100 Loss: 0.005399289075285196 PSNR: 28.419830322265625 +[TRAIN] Iter: 738200 Loss: 0.005733623169362545 PSNR: 26.640396118164062 +[TRAIN] Iter: 738300 Loss: 0.006883357185870409 PSNR: 26.681413650512695 +[TRAIN] Iter: 738400 Loss: 0.006795460358262062 PSNR: 26.293991088867188 +[TRAIN] Iter: 738500 Loss: 0.006917333696037531 PSNR: 26.05070686340332 +[TRAIN] Iter: 738600 Loss: 0.00624395115301013 PSNR: 26.57701301574707 +[TRAIN] Iter: 738700 Loss: 0.006282740272581577 PSNR: 27.237768173217773 +[TRAIN] Iter: 738800 Loss: 0.00638670613989234 PSNR: 26.762563705444336 +[TRAIN] Iter: 738900 Loss: 0.007181675639003515 PSNR: 26.13788604736328 +[TRAIN] Iter: 739000 Loss: 0.005724940448999405 PSNR: 26.8499755859375 +[TRAIN] Iter: 739100 Loss: 0.007983949966728687 PSNR: 25.444149017333984 +[TRAIN] Iter: 739200 Loss: 0.005928597878664732 PSNR: 26.446861267089844 +[TRAIN] Iter: 739300 Loss: 0.0064453864470124245 PSNR: 26.65732765197754 +[TRAIN] Iter: 739400 Loss: 0.006966926623135805 PSNR: 25.98127555847168 +[TRAIN] Iter: 739500 Loss: 0.004411828704178333 PSNR: 27.741056442260742 +[TRAIN] Iter: 739600 Loss: 0.005329118110239506 PSNR: 27.752241134643555 +[TRAIN] Iter: 739700 Loss: 0.007403449155390263 PSNR: 25.979589462280273 +[TRAIN] Iter: 739800 Loss: 0.005693945102393627 PSNR: 27.126697540283203 +[TRAIN] Iter: 739900 Loss: 0.007394502405077219 PSNR: 26.06967544555664 +Saved checkpoints at ./logs/TUT-out-doll-360-np/740000.tar +[TRAIN] Iter: 740000 Loss: 0.006036229431629181 PSNR: 27.289953231811523 +[TRAIN] Iter: 740100 Loss: 0.007218861952424049 PSNR: 26.808177947998047 +[TRAIN] Iter: 740200 Loss: 0.005845623090863228 PSNR: 27.148405075073242 +[TRAIN] Iter: 740300 Loss: 0.004039054736495018 PSNR: 28.56319236755371 +[TRAIN] Iter: 740400 Loss: 0.006749793887138367 PSNR: 26.361309051513672 +[TRAIN] Iter: 740500 Loss: 0.004994167480617762 PSNR: 28.245786666870117 +[TRAIN] Iter: 740600 Loss: 0.006196564994752407 PSNR: 27.090883255004883 +[TRAIN] Iter: 740700 Loss: 0.006042194087058306 PSNR: 26.960981369018555 +[TRAIN] Iter: 740800 Loss: 0.006302075460553169 PSNR: 26.765554428100586 +[TRAIN] Iter: 740900 Loss: 0.004110479261726141 PSNR: 29.498207092285156 +[TRAIN] Iter: 741000 Loss: 0.00729109812527895 PSNR: 26.207576751708984 +[TRAIN] Iter: 741100 Loss: 0.004929058253765106 PSNR: 29.206310272216797 +[TRAIN] Iter: 741200 Loss: 0.006776738911867142 PSNR: 26.418563842773438 +[TRAIN] Iter: 741300 Loss: 0.007103071082383394 PSNR: 26.33470344543457 +[TRAIN] Iter: 741400 Loss: 0.004354635253548622 PSNR: 28.803081512451172 +[TRAIN] Iter: 741500 Loss: 0.00542365200817585 PSNR: 27.340492248535156 +[TRAIN] Iter: 741600 Loss: 0.004799541085958481 PSNR: 28.530488967895508 +[TRAIN] Iter: 741700 Loss: 0.004300080239772797 PSNR: 29.18107795715332 +[TRAIN] Iter: 741800 Loss: 0.005040123593062162 PSNR: 28.26193618774414 +[TRAIN] Iter: 741900 Loss: 0.0056595574133098125 PSNR: 27.38215446472168 +[TRAIN] Iter: 742000 Loss: 0.006751297041773796 PSNR: 26.046972274780273 +[TRAIN] Iter: 742100 Loss: 0.005488978233188391 PSNR: 26.698389053344727 +[TRAIN] Iter: 742200 Loss: 0.006114137824624777 PSNR: 27.403610229492188 +[TRAIN] Iter: 742300 Loss: 0.00539516843855381 PSNR: 26.90385627746582 +[TRAIN] Iter: 742400 Loss: 0.005485846661031246 PSNR: 27.632736206054688 +[TRAIN] Iter: 742500 Loss: 0.006376995705068111 PSNR: 26.85975456237793 +[TRAIN] Iter: 742600 Loss: 0.005751332268118858 PSNR: 27.89100456237793 +[TRAIN] Iter: 742700 Loss: 0.006485472898930311 PSNR: 26.659936904907227 +[TRAIN] Iter: 742800 Loss: 0.006693446077406406 PSNR: 25.91301918029785 +[TRAIN] Iter: 742900 Loss: 0.006185985170304775 PSNR: 27.01472282409668 +[TRAIN] Iter: 743000 Loss: 0.006687850225716829 PSNR: 26.27131462097168 +[TRAIN] Iter: 743100 Loss: 0.005637402180582285 PSNR: 27.649641036987305 +[TRAIN] Iter: 743200 Loss: 0.0063118408434093 PSNR: 26.731687545776367 +[TRAIN] Iter: 743300 Loss: 0.007882855832576752 PSNR: 25.099000930786133 +[TRAIN] Iter: 743400 Loss: 0.005467248149216175 PSNR: 27.348554611206055 +[TRAIN] Iter: 743500 Loss: 0.005793729331344366 PSNR: 27.435157775878906 +[TRAIN] Iter: 743600 Loss: 0.006846906617283821 PSNR: 26.520587921142578 +[TRAIN] Iter: 743700 Loss: 0.00454055517911911 PSNR: 28.472795486450195 +[TRAIN] Iter: 743800 Loss: 0.005578954704105854 PSNR: 27.716903686523438 +[TRAIN] Iter: 743900 Loss: 0.006352914962917566 PSNR: 27.208805084228516 +[TRAIN] Iter: 744000 Loss: 0.007223945111036301 PSNR: 25.313074111938477 +[TRAIN] Iter: 744100 Loss: 0.007134240586310625 PSNR: 26.123836517333984 +[TRAIN] Iter: 744200 Loss: 0.005491574760526419 PSNR: 27.32782745361328 +[TRAIN] Iter: 744300 Loss: 0.00828617624938488 PSNR: 25.386110305786133 +[TRAIN] Iter: 744400 Loss: 0.005962025374174118 PSNR: 26.803531646728516 +[TRAIN] Iter: 744500 Loss: 0.006033428478986025 PSNR: 27.6656494140625 +[TRAIN] Iter: 744600 Loss: 0.006807108409702778 PSNR: 26.31887435913086 +[TRAIN] Iter: 744700 Loss: 0.006559357047080994 PSNR: 27.661041259765625 +[TRAIN] Iter: 744800 Loss: 0.004639590159058571 PSNR: 29.055103302001953 +[TRAIN] Iter: 744900 Loss: 0.0051847524009644985 PSNR: 28.248693466186523 +[TRAIN] Iter: 745000 Loss: 0.006446712650358677 PSNR: 26.383920669555664 +[TRAIN] Iter: 745100 Loss: 0.0057546598836779594 PSNR: 26.820192337036133 +[TRAIN] Iter: 745200 Loss: 0.0063803307712078094 PSNR: 26.347929000854492 +[TRAIN] Iter: 745300 Loss: 0.008025888353586197 PSNR: 25.965585708618164 +[TRAIN] Iter: 745400 Loss: 0.0066167269833385944 PSNR: 26.043039321899414 +[TRAIN] Iter: 745500 Loss: 0.004854721948504448 PSNR: 28.692520141601562 +[TRAIN] Iter: 745600 Loss: 0.006648143753409386 PSNR: 25.978227615356445 +[TRAIN] Iter: 745700 Loss: 0.0067890603095293045 PSNR: 25.856441497802734 +[TRAIN] Iter: 745800 Loss: 0.0062751248478889465 PSNR: 26.525074005126953 +[TRAIN] Iter: 745900 Loss: 0.005340081173926592 PSNR: 27.83170509338379 +[TRAIN] Iter: 746000 Loss: 0.006207410711795092 PSNR: 26.733989715576172 +[TRAIN] Iter: 746100 Loss: 0.007425599731504917 PSNR: 25.789093017578125 +[TRAIN] Iter: 746200 Loss: 0.006681344471871853 PSNR: 26.289377212524414 +[TRAIN] Iter: 746300 Loss: 0.005404789000749588 PSNR: 27.402128219604492 +[TRAIN] Iter: 746400 Loss: 0.00741894356906414 PSNR: 25.855022430419922 +[TRAIN] Iter: 746500 Loss: 0.0051431902684271336 PSNR: 28.246522903442383 +[TRAIN] Iter: 746600 Loss: 0.006042791530489922 PSNR: 27.77088165283203 +[TRAIN] Iter: 746700 Loss: 0.006316724233329296 PSNR: 26.284486770629883 +[TRAIN] Iter: 746800 Loss: 0.004933252930641174 PSNR: 28.234664916992188 +[TRAIN] Iter: 746900 Loss: 0.006537710782140493 PSNR: 26.053272247314453 +[TRAIN] Iter: 747000 Loss: 0.005151992663741112 PSNR: 27.195384979248047 +[TRAIN] Iter: 747100 Loss: 0.0057655079290270805 PSNR: 27.901315689086914 +[TRAIN] Iter: 747200 Loss: 0.006036392413079739 PSNR: 26.903982162475586 +[TRAIN] Iter: 747300 Loss: 0.00508710416033864 PSNR: 28.53863525390625 +[TRAIN] Iter: 747400 Loss: 0.00547207985073328 PSNR: 28.35674285888672 +[TRAIN] Iter: 747500 Loss: 0.005896051414310932 PSNR: 26.633764266967773 +[TRAIN] Iter: 747600 Loss: 0.0071365428157150745 PSNR: 25.788103103637695 +[TRAIN] Iter: 747700 Loss: 0.007153409533202648 PSNR: 26.043169021606445 +[TRAIN] Iter: 747800 Loss: 0.0064718155190348625 PSNR: 26.30838394165039 +[TRAIN] Iter: 747900 Loss: 0.005867631174623966 PSNR: 26.823396682739258 +[TRAIN] Iter: 748000 Loss: 0.006469111889600754 PSNR: 26.636978149414062 +[TRAIN] Iter: 748100 Loss: 0.007522494997829199 PSNR: 26.107418060302734 +[TRAIN] Iter: 748200 Loss: 0.006418582983314991 PSNR: 26.60866928100586 +[TRAIN] Iter: 748300 Loss: 0.008654976263642311 PSNR: 24.894359588623047 +[TRAIN] Iter: 748400 Loss: 0.007141323760151863 PSNR: 26.798702239990234 +[TRAIN] Iter: 748500 Loss: 0.006778413895517588 PSNR: 25.98281478881836 +[TRAIN] Iter: 748600 Loss: 0.007606284227222204 PSNR: 25.98082160949707 +[TRAIN] Iter: 748700 Loss: 0.007057079114019871 PSNR: 26.043302536010742 +[TRAIN] Iter: 748800 Loss: 0.0056754592806100845 PSNR: 27.026737213134766 +[TRAIN] Iter: 748900 Loss: 0.005995713174343109 PSNR: 27.922630310058594 +[TRAIN] Iter: 749000 Loss: 0.006917668506503105 PSNR: 26.45694351196289 +[TRAIN] Iter: 749100 Loss: 0.004741701763123274 PSNR: 28.994060516357422 +[TRAIN] Iter: 749200 Loss: 0.006305840332061052 PSNR: 26.390443801879883 +[TRAIN] Iter: 749300 Loss: 0.005867639556527138 PSNR: 27.716209411621094 +[TRAIN] Iter: 749400 Loss: 0.0061964173801243305 PSNR: 27.641965866088867 +[TRAIN] Iter: 749500 Loss: 0.004329696763306856 PSNR: 28.126895904541016 +[TRAIN] Iter: 749600 Loss: 0.006699694786220789 PSNR: 26.64872932434082 +[TRAIN] Iter: 749700 Loss: 0.005205963738262653 PSNR: 28.20186424255371 +[TRAIN] Iter: 749800 Loss: 0.006459193769842386 PSNR: 26.314424514770508 +[TRAIN] Iter: 749900 Loss: 0.007791751995682716 PSNR: 25.168060302734375 +Saved checkpoints at ./logs/TUT-out-doll-360-np/750000.tar +0 0.0009052753448486328 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.966169118881226 +2 21.440158367156982 +3 22.23130965232849 +4 22.50924587249756 +5 22.659114122390747 +6 22.60688018798828 +7 21.93090844154358 +8 21.6103413105011 +9 22.000475883483887 +10 22.29107689857483 +11 21.337291955947876 +12 22.64460563659668 +13 22.014150381088257 +14 21.461493015289307 +15 21.440579175949097 +16 22.415820121765137 +17 22.527332305908203 +18 21.439175844192505 +19 21.90016746520996 +20 22.567363500595093 +21 21.709206104278564 +22 21.70712375640869 +23 22.161665439605713 +24 21.886407613754272 +25 21.71086049079895 +26 21.914517402648926 +27 21.694162368774414 +28 21.908084630966187 +29 22.946265935897827 +30 22.2451229095459 +31 21.863734245300293 +32 22.088810920715332 +33 21.25445246696472 +34 21.733646392822266 +35 22.552874088287354 +36 22.085477352142334 +37 22.287516593933105 +38 22.181787490844727 +39 22.192196130752563 +40 21.62571883201599 +41 21.341482400894165 +42 22.000283002853394 +43 22.537365436553955 +44 21.695196866989136 +45 23.351122617721558 +46 21.850898265838623 +47 22.087990045547485 +48 22.12636089324951 +49 21.845762491226196 +50 22.337152242660522 +51 21.450188398361206 +52 21.56358027458191 +53 21.99991512298584 +54 22.248177766799927 +55 21.681028842926025 +56 22.282437562942505 +57 21.47870397567749 +58 21.978325843811035 +59 21.280426025390625 +60 22.180419921875 +61 22.132671117782593 +62 22.04334592819214 +63 21.657886505126953 +64 22.162673473358154 +65 22.945151805877686 +66 21.979393005371094 +67 21.86377739906311 +68 21.740812063217163 +69 21.709187507629395 +70 21.122398614883423 +71 22.48750591278076 +72 20.907124996185303 +73 21.92144203186035 +74 22.266541481018066 +75 21.819443225860596 +76 22.247159004211426 +77 21.531808137893677 +78 22.22764253616333 +79 22.33378291130066 +80 22.53807234764099 +81 21.582587242126465 +82 21.672096014022827 +83 22.27567195892334 +84 21.76725196838379 +85 23.074208974838257 +86 21.916574478149414 +87 22.105992078781128 +88 22.192148208618164 +89 21.752349615097046 +90 22.699002027511597 +91 21.564493894577026 +92 21.602599382400513 +93 21.39491868019104 +94 21.968506336212158 +95 22.733745098114014 +96 23.366329431533813 +97 21.30853509902954 +98 22.4562029838562 +99 21.203587532043457 +100 22.342820167541504 +101 21.691036462783813 +102 22.83116340637207 +103 22.81975507736206 +104 22.9821355342865 +105 21.506471157073975 +106 22.879510402679443 +107 22.59426188468933 +108 21.6254940032959 +109 21.708539962768555 +110 21.98736310005188 +111 21.65321373939514 +112 22.568679571151733 +113 21.88355565071106 +114 22.0244038105011 +115 21.898831605911255 +116 21.564308643341064 +117 21.74021553993225 +118 21.737163066864014 +119 21.616997957229614 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 2.1289e-01, 6.1079e-01, 7.9025e-01, -3.5484e+01], + [ 7.7402e-01, 1.7908e+00, 1.9425e+00, -7.3042e+01], + [ 3.9195e-01, 1.1132e+00, 1.9336e+00, -2.1927e+01], + ..., + [ 3.1651e+00, 5.3422e+00, 8.1181e+00, 3.1683e+02], + [ 2.7509e+00, 5.1590e+00, 8.4254e+00, 3.4867e+02], + [ 2.3676e+00, 4.8745e+00, 8.3121e+00, 3.7178e+02]], + + [[-8.5776e-01, -7.2507e-01, -3.6360e-01, -6.2860e+01], + [-1.1987e+00, -1.3286e+00, -1.3471e+00, 1.1590e+01], + [-1.3174e+00, -1.3958e+00, -1.2629e+00, 1.4594e+01], + ..., + [-7.8887e+00, -7.8135e+00, -6.4980e+00, 4.8163e+02], + [-6.9572e+00, -6.4677e+00, -3.4440e+00, 5.1645e+02], + [-1.0566e+01, -9.6480e+00, -7.5308e+00, 5.3192e+02]], + + [[-2.6861e+00, -2.0316e+00, -1.7854e+00, -9.9180e+01], + [-6.2058e+00, -5.6334e+00, -5.3719e+00, -5.2824e+01], + [-6.4313e+00, -5.8433e+00, -5.5605e+00, -5.2518e+01], + ..., + [-7.8084e+00, -3.3995e+00, -2.7468e-01, 3.7020e+02], + [-9.8954e+00, -5.0107e+00, -1.1353e+00, 3.6449e+02], + [-9.3634e+00, -4.8113e+00, -2.2222e+00, 4.3822e+02]], + + ..., + + [[-3.2016e+00, -3.7530e-01, 1.0192e+00, -1.2980e+02], + [-1.4985e+00, -3.0886e-01, 7.1838e-01, -4.9976e+01], + [-8.6726e-01, 8.5965e-02, 1.2375e+00, -7.0208e+00], + ..., + [-7.5691e+00, -7.1204e+00, -1.3304e+00, 2.8140e+02], + [-7.9468e+00, -7.5170e+00, -1.5186e+00, 2.9051e+02], + [-7.4775e+00, -7.1341e+00, -1.2710e+00, 3.0676e+02]], + + [[-1.5536e+00, -1.1228e+00, -6.7805e-01, -1.0620e+02], + [-4.3866e+00, -3.9167e+00, -3.5694e+00, -6.6941e+01], + [-4.1067e+00, -3.8637e+00, -3.3821e+00, -6.0530e+01], + ..., + [-3.1267e+00, -2.6377e+00, -2.0741e+00, 3.0540e+02], + [-2.9058e+00, -1.9231e+00, -1.7406e+00, 3.5986e+02], + [-2.9042e+00, -1.8990e+00, -1.5609e+00, 3.5718e+02]], + + [[-2.6268e+00, -1.0541e+00, -5.7897e-01, -1.0174e+02], + [-2.2922e+00, -1.2579e+00, 2.6863e-02, -6.2201e+01], + [-2.2109e+00, -1.1831e+00, -6.3724e-04, -6.7767e+01], + ..., + [-6.2438e+00, -3.6403e+00, -4.2298e+00, 3.8326e+02], + [-5.9090e+00, -3.5583e+00, -4.5045e+00, 3.9307e+02], + [-6.3637e+00, -3.8288e+00, -4.4920e+00, 4.0462e+02]]], + grad_fn=), 'rgb0': tensor([[0.4505, 0.6479, 0.8433], + [0.2308, 0.2165, 0.2087], + [0.2298, 0.2496, 0.2698], + ..., + [0.2445, 0.4619, 0.7148], + [0.1406, 0.2324, 0.3281], + [0.5756, 0.7325, 0.8928]], grad_fn=), 'disp0': tensor([ 21.2213, 46.3150, 102.1245, ..., 29.0105, 7.5887, 48.1196], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0038, 0.0041, 0.0032, ..., 0.0057, 0.0056, 0.0035])} +0 0.0007228851318359375 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.272640705108643 +2 23.876824617385864 +3 21.279704093933105 +4 21.447468996047974 +5 22.261317253112793 +6 22.161056995391846 +7 21.90410804748535 +8 21.901540756225586 +9 22.689080238342285 +10 22.24635362625122 +11 21.649049043655396 +12 22.1742742061615 +13 22.863321542739868 +14 21.71681261062622 +15 22.173198699951172 +16 22.841887712478638 +17 21.219570875167847 +18 21.941797733306885 +19 21.4562885761261 +20 22.78338098526001 +21 22.531728982925415 +22 22.424410343170166 +23 23.349159240722656 +24 21.516883373260498 +25 22.30727219581604 +26 22.915820121765137 +27 21.204347848892212 +28 21.429133415222168 +29 21.811338663101196 +30 21.544697284698486 +31 21.536128520965576 +32 21.56242275238037 +33 23.069370985031128 +34 22.629154443740845 +35 21.561336755752563 +36 22.247999906539917 +37 22.030000925064087 +38 22.082707405090332 +39 21.138020038604736 +40 22.259434461593628 +41 22.003573894500732 +42 21.607829332351685 +43 22.525534629821777 +44 21.44946551322937 +45 22.477515935897827 +46 21.58775520324707 +47 22.5210702419281 +48 21.222958087921143 +49 21.295591115951538 +50 22.553242683410645 +51 22.272374153137207 +52 21.153489112854004 +53 21.310317754745483 +54 23.433584690093994 +55 21.956726789474487 +56 21.73348832130432 +57 21.39149570465088 +58 22.084129810333252 +59 22.926612854003906 +60 22.533078908920288 +61 22.421766996383667 +62 21.870362758636475 +63 21.529049396514893 +64 22.703738927841187 +65 21.219074726104736 +66 23.249133110046387 +67 21.746225595474243 +68 22.68087887763977 +69 21.765124797821045 +70 21.613901138305664 +71 22.35651397705078 +72 21.745457410812378 +73 22.271295309066772 +74 21.52929997444153 +75 21.742576122283936 +76 21.604367971420288 +77 22.170357704162598 +78 21.508405208587646 +79 21.91535758972168 +80 21.252962350845337 +81 22.138843536376953 +82 20.939055681228638 +83 22.355376958847046 +84 22.211315393447876 +85 22.767655611038208 +86 21.73887062072754 +87 21.636436939239502 +88 21.50487232208252 +89 21.101715326309204 +90 21.86438226699829 +91 22.038799047470093 +92 21.93682599067688 +93 21.314482927322388 +94 22.01489496231079 +95 21.287420988082886 +96 22.161194562911987 +97 22.046918392181396 +98 21.19656205177307 +99 22.356001138687134 +100 21.282220602035522 +101 21.481592655181885 +102 21.647355556488037 +103 22.088858127593994 +104 21.562074422836304 +105 21.97621178627014 +106 22.062947511672974 +107 22.080254793167114 +108 21.88656497001648 +109 22.003910541534424 +110 22.032225608825684 +111 21.258289575576782 +112 21.550297737121582 +113 21.744646310806274 +114 22.65885043144226 +115 21.26542615890503 +116 22.31725311279297 +117 22.125915050506592 +118 21.83493447303772 +119 22.266277551651 +test poses shape torch.Size([4, 3, 4]) +0 0.0010137557983398438 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.400461196899414 +2 21.45158338546753 +3 21.726005792617798 +Saved test set +[TRAIN] Iter: 750000 Loss: 0.006583042908459902 PSNR: 26.766014099121094 +[TRAIN] Iter: 750100 Loss: 0.0073985569179058075 PSNR: 25.476533889770508 +[TRAIN] Iter: 750200 Loss: 0.00622273376211524 PSNR: 26.421188354492188 +[TRAIN] Iter: 750300 Loss: 0.005720523186028004 PSNR: 26.70693588256836 +[TRAIN] Iter: 750400 Loss: 0.006020631641149521 PSNR: 26.85344886779785 +[TRAIN] Iter: 750500 Loss: 0.006990836001932621 PSNR: 26.55890464782715 +[TRAIN] Iter: 750600 Loss: 0.007125450298190117 PSNR: 26.028656005859375 +[TRAIN] Iter: 750700 Loss: 0.006445319391787052 PSNR: 26.51873779296875 +[TRAIN] Iter: 750800 Loss: 0.005681054666638374 PSNR: 27.503358840942383 +[TRAIN] Iter: 750900 Loss: 0.006518718786537647 PSNR: 27.078641891479492 +[TRAIN] Iter: 751000 Loss: 0.006345110014081001 PSNR: 26.642148971557617 +[TRAIN] Iter: 751100 Loss: 0.005495098419487476 PSNR: 27.242847442626953 +[TRAIN] Iter: 751200 Loss: 0.006887779571115971 PSNR: 26.21775245666504 +[TRAIN] Iter: 751300 Loss: 0.004889005795121193 PSNR: 28.4521427154541 +[TRAIN] Iter: 751400 Loss: 0.006286631803959608 PSNR: 27.352636337280273 +[TRAIN] Iter: 751500 Loss: 0.006437668576836586 PSNR: 26.173032760620117 +[TRAIN] Iter: 751600 Loss: 0.005599242635071278 PSNR: 27.99884033203125 +[TRAIN] Iter: 751700 Loss: 0.0066169193014502525 PSNR: 26.107234954833984 +[TRAIN] Iter: 751800 Loss: 0.006487428210675716 PSNR: 26.83418083190918 +[TRAIN] Iter: 751900 Loss: 0.007190406788140535 PSNR: 25.762310028076172 +[TRAIN] Iter: 752000 Loss: 0.006359139923006296 PSNR: 26.373130798339844 +[TRAIN] Iter: 752100 Loss: 0.005040854215621948 PSNR: 28.526697158813477 +[TRAIN] Iter: 752200 Loss: 0.0048647690564394 PSNR: 28.62222671508789 +[TRAIN] Iter: 752300 Loss: 0.007277995347976685 PSNR: 25.975717544555664 +[TRAIN] Iter: 752400 Loss: 0.006513683125376701 PSNR: 27.810768127441406 +[TRAIN] Iter: 752500 Loss: 0.006211277097463608 PSNR: 26.99382209777832 +[TRAIN] Iter: 752600 Loss: 0.007302172482013702 PSNR: 25.92536735534668 +[TRAIN] Iter: 752700 Loss: 0.007094472646713257 PSNR: 26.768049240112305 +[TRAIN] Iter: 752800 Loss: 0.005812262650579214 PSNR: 27.14673614501953 +[TRAIN] Iter: 752900 Loss: 0.006894925143569708 PSNR: 25.819866180419922 +[TRAIN] Iter: 753000 Loss: 0.005629680585116148 PSNR: 27.457237243652344 +[TRAIN] Iter: 753100 Loss: 0.007432511076331139 PSNR: 26.330251693725586 +[TRAIN] Iter: 753200 Loss: 0.005548019427806139 PSNR: 28.393199920654297 +[TRAIN] Iter: 753300 Loss: 0.006667611189186573 PSNR: 26.555606842041016 +[TRAIN] Iter: 753400 Loss: 0.005698667839169502 PSNR: 27.59255599975586 +[TRAIN] Iter: 753500 Loss: 0.006962553597986698 PSNR: 26.205049514770508 +[TRAIN] Iter: 753600 Loss: 0.007178512867540121 PSNR: 25.68696403503418 +[TRAIN] Iter: 753700 Loss: 0.005111032165586948 PSNR: 27.7144775390625 +[TRAIN] Iter: 753800 Loss: 0.005794777534902096 PSNR: 26.683082580566406 +[TRAIN] Iter: 753900 Loss: 0.0065354290418326855 PSNR: 26.378734588623047 +[TRAIN] Iter: 754000 Loss: 0.0053850808180868626 PSNR: 27.93122673034668 +[TRAIN] Iter: 754100 Loss: 0.006656979210674763 PSNR: 26.670879364013672 +[TRAIN] Iter: 754200 Loss: 0.006044589914381504 PSNR: 27.154590606689453 +[TRAIN] Iter: 754300 Loss: 0.006491133011877537 PSNR: 26.785972595214844 +[TRAIN] Iter: 754400 Loss: 0.007025998085737228 PSNR: 25.591352462768555 +[TRAIN] Iter: 754500 Loss: 0.006377127952873707 PSNR: 25.95444107055664 +[TRAIN] Iter: 754600 Loss: 0.005883790552616119 PSNR: 27.32562255859375 +[TRAIN] Iter: 754700 Loss: 0.006988868582993746 PSNR: 25.658884048461914 +[TRAIN] Iter: 754800 Loss: 0.005901099648326635 PSNR: 27.178951263427734 +[TRAIN] Iter: 754900 Loss: 0.005515643395483494 PSNR: 27.589994430541992 +[TRAIN] Iter: 755000 Loss: 0.006232673302292824 PSNR: 26.506383895874023 +[TRAIN] Iter: 755100 Loss: 0.0062467195093631744 PSNR: 27.380340576171875 +[TRAIN] Iter: 755200 Loss: 0.005094054155051708 PSNR: 28.684497833251953 +[TRAIN] Iter: 755300 Loss: 0.005948504898697138 PSNR: 27.18587303161621 +[TRAIN] Iter: 755400 Loss: 0.005633615888655186 PSNR: 27.113100051879883 +[TRAIN] Iter: 755500 Loss: 0.005858831573277712 PSNR: 26.858200073242188 +[TRAIN] Iter: 755600 Loss: 0.00580011960119009 PSNR: 26.938642501831055 +[TRAIN] Iter: 755700 Loss: 0.0060422541573643684 PSNR: 26.74945831298828 +[TRAIN] Iter: 755800 Loss: 0.007821140810847282 PSNR: 25.890748977661133 +[TRAIN] Iter: 755900 Loss: 0.006930673494935036 PSNR: 26.97197914123535 +[TRAIN] Iter: 756000 Loss: 0.004946223460137844 PSNR: 28.15209197998047 +[TRAIN] Iter: 756100 Loss: 0.005929190665483475 PSNR: 26.432302474975586 +[TRAIN] Iter: 756200 Loss: 0.006335073616355658 PSNR: 26.60435676574707 +[TRAIN] Iter: 756300 Loss: 0.0067010074853897095 PSNR: 26.639665603637695 +[TRAIN] Iter: 756400 Loss: 0.004893434699624777 PSNR: 27.491212844848633 +[TRAIN] Iter: 756500 Loss: 0.006313050631433725 PSNR: 26.578842163085938 +[TRAIN] Iter: 756600 Loss: 0.0047424547374248505 PSNR: 28.686452865600586 +[TRAIN] Iter: 756700 Loss: 0.006363770924508572 PSNR: 26.773021697998047 +[TRAIN] Iter: 756800 Loss: 0.007544375956058502 PSNR: 26.559940338134766 +[TRAIN] Iter: 756900 Loss: 0.007227393798530102 PSNR: 25.88191032409668 +[TRAIN] Iter: 757000 Loss: 0.0046774037182331085 PSNR: 28.13623046875 +[TRAIN] Iter: 757100 Loss: 0.006682766601443291 PSNR: 26.731525421142578 +[TRAIN] Iter: 757200 Loss: 0.006026282906532288 PSNR: 26.928926467895508 +[TRAIN] Iter: 757300 Loss: 0.006340923719108105 PSNR: 26.946374893188477 +[TRAIN] Iter: 757400 Loss: 0.006925598718225956 PSNR: 26.340097427368164 +[TRAIN] Iter: 757500 Loss: 0.006441851612180471 PSNR: 26.169105529785156 +[TRAIN] Iter: 757600 Loss: 0.006165096536278725 PSNR: 26.26711082458496 +[TRAIN] Iter: 757700 Loss: 0.0058881258592009544 PSNR: 26.867448806762695 +[TRAIN] Iter: 757800 Loss: 0.007539818529039621 PSNR: 25.935611724853516 +[TRAIN] Iter: 757900 Loss: 0.0062856534495949745 PSNR: 26.994993209838867 +[TRAIN] Iter: 758000 Loss: 0.004989175125956535 PSNR: 28.099285125732422 +[TRAIN] Iter: 758100 Loss: 0.0054086497984826565 PSNR: 28.247085571289062 +[TRAIN] Iter: 758200 Loss: 0.005966663360595703 PSNR: 27.559553146362305 +[TRAIN] Iter: 758300 Loss: 0.006091118324548006 PSNR: 26.991262435913086 +[TRAIN] Iter: 758400 Loss: 0.005250016693025827 PSNR: 27.842559814453125 +[TRAIN] Iter: 758500 Loss: 0.0052961139008402824 PSNR: 26.833559036254883 +[TRAIN] Iter: 758600 Loss: 0.005455189384520054 PSNR: 27.714553833007812 +[TRAIN] Iter: 758700 Loss: 0.006214031483978033 PSNR: 26.67720603942871 +[TRAIN] Iter: 758800 Loss: 0.008342469111084938 PSNR: 25.32789421081543 +[TRAIN] Iter: 758900 Loss: 0.006280392408370972 PSNR: 26.73920440673828 +[TRAIN] Iter: 759000 Loss: 0.006108101457357407 PSNR: 26.758485794067383 +[TRAIN] Iter: 759100 Loss: 0.0060807811096310616 PSNR: 27.70435333251953 +[TRAIN] Iter: 759200 Loss: 0.005859840661287308 PSNR: 27.512569427490234 +[TRAIN] Iter: 759300 Loss: 0.004547279328107834 PSNR: 28.26719856262207 +[TRAIN] Iter: 759400 Loss: 0.006600687280297279 PSNR: 26.723581314086914 +[TRAIN] Iter: 759500 Loss: 0.006214791908860207 PSNR: 26.703166961669922 +[TRAIN] Iter: 759600 Loss: 0.0058542098850011826 PSNR: 26.830354690551758 +[TRAIN] Iter: 759700 Loss: 0.006411480717360973 PSNR: 26.275177001953125 +[TRAIN] Iter: 759800 Loss: 0.006184781901538372 PSNR: 26.525054931640625 +[TRAIN] Iter: 759900 Loss: 0.006134179420769215 PSNR: 27.425567626953125 +Saved checkpoints at ./logs/TUT-out-doll-360-np/760000.tar +[TRAIN] Iter: 760000 Loss: 0.005354365333914757 PSNR: 27.721799850463867 +[TRAIN] Iter: 760100 Loss: 0.006934453267604113 PSNR: 26.713764190673828 +[TRAIN] Iter: 760200 Loss: 0.005540193524211645 PSNR: 27.707975387573242 +[TRAIN] Iter: 760300 Loss: 0.0073611438274383545 PSNR: 25.418397903442383 +[TRAIN] Iter: 760400 Loss: 0.004975053481757641 PSNR: 28.013965606689453 +[TRAIN] Iter: 760500 Loss: 0.0065470486879348755 PSNR: 26.900693893432617 +[TRAIN] Iter: 760600 Loss: 0.0069390516728162766 PSNR: 26.179536819458008 +[TRAIN] Iter: 760700 Loss: 0.006203328259289265 PSNR: 26.472570419311523 +[TRAIN] Iter: 760800 Loss: 0.007224747445434332 PSNR: 25.97659683227539 +[TRAIN] Iter: 760900 Loss: 0.005172985140234232 PSNR: 27.905149459838867 +[TRAIN] Iter: 761000 Loss: 0.006726288236677647 PSNR: 26.8636531829834 +[TRAIN] Iter: 761100 Loss: 0.006718588061630726 PSNR: 26.304044723510742 +[TRAIN] Iter: 761200 Loss: 0.007816074416041374 PSNR: 25.943706512451172 +[TRAIN] Iter: 761300 Loss: 0.006814360618591309 PSNR: 25.997604370117188 +[TRAIN] Iter: 761400 Loss: 0.007089601829648018 PSNR: 26.06706428527832 +[TRAIN] Iter: 761500 Loss: 0.005772450938820839 PSNR: 27.300146102905273 +[TRAIN] Iter: 761600 Loss: 0.006865690462291241 PSNR: 25.69512176513672 +[TRAIN] Iter: 761700 Loss: 0.007441017776727676 PSNR: 26.06549072265625 +[TRAIN] Iter: 761800 Loss: 0.006062156520783901 PSNR: 27.71118927001953 +[TRAIN] Iter: 761900 Loss: 0.007953093387186527 PSNR: 25.321346282958984 +[TRAIN] Iter: 762000 Loss: 0.004650570452213287 PSNR: 28.77855682373047 +[TRAIN] Iter: 762100 Loss: 0.007114374078810215 PSNR: 26.277748107910156 +[TRAIN] Iter: 762200 Loss: 0.00610966794192791 PSNR: 26.99423599243164 +[TRAIN] Iter: 762300 Loss: 0.0063237822614610195 PSNR: 27.009946823120117 +[TRAIN] Iter: 762400 Loss: 0.005853498354554176 PSNR: 27.049274444580078 +[TRAIN] Iter: 762500 Loss: 0.005026195198297501 PSNR: 27.884567260742188 +[TRAIN] Iter: 762600 Loss: 0.005232247989624739 PSNR: 27.35485076904297 +[TRAIN] Iter: 762700 Loss: 0.0052496702410280704 PSNR: 27.055891036987305 +[TRAIN] Iter: 762800 Loss: 0.006093776784837246 PSNR: 26.38269805908203 +[TRAIN] Iter: 762900 Loss: 0.005665394477546215 PSNR: 27.56212615966797 +[TRAIN] Iter: 763000 Loss: 0.0063843559473752975 PSNR: 26.205411911010742 +[TRAIN] Iter: 763100 Loss: 0.006045148707926273 PSNR: 27.66456413269043 +[TRAIN] Iter: 763200 Loss: 0.005839684046804905 PSNR: 27.648380279541016 +[TRAIN] Iter: 763300 Loss: 0.004985900595784187 PSNR: 28.912921905517578 +[TRAIN] Iter: 763400 Loss: 0.007997129112482071 PSNR: 25.30872344970703 +[TRAIN] Iter: 763500 Loss: 0.005518659949302673 PSNR: 27.944181442260742 +[TRAIN] Iter: 763600 Loss: 0.00560541357845068 PSNR: 26.963336944580078 +[TRAIN] Iter: 763700 Loss: 0.006816771347075701 PSNR: 25.99723243713379 +[TRAIN] Iter: 763800 Loss: 0.00552632799372077 PSNR: 28.16363525390625 +[TRAIN] Iter: 763900 Loss: 0.004544435068964958 PSNR: 28.935932159423828 +[TRAIN] Iter: 764000 Loss: 0.00649669673293829 PSNR: 26.412883758544922 +[TRAIN] Iter: 764100 Loss: 0.006756005808711052 PSNR: 26.033620834350586 +[TRAIN] Iter: 764200 Loss: 0.006293760146945715 PSNR: 26.428573608398438 +[TRAIN] Iter: 764300 Loss: 0.006027129478752613 PSNR: 27.32001304626465 +[TRAIN] Iter: 764400 Loss: 0.006678896956145763 PSNR: 26.651514053344727 +[TRAIN] Iter: 764500 Loss: 0.005244948901236057 PSNR: 27.130573272705078 +[TRAIN] Iter: 764600 Loss: 0.005501296371221542 PSNR: 27.5606689453125 +[TRAIN] Iter: 764700 Loss: 0.005582135636359453 PSNR: 27.50852394104004 +[TRAIN] Iter: 764800 Loss: 0.006632216274738312 PSNR: 26.21584129333496 +[TRAIN] Iter: 764900 Loss: 0.008272247388958931 PSNR: 25.528657913208008 +[TRAIN] Iter: 765000 Loss: 0.004953907802700996 PSNR: 28.26416015625 +[TRAIN] Iter: 765100 Loss: 0.00676468200981617 PSNR: 26.5516300201416 +[TRAIN] Iter: 765200 Loss: 0.008424503728747368 PSNR: 25.318431854248047 +[TRAIN] Iter: 765300 Loss: 0.007741954643279314 PSNR: 26.20102882385254 +[TRAIN] Iter: 765400 Loss: 0.007164794020354748 PSNR: 26.2055606842041 +[TRAIN] Iter: 765500 Loss: 0.004602349363267422 PSNR: 28.75614356994629 +[TRAIN] Iter: 765600 Loss: 0.0066271028481423855 PSNR: 26.851320266723633 +[TRAIN] Iter: 765700 Loss: 0.005728577729314566 PSNR: 27.498022079467773 +[TRAIN] Iter: 765800 Loss: 0.006990758236497641 PSNR: 26.403335571289062 +[TRAIN] Iter: 765900 Loss: 0.006708391010761261 PSNR: 26.498037338256836 +[TRAIN] Iter: 766000 Loss: 0.005857170559465885 PSNR: 27.71511459350586 +[TRAIN] Iter: 766100 Loss: 0.005176134407520294 PSNR: 27.722885131835938 +[TRAIN] Iter: 766200 Loss: 0.006499880459159613 PSNR: 27.979087829589844 +[TRAIN] Iter: 766300 Loss: 0.006437999662011862 PSNR: 27.119365692138672 +[TRAIN] Iter: 766400 Loss: 0.005319373682141304 PSNR: 27.81801986694336 +[TRAIN] Iter: 766500 Loss: 0.006046746391803026 PSNR: 26.90214729309082 +[TRAIN] Iter: 766600 Loss: 0.004856862593442202 PSNR: 28.48406410217285 +[TRAIN] Iter: 766700 Loss: 0.005218431353569031 PSNR: 28.460426330566406 +[TRAIN] Iter: 766800 Loss: 0.005675524473190308 PSNR: 27.510055541992188 +[TRAIN] Iter: 766900 Loss: 0.006672794930636883 PSNR: 26.38768196105957 +[TRAIN] Iter: 767000 Loss: 0.005612239241600037 PSNR: 28.097061157226562 +[TRAIN] Iter: 767100 Loss: 0.006288617383688688 PSNR: 26.271848678588867 +[TRAIN] Iter: 767200 Loss: 0.00451214425265789 PSNR: 28.675596237182617 +[TRAIN] Iter: 767300 Loss: 0.006199279800057411 PSNR: 27.158082962036133 +[TRAIN] Iter: 767400 Loss: 0.007483738474547863 PSNR: 25.611492156982422 +[TRAIN] Iter: 767500 Loss: 0.006333877798169851 PSNR: 26.45948600769043 +[TRAIN] Iter: 767600 Loss: 0.0058919028379023075 PSNR: 27.779296875 +[TRAIN] Iter: 767700 Loss: 0.006302027963101864 PSNR: 26.69886016845703 +[TRAIN] Iter: 767800 Loss: 0.006884458474814892 PSNR: 26.087663650512695 +[TRAIN] Iter: 767900 Loss: 0.006918028462678194 PSNR: 26.420207977294922 +[TRAIN] Iter: 768000 Loss: 0.0068784598261117935 PSNR: 26.15857696533203 +[TRAIN] Iter: 768100 Loss: 0.006868196185678244 PSNR: 25.91841697692871 +[TRAIN] Iter: 768200 Loss: 0.006356279831379652 PSNR: 26.797128677368164 +[TRAIN] Iter: 768300 Loss: 0.0058391764760017395 PSNR: 27.659116744995117 +[TRAIN] Iter: 768400 Loss: 0.0054232776165008545 PSNR: 28.607402801513672 +[TRAIN] Iter: 768500 Loss: 0.0067564258351922035 PSNR: 26.361358642578125 +[TRAIN] Iter: 768600 Loss: 0.0043712486512959 PSNR: 29.03191566467285 +[TRAIN] Iter: 768700 Loss: 0.0066981022246181965 PSNR: 26.22536277770996 +[TRAIN] Iter: 768800 Loss: 0.006254894658923149 PSNR: 26.816164016723633 +[TRAIN] Iter: 768900 Loss: 0.005045219790190458 PSNR: 28.255203247070312 +[TRAIN] Iter: 769000 Loss: 0.005639967042952776 PSNR: 27.5076961517334 +[TRAIN] Iter: 769100 Loss: 0.006077582016587257 PSNR: 26.798778533935547 +[TRAIN] Iter: 769200 Loss: 0.0050573209300637245 PSNR: 28.529895782470703 +[TRAIN] Iter: 769300 Loss: 0.006159720942378044 PSNR: 27.14845848083496 +[TRAIN] Iter: 769400 Loss: 0.006545183248817921 PSNR: 26.366973876953125 +[TRAIN] Iter: 769500 Loss: 0.005554921925067902 PSNR: 27.377334594726562 +[TRAIN] Iter: 769600 Loss: 0.004647008143365383 PSNR: 28.93359375 +[TRAIN] Iter: 769700 Loss: 0.006386477500200272 PSNR: 26.56721305847168 +[TRAIN] Iter: 769800 Loss: 0.0066444831900298595 PSNR: 26.49909782409668 +[TRAIN] Iter: 769900 Loss: 0.008454138413071632 PSNR: 25.07621955871582 +Saved checkpoints at ./logs/TUT-out-doll-360-np/770000.tar +[TRAIN] Iter: 770000 Loss: 0.006963890045881271 PSNR: 26.468307495117188 +[TRAIN] Iter: 770100 Loss: 0.0056333597749471664 PSNR: 27.205951690673828 +[TRAIN] Iter: 770200 Loss: 0.006965163163840771 PSNR: 26.172239303588867 +[TRAIN] Iter: 770300 Loss: 0.007569069042801857 PSNR: 25.995534896850586 +[TRAIN] Iter: 770400 Loss: 0.00742711778730154 PSNR: 26.08763885498047 +[TRAIN] Iter: 770500 Loss: 0.0053500821813941 PSNR: 28.189950942993164 +[TRAIN] Iter: 770600 Loss: 0.006963921710848808 PSNR: 27.017370223999023 +[TRAIN] Iter: 770700 Loss: 0.005182345397770405 PSNR: 28.067094802856445 +[TRAIN] Iter: 770800 Loss: 0.0063947406597435474 PSNR: 26.055328369140625 +[TRAIN] Iter: 770900 Loss: 0.005598573945462704 PSNR: 27.162860870361328 +[TRAIN] Iter: 771000 Loss: 0.005884894169867039 PSNR: 27.118061065673828 +[TRAIN] Iter: 771100 Loss: 0.00677319522947073 PSNR: 26.381914138793945 +[TRAIN] Iter: 771200 Loss: 0.004786506295204163 PSNR: 28.65781593322754 +[TRAIN] Iter: 771300 Loss: 0.006530815735459328 PSNR: 26.67603874206543 +[TRAIN] Iter: 771400 Loss: 0.007299532648175955 PSNR: 25.907794952392578 +[TRAIN] Iter: 771500 Loss: 0.006237629801034927 PSNR: 27.3325138092041 +[TRAIN] Iter: 771600 Loss: 0.0068004196509718895 PSNR: 25.80748176574707 +[TRAIN] Iter: 771700 Loss: 0.005670764949172735 PSNR: 27.209474563598633 +[TRAIN] Iter: 771800 Loss: 0.007721422705799341 PSNR: 25.40237045288086 +[TRAIN] Iter: 771900 Loss: 0.006715457420796156 PSNR: 26.093463897705078 +[TRAIN] Iter: 772000 Loss: 0.006324902176856995 PSNR: 26.57157325744629 +[TRAIN] Iter: 772100 Loss: 0.0055719949305057526 PSNR: 28.0805606842041 +[TRAIN] Iter: 772200 Loss: 0.0049486407078802586 PSNR: 28.570556640625 +[TRAIN] Iter: 772300 Loss: 0.006897099781781435 PSNR: 26.33961296081543 +[TRAIN] Iter: 772400 Loss: 0.006333775818347931 PSNR: 27.06411361694336 +[TRAIN] Iter: 772500 Loss: 0.007302665151655674 PSNR: 25.780900955200195 +[TRAIN] Iter: 772600 Loss: 0.006504235323518515 PSNR: 26.24329948425293 +[TRAIN] Iter: 772700 Loss: 0.007464549038559198 PSNR: 25.440752029418945 +[TRAIN] Iter: 772800 Loss: 0.0061128754168748856 PSNR: 26.710683822631836 +[TRAIN] Iter: 772900 Loss: 0.0054440549574792385 PSNR: 27.47907257080078 +[TRAIN] Iter: 773000 Loss: 0.005491192452609539 PSNR: 27.114133834838867 +[TRAIN] Iter: 773100 Loss: 0.006617784965783358 PSNR: 26.892423629760742 +[TRAIN] Iter: 773200 Loss: 0.00622980110347271 PSNR: 26.422693252563477 +[TRAIN] Iter: 773300 Loss: 0.005400513298809528 PSNR: 28.370769500732422 +[TRAIN] Iter: 773400 Loss: 0.005522143095731735 PSNR: 27.06689453125 +[TRAIN] Iter: 773500 Loss: 0.006393132731318474 PSNR: 26.18231964111328 +[TRAIN] Iter: 773600 Loss: 0.006068314891308546 PSNR: 26.30575180053711 +[TRAIN] Iter: 773700 Loss: 0.006508751772344112 PSNR: 27.36977767944336 +[TRAIN] Iter: 773800 Loss: 0.005724102258682251 PSNR: 27.78174591064453 +[TRAIN] Iter: 773900 Loss: 0.0049426136538386345 PSNR: 28.497953414916992 +[TRAIN] Iter: 774000 Loss: 0.006128738168627024 PSNR: 27.257837295532227 +[TRAIN] Iter: 774100 Loss: 0.006288031116127968 PSNR: 26.644386291503906 +[TRAIN] Iter: 774200 Loss: 0.005895458161830902 PSNR: 27.117589950561523 +[TRAIN] Iter: 774300 Loss: 0.005046307109296322 PSNR: 27.38957405090332 +[TRAIN] Iter: 774400 Loss: 0.004578995518386364 PSNR: 29.27196502685547 +[TRAIN] Iter: 774500 Loss: 0.006331196054816246 PSNR: 26.59839630126953 +[TRAIN] Iter: 774600 Loss: 0.006953523028641939 PSNR: 26.696989059448242 +[TRAIN] Iter: 774700 Loss: 0.005950487218797207 PSNR: 27.482620239257812 +[TRAIN] Iter: 774800 Loss: 0.005563255399465561 PSNR: 27.593338012695312 +[TRAIN] Iter: 774900 Loss: 0.006382143124938011 PSNR: 26.088857650756836 +[TRAIN] Iter: 775000 Loss: 0.006746156141161919 PSNR: 27.202617645263672 +[TRAIN] Iter: 775100 Loss: 0.0068621402606368065 PSNR: 26.85254669189453 +[TRAIN] Iter: 775200 Loss: 0.0061021833680570126 PSNR: 26.112098693847656 +[TRAIN] Iter: 775300 Loss: 0.0063345166854560375 PSNR: 26.40889549255371 +[TRAIN] Iter: 775400 Loss: 0.007077707909047604 PSNR: 26.437564849853516 +[TRAIN] Iter: 775500 Loss: 0.0049168942496180534 PSNR: 27.917675018310547 +[TRAIN] Iter: 775600 Loss: 0.007020013406872749 PSNR: 25.9122371673584 +[TRAIN] Iter: 775700 Loss: 0.006667240522801876 PSNR: 26.327199935913086 +[TRAIN] Iter: 775800 Loss: 0.007121547125279903 PSNR: 26.462385177612305 +[TRAIN] Iter: 775900 Loss: 0.00658378517255187 PSNR: 26.56917381286621 +[TRAIN] Iter: 776000 Loss: 0.0051474180072546005 PSNR: 28.682153701782227 +[TRAIN] Iter: 776100 Loss: 0.006273148115724325 PSNR: 27.107196807861328 +[TRAIN] Iter: 776200 Loss: 0.00699866097420454 PSNR: 26.338708877563477 +[TRAIN] Iter: 776300 Loss: 0.00509986374527216 PSNR: 28.47878646850586 +[TRAIN] Iter: 776400 Loss: 0.0051147365011274815 PSNR: 27.36243438720703 +[TRAIN] Iter: 776500 Loss: 0.006680718623101711 PSNR: 26.196086883544922 +[TRAIN] Iter: 776600 Loss: 0.007262204773724079 PSNR: 26.154129028320312 +[TRAIN] Iter: 776700 Loss: 0.006189526990056038 PSNR: 26.81439781188965 +[TRAIN] Iter: 776800 Loss: 0.007282450795173645 PSNR: 25.867246627807617 +[TRAIN] Iter: 776900 Loss: 0.00667471531778574 PSNR: 27.121732711791992 +[TRAIN] Iter: 777000 Loss: 0.004642231855541468 PSNR: 28.301485061645508 +[TRAIN] Iter: 777100 Loss: 0.005993451923131943 PSNR: 27.023283004760742 +[TRAIN] Iter: 777200 Loss: 0.004063979722559452 PSNR: 28.951427459716797 +[TRAIN] Iter: 777300 Loss: 0.006997852586209774 PSNR: 25.70875358581543 +[TRAIN] Iter: 777400 Loss: 0.005095212254673243 PSNR: 28.39939308166504 +[TRAIN] Iter: 777500 Loss: 0.00672965869307518 PSNR: 26.63802146911621 +[TRAIN] Iter: 777600 Loss: 0.006606478244066238 PSNR: 26.338150024414062 +[TRAIN] Iter: 777700 Loss: 0.005854178685694933 PSNR: 27.169639587402344 +[TRAIN] Iter: 777800 Loss: 0.006174992769956589 PSNR: 26.786056518554688 +[TRAIN] Iter: 777900 Loss: 0.005859294906258583 PSNR: 26.74791717529297 +[TRAIN] Iter: 778000 Loss: 0.005194639787077904 PSNR: 28.186756134033203 +[TRAIN] Iter: 778100 Loss: 0.0057546296156942844 PSNR: 27.622962951660156 +[TRAIN] Iter: 778200 Loss: 0.00688206497579813 PSNR: 26.51129913330078 +[TRAIN] Iter: 778300 Loss: 0.00565328449010849 PSNR: 27.034414291381836 +[TRAIN] Iter: 778400 Loss: 0.0060557229444384575 PSNR: 28.23089027404785 +[TRAIN] Iter: 778500 Loss: 0.006679294630885124 PSNR: 26.527889251708984 +[TRAIN] Iter: 778600 Loss: 0.005197701044380665 PSNR: 27.71160316467285 +[TRAIN] Iter: 778700 Loss: 0.005819765850901604 PSNR: 27.563934326171875 +[TRAIN] Iter: 778800 Loss: 0.007033425848931074 PSNR: 26.148141860961914 +[TRAIN] Iter: 778900 Loss: 0.004425272345542908 PSNR: 29.198030471801758 +[TRAIN] Iter: 779000 Loss: 0.0058678570203483105 PSNR: 27.168928146362305 +[TRAIN] Iter: 779100 Loss: 0.006246055476367474 PSNR: 26.97342300415039 +[TRAIN] Iter: 779200 Loss: 0.005728672258555889 PSNR: 26.97718620300293 +[TRAIN] Iter: 779300 Loss: 0.005891452077776194 PSNR: 27.83296012878418 +[TRAIN] Iter: 779400 Loss: 0.006497461348772049 PSNR: 26.58255958557129 +[TRAIN] Iter: 779500 Loss: 0.005656107794493437 PSNR: 27.06900405883789 +[TRAIN] Iter: 779600 Loss: 0.007076019886881113 PSNR: 26.059879302978516 +[TRAIN] Iter: 779700 Loss: 0.006270784884691238 PSNR: 26.123403549194336 +[TRAIN] Iter: 779800 Loss: 0.0050475746393203735 PSNR: 28.11893653869629 +[TRAIN] Iter: 779900 Loss: 0.006310821510851383 PSNR: 27.182371139526367 +Saved checkpoints at ./logs/TUT-out-doll-360-np/780000.tar +[TRAIN] Iter: 780000 Loss: 0.0055762045085430145 PSNR: 26.955337524414062 +[TRAIN] Iter: 780100 Loss: 0.005829683039337397 PSNR: 26.823932647705078 +[TRAIN] Iter: 780200 Loss: 0.007183310575783253 PSNR: 25.661813735961914 +[TRAIN] Iter: 780300 Loss: 0.006710043642669916 PSNR: 26.158817291259766 +[TRAIN] Iter: 780400 Loss: 0.0053110551089048386 PSNR: 28.069799423217773 +[TRAIN] Iter: 780500 Loss: 0.0060804723761975765 PSNR: 26.90970802307129 +[TRAIN] Iter: 780600 Loss: 0.005624106153845787 PSNR: 26.885337829589844 +[TRAIN] Iter: 780700 Loss: 0.006480679847300053 PSNR: 26.36240005493164 +[TRAIN] Iter: 780800 Loss: 0.005890491418540478 PSNR: 27.04358673095703 +[TRAIN] Iter: 780900 Loss: 0.007577140349894762 PSNR: 26.467086791992188 +[TRAIN] Iter: 781000 Loss: 0.006497632712125778 PSNR: 26.308818817138672 +[TRAIN] Iter: 781100 Loss: 0.006198996677994728 PSNR: 26.444442749023438 +[TRAIN] Iter: 781200 Loss: 0.004873727913945913 PSNR: 28.525753021240234 +[TRAIN] Iter: 781300 Loss: 0.0071258144453167915 PSNR: 26.853574752807617 +[TRAIN] Iter: 781400 Loss: 0.006438993848860264 PSNR: 26.507482528686523 +[TRAIN] Iter: 781500 Loss: 0.006676287390291691 PSNR: 26.611242294311523 +[TRAIN] Iter: 781600 Loss: 0.006707090884447098 PSNR: 26.187538146972656 +[TRAIN] Iter: 781700 Loss: 0.005346265621483326 PSNR: 27.117185592651367 +[TRAIN] Iter: 781800 Loss: 0.005890774540603161 PSNR: 27.01582145690918 +[TRAIN] Iter: 781900 Loss: 0.005686587654054165 PSNR: 27.00206184387207 +[TRAIN] Iter: 782000 Loss: 0.0063368710689246655 PSNR: 26.2255916595459 +[TRAIN] Iter: 782100 Loss: 0.005688939709216356 PSNR: 26.886796951293945 +[TRAIN] Iter: 782200 Loss: 0.0067571066319942474 PSNR: 26.617006301879883 +[TRAIN] Iter: 782300 Loss: 0.004735908005386591 PSNR: 28.667438507080078 +[TRAIN] Iter: 782400 Loss: 0.0055470215156674385 PSNR: 28.647829055786133 +[TRAIN] Iter: 782500 Loss: 0.0073202913627028465 PSNR: 25.887176513671875 +[TRAIN] Iter: 782600 Loss: 0.005171399563550949 PSNR: 28.095468521118164 +[TRAIN] Iter: 782700 Loss: 0.0069876983761787415 PSNR: 26.21034812927246 +[TRAIN] Iter: 782800 Loss: 0.005896480288356543 PSNR: 27.03501319885254 +[TRAIN] Iter: 782900 Loss: 0.005441313609480858 PSNR: 26.711406707763672 +[TRAIN] Iter: 783000 Loss: 0.0043732356280088425 PSNR: 29.07683563232422 +[TRAIN] Iter: 783100 Loss: 0.006315409205853939 PSNR: 26.807741165161133 +[TRAIN] Iter: 783200 Loss: 0.0064304666593670845 PSNR: 26.08896255493164 +[TRAIN] Iter: 783300 Loss: 0.0057106344029307365 PSNR: 27.089534759521484 +[TRAIN] Iter: 783400 Loss: 0.00709195714443922 PSNR: 26.468299865722656 +[TRAIN] Iter: 783500 Loss: 0.006373154930770397 PSNR: 26.870098114013672 +[TRAIN] Iter: 783600 Loss: 0.005758365616202354 PSNR: 27.179733276367188 +[TRAIN] Iter: 783700 Loss: 0.006524574011564255 PSNR: 26.397947311401367 +[TRAIN] Iter: 783800 Loss: 0.005935866851359606 PSNR: 27.226438522338867 +[TRAIN] Iter: 783900 Loss: 0.006659524515271187 PSNR: 26.164039611816406 +[TRAIN] Iter: 784000 Loss: 0.0065048858523368835 PSNR: 26.801652908325195 +[TRAIN] Iter: 784100 Loss: 0.005498105194419622 PSNR: 27.489166259765625 +[TRAIN] Iter: 784200 Loss: 0.006764087826013565 PSNR: 26.507099151611328 +[TRAIN] Iter: 784300 Loss: 0.007163509726524353 PSNR: 25.949731826782227 +[TRAIN] Iter: 784400 Loss: 0.0044957734644412994 PSNR: 28.840377807617188 +[TRAIN] Iter: 784500 Loss: 0.007261732593178749 PSNR: 26.0141544342041 +[TRAIN] Iter: 784600 Loss: 0.005577312782406807 PSNR: 28.56988525390625 +[TRAIN] Iter: 784700 Loss: 0.005934142507612705 PSNR: 27.078767776489258 +[TRAIN] Iter: 784800 Loss: 0.0060496642254292965 PSNR: 26.144987106323242 +[TRAIN] Iter: 784900 Loss: 0.007191802375018597 PSNR: 26.13823127746582 +[TRAIN] Iter: 785000 Loss: 0.006745770573616028 PSNR: 26.79897689819336 +[TRAIN] Iter: 785100 Loss: 0.005959854926913977 PSNR: 26.56359100341797 +[TRAIN] Iter: 785200 Loss: 0.006471715867519379 PSNR: 26.2241268157959 +[TRAIN] Iter: 785300 Loss: 0.005767263472080231 PSNR: 27.1639404296875 +[TRAIN] Iter: 785400 Loss: 0.00572031456977129 PSNR: 26.906211853027344 +[TRAIN] Iter: 785500 Loss: 0.006213172804564238 PSNR: 26.38132095336914 +[TRAIN] Iter: 785600 Loss: 0.006437018513679504 PSNR: 26.813886642456055 +[TRAIN] Iter: 785700 Loss: 0.0056719244457781315 PSNR: 26.74216651916504 +[TRAIN] Iter: 785800 Loss: 0.007396925240755081 PSNR: 25.4670467376709 +[TRAIN] Iter: 785900 Loss: 0.007036385592073202 PSNR: 26.332834243774414 +[TRAIN] Iter: 786000 Loss: 0.00696959625929594 PSNR: 26.153263092041016 +[TRAIN] Iter: 786100 Loss: 0.0052459388971328735 PSNR: 26.84139633178711 +[TRAIN] Iter: 786200 Loss: 0.005616515874862671 PSNR: 27.23414421081543 +[TRAIN] Iter: 786300 Loss: 0.005374742206186056 PSNR: 26.827232360839844 +[TRAIN] Iter: 786400 Loss: 0.00631642434746027 PSNR: 26.904621124267578 +[TRAIN] Iter: 786500 Loss: 0.0063588968478143215 PSNR: 26.659748077392578 +[TRAIN] Iter: 786600 Loss: 0.006237017922103405 PSNR: 27.082828521728516 +[TRAIN] Iter: 786700 Loss: 0.006507463287562132 PSNR: 27.199371337890625 +[TRAIN] Iter: 786800 Loss: 0.00566516537219286 PSNR: 27.93569564819336 +[TRAIN] Iter: 786900 Loss: 0.0066877552308142185 PSNR: 26.259418487548828 +[TRAIN] Iter: 787000 Loss: 0.00532474834471941 PSNR: 27.346332550048828 +[TRAIN] Iter: 787100 Loss: 0.006243542302399874 PSNR: 27.058391571044922 +[TRAIN] Iter: 787200 Loss: 0.00567150441929698 PSNR: 27.790241241455078 +[TRAIN] Iter: 787300 Loss: 0.0055863382294774055 PSNR: 28.17900276184082 +[TRAIN] Iter: 787400 Loss: 0.005947696976363659 PSNR: 27.371244430541992 +[TRAIN] Iter: 787500 Loss: 0.007693122141063213 PSNR: 25.410907745361328 +[TRAIN] Iter: 787600 Loss: 0.007133808918297291 PSNR: 26.07145118713379 +[TRAIN] Iter: 787700 Loss: 0.005061584524810314 PSNR: 27.806884765625 +[TRAIN] Iter: 787800 Loss: 0.006015518680214882 PSNR: 27.094600677490234 +[TRAIN] Iter: 787900 Loss: 0.006893213372677565 PSNR: 26.1678409576416 +[TRAIN] Iter: 788000 Loss: 0.006296052597463131 PSNR: 26.371023178100586 +[TRAIN] Iter: 788100 Loss: 0.005970345810055733 PSNR: 27.098106384277344 +[TRAIN] Iter: 788200 Loss: 0.007238592021167278 PSNR: 25.616018295288086 +[TRAIN] Iter: 788300 Loss: 0.0057414984330534935 PSNR: 26.768760681152344 +[TRAIN] Iter: 788400 Loss: 0.005737282335758209 PSNR: 26.53407096862793 +[TRAIN] Iter: 788500 Loss: 0.006601874716579914 PSNR: 26.916168212890625 +[TRAIN] Iter: 788600 Loss: 0.0053594657219946384 PSNR: 27.665245056152344 +[TRAIN] Iter: 788700 Loss: 0.006292091682553291 PSNR: 27.25041389465332 +[TRAIN] Iter: 788800 Loss: 0.005564180202782154 PSNR: 28.362451553344727 +[TRAIN] Iter: 788900 Loss: 0.005182887893170118 PSNR: 28.396968841552734 +[TRAIN] Iter: 789000 Loss: 0.005762075074017048 PSNR: 27.03026580810547 +[TRAIN] Iter: 789100 Loss: 0.004996664822101593 PSNR: 27.62422752380371 +[TRAIN] Iter: 789200 Loss: 0.006460776552557945 PSNR: 27.009790420532227 +[TRAIN] Iter: 789300 Loss: 0.005141938105225563 PSNR: 27.977222442626953 +[TRAIN] Iter: 789400 Loss: 0.006360854022204876 PSNR: 26.77755355834961 +[TRAIN] Iter: 789500 Loss: 0.00493656238541007 PSNR: 28.23619842529297 +[TRAIN] Iter: 789600 Loss: 0.005028679966926575 PSNR: 28.429655075073242 +[TRAIN] Iter: 789700 Loss: 0.007099885027855635 PSNR: 26.27116584777832 +[TRAIN] Iter: 789800 Loss: 0.005212167743593454 PSNR: 27.2736873626709 +[TRAIN] Iter: 789900 Loss: 0.005220562685281038 PSNR: 27.647886276245117 +Saved checkpoints at ./logs/TUT-out-doll-360-np/790000.tar +[TRAIN] Iter: 790000 Loss: 0.005371090956032276 PSNR: 28.45018196105957 +[TRAIN] Iter: 790100 Loss: 0.005545099265873432 PSNR: 27.59246253967285 +[TRAIN] Iter: 790200 Loss: 0.005231261253356934 PSNR: 27.86850929260254 +[TRAIN] Iter: 790300 Loss: 0.0059272535145282745 PSNR: 27.30552101135254 +[TRAIN] Iter: 790400 Loss: 0.0066267033107578754 PSNR: 26.1192626953125 +[TRAIN] Iter: 790500 Loss: 0.0054962835274636745 PSNR: 27.21102523803711 +[TRAIN] Iter: 790600 Loss: 0.007205525878816843 PSNR: 26.188064575195312 +[TRAIN] Iter: 790700 Loss: 0.0053070480935275555 PSNR: 28.721172332763672 +[TRAIN] Iter: 790800 Loss: 0.006928908172994852 PSNR: 25.909692764282227 +[TRAIN] Iter: 790900 Loss: 0.005684772972017527 PSNR: 26.556982040405273 +[TRAIN] Iter: 791000 Loss: 0.006967146880924702 PSNR: 26.997249603271484 +[TRAIN] Iter: 791100 Loss: 0.00524374982342124 PSNR: 27.55756187438965 +[TRAIN] Iter: 791200 Loss: 0.007680888753384352 PSNR: 25.6477108001709 +[TRAIN] Iter: 791300 Loss: 0.005661033559590578 PSNR: 27.439044952392578 +[TRAIN] Iter: 791400 Loss: 0.006447688210755587 PSNR: 26.20869255065918 +[TRAIN] Iter: 791500 Loss: 0.006975365802645683 PSNR: 25.812501907348633 +[TRAIN] Iter: 791600 Loss: 0.006244124844670296 PSNR: 26.558568954467773 +[TRAIN] Iter: 791700 Loss: 0.006582564674317837 PSNR: 26.046146392822266 +[TRAIN] Iter: 791800 Loss: 0.0061446744948625565 PSNR: 26.869335174560547 +[TRAIN] Iter: 791900 Loss: 0.006206068210303783 PSNR: 27.235321044921875 +[TRAIN] Iter: 792000 Loss: 0.006920690182596445 PSNR: 26.368961334228516 +[TRAIN] Iter: 792100 Loss: 0.007688869722187519 PSNR: 25.603281021118164 +[TRAIN] Iter: 792200 Loss: 0.0064318012446165085 PSNR: 26.622434616088867 +[TRAIN] Iter: 792300 Loss: 0.00631469301879406 PSNR: 27.221527099609375 +[TRAIN] Iter: 792400 Loss: 0.00606082659214735 PSNR: 27.191574096679688 +[TRAIN] Iter: 792500 Loss: 0.0057424260303378105 PSNR: 27.68535804748535 +[TRAIN] Iter: 792600 Loss: 0.004939848557114601 PSNR: 28.202909469604492 +[TRAIN] Iter: 792700 Loss: 0.0054777637124061584 PSNR: 27.824729919433594 +[TRAIN] Iter: 792800 Loss: 0.006476262118667364 PSNR: 25.91942024230957 +[TRAIN] Iter: 792900 Loss: 0.007844146341085434 PSNR: 25.9721736907959 +[TRAIN] Iter: 793000 Loss: 0.0058600022457540035 PSNR: 28.066307067871094 +[TRAIN] Iter: 793100 Loss: 0.006458168849349022 PSNR: 26.228008270263672 +[TRAIN] Iter: 793200 Loss: 0.00506087951362133 PSNR: 28.00143051147461 +[TRAIN] Iter: 793300 Loss: 0.006632648874074221 PSNR: 26.661462783813477 +[TRAIN] Iter: 793400 Loss: 0.006585937459021807 PSNR: 26.589996337890625 +[TRAIN] Iter: 793500 Loss: 0.006254302337765694 PSNR: 26.27127456665039 +[TRAIN] Iter: 793600 Loss: 0.005063630640506744 PSNR: 27.718406677246094 +[TRAIN] Iter: 793700 Loss: 0.007007146254181862 PSNR: 26.081300735473633 +[TRAIN] Iter: 793800 Loss: 0.005749833770096302 PSNR: 26.73969078063965 +[TRAIN] Iter: 793900 Loss: 0.006015867926180363 PSNR: 26.59560203552246 +[TRAIN] Iter: 794000 Loss: 0.004428043961524963 PSNR: 28.637208938598633 +[TRAIN] Iter: 794100 Loss: 0.006549244746565819 PSNR: 26.52718734741211 +[TRAIN] Iter: 794200 Loss: 0.0067160530015826225 PSNR: 26.355815887451172 +[TRAIN] Iter: 794300 Loss: 0.006448889151215553 PSNR: 26.605060577392578 +[TRAIN] Iter: 794400 Loss: 0.006112435832619667 PSNR: 27.553241729736328 +[TRAIN] Iter: 794500 Loss: 0.005889078602194786 PSNR: 26.576961517333984 +[TRAIN] Iter: 794600 Loss: 0.004875377286225557 PSNR: 28.037029266357422 +[TRAIN] Iter: 794700 Loss: 0.005821560975164175 PSNR: 26.478723526000977 +[TRAIN] Iter: 794800 Loss: 0.004537075757980347 PSNR: 28.14443016052246 +[TRAIN] Iter: 794900 Loss: 0.005415846593677998 PSNR: 27.33611488342285 +[TRAIN] Iter: 795000 Loss: 0.008883786387741566 PSNR: 24.692447662353516 +[TRAIN] Iter: 795100 Loss: 0.004257040563970804 PSNR: 28.759252548217773 +[TRAIN] Iter: 795200 Loss: 0.007150692865252495 PSNR: 25.882606506347656 +[TRAIN] Iter: 795300 Loss: 0.005988147109746933 PSNR: 26.964017868041992 +[TRAIN] Iter: 795400 Loss: 0.005061912816017866 PSNR: 28.0334415435791 +[TRAIN] Iter: 795500 Loss: 0.006318939384073019 PSNR: 26.857879638671875 +[TRAIN] Iter: 795600 Loss: 0.0067170849069952965 PSNR: 26.781787872314453 +[TRAIN] Iter: 795700 Loss: 0.006608524359762669 PSNR: 26.468671798706055 +[TRAIN] Iter: 795800 Loss: 0.006735594943165779 PSNR: 25.800182342529297 +[TRAIN] Iter: 795900 Loss: 0.004349645227193832 PSNR: 28.673011779785156 +[TRAIN] Iter: 796000 Loss: 0.006320082116872072 PSNR: 26.77213478088379 +[TRAIN] Iter: 796100 Loss: 0.006932565476745367 PSNR: 25.598880767822266 +[TRAIN] Iter: 796200 Loss: 0.006030704826116562 PSNR: 26.589820861816406 +[TRAIN] Iter: 796300 Loss: 0.0054623959586024284 PSNR: 27.213764190673828 +[TRAIN] Iter: 796400 Loss: 0.0055752163752913475 PSNR: 27.949522018432617 +[TRAIN] Iter: 796500 Loss: 0.005936456844210625 PSNR: 26.3666934967041 +[TRAIN] Iter: 796600 Loss: 0.006152631249278784 PSNR: 26.06031608581543 +[TRAIN] Iter: 796700 Loss: 0.00550846429541707 PSNR: 27.848987579345703 +[TRAIN] Iter: 796800 Loss: 0.007280578371137381 PSNR: 25.798227310180664 +[TRAIN] Iter: 796900 Loss: 0.006501803174614906 PSNR: 26.57001304626465 +[TRAIN] Iter: 797000 Loss: 0.005442426539957523 PSNR: 28.10613441467285 +[TRAIN] Iter: 797100 Loss: 0.006538095884025097 PSNR: 26.07928466796875 +[TRAIN] Iter: 797200 Loss: 0.006211639381945133 PSNR: 26.730751037597656 +[TRAIN] Iter: 797300 Loss: 0.0056098345667123795 PSNR: 26.75899887084961 +[TRAIN] Iter: 797400 Loss: 0.007576615549623966 PSNR: 25.222591400146484 +[TRAIN] Iter: 797500 Loss: 0.006163423880934715 PSNR: 26.475496292114258 +[TRAIN] Iter: 797600 Loss: 0.005889963358640671 PSNR: 27.231603622436523 +[TRAIN] Iter: 797700 Loss: 0.006821721326559782 PSNR: 26.055376052856445 +[TRAIN] Iter: 797800 Loss: 0.005788231268525124 PSNR: 26.854990005493164 +[TRAIN] Iter: 797900 Loss: 0.0069198282435536385 PSNR: 26.00824737548828 +[TRAIN] Iter: 798000 Loss: 0.006970623508095741 PSNR: 25.909957885742188 +[TRAIN] Iter: 798100 Loss: 0.006921121850609779 PSNR: 26.065622329711914 +[TRAIN] Iter: 798200 Loss: 0.0070557971484959126 PSNR: 26.337783813476562 +[TRAIN] Iter: 798300 Loss: 0.006442351266741753 PSNR: 26.502941131591797 +[TRAIN] Iter: 798400 Loss: 0.0061438363045454025 PSNR: 26.69693374633789 +[TRAIN] Iter: 798500 Loss: 0.005990348290652037 PSNR: 26.640522003173828 +[TRAIN] Iter: 798600 Loss: 0.006046370603144169 PSNR: 26.731548309326172 +[TRAIN] Iter: 798700 Loss: 0.006636412348598242 PSNR: 26.15851593017578 +[TRAIN] Iter: 798800 Loss: 0.005428585223853588 PSNR: 27.16158103942871 +[TRAIN] Iter: 798900 Loss: 0.007541644852608442 PSNR: 25.885852813720703 +[TRAIN] Iter: 799000 Loss: 0.007865682244300842 PSNR: 26.536354064941406 +[TRAIN] Iter: 799100 Loss: 0.005840500816702843 PSNR: 26.96052360534668 +[TRAIN] Iter: 799200 Loss: 0.007786480709910393 PSNR: 25.718116760253906 +[TRAIN] Iter: 799300 Loss: 0.00636422261595726 PSNR: 27.569379806518555 +[TRAIN] Iter: 799400 Loss: 0.006151264533400536 PSNR: 26.93300437927246 +[TRAIN] Iter: 799500 Loss: 0.0054981764405965805 PSNR: 28.899131774902344 +[TRAIN] Iter: 799600 Loss: 0.005679129622876644 PSNR: 26.795930862426758 +[TRAIN] Iter: 799700 Loss: 0.005973151884973049 PSNR: 27.277149200439453 +[TRAIN] Iter: 799800 Loss: 0.006573314778506756 PSNR: 26.054187774658203 +[TRAIN] Iter: 799900 Loss: 0.0067143309861421585 PSNR: 26.1591739654541 +Saved checkpoints at ./logs/TUT-out-doll-360-np/800000.tar +0 0.0008523464202880859 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.755643606185913 +2 21.240294218063354 +3 21.286609888076782 +4 22.065220594406128 +5 21.93559503555298 +6 21.64163088798523 +7 21.043962717056274 +8 21.50379967689514 +9 21.50582194328308 +10 21.392350673675537 +11 21.80809187889099 +12 21.106534004211426 +13 21.441579580307007 +14 21.58575177192688 +15 21.248181343078613 +16 21.48099422454834 +17 21.59946870803833 +18 21.1016526222229 +19 21.68591332435608 +20 21.44623041152954 +21 21.07632088661194 +22 21.120595455169678 +23 21.740464210510254 +24 21.080655813217163 +25 21.651883363723755 +26 21.21235704421997 +27 21.211771965026855 +28 21.700539588928223 +29 20.995128870010376 +30 21.467331647872925 +31 21.679723978042603 +32 21.050175666809082 +33 21.3622624874115 +34 21.29094624519348 +35 21.27890706062317 +36 21.19677972793579 +37 21.266865730285645 +38 21.17203426361084 +39 22.070016384124756 +40 21.115585803985596 +41 21.20065951347351 +42 21.68639874458313 +43 21.088752031326294 +44 20.924186944961548 +45 21.67862319946289 +46 21.130470991134644 +47 21.475605249404907 +48 21.021697282791138 +49 21.20890164375305 +50 21.516332387924194 +51 21.490222215652466 +52 20.88083291053772 +53 21.48447060585022 +54 21.493998050689697 +55 21.48822593688965 +56 21.15140390396118 +57 21.244770050048828 +58 21.62045431137085 +59 21.78819251060486 +60 20.950897932052612 +61 21.47894024848938 +62 21.477140426635742 +63 21.76477575302124 +64 21.281177282333374 +65 21.32486367225647 +66 21.448863983154297 +67 21.610785722732544 +68 21.7845721244812 +69 20.933051109313965 +70 21.117231607437134 +71 21.67285180091858 +72 21.276687622070312 +73 21.549177885055542 +74 21.309409618377686 +75 21.241602182388306 +76 21.516443729400635 +77 20.980965614318848 +78 21.607568979263306 +79 21.152581930160522 +80 21.611865282058716 +81 21.18644380569458 +82 21.740020751953125 +83 21.369931936264038 +84 20.963555097579956 +85 21.405302047729492 +86 21.207462549209595 +87 21.047232389450073 +88 21.115362405776978 +89 21.329329252243042 +90 21.2542507648468 +91 21.936018705368042 +92 20.888089656829834 +93 21.079127311706543 +94 21.587275743484497 +95 21.280270099639893 +96 21.891721725463867 +97 20.93837594985962 +98 21.44105553627014 +99 21.326178789138794 +100 21.207924127578735 +101 21.465851545333862 +102 21.686223030090332 +103 21.26445770263672 +104 21.59291672706604 +105 21.057501792907715 +106 21.730453491210938 +107 21.38821244239807 +108 21.1346914768219 +109 21.651423931121826 +110 21.08333992958069 +111 21.880008697509766 +112 21.633179664611816 +113 22.020602464675903 +114 21.9006450176239 +115 21.985493898391724 +116 20.764471530914307 +117 21.53572106361389 +118 21.617011547088623 +119 21.630223751068115 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 2.0133e-01, 1.1038e+00, 1.7002e+00, -1.7205e+01], + [-2.8387e-01, 7.7390e-01, 1.6266e+00, -2.1199e+01], + [-3.5440e-01, 6.3898e-01, 1.2763e+00, -1.8518e+01], + ..., + [-9.6034e+00, -9.0328e+00, -1.6122e+00, 3.8424e+02], + [-9.7940e+00, -9.3796e+00, -1.7717e+00, 3.9278e+02], + [-1.0014e+01, -9.4926e+00, -1.7683e+00, 4.0165e+02]], + + [[ 4.1868e-01, 4.3151e-01, 7.6241e-01, -3.8683e+01], + [ 9.7432e-01, 6.2583e-01, 2.6378e-01, -2.1527e+01], + [-1.3159e-02, -3.1918e-01, -3.2242e-01, -4.0272e+01], + ..., + [-5.9060e+00, -4.6702e+00, -5.7388e+00, 1.2724e+02], + [-5.5258e+00, -4.2247e+00, -4.5602e+00, 2.0748e+02], + [-5.0214e+00, -3.3338e+00, -3.7763e+00, 1.8601e+02]], + + [[-9.2710e-01, 1.1869e-01, 1.3111e+00, -1.8221e+01], + [-2.7957e-02, 5.8708e-01, 1.4678e+00, -5.0779e+00], + [-2.4465e-01, 4.2279e-01, 1.3304e+00, 7.1469e-01], + ..., + [-6.0360e+00, -3.0953e+00, -4.3599e+00, 3.1305e+02], + [-5.8188e+00, -2.5643e+00, -3.9124e+00, 3.3959e+02], + [-5.8248e+00, -2.7525e+00, -4.1014e+00, 3.2309e+02]], + + ..., + + [[-4.3628e-01, 1.7260e-01, 9.6904e-01, -2.4741e+01], + [-1.5606e-01, 5.1064e-01, 1.2379e+00, -2.0484e+01], + [-6.2759e-02, 5.9111e-01, 1.2251e+00, -2.3880e+01], + ..., + [-5.9577e+00, -5.1397e+00, 2.4490e+00, 3.7526e+02], + [-3.9806e+00, -3.1677e+00, 5.0019e+00, 3.4229e+02], + [-4.4822e+00, -3.9309e+00, 4.4025e+00, 3.2848e+02]], + + [[ 7.8776e-01, 1.2322e+00, -8.7974e-01, -6.2510e+01], + [-1.1728e+00, -1.0389e+00, -9.9692e-01, -2.3676e+01], + [ 1.6748e-01, 3.0536e-02, -4.1641e-01, -4.4779e+01], + ..., + [-1.7252e+00, 3.5679e+00, -1.4493e+00, 1.0827e+03], + [-2.9984e+00, 2.2631e+00, -2.7649e+00, 1.1604e+03], + [-3.6906e+00, 1.8314e+00, -1.3021e-01, 1.2231e+03]], + + [[-2.0083e+00, -1.4532e+00, -1.3085e+00, -5.5855e+01], + [-7.0655e-01, -9.5483e-01, -1.1028e+00, -2.1192e+01], + [-2.0377e-01, -3.3760e-01, -2.8813e-01, 1.4012e+00], + ..., + [-2.9942e+00, -1.1335e+00, -1.9482e+00, 2.7550e+02], + [-3.5088e+00, -1.9486e+00, -1.6460e+00, 2.1132e+02], + [-4.0197e+00, -2.5802e+00, -3.5787e+00, 3.0800e+02]]], + grad_fn=), 'rgb0': tensor([[0.2167, 0.3872, 0.6283], + [0.3203, 0.2864, 0.2713], + [0.4651, 0.6268, 0.8090], + ..., + [0.2582, 0.4460, 0.6795], + [0.3339, 0.3156, 0.3146], + [0.4137, 0.3610, 0.3563]], grad_fn=), 'disp0': tensor([88.0251, 19.3207, 44.8978, ..., 90.2756, 20.9382, 39.5588], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0034, 0.0059, 0.0042, ..., 0.0026, 0.0033, 0.0041])} +0 0.0009453296661376953 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.266806840896606 +2 21.554859161376953 +3 21.28850269317627 +4 21.685938119888306 +5 21.322176933288574 +6 21.337854385375977 +7 21.232518911361694 +8 21.249183893203735 +9 21.36387300491333 +10 21.044583320617676 +11 21.463717222213745 +12 21.099406957626343 +13 21.306558847427368 +14 21.401012182235718 +15 21.605587482452393 +16 21.337236881256104 +17 21.35322380065918 +18 21.473515272140503 +19 21.089324951171875 +20 21.7248055934906 +21 21.473339557647705 +22 21.056796550750732 +23 21.551017999649048 +24 21.386225938796997 +25 21.66890025138855 +26 21.130797386169434 +27 21.379679918289185 +28 21.367762327194214 +29 21.27446436882019 +30 21.5957190990448 +31 21.208864450454712 +32 21.669250011444092 +33 21.459932565689087 +34 21.19104242324829 +35 21.41853141784668 +36 21.4630708694458 +37 21.32747220993042 +38 21.352915048599243 +39 21.54434895515442 +40 21.13356304168701 +41 21.302061319351196 +42 21.43499183654785 +43 21.804967164993286 +44 21.080516815185547 +45 21.481935024261475 +46 21.858185052871704 +47 21.413546323776245 +48 21.29522967338562 +49 21.24627947807312 +50 21.278926849365234 +51 21.488635778427124 +52 21.827894687652588 +53 21.63275718688965 +54 21.40486788749695 +55 21.18602418899536 +56 21.270235300064087 +57 21.520168781280518 +58 21.316248416900635 +59 21.544504642486572 +60 21.479472875595093 +61 21.325297832489014 +62 21.77492880821228 +63 21.48693585395813 +64 21.59389305114746 +65 21.292157649993896 +66 21.33070707321167 +67 21.323444604873657 +68 21.57186222076416 +69 21.17797040939331 +70 21.761948108673096 +71 21.599849462509155 +72 21.671557903289795 +73 21.67140221595764 +74 21.073552131652832 +75 21.24504041671753 +76 21.54638361930847 +77 21.098493337631226 +78 21.409066677093506 +79 21.812837839126587 +80 21.230297803878784 +81 21.338842391967773 +82 21.313008308410645 +83 21.837263107299805 +84 21.670048475265503 +85 21.124467134475708 +86 21.518511056900024 +87 21.217106342315674 +88 21.642858028411865 +89 21.144922256469727 +90 21.5221164226532 +91 21.16329336166382 +92 21.440966844558716 +93 21.222708225250244 +94 21.48575758934021 +95 21.384326934814453 +96 21.167264699935913 +97 21.342175483703613 +98 21.772849321365356 +99 21.501891374588013 +100 21.145615339279175 +101 21.28138518333435 +102 21.23153781890869 +103 21.340343236923218 +104 21.520728588104248 +105 21.437642097473145 +106 21.465280055999756 +107 21.049021005630493 +108 21.170881032943726 +109 21.995031118392944 +110 20.947320461273193 +111 21.720588207244873 +112 21.43630290031433 +113 20.90927505493164 +114 21.34888458251953 +115 21.43125629425049 +116 21.26635766029358 +117 21.31954264640808 +118 21.733410120010376 +119 21.276773929595947 +test poses shape torch.Size([4, 3, 4]) +0 0.001798868179321289 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.199933767318726 +2 21.580344200134277 +3 21.883394718170166 +Saved test set +[TRAIN] Iter: 800000 Loss: 0.005560273304581642 PSNR: 27.369434356689453 +[TRAIN] Iter: 800100 Loss: 0.007075522094964981 PSNR: 26.187965393066406 +[TRAIN] Iter: 800200 Loss: 0.00723644345998764 PSNR: 25.99492645263672 +[TRAIN] Iter: 800300 Loss: 0.006734761409461498 PSNR: 26.04770278930664 +[TRAIN] Iter: 800400 Loss: 0.004985091742128134 PSNR: 27.936660766601562 +[TRAIN] Iter: 800500 Loss: 0.006565835326910019 PSNR: 26.9027099609375 +[TRAIN] Iter: 800600 Loss: 0.004765765741467476 PSNR: 28.71782875061035 +[TRAIN] Iter: 800700 Loss: 0.006437120493501425 PSNR: 26.208667755126953 +[TRAIN] Iter: 800800 Loss: 0.006989019922912121 PSNR: 26.140344619750977 +[TRAIN] Iter: 800900 Loss: 0.007418019697070122 PSNR: 27.124744415283203 +[TRAIN] Iter: 801000 Loss: 0.005469440948218107 PSNR: 28.183435440063477 +[TRAIN] Iter: 801100 Loss: 0.005714992061257362 PSNR: 27.671695709228516 +[TRAIN] Iter: 801200 Loss: 0.006341374479234219 PSNR: 26.199798583984375 +[TRAIN] Iter: 801300 Loss: 0.006726827006787062 PSNR: 26.964481353759766 +[TRAIN] Iter: 801400 Loss: 0.006541638169437647 PSNR: 26.758329391479492 +[TRAIN] Iter: 801500 Loss: 0.008098224177956581 PSNR: 25.348535537719727 +[TRAIN] Iter: 801600 Loss: 0.006054254714399576 PSNR: 26.889896392822266 +[TRAIN] Iter: 801700 Loss: 0.005188889801502228 PSNR: 27.03011703491211 +[TRAIN] Iter: 801800 Loss: 0.005287892185151577 PSNR: 28.12885093688965 +[TRAIN] Iter: 801900 Loss: 0.007162700407207012 PSNR: 26.059404373168945 +[TRAIN] Iter: 802000 Loss: 0.0053324028849601746 PSNR: 28.020814895629883 +[TRAIN] Iter: 802100 Loss: 0.0041118199005723 PSNR: 29.42026710510254 +[TRAIN] Iter: 802200 Loss: 0.006736557465046644 PSNR: 26.747587203979492 +[TRAIN] Iter: 802300 Loss: 0.005644692108035088 PSNR: 26.75290298461914 +[TRAIN] Iter: 802400 Loss: 0.006123579107224941 PSNR: 27.118104934692383 +[TRAIN] Iter: 802500 Loss: 0.006411098875105381 PSNR: 26.633256912231445 +[TRAIN] Iter: 802600 Loss: 0.004532582126557827 PSNR: 27.759511947631836 +[TRAIN] Iter: 802700 Loss: 0.005930301733314991 PSNR: 27.416597366333008 +[TRAIN] Iter: 802800 Loss: 0.007493402808904648 PSNR: 25.854042053222656 +[TRAIN] Iter: 802900 Loss: 0.004836409352719784 PSNR: 28.01171112060547 +[TRAIN] Iter: 803000 Loss: 0.005792590789496899 PSNR: 27.376625061035156 +[TRAIN] Iter: 803100 Loss: 0.00590532086789608 PSNR: 27.17193031311035 +[TRAIN] Iter: 803200 Loss: 0.004830252379179001 PSNR: 28.651729583740234 +[TRAIN] Iter: 803300 Loss: 0.006591701414436102 PSNR: 26.366321563720703 +[TRAIN] Iter: 803400 Loss: 0.005047924816608429 PSNR: 27.775224685668945 +[TRAIN] Iter: 803500 Loss: 0.007795391604304314 PSNR: 26.35713768005371 +[TRAIN] Iter: 803600 Loss: 0.00644723791629076 PSNR: 26.493404388427734 +[TRAIN] Iter: 803700 Loss: 0.005051027983427048 PSNR: 27.608388900756836 +[TRAIN] Iter: 803800 Loss: 0.006661364808678627 PSNR: 26.140975952148438 +[TRAIN] Iter: 803900 Loss: 0.00693028699606657 PSNR: 25.895397186279297 +[TRAIN] Iter: 804000 Loss: 0.006285368464887142 PSNR: 26.13025665283203 +[TRAIN] Iter: 804100 Loss: 0.004444478545337915 PSNR: 28.38454246520996 +[TRAIN] Iter: 804200 Loss: 0.0069547388702631 PSNR: 26.170228958129883 +[TRAIN] Iter: 804300 Loss: 0.005187527276575565 PSNR: 27.96847915649414 +[TRAIN] Iter: 804400 Loss: 0.005666102282702923 PSNR: 28.413965225219727 +[TRAIN] Iter: 804500 Loss: 0.005207143258303404 PSNR: 28.054750442504883 +[TRAIN] Iter: 804600 Loss: 0.00511801615357399 PSNR: 26.796672821044922 +[TRAIN] Iter: 804700 Loss: 0.005938543938100338 PSNR: 27.006925582885742 +[TRAIN] Iter: 804800 Loss: 0.004736038856208324 PSNR: 28.254640579223633 +[TRAIN] Iter: 804900 Loss: 0.004875281825661659 PSNR: 29.34415054321289 +[TRAIN] Iter: 805000 Loss: 0.006245343014597893 PSNR: 26.162330627441406 +[TRAIN] Iter: 805100 Loss: 0.006044553127139807 PSNR: 27.145233154296875 +[TRAIN] Iter: 805200 Loss: 0.007145359180867672 PSNR: 26.009197235107422 +[TRAIN] Iter: 805300 Loss: 0.006746088620275259 PSNR: 26.27907371520996 +[TRAIN] Iter: 805400 Loss: 0.005386576056480408 PSNR: 28.154573440551758 +[TRAIN] Iter: 805500 Loss: 0.0062353406101465225 PSNR: 26.74888801574707 +[TRAIN] Iter: 805600 Loss: 0.006636546924710274 PSNR: 26.440420150756836 +[TRAIN] Iter: 805700 Loss: 0.006239988841116428 PSNR: 26.514604568481445 +[TRAIN] Iter: 805800 Loss: 0.0068716080859303474 PSNR: 26.198041915893555 +[TRAIN] Iter: 805900 Loss: 0.0050874147564172745 PSNR: 27.099390029907227 +[TRAIN] Iter: 806000 Loss: 0.004548058845102787 PSNR: 28.13954734802246 +[TRAIN] Iter: 806100 Loss: 0.0059740906581282616 PSNR: 27.34707260131836 +[TRAIN] Iter: 806200 Loss: 0.005370922386646271 PSNR: 27.237388610839844 +[TRAIN] Iter: 806300 Loss: 0.007535979617387056 PSNR: 25.869102478027344 +[TRAIN] Iter: 806400 Loss: 0.0064957523718476295 PSNR: 26.979333877563477 +[TRAIN] Iter: 806500 Loss: 0.00654156319797039 PSNR: 26.24195098876953 +[TRAIN] Iter: 806600 Loss: 0.0057250261306762695 PSNR: 27.138565063476562 +[TRAIN] Iter: 806700 Loss: 0.004309788811951876 PSNR: 28.966999053955078 +[TRAIN] Iter: 806800 Loss: 0.006524569354951382 PSNR: 26.995861053466797 +[TRAIN] Iter: 806900 Loss: 0.005095056723803282 PSNR: 27.917713165283203 +[TRAIN] Iter: 807000 Loss: 0.006570296362042427 PSNR: 27.5499324798584 +[TRAIN] Iter: 807100 Loss: 0.007647933438420296 PSNR: 25.939659118652344 +[TRAIN] Iter: 807200 Loss: 0.007107534911483526 PSNR: 25.815622329711914 +[TRAIN] Iter: 807300 Loss: 0.00714511564001441 PSNR: 26.47726058959961 +[TRAIN] Iter: 807400 Loss: 0.005819305311888456 PSNR: 27.82666778564453 +[TRAIN] Iter: 807500 Loss: 0.0064211077988147736 PSNR: 26.1562442779541 +[TRAIN] Iter: 807600 Loss: 0.005745891015976667 PSNR: 27.439640045166016 +[TRAIN] Iter: 807700 Loss: 0.005953799933195114 PSNR: 27.864070892333984 +[TRAIN] Iter: 807800 Loss: 0.005276914685964584 PSNR: 27.82445526123047 +[TRAIN] Iter: 807900 Loss: 0.007587007246911526 PSNR: 25.58914566040039 +[TRAIN] Iter: 808000 Loss: 0.00588363129645586 PSNR: 27.199947357177734 +[TRAIN] Iter: 808100 Loss: 0.005976713262498379 PSNR: 27.343381881713867 +[TRAIN] Iter: 808200 Loss: 0.006469647400081158 PSNR: 26.163516998291016 +[TRAIN] Iter: 808300 Loss: 0.006089949049055576 PSNR: 26.69938850402832 +[TRAIN] Iter: 808400 Loss: 0.008323710411787033 PSNR: 25.173858642578125 +[TRAIN] Iter: 808500 Loss: 0.0054102810099720955 PSNR: 27.06397247314453 +[TRAIN] Iter: 808600 Loss: 0.006326320581138134 PSNR: 26.452613830566406 +[TRAIN] Iter: 808700 Loss: 0.005985182709991932 PSNR: 26.510391235351562 +[TRAIN] Iter: 808800 Loss: 0.005457001738250256 PSNR: 27.741456985473633 +[TRAIN] Iter: 808900 Loss: 0.0069860173389315605 PSNR: 26.18642807006836 +[TRAIN] Iter: 809000 Loss: 0.006466694176197052 PSNR: 26.56399154663086 +[TRAIN] Iter: 809100 Loss: 0.006039132364094257 PSNR: 28.373035430908203 +[TRAIN] Iter: 809200 Loss: 0.005391158629208803 PSNR: 27.704370498657227 +[TRAIN] Iter: 809300 Loss: 0.0055420673452317715 PSNR: 28.04845428466797 +[TRAIN] Iter: 809400 Loss: 0.005609385669231415 PSNR: 27.432525634765625 +[TRAIN] Iter: 809500 Loss: 0.004896435420960188 PSNR: 28.362363815307617 +[TRAIN] Iter: 809600 Loss: 0.007372718770056963 PSNR: 25.46241569519043 +[TRAIN] Iter: 809700 Loss: 0.006173017434775829 PSNR: 25.939064025878906 +[TRAIN] Iter: 809800 Loss: 0.006346546579152346 PSNR: 27.43973159790039 +[TRAIN] Iter: 809900 Loss: 0.004917996935546398 PSNR: 28.516870498657227 +Saved checkpoints at ./logs/TUT-out-doll-360-np/810000.tar +[TRAIN] Iter: 810000 Loss: 0.004722698591649532 PSNR: 28.463186264038086 +[TRAIN] Iter: 810100 Loss: 0.007329258136451244 PSNR: 25.923561096191406 +[TRAIN] Iter: 810200 Loss: 0.004722552839666605 PSNR: 28.232484817504883 +[TRAIN] Iter: 810300 Loss: 0.0064766667783260345 PSNR: 25.991802215576172 +[TRAIN] Iter: 810400 Loss: 0.005598109215497971 PSNR: 27.629152297973633 +[TRAIN] Iter: 810500 Loss: 0.005940555594861507 PSNR: 26.6749267578125 +[TRAIN] Iter: 810600 Loss: 0.006630547810345888 PSNR: 26.11517333984375 +[TRAIN] Iter: 810700 Loss: 0.006920807994902134 PSNR: 26.223928451538086 +[TRAIN] Iter: 810800 Loss: 0.005982648581266403 PSNR: 27.165359497070312 +[TRAIN] Iter: 810900 Loss: 0.004790377803146839 PSNR: 28.45843505859375 +[TRAIN] Iter: 811000 Loss: 0.006425734143704176 PSNR: 26.381248474121094 +[TRAIN] Iter: 811100 Loss: 0.007409762591123581 PSNR: 26.07084846496582 +[TRAIN] Iter: 811200 Loss: 0.006868235766887665 PSNR: 25.88401222229004 +[TRAIN] Iter: 811300 Loss: 0.006768070161342621 PSNR: 26.005407333374023 +[TRAIN] Iter: 811400 Loss: 0.006860693916678429 PSNR: 26.365318298339844 +[TRAIN] Iter: 811500 Loss: 0.006301251705735922 PSNR: 26.323505401611328 +[TRAIN] Iter: 811600 Loss: 0.006412115879356861 PSNR: 26.367687225341797 +[TRAIN] Iter: 811700 Loss: 0.005854171700775623 PSNR: 26.95106315612793 +[TRAIN] Iter: 811800 Loss: 0.005387150216847658 PSNR: 28.183502197265625 +[TRAIN] Iter: 811900 Loss: 0.0063803126104176044 PSNR: 26.3313045501709 +[TRAIN] Iter: 812000 Loss: 0.005998863838613033 PSNR: 26.71782112121582 +[TRAIN] Iter: 812100 Loss: 0.007020717952400446 PSNR: 26.115114212036133 +[TRAIN] Iter: 812200 Loss: 0.00664130924269557 PSNR: 26.303447723388672 +[TRAIN] Iter: 812300 Loss: 0.004555423744022846 PSNR: 28.683435440063477 +[TRAIN] Iter: 812400 Loss: 0.005199094768613577 PSNR: 26.954195022583008 +[TRAIN] Iter: 812500 Loss: 0.006203869357705116 PSNR: 26.274341583251953 +[TRAIN] Iter: 812600 Loss: 0.0065703922882676125 PSNR: 26.295204162597656 +[TRAIN] Iter: 812700 Loss: 0.005831622052937746 PSNR: 27.03193473815918 +[TRAIN] Iter: 812800 Loss: 0.00682871276512742 PSNR: 25.730920791625977 +[TRAIN] Iter: 812900 Loss: 0.006955954246222973 PSNR: 26.50506019592285 +[TRAIN] Iter: 813000 Loss: 0.00491883372887969 PSNR: 27.919038772583008 +[TRAIN] Iter: 813100 Loss: 0.006298322696238756 PSNR: 26.85991668701172 +[TRAIN] Iter: 813200 Loss: 0.006022103130817413 PSNR: 26.685035705566406 +[TRAIN] Iter: 813300 Loss: 0.005882956087589264 PSNR: 27.551002502441406 +[TRAIN] Iter: 813400 Loss: 0.00701664574444294 PSNR: 26.045785903930664 +[TRAIN] Iter: 813500 Loss: 0.0067143552005290985 PSNR: 26.03260040283203 +[TRAIN] Iter: 813600 Loss: 0.00705032330006361 PSNR: 26.80820083618164 +[TRAIN] Iter: 813700 Loss: 0.006426706910133362 PSNR: 26.500064849853516 +[TRAIN] Iter: 813800 Loss: 0.00566769577562809 PSNR: 26.57988739013672 +[TRAIN] Iter: 813900 Loss: 0.006411838810890913 PSNR: 26.825456619262695 +[TRAIN] Iter: 814000 Loss: 0.006783595308661461 PSNR: 25.985660552978516 +[TRAIN] Iter: 814100 Loss: 0.006365962326526642 PSNR: 25.86017417907715 +[TRAIN] Iter: 814200 Loss: 0.007051094435155392 PSNR: 25.822052001953125 +[TRAIN] Iter: 814300 Loss: 0.0064133210107684135 PSNR: 26.364944458007812 +[TRAIN] Iter: 814400 Loss: 0.006632058881223202 PSNR: 26.168914794921875 +[TRAIN] Iter: 814500 Loss: 0.006775078363716602 PSNR: 25.920957565307617 +[TRAIN] Iter: 814600 Loss: 0.007003606762737036 PSNR: 26.69932746887207 +[TRAIN] Iter: 814700 Loss: 0.008022796362638474 PSNR: 24.87360954284668 +[TRAIN] Iter: 814800 Loss: 0.0057891481555998325 PSNR: 26.74713134765625 +[TRAIN] Iter: 814900 Loss: 0.007147517055273056 PSNR: 26.577442169189453 +[TRAIN] Iter: 815000 Loss: 0.005734140053391457 PSNR: 28.099872589111328 +[TRAIN] Iter: 815100 Loss: 0.005050827283412218 PSNR: 28.470746994018555 +[TRAIN] Iter: 815200 Loss: 0.005917211063206196 PSNR: 27.139389038085938 +[TRAIN] Iter: 815300 Loss: 0.005943591240793467 PSNR: 26.934907913208008 +[TRAIN] Iter: 815400 Loss: 0.005423820577561855 PSNR: 28.097015380859375 +[TRAIN] Iter: 815500 Loss: 0.006254077889025211 PSNR: 26.247154235839844 +[TRAIN] Iter: 815600 Loss: 0.005897576920688152 PSNR: 26.71963882446289 +[TRAIN] Iter: 815700 Loss: 0.00569263007491827 PSNR: 26.80035972595215 +[TRAIN] Iter: 815800 Loss: 0.005613797344267368 PSNR: 27.36724090576172 +[TRAIN] Iter: 815900 Loss: 0.005975428037345409 PSNR: 27.314210891723633 +[TRAIN] Iter: 816000 Loss: 0.005911998450756073 PSNR: 27.769620895385742 +[TRAIN] Iter: 816100 Loss: 0.007336650975048542 PSNR: 27.069766998291016 +[TRAIN] Iter: 816200 Loss: 0.00847820658236742 PSNR: 25.318716049194336 +[TRAIN] Iter: 816300 Loss: 0.007445433177053928 PSNR: 26.577375411987305 +[TRAIN] Iter: 816400 Loss: 0.005945968441665173 PSNR: 27.193994522094727 +[TRAIN] Iter: 816500 Loss: 0.004290260374546051 PSNR: 28.662248611450195 +[TRAIN] Iter: 816600 Loss: 0.0061821406707167625 PSNR: 26.734813690185547 +[TRAIN] Iter: 816700 Loss: 0.006168491207063198 PSNR: 27.5048770904541 +[TRAIN] Iter: 816800 Loss: 0.006571997422724962 PSNR: 25.892635345458984 +[TRAIN] Iter: 816900 Loss: 0.00528957974165678 PSNR: 27.840187072753906 +[TRAIN] Iter: 817000 Loss: 0.005301240365952253 PSNR: 28.045495986938477 +[TRAIN] Iter: 817100 Loss: 0.005175197031348944 PSNR: 27.511117935180664 +[TRAIN] Iter: 817200 Loss: 0.006080940831452608 PSNR: 27.020118713378906 +[TRAIN] Iter: 817300 Loss: 0.006804465316236019 PSNR: 26.57457733154297 +[TRAIN] Iter: 817400 Loss: 0.004490227438509464 PSNR: 28.838871002197266 +[TRAIN] Iter: 817500 Loss: 0.005905160680413246 PSNR: 27.079256057739258 +[TRAIN] Iter: 817600 Loss: 0.0057188537903130054 PSNR: 27.3079776763916 +[TRAIN] Iter: 817700 Loss: 0.0051160044968128204 PSNR: 28.313861846923828 +[TRAIN] Iter: 817800 Loss: 0.006194569170475006 PSNR: 26.601482391357422 +[TRAIN] Iter: 817900 Loss: 0.005441934801638126 PSNR: 28.090190887451172 +[TRAIN] Iter: 818000 Loss: 0.005232807248830795 PSNR: 28.116100311279297 +[TRAIN] Iter: 818100 Loss: 0.005832964554429054 PSNR: 26.914339065551758 +[TRAIN] Iter: 818200 Loss: 0.007030675187706947 PSNR: 26.622529983520508 +[TRAIN] Iter: 818300 Loss: 0.007021381985396147 PSNR: 26.735654830932617 +[TRAIN] Iter: 818400 Loss: 0.007358630653470755 PSNR: 26.202768325805664 +[TRAIN] Iter: 818500 Loss: 0.007043872494250536 PSNR: 26.40412712097168 +[TRAIN] Iter: 818600 Loss: 0.006211870815604925 PSNR: 26.701108932495117 +[TRAIN] Iter: 818700 Loss: 0.006178576499223709 PSNR: 27.84078025817871 +[TRAIN] Iter: 818800 Loss: 0.007458607666194439 PSNR: 24.837583541870117 +[TRAIN] Iter: 818900 Loss: 0.006075243465602398 PSNR: 26.927745819091797 +[TRAIN] Iter: 819000 Loss: 0.007040787488222122 PSNR: 26.007190704345703 +[TRAIN] Iter: 819100 Loss: 0.006304485257714987 PSNR: 26.835044860839844 +[TRAIN] Iter: 819200 Loss: 0.006838551722466946 PSNR: 25.877229690551758 +[TRAIN] Iter: 819300 Loss: 0.005037362687289715 PSNR: 27.69281768798828 +[TRAIN] Iter: 819400 Loss: 0.005706490483134985 PSNR: 27.33133316040039 +[TRAIN] Iter: 819500 Loss: 0.006584863178431988 PSNR: 25.84164047241211 +[TRAIN] Iter: 819600 Loss: 0.005739918444305658 PSNR: 26.86467742919922 +[TRAIN] Iter: 819700 Loss: 0.0066489847376942635 PSNR: 26.340784072875977 +[TRAIN] Iter: 819800 Loss: 0.006129147484898567 PSNR: 27.818347930908203 +[TRAIN] Iter: 819900 Loss: 0.005746912211179733 PSNR: 27.195117950439453 +Saved checkpoints at ./logs/TUT-out-doll-360-np/820000.tar +[TRAIN] Iter: 820000 Loss: 0.0068356674164533615 PSNR: 26.528156280517578 +[TRAIN] Iter: 820100 Loss: 0.005025883205235004 PSNR: 28.1928653717041 +[TRAIN] Iter: 820200 Loss: 0.006711235735565424 PSNR: 26.047779083251953 +[TRAIN] Iter: 820300 Loss: 0.005355012137442827 PSNR: 27.252756118774414 +[TRAIN] Iter: 820400 Loss: 0.005763043183833361 PSNR: 26.852264404296875 +[TRAIN] Iter: 820500 Loss: 0.007141865789890289 PSNR: 26.03216552734375 +[TRAIN] Iter: 820600 Loss: 0.00519877765327692 PSNR: 27.89158058166504 +[TRAIN] Iter: 820700 Loss: 0.005661788862198591 PSNR: 27.804035186767578 +[TRAIN] Iter: 820800 Loss: 0.005596785806119442 PSNR: 27.668867111206055 +[TRAIN] Iter: 820900 Loss: 0.006314839236438274 PSNR: 27.4599666595459 +[TRAIN] Iter: 821000 Loss: 0.005200213752686977 PSNR: 28.653396606445312 +[TRAIN] Iter: 821100 Loss: 0.006327726878225803 PSNR: 26.406152725219727 +[TRAIN] Iter: 821200 Loss: 0.006581611931324005 PSNR: 27.0704345703125 +[TRAIN] Iter: 821300 Loss: 0.006355740129947662 PSNR: 26.57096290588379 +[TRAIN] Iter: 821400 Loss: 0.004371010698378086 PSNR: 29.50018310546875 +[TRAIN] Iter: 821500 Loss: 0.00592394545674324 PSNR: 27.961898803710938 +[TRAIN] Iter: 821600 Loss: 0.006723620928823948 PSNR: 26.221242904663086 +[TRAIN] Iter: 821700 Loss: 0.00786854699254036 PSNR: 25.87533950805664 +[TRAIN] Iter: 821800 Loss: 0.005368297919631004 PSNR: 26.77411651611328 +[TRAIN] Iter: 821900 Loss: 0.005970122292637825 PSNR: 26.941577911376953 +[TRAIN] Iter: 822000 Loss: 0.006039333064109087 PSNR: 26.264694213867188 +[TRAIN] Iter: 822100 Loss: 0.0055017657577991486 PSNR: 26.871898651123047 +[TRAIN] Iter: 822200 Loss: 0.005570686887949705 PSNR: 26.62135887145996 +[TRAIN] Iter: 822300 Loss: 0.005892166867852211 PSNR: 28.040714263916016 +[TRAIN] Iter: 822400 Loss: 0.00463295029476285 PSNR: 28.733381271362305 +[TRAIN] Iter: 822500 Loss: 0.004637348931282759 PSNR: 28.86167335510254 +[TRAIN] Iter: 822600 Loss: 0.006510447710752487 PSNR: 27.416786193847656 +[TRAIN] Iter: 822700 Loss: 0.005955232307314873 PSNR: 27.392581939697266 +[TRAIN] Iter: 822800 Loss: 0.00739931408315897 PSNR: 25.85372543334961 +[TRAIN] Iter: 822900 Loss: 0.006885428912937641 PSNR: 26.445560455322266 +[TRAIN] Iter: 823000 Loss: 0.007107928395271301 PSNR: 25.69342613220215 +[TRAIN] Iter: 823100 Loss: 0.007610555738210678 PSNR: 26.103384017944336 +[TRAIN] Iter: 823200 Loss: 0.007045593112707138 PSNR: 26.364322662353516 +[TRAIN] Iter: 823300 Loss: 0.006201641634106636 PSNR: 26.84104347229004 +[TRAIN] Iter: 823400 Loss: 0.0074615939520299435 PSNR: 25.80515480041504 +[TRAIN] Iter: 823500 Loss: 0.00627459492534399 PSNR: 27.008729934692383 +[TRAIN] Iter: 823600 Loss: 0.005887876730412245 PSNR: 26.890941619873047 +[TRAIN] Iter: 823700 Loss: 0.005996242165565491 PSNR: 26.439611434936523 +[TRAIN] Iter: 823800 Loss: 0.006049193441867828 PSNR: 26.76205825805664 +[TRAIN] Iter: 823900 Loss: 0.005877810996025801 PSNR: 27.133609771728516 +[TRAIN] Iter: 824000 Loss: 0.005513188429176807 PSNR: 27.147249221801758 +[TRAIN] Iter: 824100 Loss: 0.006307960953563452 PSNR: 26.566776275634766 +[TRAIN] Iter: 824200 Loss: 0.007511143572628498 PSNR: 25.79506492614746 +[TRAIN] Iter: 824300 Loss: 0.005529854912310839 PSNR: 27.928529739379883 +[TRAIN] Iter: 824400 Loss: 0.005849051754921675 PSNR: 26.609657287597656 +[TRAIN] Iter: 824500 Loss: 0.008145617321133614 PSNR: 25.493865966796875 +[TRAIN] Iter: 824600 Loss: 0.005817804019898176 PSNR: 26.80776596069336 +[TRAIN] Iter: 824700 Loss: 0.008239362388849258 PSNR: 25.132999420166016 +[TRAIN] Iter: 824800 Loss: 0.005879241041839123 PSNR: 26.952917098999023 +[TRAIN] Iter: 824900 Loss: 0.0050838300958275795 PSNR: 27.83995246887207 +[TRAIN] Iter: 825000 Loss: 0.007468319032341242 PSNR: 26.225383758544922 +[TRAIN] Iter: 825100 Loss: 0.006895022001117468 PSNR: 25.961318969726562 +[TRAIN] Iter: 825200 Loss: 0.0044990042224526405 PSNR: 28.917387008666992 +[TRAIN] Iter: 825300 Loss: 0.0067801764234900475 PSNR: 25.947843551635742 +[TRAIN] Iter: 825400 Loss: 0.005427908152341843 PSNR: 28.92796516418457 +[TRAIN] Iter: 825500 Loss: 0.0054102614521980286 PSNR: 27.48369789123535 +[TRAIN] Iter: 825600 Loss: 0.006974674761295319 PSNR: 26.422134399414062 +[TRAIN] Iter: 825700 Loss: 0.00675565842539072 PSNR: 26.49933624267578 +[TRAIN] Iter: 825800 Loss: 0.006317210383713245 PSNR: 26.87615966796875 +[TRAIN] Iter: 825900 Loss: 0.005815794691443443 PSNR: 27.09806251525879 +[TRAIN] Iter: 826000 Loss: 0.007187011651694775 PSNR: 26.22577667236328 +[TRAIN] Iter: 826100 Loss: 0.0067425621673464775 PSNR: 26.31467628479004 +[TRAIN] Iter: 826200 Loss: 0.0039043163415044546 PSNR: 29.264310836791992 +[TRAIN] Iter: 826300 Loss: 0.006773489993065596 PSNR: 26.276390075683594 +[TRAIN] Iter: 826400 Loss: 0.00690396036952734 PSNR: 26.038381576538086 +[TRAIN] Iter: 826500 Loss: 0.006476002745330334 PSNR: 27.864171981811523 +[TRAIN] Iter: 826600 Loss: 0.006485478021204472 PSNR: 26.314138412475586 +[TRAIN] Iter: 826700 Loss: 0.006205164361745119 PSNR: 26.36337661743164 +[TRAIN] Iter: 826800 Loss: 0.005550895817577839 PSNR: 27.9689998626709 +[TRAIN] Iter: 826900 Loss: 0.006284539587795734 PSNR: 26.253971099853516 +[TRAIN] Iter: 827000 Loss: 0.004731138236820698 PSNR: 27.757753372192383 +[TRAIN] Iter: 827100 Loss: 0.005734095349907875 PSNR: 26.96087074279785 +[TRAIN] Iter: 827200 Loss: 0.005037419497966766 PSNR: 28.845048904418945 +[TRAIN] Iter: 827300 Loss: 0.00622291024774313 PSNR: 26.437376022338867 +[TRAIN] Iter: 827400 Loss: 0.006059113424271345 PSNR: 27.520492553710938 +[TRAIN] Iter: 827500 Loss: 0.005746127106249332 PSNR: 26.865280151367188 +[TRAIN] Iter: 827600 Loss: 0.005178624298423529 PSNR: 28.371564865112305 +[TRAIN] Iter: 827700 Loss: 0.0072321644984185696 PSNR: 25.257776260375977 +[TRAIN] Iter: 827800 Loss: 0.006546908989548683 PSNR: 26.400554656982422 +[TRAIN] Iter: 827900 Loss: 0.006285316776484251 PSNR: 26.23479652404785 +[TRAIN] Iter: 828000 Loss: 0.006969409994781017 PSNR: 26.425935745239258 +[TRAIN] Iter: 828100 Loss: 0.005579506047070026 PSNR: 27.489999771118164 +[TRAIN] Iter: 828200 Loss: 0.006384196225553751 PSNR: 26.355247497558594 +[TRAIN] Iter: 828300 Loss: 0.006589159369468689 PSNR: 27.394060134887695 +[TRAIN] Iter: 828400 Loss: 0.005770893767476082 PSNR: 26.773496627807617 +[TRAIN] Iter: 828500 Loss: 0.004972280003130436 PSNR: 28.621179580688477 +[TRAIN] Iter: 828600 Loss: 0.00718481233343482 PSNR: 26.181228637695312 +[TRAIN] Iter: 828700 Loss: 0.006461708806455135 PSNR: 26.534921646118164 +[TRAIN] Iter: 828800 Loss: 0.005969853140413761 PSNR: 27.187353134155273 +[TRAIN] Iter: 828900 Loss: 0.0060701509937644005 PSNR: 26.398916244506836 +[TRAIN] Iter: 829000 Loss: 0.005992640741169453 PSNR: 26.690563201904297 +[TRAIN] Iter: 829100 Loss: 0.005433980375528336 PSNR: 27.051855087280273 +[TRAIN] Iter: 829200 Loss: 0.006115062162280083 PSNR: 26.415624618530273 +[TRAIN] Iter: 829300 Loss: 0.006278534419834614 PSNR: 26.387466430664062 +[TRAIN] Iter: 829400 Loss: 0.007073533721268177 PSNR: 26.048690795898438 +[TRAIN] Iter: 829500 Loss: 0.005844379775226116 PSNR: 27.569671630859375 +[TRAIN] Iter: 829600 Loss: 0.006930351257324219 PSNR: 26.187578201293945 +[TRAIN] Iter: 829700 Loss: 0.005905103404074907 PSNR: 26.5196590423584 +[TRAIN] Iter: 829800 Loss: 0.004266167059540749 PSNR: 28.25040054321289 +[TRAIN] Iter: 829900 Loss: 0.005981010384857655 PSNR: 27.016260147094727 +Saved checkpoints at ./logs/TUT-out-doll-360-np/830000.tar +[TRAIN] Iter: 830000 Loss: 0.00419072387740016 PSNR: 29.50674057006836 +[TRAIN] Iter: 830100 Loss: 0.005208078771829605 PSNR: 27.494295120239258 +[TRAIN] Iter: 830200 Loss: 0.006321437656879425 PSNR: 26.702266693115234 +[TRAIN] Iter: 830300 Loss: 0.006977601908147335 PSNR: 26.71976661682129 +[TRAIN] Iter: 830400 Loss: 0.00721327681094408 PSNR: 26.418203353881836 +[TRAIN] Iter: 830500 Loss: 0.0061320289969444275 PSNR: 26.1682071685791 +[TRAIN] Iter: 830600 Loss: 0.008195270784199238 PSNR: 25.82192611694336 +[TRAIN] Iter: 830700 Loss: 0.005999784916639328 PSNR: 27.326004028320312 +[TRAIN] Iter: 830800 Loss: 0.006617868319153786 PSNR: 26.97789764404297 +[TRAIN] Iter: 830900 Loss: 0.005530279595404863 PSNR: 27.078779220581055 +[TRAIN] Iter: 831000 Loss: 0.006686475593596697 PSNR: 26.375856399536133 +[TRAIN] Iter: 831100 Loss: 0.007531235925853252 PSNR: 25.761741638183594 +[TRAIN] Iter: 831200 Loss: 0.007684431038796902 PSNR: 25.797338485717773 +[TRAIN] Iter: 831300 Loss: 0.006074641831219196 PSNR: 26.734394073486328 +[TRAIN] Iter: 831400 Loss: 0.00640620943158865 PSNR: 26.20838737487793 +[TRAIN] Iter: 831500 Loss: 0.007123243995010853 PSNR: 26.4428653717041 +[TRAIN] Iter: 831600 Loss: 0.006538490764796734 PSNR: 26.217790603637695 +[TRAIN] Iter: 831700 Loss: 0.0064268955029547215 PSNR: 26.420156478881836 +[TRAIN] Iter: 831800 Loss: 0.0055368440225720406 PSNR: 27.68347930908203 +[TRAIN] Iter: 831900 Loss: 0.007780150976032019 PSNR: 25.864919662475586 +[TRAIN] Iter: 832000 Loss: 0.006417103577405214 PSNR: 26.773719787597656 +[TRAIN] Iter: 832100 Loss: 0.005768388509750366 PSNR: 27.103187561035156 +[TRAIN] Iter: 832200 Loss: 0.005594823509454727 PSNR: 28.313735961914062 +[TRAIN] Iter: 832300 Loss: 0.006400395650416613 PSNR: 26.184951782226562 +[TRAIN] Iter: 832400 Loss: 0.007085432298481464 PSNR: 25.945180892944336 +[TRAIN] Iter: 832500 Loss: 0.004973030649125576 PSNR: 27.90262794494629 +[TRAIN] Iter: 832600 Loss: 0.0067168185487389565 PSNR: 26.485151290893555 +[TRAIN] Iter: 832700 Loss: 0.0056668356992304325 PSNR: 27.771352767944336 +[TRAIN] Iter: 832800 Loss: 0.00705684581771493 PSNR: 25.796123504638672 +[TRAIN] Iter: 832900 Loss: 0.0062665874138474464 PSNR: 26.63652229309082 +[TRAIN] Iter: 833000 Loss: 0.006530000362545252 PSNR: 26.543336868286133 +[TRAIN] Iter: 833100 Loss: 0.006781280972063541 PSNR: 26.269380569458008 +[TRAIN] Iter: 833200 Loss: 0.005051033571362495 PSNR: 27.780738830566406 +[TRAIN] Iter: 833300 Loss: 0.006656189914792776 PSNR: 26.33100700378418 +[TRAIN] Iter: 833400 Loss: 0.0069534266367554665 PSNR: 26.091712951660156 +[TRAIN] Iter: 833500 Loss: 0.005780075676739216 PSNR: 26.904468536376953 +[TRAIN] Iter: 833600 Loss: 0.005632729269564152 PSNR: 27.010223388671875 +[TRAIN] Iter: 833700 Loss: 0.00609922967851162 PSNR: 26.00519371032715 +[TRAIN] Iter: 833800 Loss: 0.006068526301532984 PSNR: 26.98304557800293 +[TRAIN] Iter: 833900 Loss: 0.005234384909272194 PSNR: 26.860502243041992 +[TRAIN] Iter: 834000 Loss: 0.005131055600941181 PSNR: 27.76427459716797 +[TRAIN] Iter: 834100 Loss: 0.005554819479584694 PSNR: 27.682018280029297 +[TRAIN] Iter: 834200 Loss: 0.006287321913987398 PSNR: 27.20716094970703 +[TRAIN] Iter: 834300 Loss: 0.005654925014823675 PSNR: 28.121631622314453 +[TRAIN] Iter: 834400 Loss: 0.006505532190203667 PSNR: 26.473417282104492 +[TRAIN] Iter: 834500 Loss: 0.005570085719227791 PSNR: 26.970617294311523 +[TRAIN] Iter: 834600 Loss: 0.0055007850751280785 PSNR: 27.025859832763672 +[TRAIN] Iter: 834700 Loss: 0.005813932977616787 PSNR: 26.59463882446289 +[TRAIN] Iter: 834800 Loss: 0.005448825657367706 PSNR: 27.35204315185547 +[TRAIN] Iter: 834900 Loss: 0.005427297670394182 PSNR: 27.39875030517578 +[TRAIN] Iter: 835000 Loss: 0.0056209261529147625 PSNR: 27.376131057739258 +[TRAIN] Iter: 835100 Loss: 0.006700723897665739 PSNR: 25.773719787597656 +[TRAIN] Iter: 835200 Loss: 0.005883608944714069 PSNR: 27.42527961730957 +[TRAIN] Iter: 835300 Loss: 0.0055051278322935104 PSNR: 27.333694458007812 +[TRAIN] Iter: 835400 Loss: 0.00645361328497529 PSNR: 25.96009635925293 +[TRAIN] Iter: 835500 Loss: 0.005780805833637714 PSNR: 27.10529136657715 +[TRAIN] Iter: 835600 Loss: 0.006758440751582384 PSNR: 26.353267669677734 +[TRAIN] Iter: 835700 Loss: 0.006008996162563562 PSNR: 26.710323333740234 +[TRAIN] Iter: 835800 Loss: 0.006759120151400566 PSNR: 25.962085723876953 +[TRAIN] Iter: 835900 Loss: 0.006981069687753916 PSNR: 25.752344131469727 +[TRAIN] Iter: 836000 Loss: 0.006245891563594341 PSNR: 26.392133712768555 +[TRAIN] Iter: 836100 Loss: 0.007448868360370398 PSNR: 25.521183013916016 +[TRAIN] Iter: 836200 Loss: 0.006638586521148682 PSNR: 26.106250762939453 +[TRAIN] Iter: 836300 Loss: 0.005194785539060831 PSNR: 28.491281509399414 +[TRAIN] Iter: 836400 Loss: 0.004325764253735542 PSNR: 28.332210540771484 +[TRAIN] Iter: 836500 Loss: 0.004609060939401388 PSNR: 28.907978057861328 +[TRAIN] Iter: 836600 Loss: 0.007159294560551643 PSNR: 25.698108673095703 +[TRAIN] Iter: 836700 Loss: 0.006470466963946819 PSNR: 26.873275756835938 +[TRAIN] Iter: 836800 Loss: 0.004448547959327698 PSNR: 28.779447555541992 +[TRAIN] Iter: 836900 Loss: 0.007218760438263416 PSNR: 25.745105743408203 +[TRAIN] Iter: 837000 Loss: 0.007409281097352505 PSNR: 25.715906143188477 +[TRAIN] Iter: 837100 Loss: 0.00513714924454689 PSNR: 27.304655075073242 +[TRAIN] Iter: 837200 Loss: 0.0068048229441046715 PSNR: 26.496320724487305 +[TRAIN] Iter: 837300 Loss: 0.006059722974896431 PSNR: 27.762630462646484 +[TRAIN] Iter: 837400 Loss: 0.004898363724350929 PSNR: 28.238269805908203 +[TRAIN] Iter: 837500 Loss: 0.0049135019071400166 PSNR: 27.80449104309082 +[TRAIN] Iter: 837600 Loss: 0.006548577919602394 PSNR: 26.881132125854492 +[TRAIN] Iter: 837700 Loss: 0.005876895971596241 PSNR: 26.426204681396484 +[TRAIN] Iter: 837800 Loss: 0.005974011495709419 PSNR: 27.670814514160156 +[TRAIN] Iter: 837900 Loss: 0.006840711459517479 PSNR: 26.020523071289062 +[TRAIN] Iter: 838000 Loss: 0.007884077727794647 PSNR: 25.92900276184082 +[TRAIN] Iter: 838100 Loss: 0.0072967857122421265 PSNR: 25.387109756469727 +[TRAIN] Iter: 838200 Loss: 0.005432712845504284 PSNR: 27.929550170898438 +[TRAIN] Iter: 838300 Loss: 0.006197397597134113 PSNR: 26.3378963470459 +[TRAIN] Iter: 838400 Loss: 0.0070442138239741325 PSNR: 26.34030532836914 +[TRAIN] Iter: 838500 Loss: 0.0052896421402692795 PSNR: 28.126611709594727 +[TRAIN] Iter: 838600 Loss: 0.005805706139653921 PSNR: 26.977197647094727 +[TRAIN] Iter: 838700 Loss: 0.006578032858669758 PSNR: 26.30602264404297 +[TRAIN] Iter: 838800 Loss: 0.006227447651326656 PSNR: 26.25499725341797 +[TRAIN] Iter: 838900 Loss: 0.005694012623280287 PSNR: 27.04375648498535 +[TRAIN] Iter: 839000 Loss: 0.006795111577957869 PSNR: 25.88762855529785 +[TRAIN] Iter: 839100 Loss: 0.006988241337239742 PSNR: 26.31597328186035 +[TRAIN] Iter: 839200 Loss: 0.006228962447494268 PSNR: 26.22119903564453 +[TRAIN] Iter: 839300 Loss: 0.005878825206309557 PSNR: 27.102153778076172 +[TRAIN] Iter: 839400 Loss: 0.007146511692553759 PSNR: 25.918807983398438 +[TRAIN] Iter: 839500 Loss: 0.005060943774878979 PSNR: 27.887508392333984 +[TRAIN] Iter: 839600 Loss: 0.006729966029524803 PSNR: 26.512758255004883 +[TRAIN] Iter: 839700 Loss: 0.0043523283675313 PSNR: 28.84703826904297 +[TRAIN] Iter: 839800 Loss: 0.006673973053693771 PSNR: 27.2545166015625 +[TRAIN] Iter: 839900 Loss: 0.005143599584698677 PSNR: 28.12381362915039 +Saved checkpoints at ./logs/TUT-out-doll-360-np/840000.tar +[TRAIN] Iter: 840000 Loss: 0.005425118841230869 PSNR: 26.575847625732422 +[TRAIN] Iter: 840100 Loss: 0.0065573276951909065 PSNR: 26.457338333129883 +[TRAIN] Iter: 840200 Loss: 0.005333595909178257 PSNR: 28.16396713256836 +[TRAIN] Iter: 840300 Loss: 0.006105097942054272 PSNR: 26.801912307739258 +[TRAIN] Iter: 840400 Loss: 0.0059566800482571125 PSNR: 26.883317947387695 +[TRAIN] Iter: 840500 Loss: 0.006293489132076502 PSNR: 26.96590805053711 +[TRAIN] Iter: 840600 Loss: 0.006492190062999725 PSNR: 26.645566940307617 +[TRAIN] Iter: 840700 Loss: 0.004700005985796452 PSNR: 28.639379501342773 +[TRAIN] Iter: 840800 Loss: 0.00610141409561038 PSNR: 26.76877212524414 +[TRAIN] Iter: 840900 Loss: 0.007354180794209242 PSNR: 25.49365234375 +[TRAIN] Iter: 841000 Loss: 0.006744016893208027 PSNR: 26.27621078491211 +[TRAIN] Iter: 841100 Loss: 0.006400540471076965 PSNR: 26.120389938354492 +[TRAIN] Iter: 841200 Loss: 0.00736198341473937 PSNR: 25.780961990356445 +[TRAIN] Iter: 841300 Loss: 0.004771881736814976 PSNR: 27.956762313842773 +[TRAIN] Iter: 841400 Loss: 0.006526071112602949 PSNR: 26.22199058532715 +[TRAIN] Iter: 841500 Loss: 0.005805965978652239 PSNR: 28.02897071838379 +[TRAIN] Iter: 841600 Loss: 0.0070498352870345116 PSNR: 26.137405395507812 +[TRAIN] Iter: 841700 Loss: 0.006502850446850061 PSNR: 26.71452522277832 +[TRAIN] Iter: 841800 Loss: 0.0056479088962078094 PSNR: 27.445423126220703 +[TRAIN] Iter: 841900 Loss: 0.005078566260635853 PSNR: 27.395580291748047 +[TRAIN] Iter: 842000 Loss: 0.005585332401096821 PSNR: 27.90192222595215 +[TRAIN] Iter: 842100 Loss: 0.006960377097129822 PSNR: 26.04726219177246 +[TRAIN] Iter: 842200 Loss: 0.006435383576899767 PSNR: 26.939830780029297 +[TRAIN] Iter: 842300 Loss: 0.004623321816325188 PSNR: 27.691137313842773 +[TRAIN] Iter: 842400 Loss: 0.006643881089985371 PSNR: 26.525474548339844 +[TRAIN] Iter: 842500 Loss: 0.006651082541793585 PSNR: 26.345888137817383 +[TRAIN] Iter: 842600 Loss: 0.00610719621181488 PSNR: 26.955717086791992 +[TRAIN] Iter: 842700 Loss: 0.004854725673794746 PSNR: 29.045269012451172 +[TRAIN] Iter: 842800 Loss: 0.005987081211060286 PSNR: 27.479337692260742 +[TRAIN] Iter: 842900 Loss: 0.007233176380395889 PSNR: 26.003063201904297 +[TRAIN] Iter: 843000 Loss: 0.006187590304762125 PSNR: 26.696491241455078 +[TRAIN] Iter: 843100 Loss: 0.005891091655939817 PSNR: 27.02894401550293 +[TRAIN] Iter: 843200 Loss: 0.007718770764768124 PSNR: 25.5645694732666 +[TRAIN] Iter: 843300 Loss: 0.0050474186427891254 PSNR: 27.469009399414062 +[TRAIN] Iter: 843400 Loss: 0.0054837362840771675 PSNR: 28.135475158691406 +[TRAIN] Iter: 843500 Loss: 0.005606312304735184 PSNR: 27.365711212158203 +[TRAIN] Iter: 843600 Loss: 0.005249097011983395 PSNR: 28.10301971435547 +[TRAIN] Iter: 843700 Loss: 0.007873272523283958 PSNR: 25.58191680908203 +[TRAIN] Iter: 843800 Loss: 0.005551361478865147 PSNR: 26.90184783935547 +[TRAIN] Iter: 843900 Loss: 0.005505883134901524 PSNR: 27.717885971069336 +[TRAIN] Iter: 844000 Loss: 0.0061951070092618465 PSNR: 26.41606903076172 +[TRAIN] Iter: 844100 Loss: 0.007262611761689186 PSNR: 25.148395538330078 +[TRAIN] Iter: 844200 Loss: 0.0059904553927481174 PSNR: 27.487552642822266 +[TRAIN] Iter: 844300 Loss: 0.006176642142236233 PSNR: 27.544368743896484 +[TRAIN] Iter: 844400 Loss: 0.006001197267323732 PSNR: 26.9482479095459 +[TRAIN] Iter: 844500 Loss: 0.00673211645334959 PSNR: 25.855484008789062 +[TRAIN] Iter: 844600 Loss: 0.005975634325295687 PSNR: 26.89324188232422 +[TRAIN] Iter: 844700 Loss: 0.006802726536989212 PSNR: 26.553007125854492 +[TRAIN] Iter: 844800 Loss: 0.004421760328114033 PSNR: 28.232328414916992 +[TRAIN] Iter: 844900 Loss: 0.00474145682528615 PSNR: 28.498573303222656 +[TRAIN] Iter: 845000 Loss: 0.0057685538195073605 PSNR: 26.854490280151367 +[TRAIN] Iter: 845100 Loss: 0.007432304322719574 PSNR: 25.664525985717773 +[TRAIN] Iter: 845200 Loss: 0.007627204060554504 PSNR: 25.988080978393555 +[TRAIN] Iter: 845300 Loss: 0.006169037893414497 PSNR: 27.043745040893555 +[TRAIN] Iter: 845400 Loss: 0.006554436404258013 PSNR: 26.34379005432129 +[TRAIN] Iter: 845500 Loss: 0.0056600309908390045 PSNR: 27.216209411621094 +[TRAIN] Iter: 845600 Loss: 0.006146437022835016 PSNR: 26.955286026000977 +[TRAIN] Iter: 845700 Loss: 0.005937462672591209 PSNR: 27.097322463989258 +[TRAIN] Iter: 845800 Loss: 0.0056191845797002316 PSNR: 27.688493728637695 +[TRAIN] Iter: 845900 Loss: 0.006512866821140051 PSNR: 26.412975311279297 +[TRAIN] Iter: 846000 Loss: 0.006116466596722603 PSNR: 27.216510772705078 +[TRAIN] Iter: 846100 Loss: 0.00532707292586565 PSNR: 27.233619689941406 +[TRAIN] Iter: 846200 Loss: 0.0073934695683419704 PSNR: 26.1605224609375 +[TRAIN] Iter: 846300 Loss: 0.005572872702032328 PSNR: 27.819259643554688 +[TRAIN] Iter: 846400 Loss: 0.006963977124541998 PSNR: 26.158008575439453 +[TRAIN] Iter: 846500 Loss: 0.006253710016608238 PSNR: 27.320396423339844 +[TRAIN] Iter: 846600 Loss: 0.006461916957050562 PSNR: 26.610702514648438 +[TRAIN] Iter: 846700 Loss: 0.007362851407378912 PSNR: 25.96414566040039 +[TRAIN] Iter: 846800 Loss: 0.005678253248333931 PSNR: 27.35346221923828 +[TRAIN] Iter: 846900 Loss: 0.006254970096051693 PSNR: 26.619863510131836 +[TRAIN] Iter: 847000 Loss: 0.005533887073397636 PSNR: 28.612157821655273 +[TRAIN] Iter: 847100 Loss: 0.0061956411227583885 PSNR: 26.85752296447754 +[TRAIN] Iter: 847200 Loss: 0.005975188221782446 PSNR: 26.975812911987305 +[TRAIN] Iter: 847300 Loss: 0.006328823044896126 PSNR: 26.683082580566406 +[TRAIN] Iter: 847400 Loss: 0.0052493903785943985 PSNR: 28.088665008544922 +[TRAIN] Iter: 847500 Loss: 0.006631654687225819 PSNR: 26.544063568115234 +[TRAIN] Iter: 847600 Loss: 0.006924275774508715 PSNR: 26.955156326293945 +[TRAIN] Iter: 847700 Loss: 0.008289474993944168 PSNR: 25.619915008544922 +[TRAIN] Iter: 847800 Loss: 0.006076999939978123 PSNR: 26.83214569091797 +[TRAIN] Iter: 847900 Loss: 0.005339652765542269 PSNR: 28.134294509887695 +[TRAIN] Iter: 848000 Loss: 0.005013339687138796 PSNR: 27.041933059692383 +[TRAIN] Iter: 848100 Loss: 0.006585757248103619 PSNR: 26.94463348388672 +[TRAIN] Iter: 848200 Loss: 0.0073931654915213585 PSNR: 25.40650177001953 +[TRAIN] Iter: 848300 Loss: 0.005276783369481564 PSNR: 26.630388259887695 +[TRAIN] Iter: 848400 Loss: 0.00640743225812912 PSNR: 26.70587921142578 +[TRAIN] Iter: 848500 Loss: 0.006156370975077152 PSNR: 27.018739700317383 +[TRAIN] Iter: 848600 Loss: 0.004744796082377434 PSNR: 27.772808074951172 +[TRAIN] Iter: 848700 Loss: 0.006405367981642485 PSNR: 26.688304901123047 +[TRAIN] Iter: 848800 Loss: 0.006995476316660643 PSNR: 26.145769119262695 +[TRAIN] Iter: 848900 Loss: 0.006183274555951357 PSNR: 26.84443473815918 +[TRAIN] Iter: 849000 Loss: 0.006862057838588953 PSNR: 26.151445388793945 +[TRAIN] Iter: 849100 Loss: 0.0056257667019963264 PSNR: 28.003368377685547 +[TRAIN] Iter: 849200 Loss: 0.005256615113466978 PSNR: 27.73841667175293 +[TRAIN] Iter: 849300 Loss: 0.004802722949534655 PSNR: 29.103120803833008 +[TRAIN] Iter: 849400 Loss: 0.004985887557268143 PSNR: 29.327896118164062 +[TRAIN] Iter: 849500 Loss: 0.006536033004522324 PSNR: 26.41692352294922 +[TRAIN] Iter: 849600 Loss: 0.006198713090270758 PSNR: 26.82099151611328 +[TRAIN] Iter: 849700 Loss: 0.006594934035092592 PSNR: 26.64093589782715 +[TRAIN] Iter: 849800 Loss: 0.006670762784779072 PSNR: 26.548114776611328 +[TRAIN] Iter: 849900 Loss: 0.005578415468335152 PSNR: 26.868322372436523 +Saved checkpoints at ./logs/TUT-out-doll-360-np/850000.tar +0 0.0011696815490722656 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.48414444923401 +2 21.762065172195435 +3 21.881660223007202 +4 22.10834288597107 +5 21.322152614593506 +6 21.32793617248535 +7 21.5853009223938 +8 22.689103841781616 +9 21.805482387542725 +10 23.207533359527588 +11 22.09896469116211 +12 21.203811407089233 +13 22.787692546844482 +14 21.520036935806274 +15 21.147526025772095 +16 21.66964364051819 +17 21.534682035446167 +18 22.119515657424927 +19 21.197818279266357 +20 21.973520517349243 +21 21.969902992248535 +22 22.438543558120728 +23 21.596426010131836 +24 22.25440216064453 +25 21.559202671051025 +26 21.9000985622406 +27 21.319392204284668 +28 21.87825083732605 +29 21.666675329208374 +30 22.60097074508667 +31 21.685916662216187 +32 22.326090574264526 +33 21.46439242362976 +34 21.97833776473999 +35 21.906105518341064 +36 21.457358598709106 +37 22.6300106048584 +38 22.16235375404358 +39 21.584352016448975 +40 23.137810230255127 +41 21.829336404800415 +42 22.276162147521973 +43 22.43777346611023 +44 22.802111864089966 +45 22.940701484680176 +46 22.59843897819519 +47 22.667972564697266 +48 21.311519861221313 +49 22.111461400985718 +50 22.729548692703247 +51 22.487977504730225 +52 22.94758915901184 +53 22.108026266098022 +54 22.08939838409424 +55 22.014955520629883 +56 22.24972152709961 +57 21.962215900421143 +58 22.263839960098267 +59 22.4588041305542 +60 21.409401655197144 +61 21.968115091323853 +62 23.031786680221558 +63 21.346949815750122 +64 21.889439821243286 +65 21.80868124961853 +66 21.890372037887573 +67 22.45824956893921 +68 21.478026151657104 +69 22.394906044006348 +70 21.305708169937134 +71 21.895897150039673 +72 21.924583435058594 +73 21.843281745910645 +74 21.557294607162476 +75 21.51154637336731 +76 22.4539532661438 +77 22.21073341369629 +78 21.330032110214233 +79 21.765180110931396 +80 22.238351821899414 +81 21.51243495941162 +82 22.677791833877563 +83 21.548596382141113 +84 22.191864490509033 +85 22.458198070526123 +86 22.731105089187622 +87 22.130244731903076 +88 21.65642261505127 +89 22.50898814201355 +90 22.847273588180542 +91 21.2721266746521 +92 21.669408559799194 +93 21.503661394119263 +94 22.534252882003784 +95 22.04885506629944 +96 22.297281980514526 +97 23.40746831893921 +98 21.078267335891724 +99 22.312942266464233 +100 22.13113021850586 +101 22.2479887008667 +102 21.506529808044434 +103 22.82912802696228 +104 22.29458999633789 +105 21.911446571350098 +106 22.643330812454224 +107 22.424127340316772 +108 21.94049382209778 +109 21.628697156906128 +110 22.40717101097107 +111 21.274502515792847 +112 21.730384349822998 +113 21.413516759872437 +114 21.3303279876709 +115 21.587573051452637 +116 21.493446111679077 +117 22.0667462348938 +118 21.27567148208618 +119 21.92379379272461 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-1.4016e+00, -1.3313e+00, -1.0416e+00, -5.4802e+00], + [-1.8802e+00, -1.8635e+00, -1.5562e+00, -1.7883e+00], + [-2.2414e+00, -2.1190e+00, -2.6289e+00, -1.8301e+01], + ..., + [-3.4513e+01, -2.3029e+01, -2.3179e+01, 7.0976e+02], + [-3.3035e+01, -2.1119e+01, -2.1065e+01, 7.3406e+02], + [-3.5800e+01, -2.4583e+01, -2.6001e+01, 7.6080e+02]], + + [[-1.5935e+00, -6.9665e-01, -9.8625e-01, -4.4409e+01], + [-5.3950e-01, 1.5465e-01, 1.0235e+00, -1.3300e+01], + [-5.8503e-01, 9.5276e-02, 9.8831e-01, -1.4127e+01], + ..., + [-1.0063e+01, -9.5221e+00, -2.1614e-01, 5.3839e+02], + [-1.0764e+01, -1.0157e+01, -4.3148e-01, 5.5792e+02], + [-1.0153e+01, -9.4706e+00, -1.7780e-01, 5.3496e+02]], + + [[-4.9369e-01, -6.7994e-01, -6.0597e-01, -2.2465e+01], + [-6.5950e-01, -8.4788e-01, -6.9241e-01, -2.1564e+01], + [-3.4922e-01, -5.1504e-01, -6.7572e-01, -2.7609e+01], + ..., + [-1.3188e+01, -1.0338e+01, -8.8778e+00, 1.0220e+03], + [-1.0949e+01, -8.2086e+00, -7.0630e+00, 9.1038e+02], + [-1.4569e+01, -1.2057e+01, -1.0300e+01, 8.3799e+02]], + + ..., + + [[-1.1613e+00, -4.6715e-01, 3.8382e-01, -1.8363e+01], + [-1.0361e+00, -3.8062e-01, 6.4016e-01, -1.0237e+01], + [-1.0479e+00, -3.9064e-01, 6.3360e-01, -1.0328e+01], + ..., + [-9.2861e+00, -6.3983e+00, -7.4328e+00, 5.9031e+02], + [-8.6514e+00, -5.5624e+00, -6.6501e+00, 5.7888e+02], + [-9.2196e+00, -6.1001e+00, -6.9734e+00, 5.9063e+02]], + + [[-1.1190e+00, -1.2092e+00, -1.0392e+00, -8.8110e+00], + [-9.9944e-01, -8.9967e-01, -6.8094e-01, -2.3753e+01], + [-1.1978e+00, -1.1021e+00, -8.7248e-01, -2.3420e+01], + ..., + [-8.0841e+00, -4.2071e+00, -7.1320e+00, 7.4884e+02], + [-7.5818e+00, -3.1964e+00, -5.2598e+00, 8.5973e+02], + [-8.4006e+00, -4.7751e+00, -7.9623e+00, 8.6143e+02]], + + [[-1.7217e+00, -1.7165e+00, -1.6128e+00, -4.6021e+00], + [-1.2353e+00, -1.2808e+00, -1.0510e+00, -2.1838e+01], + [-1.3584e+00, -1.4060e+00, -1.1952e+00, -2.1988e+01], + ..., + [-8.8957e+00, -4.5696e+00, -6.2789e+00, 6.6160e+02], + [-7.4077e+00, -3.7432e+00, -5.8167e+00, 6.8182e+02], + [-9.6492e+00, -5.1435e+00, -7.3319e+00, 7.6778e+02]]], + grad_fn=), 'rgb0': tensor([[0.1797, 0.1798, 0.1853], + [0.2857, 0.4424, 0.6729], + [0.2884, 0.2809, 0.2783], + ..., + [0.3061, 0.4616, 0.6795], + [0.2725, 0.2411, 0.2363], + [0.2691, 0.2350, 0.2402]], grad_fn=), 'disp0': tensor([161.8357, 48.8534, 37.8417, ..., 125.1395, 233.7957, 424.9846], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.2846, 0.0032, 0.0052, ..., 0.0052, 0.2016, 0.0503])} +0 0.0008287429809570312 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.134775400161743 +2 21.813316345214844 +3 21.972700357437134 +4 21.158647298812866 +5 21.984357595443726 +6 22.933300495147705 +7 21.817318201065063 +8 21.87314796447754 +9 22.087494611740112 +10 22.555360794067383 +11 22.49607539176941 +12 21.38280701637268 +13 21.685152292251587 +14 22.123780727386475 +15 22.10296058654785 +16 21.8881254196167 +17 22.220577239990234 +18 21.464393138885498 +19 21.870933294296265 +20 21.559045553207397 +21 22.634814262390137 +22 22.39411187171936 +23 21.18502140045166 +24 23.08387279510498 +25 21.99786067008972 +26 22.370545625686646 +27 21.538111209869385 +28 21.505919933319092 +29 21.881588459014893 +30 21.524521112442017 +31 22.280039072036743 +32 21.820123195648193 +33 21.520593404769897 +34 21.780765533447266 +35 21.939244031906128 +36 21.325595140457153 +37 21.512511253356934 +38 21.995439767837524 +39 22.029117107391357 +40 22.72814416885376 +41 21.913487911224365 +42 21.55844283103943 +43 21.415731191635132 +44 21.800838470458984 +45 22.023221015930176 +46 21.934359788894653 +47 22.589272499084473 +48 21.49903631210327 +49 21.612297773361206 +50 22.309425354003906 +51 21.569214582443237 +52 22.029988050460815 +53 21.343953132629395 +54 21.819369792938232 +55 21.988960027694702 +56 22.366808652877808 +57 21.5362548828125 +58 21.635348320007324 +59 21.97308850288391 +60 21.83050847053528 +61 21.381669282913208 +62 22.339239358901978 +63 21.646143674850464 +64 21.61167025566101 +65 22.27021026611328 +66 22.051994800567627 +67 22.60897922515869 +68 22.073298931121826 +69 21.95042324066162 +70 21.735626459121704 +71 21.83527112007141 +72 22.83015513420105 +73 21.849517822265625 +74 22.539493560791016 +75 21.95452570915222 +76 22.424580574035645 +77 21.54715633392334 +78 22.383553743362427 +79 22.267865657806396 +80 22.54412603378296 +81 21.376888036727905 +82 21.8914954662323 +83 22.226667404174805 +84 21.888264179229736 +85 22.184218406677246 +86 22.079944372177124 +87 21.490404844284058 +88 21.912058353424072 +89 22.176352500915527 +90 22.169162273406982 +91 21.81033182144165 +92 22.19861364364624 +93 21.937668561935425 +94 22.299273014068604 +95 22.44306230545044 +96 21.836873769760132 +97 22.770581483840942 +98 20.99916648864746 +99 21.78209114074707 +100 21.824653387069702 +101 22.20887017250061 +102 21.654155015945435 +103 21.70262098312378 +104 21.925819635391235 +105 22.270874977111816 +106 21.644131422042847 +107 21.862773180007935 +108 23.049721717834473 +109 22.14380192756653 +110 21.508023023605347 +111 21.833856344223022 +112 21.870101928710938 +113 21.354899883270264 +114 22.33450722694397 +115 23.055423498153687 +116 21.676649570465088 +117 22.283663034439087 +118 21.522650003433228 +119 21.86675453186035 +test poses shape torch.Size([4, 3, 4]) +0 0.0011415481567382812 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 22.317627429962158 +2 22.2012836933136 +3 22.149521589279175 +Saved test set +[TRAIN] Iter: 850000 Loss: 0.00662271399050951 PSNR: 26.75922203063965 +[TRAIN] Iter: 850100 Loss: 0.006586586590856314 PSNR: 26.437292098999023 +[TRAIN] Iter: 850200 Loss: 0.006755564361810684 PSNR: 26.75454330444336 +[TRAIN] Iter: 850300 Loss: 0.006706653628498316 PSNR: 26.034317016601562 +[TRAIN] Iter: 850400 Loss: 0.006743770092725754 PSNR: 25.831083297729492 +[TRAIN] Iter: 850500 Loss: 0.006165459286421537 PSNR: 27.39703941345215 +[TRAIN] Iter: 850600 Loss: 0.00649562431499362 PSNR: 26.70836067199707 +[TRAIN] Iter: 850700 Loss: 0.006229606922715902 PSNR: 26.977649688720703 +[TRAIN] Iter: 850800 Loss: 0.005598465912044048 PSNR: 28.73431968688965 +[TRAIN] Iter: 850900 Loss: 0.00605978537350893 PSNR: 26.879072189331055 +[TRAIN] Iter: 851000 Loss: 0.00703152921050787 PSNR: 26.443281173706055 +[TRAIN] Iter: 851100 Loss: 0.005731629207730293 PSNR: 27.016979217529297 +[TRAIN] Iter: 851200 Loss: 0.006849268451333046 PSNR: 26.13727378845215 +[TRAIN] Iter: 851300 Loss: 0.005743440240621567 PSNR: 27.19456672668457 +[TRAIN] Iter: 851400 Loss: 0.006564925890415907 PSNR: 26.124996185302734 +[TRAIN] Iter: 851500 Loss: 0.006544099655002356 PSNR: 26.05571174621582 +[TRAIN] Iter: 851600 Loss: 0.006860310677438974 PSNR: 25.978487014770508 +[TRAIN] Iter: 851700 Loss: 0.005959192756563425 PSNR: 26.863346099853516 +[TRAIN] Iter: 851800 Loss: 0.006849757395684719 PSNR: 26.038516998291016 +[TRAIN] Iter: 851900 Loss: 0.007009583525359631 PSNR: 26.78410530090332 +[TRAIN] Iter: 852000 Loss: 0.005319787655025721 PSNR: 27.78148651123047 +[TRAIN] Iter: 852100 Loss: 0.005935816094279289 PSNR: 26.94872283935547 +[TRAIN] Iter: 852200 Loss: 0.006149583030492067 PSNR: 27.675331115722656 +[TRAIN] Iter: 852300 Loss: 0.005524079781025648 PSNR: 27.91535758972168 +[TRAIN] Iter: 852400 Loss: 0.005383543204516172 PSNR: 27.994760513305664 +[TRAIN] Iter: 852500 Loss: 0.005060664378106594 PSNR: 28.634662628173828 +[TRAIN] Iter: 852600 Loss: 0.006054698955267668 PSNR: 26.53205108642578 +[TRAIN] Iter: 852700 Loss: 0.006088615395128727 PSNR: 27.290550231933594 +[TRAIN] Iter: 852800 Loss: 0.006823391653597355 PSNR: 26.696849822998047 +[TRAIN] Iter: 852900 Loss: 0.00526061374694109 PSNR: 27.885351181030273 +[TRAIN] Iter: 853000 Loss: 0.005261362064629793 PSNR: 28.868227005004883 +[TRAIN] Iter: 853100 Loss: 0.006755455397069454 PSNR: 26.704627990722656 +[TRAIN] Iter: 853200 Loss: 0.006124472711235285 PSNR: 26.44927215576172 +[TRAIN] Iter: 853300 Loss: 0.006665220018476248 PSNR: 25.698266983032227 +[TRAIN] Iter: 853400 Loss: 0.006218378897756338 PSNR: 26.711238861083984 +[TRAIN] Iter: 853500 Loss: 0.004980402998626232 PSNR: 28.709077835083008 +[TRAIN] Iter: 853600 Loss: 0.006151288747787476 PSNR: 26.643461227416992 +[TRAIN] Iter: 853700 Loss: 0.005849581211805344 PSNR: 26.822229385375977 +[TRAIN] Iter: 853800 Loss: 0.006126871332526207 PSNR: 26.632509231567383 +[TRAIN] Iter: 853900 Loss: 0.0065550128929317 PSNR: 26.655668258666992 +[TRAIN] Iter: 854000 Loss: 0.006924955174326897 PSNR: 26.257369995117188 +[TRAIN] Iter: 854100 Loss: 0.0054765185341238976 PSNR: 27.430700302124023 +[TRAIN] Iter: 854200 Loss: 0.005198394414037466 PSNR: 27.50811004638672 +[TRAIN] Iter: 854300 Loss: 0.0064787063747644424 PSNR: 26.57114601135254 +[TRAIN] Iter: 854400 Loss: 0.0063600740395486355 PSNR: 26.267969131469727 +[TRAIN] Iter: 854500 Loss: 0.008549600839614868 PSNR: 25.61689567565918 +[TRAIN] Iter: 854600 Loss: 0.0049925255589187145 PSNR: 28.659812927246094 +[TRAIN] Iter: 854700 Loss: 0.007575739175081253 PSNR: 25.970911026000977 +[TRAIN] Iter: 854800 Loss: 0.006293223239481449 PSNR: 27.406707763671875 +[TRAIN] Iter: 854900 Loss: 0.006554548628628254 PSNR: 26.588499069213867 +[TRAIN] Iter: 855000 Loss: 0.0048595150001347065 PSNR: 28.88950538635254 +[TRAIN] Iter: 855100 Loss: 0.004960194695740938 PSNR: 28.30608558654785 +[TRAIN] Iter: 855200 Loss: 0.005832390859723091 PSNR: 27.329387664794922 +[TRAIN] Iter: 855300 Loss: 0.005907595623284578 PSNR: 26.646507263183594 +[TRAIN] Iter: 855400 Loss: 0.005531884729862213 PSNR: 27.086156845092773 +[TRAIN] Iter: 855500 Loss: 0.006298426538705826 PSNR: 25.515277862548828 +[TRAIN] Iter: 855600 Loss: 0.005317694507539272 PSNR: 28.215953826904297 +[TRAIN] Iter: 855700 Loss: 0.005984073970466852 PSNR: 26.863239288330078 +[TRAIN] Iter: 855800 Loss: 0.005308771040290594 PSNR: 27.268327713012695 +[TRAIN] Iter: 855900 Loss: 0.007052157539874315 PSNR: 26.56147575378418 +[TRAIN] Iter: 856000 Loss: 0.0061598666943609715 PSNR: 27.478208541870117 +[TRAIN] Iter: 856100 Loss: 0.00645537581294775 PSNR: 26.39006233215332 +[TRAIN] Iter: 856200 Loss: 0.005818539299070835 PSNR: 27.121280670166016 +[TRAIN] Iter: 856300 Loss: 0.006178298965096474 PSNR: 26.297687530517578 +[TRAIN] Iter: 856400 Loss: 0.005906909704208374 PSNR: 26.48088264465332 +[TRAIN] Iter: 856500 Loss: 0.005782983265817165 PSNR: 27.025182723999023 +[TRAIN] Iter: 856600 Loss: 0.004997763782739639 PSNR: 28.45149040222168 +[TRAIN] Iter: 856700 Loss: 0.007506732828915119 PSNR: 25.491731643676758 +[TRAIN] Iter: 856800 Loss: 0.0072547439485788345 PSNR: 25.70815658569336 +[TRAIN] Iter: 856900 Loss: 0.0061752451583743095 PSNR: 26.47260284423828 +[TRAIN] Iter: 857000 Loss: 0.005013602785766125 PSNR: 28.728193283081055 +[TRAIN] Iter: 857100 Loss: 0.006480121985077858 PSNR: 26.348262786865234 +[TRAIN] Iter: 857200 Loss: 0.006823768373578787 PSNR: 26.418380737304688 +[TRAIN] Iter: 857300 Loss: 0.00635442603379488 PSNR: 26.474212646484375 +[TRAIN] Iter: 857400 Loss: 0.006362376734614372 PSNR: 26.669422149658203 +[TRAIN] Iter: 857500 Loss: 0.006803308613598347 PSNR: 25.92940330505371 +[TRAIN] Iter: 857600 Loss: 0.0066833700984716415 PSNR: 26.233835220336914 +[TRAIN] Iter: 857700 Loss: 0.006234925240278244 PSNR: 27.57686424255371 +[TRAIN] Iter: 857800 Loss: 0.006636790465563536 PSNR: 26.6208553314209 +[TRAIN] Iter: 857900 Loss: 0.005988924764096737 PSNR: 28.16487693786621 +[TRAIN] Iter: 858000 Loss: 0.0071382722817361355 PSNR: 26.266582489013672 +[TRAIN] Iter: 858100 Loss: 0.006829964928328991 PSNR: 26.515207290649414 +[TRAIN] Iter: 858200 Loss: 0.007675531320273876 PSNR: 26.182849884033203 +[TRAIN] Iter: 858300 Loss: 0.007438294123858213 PSNR: 25.469423294067383 +[TRAIN] Iter: 858400 Loss: 0.0058764866553246975 PSNR: 27.85384178161621 +[TRAIN] Iter: 858500 Loss: 0.004767967388033867 PSNR: 27.967796325683594 +[TRAIN] Iter: 858600 Loss: 0.006572050973773003 PSNR: 26.322052001953125 +[TRAIN] Iter: 858700 Loss: 0.004533485509455204 PSNR: 28.22600555419922 +[TRAIN] Iter: 858800 Loss: 0.006444248370826244 PSNR: 26.224822998046875 +[TRAIN] Iter: 858900 Loss: 0.005601800978183746 PSNR: 27.88673973083496 +[TRAIN] Iter: 859000 Loss: 0.006960752885788679 PSNR: 26.555538177490234 +[TRAIN] Iter: 859100 Loss: 0.006987118162214756 PSNR: 26.206157684326172 +[TRAIN] Iter: 859200 Loss: 0.007651777472347021 PSNR: 26.04370880126953 +[TRAIN] Iter: 859300 Loss: 0.005812017247080803 PSNR: 27.365081787109375 +[TRAIN] Iter: 859400 Loss: 0.005212058313190937 PSNR: 27.772600173950195 +[TRAIN] Iter: 859500 Loss: 0.0052414159290492535 PSNR: 28.22816276550293 +[TRAIN] Iter: 859600 Loss: 0.006403462029993534 PSNR: 26.896520614624023 +[TRAIN] Iter: 859700 Loss: 0.007309994660317898 PSNR: 25.704204559326172 +[TRAIN] Iter: 859800 Loss: 0.006345592439174652 PSNR: 26.631427764892578 +[TRAIN] Iter: 859900 Loss: 0.004800119437277317 PSNR: 28.30638313293457 +Saved checkpoints at ./logs/TUT-out-doll-360-np/860000.tar +[TRAIN] Iter: 860000 Loss: 0.0062701888382434845 PSNR: 26.419294357299805 +[TRAIN] Iter: 860100 Loss: 0.00587043771520257 PSNR: 27.003541946411133 +[TRAIN] Iter: 860200 Loss: 0.006315635517239571 PSNR: 27.58015251159668 +[TRAIN] Iter: 860300 Loss: 0.005710835102945566 PSNR: 28.08254623413086 +[TRAIN] Iter: 860400 Loss: 0.005896671675145626 PSNR: 27.189674377441406 +[TRAIN] Iter: 860500 Loss: 0.0057139103300869465 PSNR: 27.025239944458008 +[TRAIN] Iter: 860600 Loss: 0.005416517145931721 PSNR: 28.61956024169922 +[TRAIN] Iter: 860700 Loss: 0.005996501073241234 PSNR: 27.954360961914062 +[TRAIN] Iter: 860800 Loss: 0.006185494363307953 PSNR: 26.728504180908203 +[TRAIN] Iter: 860900 Loss: 0.006634457502514124 PSNR: 26.337526321411133 +[TRAIN] Iter: 861000 Loss: 0.007390493527054787 PSNR: 26.237403869628906 +[TRAIN] Iter: 861100 Loss: 0.0061947619542479515 PSNR: 26.35854148864746 +[TRAIN] Iter: 861200 Loss: 0.006319005973637104 PSNR: 26.509891510009766 +[TRAIN] Iter: 861300 Loss: 0.004996492527425289 PSNR: 28.03046226501465 +[TRAIN] Iter: 861400 Loss: 0.006263165269047022 PSNR: 26.449974060058594 +[TRAIN] Iter: 861500 Loss: 0.005297319032251835 PSNR: 27.83586311340332 +[TRAIN] Iter: 861600 Loss: 0.004615842830389738 PSNR: 28.241775512695312 +[TRAIN] Iter: 861700 Loss: 0.006138161290436983 PSNR: 26.96727180480957 +[TRAIN] Iter: 861800 Loss: 0.007187257520854473 PSNR: 25.905630111694336 +[TRAIN] Iter: 861900 Loss: 0.007126848679035902 PSNR: 26.12619972229004 +[TRAIN] Iter: 862000 Loss: 0.006099761463701725 PSNR: 26.78330421447754 +[TRAIN] Iter: 862100 Loss: 0.007117270492017269 PSNR: 26.007246017456055 +[TRAIN] Iter: 862200 Loss: 0.0046899402514100075 PSNR: 27.97355842590332 +[TRAIN] Iter: 862300 Loss: 0.006486803293228149 PSNR: 26.882368087768555 +[TRAIN] Iter: 862400 Loss: 0.0062745437026023865 PSNR: 26.44902229309082 +[TRAIN] Iter: 862500 Loss: 0.006296235136687756 PSNR: 26.484939575195312 +[TRAIN] Iter: 862600 Loss: 0.006767011247575283 PSNR: 26.02154541015625 +[TRAIN] Iter: 862700 Loss: 0.005404243711382151 PSNR: 27.358877182006836 +[TRAIN] Iter: 862800 Loss: 0.005873624235391617 PSNR: 27.143503189086914 +[TRAIN] Iter: 862900 Loss: 0.006311589851975441 PSNR: 26.638580322265625 +[TRAIN] Iter: 863000 Loss: 0.005163020454347134 PSNR: 27.66870880126953 +[TRAIN] Iter: 863100 Loss: 0.007785390596836805 PSNR: 25.65079116821289 +[TRAIN] Iter: 863200 Loss: 0.005468917079269886 PSNR: 28.414966583251953 +[TRAIN] Iter: 863300 Loss: 0.007135597988963127 PSNR: 26.084299087524414 +[TRAIN] Iter: 863400 Loss: 0.005177306942641735 PSNR: 27.56049919128418 +[TRAIN] Iter: 863500 Loss: 0.005909489467740059 PSNR: 26.705570220947266 +[TRAIN] Iter: 863600 Loss: 0.005685803946107626 PSNR: 27.050735473632812 +[TRAIN] Iter: 863700 Loss: 0.0065594930201768875 PSNR: 26.6207218170166 +[TRAIN] Iter: 863800 Loss: 0.00667203776538372 PSNR: 26.564651489257812 +[TRAIN] Iter: 863900 Loss: 0.006127433851361275 PSNR: 26.604738235473633 +[TRAIN] Iter: 864000 Loss: 0.006197785492986441 PSNR: 26.210220336914062 +[TRAIN] Iter: 864100 Loss: 0.006147319450974464 PSNR: 26.695762634277344 +[TRAIN] Iter: 864200 Loss: 0.006093194708228111 PSNR: 26.326189041137695 +[TRAIN] Iter: 864300 Loss: 0.0065338462591171265 PSNR: 26.18415641784668 +[TRAIN] Iter: 864400 Loss: 0.00697355717420578 PSNR: 25.586633682250977 +[TRAIN] Iter: 864500 Loss: 0.0059045422822237015 PSNR: 26.480690002441406 +[TRAIN] Iter: 864600 Loss: 0.0054434724152088165 PSNR: 26.828243255615234 +[TRAIN] Iter: 864700 Loss: 0.0057785389944911 PSNR: 27.01144790649414 +[TRAIN] Iter: 864800 Loss: 0.006263215094804764 PSNR: 26.613515853881836 +[TRAIN] Iter: 864900 Loss: 0.006243029609322548 PSNR: 26.56173324584961 +[TRAIN] Iter: 865000 Loss: 0.006432810332626104 PSNR: 26.849496841430664 +[TRAIN] Iter: 865100 Loss: 0.006031424272805452 PSNR: 26.738168716430664 +[TRAIN] Iter: 865200 Loss: 0.006179111544042826 PSNR: 27.09214973449707 +[TRAIN] Iter: 865300 Loss: 0.00702622439712286 PSNR: 26.346256256103516 +[TRAIN] Iter: 865400 Loss: 0.0059223840944468975 PSNR: 26.842727661132812 +[TRAIN] Iter: 865500 Loss: 0.007150026969611645 PSNR: 25.732629776000977 +[TRAIN] Iter: 865600 Loss: 0.0066071683540940285 PSNR: 26.496891021728516 +[TRAIN] Iter: 865700 Loss: 0.0063532693311572075 PSNR: 27.00908851623535 +[TRAIN] Iter: 865800 Loss: 0.006511843763291836 PSNR: 26.242149353027344 +[TRAIN] Iter: 865900 Loss: 0.006827200297266245 PSNR: 26.817352294921875 +[TRAIN] Iter: 866000 Loss: 0.006344659253954887 PSNR: 26.95989418029785 +[TRAIN] Iter: 866100 Loss: 0.005014556925743818 PSNR: 28.367300033569336 +[TRAIN] Iter: 866200 Loss: 0.006816821172833443 PSNR: 26.434465408325195 +[TRAIN] Iter: 866300 Loss: 0.005298518110066652 PSNR: 27.98704719543457 +[TRAIN] Iter: 866400 Loss: 0.006818046793341637 PSNR: 26.7155704498291 +[TRAIN] Iter: 866500 Loss: 0.006562936119735241 PSNR: 26.29030990600586 +[TRAIN] Iter: 866600 Loss: 0.005774651654064655 PSNR: 26.66811180114746 +[TRAIN] Iter: 866700 Loss: 0.005083065014332533 PSNR: 28.299083709716797 +[TRAIN] Iter: 866800 Loss: 0.004380060359835625 PSNR: 28.506303787231445 +[TRAIN] Iter: 866900 Loss: 0.007304965518414974 PSNR: 25.33928680419922 +[TRAIN] Iter: 867000 Loss: 0.006816441193223 PSNR: 26.636852264404297 +[TRAIN] Iter: 867100 Loss: 0.006635632831603289 PSNR: 26.671436309814453 +[TRAIN] Iter: 867200 Loss: 0.0056701344437897205 PSNR: 28.491384506225586 +[TRAIN] Iter: 867300 Loss: 0.006322006229311228 PSNR: 26.774410247802734 +[TRAIN] Iter: 867400 Loss: 0.006366300396621227 PSNR: 26.531906127929688 +[TRAIN] Iter: 867500 Loss: 0.006456087343394756 PSNR: 26.885419845581055 +[TRAIN] Iter: 867600 Loss: 0.006650097202509642 PSNR: 26.381813049316406 +[TRAIN] Iter: 867700 Loss: 0.006465961690992117 PSNR: 26.459487915039062 +[TRAIN] Iter: 867800 Loss: 0.006021288689225912 PSNR: 27.219526290893555 +[TRAIN] Iter: 867900 Loss: 0.006163149606436491 PSNR: 26.569711685180664 +[TRAIN] Iter: 868000 Loss: 0.005371124483644962 PSNR: 28.314388275146484 +[TRAIN] Iter: 868100 Loss: 0.004342540167272091 PSNR: 29.208900451660156 +[TRAIN] Iter: 868200 Loss: 0.005782204680144787 PSNR: 26.894664764404297 +[TRAIN] Iter: 868300 Loss: 0.006703415885567665 PSNR: 26.15550422668457 +[TRAIN] Iter: 868400 Loss: 0.004596959333866835 PSNR: 28.330825805664062 +[TRAIN] Iter: 868500 Loss: 0.006019123829901218 PSNR: 27.20610809326172 +[TRAIN] Iter: 868600 Loss: 0.007256254553794861 PSNR: 25.792898178100586 +[TRAIN] Iter: 868700 Loss: 0.005483937915414572 PSNR: 27.563255310058594 +[TRAIN] Iter: 868800 Loss: 0.007000664249062538 PSNR: 25.601455688476562 +[TRAIN] Iter: 868900 Loss: 0.0065440041944384575 PSNR: 27.093189239501953 +[TRAIN] Iter: 869000 Loss: 0.00638264324516058 PSNR: 26.14656639099121 +[TRAIN] Iter: 869100 Loss: 0.005745331756770611 PSNR: 27.033355712890625 +[TRAIN] Iter: 869200 Loss: 0.006331705953925848 PSNR: 28.364715576171875 +[TRAIN] Iter: 869300 Loss: 0.0050125583074986935 PSNR: 27.636703491210938 +[TRAIN] Iter: 869400 Loss: 0.0072168949991464615 PSNR: 25.971654891967773 +[TRAIN] Iter: 869500 Loss: 0.006633436307311058 PSNR: 26.634471893310547 +[TRAIN] Iter: 869600 Loss: 0.006083891727030277 PSNR: 26.23490333557129 +[TRAIN] Iter: 869700 Loss: 0.006155325565487146 PSNR: 26.413028717041016 +[TRAIN] Iter: 869800 Loss: 0.006092956755310297 PSNR: 27.098291397094727 +[TRAIN] Iter: 869900 Loss: 0.007124894764274359 PSNR: 26.115501403808594 +Saved checkpoints at ./logs/TUT-out-doll-360-np/870000.tar +[TRAIN] Iter: 870000 Loss: 0.00466556940227747 PSNR: 28.305740356445312 +[TRAIN] Iter: 870100 Loss: 0.0067650554701685905 PSNR: 25.92441749572754 +[TRAIN] Iter: 870200 Loss: 0.007087791338562965 PSNR: 26.087636947631836 +[TRAIN] Iter: 870300 Loss: 0.006417717784643173 PSNR: 26.09610366821289 +[TRAIN] Iter: 870400 Loss: 0.005817352794110775 PSNR: 26.674501419067383 +[TRAIN] Iter: 870500 Loss: 0.006286570802330971 PSNR: 26.69737434387207 +[TRAIN] Iter: 870600 Loss: 0.006002362817525864 PSNR: 27.15081214904785 +[TRAIN] Iter: 870700 Loss: 0.0057858750224113464 PSNR: 27.242996215820312 +[TRAIN] Iter: 870800 Loss: 0.0055601103231310844 PSNR: 26.70944595336914 +[TRAIN] Iter: 870900 Loss: 0.007434965576976538 PSNR: 25.626066207885742 +[TRAIN] Iter: 871000 Loss: 0.006911738310009241 PSNR: 26.506093978881836 +[TRAIN] Iter: 871100 Loss: 0.0067830211482942104 PSNR: 26.057968139648438 +[TRAIN] Iter: 871200 Loss: 0.005408274941146374 PSNR: 27.935152053833008 +[TRAIN] Iter: 871300 Loss: 0.0056610493920743465 PSNR: 26.958599090576172 +[TRAIN] Iter: 871400 Loss: 0.004983086604624987 PSNR: 27.932476043701172 +[TRAIN] Iter: 871500 Loss: 0.005882994271814823 PSNR: 27.341930389404297 +[TRAIN] Iter: 871600 Loss: 0.0064661153592169285 PSNR: 26.226205825805664 +[TRAIN] Iter: 871700 Loss: 0.006246352102607489 PSNR: 26.30191993713379 +[TRAIN] Iter: 871800 Loss: 0.0065916297025978565 PSNR: 26.12082290649414 +[TRAIN] Iter: 871900 Loss: 0.007523604668676853 PSNR: 25.52069091796875 +[TRAIN] Iter: 872000 Loss: 0.006780919618904591 PSNR: 26.355728149414062 +[TRAIN] Iter: 872100 Loss: 0.00623353011906147 PSNR: 27.30348777770996 +[TRAIN] Iter: 872200 Loss: 0.00709568290039897 PSNR: 25.98579978942871 +[TRAIN] Iter: 872300 Loss: 0.0069952914491295815 PSNR: 26.281095504760742 +[TRAIN] Iter: 872400 Loss: 0.006374304182827473 PSNR: 26.822893142700195 +[TRAIN] Iter: 872500 Loss: 0.00526825524866581 PSNR: 27.751785278320312 +[TRAIN] Iter: 872600 Loss: 0.005043898243457079 PSNR: 28.542356491088867 +[TRAIN] Iter: 872700 Loss: 0.006729572545737028 PSNR: 26.03448486328125 +[TRAIN] Iter: 872800 Loss: 0.007478293031454086 PSNR: 25.5051212310791 +[TRAIN] Iter: 872900 Loss: 0.006221337243914604 PSNR: 26.227062225341797 +[TRAIN] Iter: 873000 Loss: 0.005764607340097427 PSNR: 27.102842330932617 +[TRAIN] Iter: 873100 Loss: 0.0071336813271045685 PSNR: 26.12883949279785 +[TRAIN] Iter: 873200 Loss: 0.006297433748841286 PSNR: 26.115262985229492 +[TRAIN] Iter: 873300 Loss: 0.006032466888427734 PSNR: 26.670578002929688 +[TRAIN] Iter: 873400 Loss: 0.005825999192893505 PSNR: 26.70673370361328 +[TRAIN] Iter: 873500 Loss: 0.005862651392817497 PSNR: 26.553462982177734 +[TRAIN] Iter: 873600 Loss: 0.006266254000365734 PSNR: 26.49517822265625 +[TRAIN] Iter: 873700 Loss: 0.00573736522346735 PSNR: 26.71526336669922 +[TRAIN] Iter: 873800 Loss: 0.0069561731070280075 PSNR: 25.98174285888672 +[TRAIN] Iter: 873900 Loss: 0.005138645879924297 PSNR: 28.887971878051758 +[TRAIN] Iter: 874000 Loss: 0.004818056244403124 PSNR: 28.56713104248047 +[TRAIN] Iter: 874100 Loss: 0.006567963398993015 PSNR: 27.37151527404785 +[TRAIN] Iter: 874200 Loss: 0.006613012403249741 PSNR: 26.038963317871094 +[TRAIN] Iter: 874300 Loss: 0.004891831427812576 PSNR: 28.1807918548584 +[TRAIN] Iter: 874400 Loss: 0.005818711593747139 PSNR: 26.49297332763672 +[TRAIN] Iter: 874500 Loss: 0.006997445598244667 PSNR: 26.308767318725586 +[TRAIN] Iter: 874600 Loss: 0.006543505005538464 PSNR: 26.33510398864746 +[TRAIN] Iter: 874700 Loss: 0.005152115132659674 PSNR: 27.67205238342285 +[TRAIN] Iter: 874800 Loss: 0.005740084685385227 PSNR: 27.857463836669922 +[TRAIN] Iter: 874900 Loss: 0.0046173888258636 PSNR: 28.36153221130371 +[TRAIN] Iter: 875000 Loss: 0.00563454907387495 PSNR: 27.451154708862305 +[TRAIN] Iter: 875100 Loss: 0.006881101988255978 PSNR: 26.508569717407227 +[TRAIN] Iter: 875200 Loss: 0.005852293223142624 PSNR: 26.8919677734375 +[TRAIN] Iter: 875300 Loss: 0.0069646406918764114 PSNR: 25.850093841552734 +[TRAIN] Iter: 875400 Loss: 0.00408682506531477 PSNR: 28.932767868041992 +[TRAIN] Iter: 875500 Loss: 0.00658008735626936 PSNR: 26.218961715698242 +[TRAIN] Iter: 875600 Loss: 0.005145950708538294 PSNR: 27.382368087768555 +[TRAIN] Iter: 875700 Loss: 0.005335113033652306 PSNR: 27.889610290527344 +[TRAIN] Iter: 875800 Loss: 0.0063180383294820786 PSNR: 26.113313674926758 +[TRAIN] Iter: 875900 Loss: 0.006860534660518169 PSNR: 26.32392120361328 +[TRAIN] Iter: 876000 Loss: 0.006924143992364407 PSNR: 26.248003005981445 +[TRAIN] Iter: 876100 Loss: 0.005526402033865452 PSNR: 28.436559677124023 +[TRAIN] Iter: 876200 Loss: 0.007161910645663738 PSNR: 25.981246948242188 +[TRAIN] Iter: 876300 Loss: 0.006193122360855341 PSNR: 26.72685432434082 +[TRAIN] Iter: 876400 Loss: 0.005319707561284304 PSNR: 28.328277587890625 +[TRAIN] Iter: 876500 Loss: 0.006017458159476519 PSNR: 26.644468307495117 +[TRAIN] Iter: 876600 Loss: 0.006064647808670998 PSNR: 26.98525619506836 +[TRAIN] Iter: 876700 Loss: 0.006839596666395664 PSNR: 26.121885299682617 +[TRAIN] Iter: 876800 Loss: 0.00633603148162365 PSNR: 26.2891845703125 +[TRAIN] Iter: 876900 Loss: 0.006942338310182095 PSNR: 26.66008758544922 +[TRAIN] Iter: 877000 Loss: 0.005654213018715382 PSNR: 28.23778533935547 +[TRAIN] Iter: 877100 Loss: 0.0053178612142801285 PSNR: 28.218753814697266 +[TRAIN] Iter: 877200 Loss: 0.0048570153303444386 PSNR: 28.81862449645996 +[TRAIN] Iter: 877300 Loss: 0.007674466818571091 PSNR: 26.16766929626465 +[TRAIN] Iter: 877400 Loss: 0.006867584772408009 PSNR: 26.05105209350586 +[TRAIN] Iter: 877500 Loss: 0.004177659284323454 PSNR: 28.949743270874023 +[TRAIN] Iter: 877600 Loss: 0.0060806116089224815 PSNR: 26.265352249145508 +[TRAIN] Iter: 877700 Loss: 0.0054547348991036415 PSNR: 27.279529571533203 +[TRAIN] Iter: 877800 Loss: 0.006976987235248089 PSNR: 26.02823257446289 +[TRAIN] Iter: 877900 Loss: 0.005584511905908585 PSNR: 28.01633071899414 +[TRAIN] Iter: 878000 Loss: 0.005175628699362278 PSNR: 28.2901611328125 +[TRAIN] Iter: 878100 Loss: 0.005957512650638819 PSNR: 27.1044864654541 +[TRAIN] Iter: 878200 Loss: 0.004812947008758783 PSNR: 29.166109085083008 +[TRAIN] Iter: 878300 Loss: 0.006266484037041664 PSNR: 26.876768112182617 +[TRAIN] Iter: 878400 Loss: 0.006581770721822977 PSNR: 26.687747955322266 +[TRAIN] Iter: 878500 Loss: 0.005275529809296131 PSNR: 27.36115264892578 +[TRAIN] Iter: 878600 Loss: 0.005617672577500343 PSNR: 27.423404693603516 +[TRAIN] Iter: 878700 Loss: 0.006478112190961838 PSNR: 26.78110694885254 +[TRAIN] Iter: 878800 Loss: 0.006854302249848843 PSNR: 26.256895065307617 +[TRAIN] Iter: 878900 Loss: 0.00636665103957057 PSNR: 26.513729095458984 +[TRAIN] Iter: 879000 Loss: 0.0072018117643892765 PSNR: 25.468521118164062 +[TRAIN] Iter: 879100 Loss: 0.0053202807903289795 PSNR: 27.789472579956055 +[TRAIN] Iter: 879200 Loss: 0.005757664330303669 PSNR: 27.244853973388672 +[TRAIN] Iter: 879300 Loss: 0.006501310970634222 PSNR: 26.34700584411621 +[TRAIN] Iter: 879400 Loss: 0.005237179808318615 PSNR: 27.67369270324707 +[TRAIN] Iter: 879500 Loss: 0.00636836513876915 PSNR: 27.3492431640625 +[TRAIN] Iter: 879600 Loss: 0.005415535531938076 PSNR: 28.45844841003418 +[TRAIN] Iter: 879700 Loss: 0.006697647273540497 PSNR: 26.74254035949707 +[TRAIN] Iter: 879800 Loss: 0.0070102633908391 PSNR: 26.734891891479492 +[TRAIN] Iter: 879900 Loss: 0.005883284378796816 PSNR: 26.82013511657715 +Saved checkpoints at ./logs/TUT-out-doll-360-np/880000.tar +[TRAIN] Iter: 880000 Loss: 0.006546744145452976 PSNR: 27.294761657714844 +[TRAIN] Iter: 880100 Loss: 0.0061313556507229805 PSNR: 26.67491340637207 +[TRAIN] Iter: 880200 Loss: 0.005901719909161329 PSNR: 26.946565628051758 +[TRAIN] Iter: 880300 Loss: 0.004376712255179882 PSNR: 28.72236442565918 +[TRAIN] Iter: 880400 Loss: 0.006399550940841436 PSNR: 26.498212814331055 +[TRAIN] Iter: 880500 Loss: 0.005687857046723366 PSNR: 27.403669357299805 +[TRAIN] Iter: 880600 Loss: 0.006587628740817308 PSNR: 26.56280517578125 +[TRAIN] Iter: 880700 Loss: 0.00602316576987505 PSNR: 26.6485538482666 +[TRAIN] Iter: 880800 Loss: 0.0055971588008105755 PSNR: 28.14641571044922 +[TRAIN] Iter: 880900 Loss: 0.0067061372101306915 PSNR: 25.99639320373535 +[TRAIN] Iter: 881000 Loss: 0.007227614521980286 PSNR: 25.757802963256836 +[TRAIN] Iter: 881100 Loss: 0.006609750911593437 PSNR: 26.107040405273438 +[TRAIN] Iter: 881200 Loss: 0.0059205591678619385 PSNR: 27.41434097290039 +[TRAIN] Iter: 881300 Loss: 0.007229143287986517 PSNR: 26.131715774536133 +[TRAIN] Iter: 881400 Loss: 0.005451733246445656 PSNR: 27.658781051635742 +[TRAIN] Iter: 881500 Loss: 0.00719134695827961 PSNR: 25.630456924438477 +[TRAIN] Iter: 881600 Loss: 0.008042541332542896 PSNR: 25.55799102783203 +[TRAIN] Iter: 881700 Loss: 0.006340830586850643 PSNR: 26.642995834350586 +[TRAIN] Iter: 881800 Loss: 0.0044203950092196465 PSNR: 28.87164878845215 +[TRAIN] Iter: 881900 Loss: 0.005438100080937147 PSNR: 27.627079010009766 +[TRAIN] Iter: 882000 Loss: 0.00592302531003952 PSNR: 27.07148551940918 +[TRAIN] Iter: 882100 Loss: 0.008222242817282677 PSNR: 25.783639907836914 +[TRAIN] Iter: 882200 Loss: 0.006243766285479069 PSNR: 26.310667037963867 +[TRAIN] Iter: 882300 Loss: 0.0071741510182619095 PSNR: 26.237220764160156 +[TRAIN] Iter: 882400 Loss: 0.00768582709133625 PSNR: 25.556865692138672 +[TRAIN] Iter: 882500 Loss: 0.007610003929585218 PSNR: 25.836565017700195 +[TRAIN] Iter: 882600 Loss: 0.004978547804057598 PSNR: 28.039037704467773 +[TRAIN] Iter: 882700 Loss: 0.005550720728933811 PSNR: 27.325767517089844 +[TRAIN] Iter: 882800 Loss: 0.007193112745881081 PSNR: 25.699920654296875 +[TRAIN] Iter: 882900 Loss: 0.004860735032707453 PSNR: 28.193479537963867 +[TRAIN] Iter: 883000 Loss: 0.0054338895715773106 PSNR: 27.245912551879883 +[TRAIN] Iter: 883100 Loss: 0.006030918564647436 PSNR: 26.74419593811035 +[TRAIN] Iter: 883200 Loss: 0.005573983304202557 PSNR: 27.00133514404297 +[TRAIN] Iter: 883300 Loss: 0.005746600683778524 PSNR: 27.946510314941406 +[TRAIN] Iter: 883400 Loss: 0.006721067242324352 PSNR: 26.190288543701172 +[TRAIN] Iter: 883500 Loss: 0.006354461424052715 PSNR: 26.659788131713867 +[TRAIN] Iter: 883600 Loss: 0.006444633472710848 PSNR: 26.34611701965332 +[TRAIN] Iter: 883700 Loss: 0.005700860172510147 PSNR: 28.792123794555664 +[TRAIN] Iter: 883800 Loss: 0.007053039036691189 PSNR: 26.064197540283203 +[TRAIN] Iter: 883900 Loss: 0.006128467153757811 PSNR: 26.661611557006836 +[TRAIN] Iter: 884000 Loss: 0.006982963532209396 PSNR: 26.127389907836914 +[TRAIN] Iter: 884100 Loss: 0.0061224279925227165 PSNR: 26.753271102905273 +[TRAIN] Iter: 884200 Loss: 0.006665742956101894 PSNR: 26.34451675415039 +[TRAIN] Iter: 884300 Loss: 0.005401340778917074 PSNR: 27.107601165771484 +[TRAIN] Iter: 884400 Loss: 0.005334735382348299 PSNR: 29.340972900390625 +[TRAIN] Iter: 884500 Loss: 0.007323817349970341 PSNR: 25.297748565673828 +[TRAIN] Iter: 884600 Loss: 0.006297233980149031 PSNR: 26.595565795898438 +[TRAIN] Iter: 884700 Loss: 0.007252539973706007 PSNR: 26.258087158203125 +[TRAIN] Iter: 884800 Loss: 0.0067392196506261826 PSNR: 27.070159912109375 +[TRAIN] Iter: 884900 Loss: 0.005798078142106533 PSNR: 27.365135192871094 +[TRAIN] Iter: 885000 Loss: 0.005352073814719915 PSNR: 27.355693817138672 +[TRAIN] Iter: 885100 Loss: 0.005971068050712347 PSNR: 26.99090576171875 +[TRAIN] Iter: 885200 Loss: 0.0049972995184361935 PSNR: 28.62065887451172 +[TRAIN] Iter: 885300 Loss: 0.006176290102303028 PSNR: 27.200496673583984 +[TRAIN] Iter: 885400 Loss: 0.005899863783270121 PSNR: 26.726545333862305 +[TRAIN] Iter: 885500 Loss: 0.005095022730529308 PSNR: 28.47991943359375 +[TRAIN] Iter: 885600 Loss: 0.006138449069112539 PSNR: 27.811386108398438 +[TRAIN] Iter: 885700 Loss: 0.006325491238385439 PSNR: 26.564987182617188 +[TRAIN] Iter: 885800 Loss: 0.00550896767526865 PSNR: 26.979736328125 +[TRAIN] Iter: 885900 Loss: 0.006461942568421364 PSNR: 27.191436767578125 +[TRAIN] Iter: 886000 Loss: 0.00609206035733223 PSNR: 27.36044692993164 +[TRAIN] Iter: 886100 Loss: 0.0064100404269993305 PSNR: 26.097267150878906 +[TRAIN] Iter: 886200 Loss: 0.007005151826888323 PSNR: 26.138458251953125 +[TRAIN] Iter: 886300 Loss: 0.005100275855511427 PSNR: 27.045427322387695 +[TRAIN] Iter: 886400 Loss: 0.006418918259441853 PSNR: 26.08216094970703 +[TRAIN] Iter: 886500 Loss: 0.004835367202758789 PSNR: 27.656518936157227 +[TRAIN] Iter: 886600 Loss: 0.006474349182099104 PSNR: 26.569841384887695 +[TRAIN] Iter: 886700 Loss: 0.00678139366209507 PSNR: 26.112768173217773 +[TRAIN] Iter: 886800 Loss: 0.0048202769830822945 PSNR: 28.832561492919922 +[TRAIN] Iter: 886900 Loss: 0.006149784661829472 PSNR: 27.356840133666992 +[TRAIN] Iter: 887000 Loss: 0.0063999369740486145 PSNR: 26.75086212158203 +[TRAIN] Iter: 887100 Loss: 0.005877496674656868 PSNR: 26.739286422729492 +[TRAIN] Iter: 887200 Loss: 0.006812314502894878 PSNR: 26.8925724029541 +[TRAIN] Iter: 887300 Loss: 0.005206206813454628 PSNR: 27.163082122802734 +[TRAIN] Iter: 887400 Loss: 0.005841074511408806 PSNR: 26.916980743408203 +[TRAIN] Iter: 887500 Loss: 0.006325467489659786 PSNR: 26.495729446411133 +[TRAIN] Iter: 887600 Loss: 0.005059492774307728 PSNR: 27.901010513305664 +[TRAIN] Iter: 887700 Loss: 0.005133583676069975 PSNR: 27.935327529907227 +[TRAIN] Iter: 887800 Loss: 0.0057901423424482346 PSNR: 27.589080810546875 +[TRAIN] Iter: 887900 Loss: 0.008009964600205421 PSNR: 25.681991577148438 +[TRAIN] Iter: 888000 Loss: 0.006564242299646139 PSNR: 26.939964294433594 +[TRAIN] Iter: 888100 Loss: 0.006076805759221315 PSNR: 27.080303192138672 +[TRAIN] Iter: 888200 Loss: 0.007080712355673313 PSNR: 26.93023681640625 +[TRAIN] Iter: 888300 Loss: 0.005060513969510794 PSNR: 28.806272506713867 +[TRAIN] Iter: 888400 Loss: 0.006468326784670353 PSNR: 26.72627067565918 +[TRAIN] Iter: 888500 Loss: 0.006382341030985117 PSNR: 26.716917037963867 +[TRAIN] Iter: 888600 Loss: 0.005978977307677269 PSNR: 26.739809036254883 +[TRAIN] Iter: 888700 Loss: 0.006479773670434952 PSNR: 27.453853607177734 +[TRAIN] Iter: 888800 Loss: 0.006201332900673151 PSNR: 26.287521362304688 +[TRAIN] Iter: 888900 Loss: 0.005802870262414217 PSNR: 27.25497055053711 +[TRAIN] Iter: 889000 Loss: 0.006789328530430794 PSNR: 26.116165161132812 +[TRAIN] Iter: 889100 Loss: 0.00526690436527133 PSNR: 27.55632781982422 +[TRAIN] Iter: 889200 Loss: 0.006019866559654474 PSNR: 27.146717071533203 +[TRAIN] Iter: 889300 Loss: 0.005866507068276405 PSNR: 26.57328987121582 +[TRAIN] Iter: 889400 Loss: 0.005286938510835171 PSNR: 28.152240753173828 +[TRAIN] Iter: 889500 Loss: 0.006731111090630293 PSNR: 26.366626739501953 +[TRAIN] Iter: 889600 Loss: 0.006283958442509174 PSNR: 26.418880462646484 +[TRAIN] Iter: 889700 Loss: 0.0071818092837929726 PSNR: 26.119131088256836 +[TRAIN] Iter: 889800 Loss: 0.00592817785218358 PSNR: 27.48944091796875 +[TRAIN] Iter: 889900 Loss: 0.006321440916508436 PSNR: 27.286808013916016 +Saved checkpoints at ./logs/TUT-out-doll-360-np/890000.tar +[TRAIN] Iter: 890000 Loss: 0.006729978136718273 PSNR: 26.012115478515625 +[TRAIN] Iter: 890100 Loss: 0.006716615054756403 PSNR: 26.018230438232422 +[TRAIN] Iter: 890200 Loss: 0.006800789386034012 PSNR: 26.343626022338867 +[TRAIN] Iter: 890300 Loss: 0.006937255151569843 PSNR: 26.426559448242188 +[TRAIN] Iter: 890400 Loss: 0.006276452913880348 PSNR: 26.847564697265625 +[TRAIN] Iter: 890500 Loss: 0.006414934527128935 PSNR: 26.55807113647461 +[TRAIN] Iter: 890600 Loss: 0.0068447706289589405 PSNR: 26.00536346435547 +[TRAIN] Iter: 890700 Loss: 0.006587608717381954 PSNR: 26.442251205444336 +[TRAIN] Iter: 890800 Loss: 0.006678970996290445 PSNR: 26.58094024658203 +[TRAIN] Iter: 890900 Loss: 0.007444844581186771 PSNR: 25.980405807495117 +[TRAIN] Iter: 891000 Loss: 0.004416128620505333 PSNR: 29.031606674194336 +[TRAIN] Iter: 891100 Loss: 0.006599333602935076 PSNR: 26.464580535888672 +[TRAIN] Iter: 891200 Loss: 0.0061704679392278194 PSNR: 27.015460968017578 +[TRAIN] Iter: 891300 Loss: 0.005298730451613665 PSNR: 27.673099517822266 +[TRAIN] Iter: 891400 Loss: 0.006893680430948734 PSNR: 26.725217819213867 +[TRAIN] Iter: 891500 Loss: 0.0057661207392811775 PSNR: 27.221263885498047 +[TRAIN] Iter: 891600 Loss: 0.0063180928118526936 PSNR: 26.82505226135254 +[TRAIN] Iter: 891700 Loss: 0.006508727557957172 PSNR: 26.668621063232422 +[TRAIN] Iter: 891800 Loss: 0.0062348609790205956 PSNR: 26.547718048095703 +[TRAIN] Iter: 891900 Loss: 0.00665679294615984 PSNR: 25.927608489990234 +[TRAIN] Iter: 892000 Loss: 0.005007035098969936 PSNR: 28.415502548217773 +[TRAIN] Iter: 892100 Loss: 0.007250896189361811 PSNR: 25.820220947265625 +[TRAIN] Iter: 892200 Loss: 0.006525993812829256 PSNR: 26.981538772583008 +[TRAIN] Iter: 892300 Loss: 0.007401226554065943 PSNR: 27.399696350097656 +[TRAIN] Iter: 892400 Loss: 0.0066121709533035755 PSNR: 26.368967056274414 +[TRAIN] Iter: 892500 Loss: 0.005929009988903999 PSNR: 27.556264877319336 +[TRAIN] Iter: 892600 Loss: 0.006026701536029577 PSNR: 27.25472068786621 +[TRAIN] Iter: 892700 Loss: 0.004863320384174585 PSNR: 28.182186126708984 +[TRAIN] Iter: 892800 Loss: 0.008019967004656792 PSNR: 26.042139053344727 +[TRAIN] Iter: 892900 Loss: 0.005806990899145603 PSNR: 27.64303207397461 +[TRAIN] Iter: 893000 Loss: 0.006066996604204178 PSNR: 27.7197322845459 +[TRAIN] Iter: 893100 Loss: 0.004927761387079954 PSNR: 28.763391494750977 +[TRAIN] Iter: 893200 Loss: 0.005295310169458389 PSNR: 27.552627563476562 +[TRAIN] Iter: 893300 Loss: 0.005284497514367104 PSNR: 28.166894912719727 +[TRAIN] Iter: 893400 Loss: 0.006518030539155006 PSNR: 26.61118507385254 +[TRAIN] Iter: 893500 Loss: 0.0059257079847157 PSNR: 26.420040130615234 +[TRAIN] Iter: 893600 Loss: 0.006262666545808315 PSNR: 26.051837921142578 +[TRAIN] Iter: 893700 Loss: 0.004950814414769411 PSNR: 28.13033103942871 +[TRAIN] Iter: 893800 Loss: 0.006447626743465662 PSNR: 26.823135375976562 +[TRAIN] Iter: 893900 Loss: 0.004538512788712978 PSNR: 27.4638729095459 +[TRAIN] Iter: 894000 Loss: 0.00616968609392643 PSNR: 27.123506546020508 +[TRAIN] Iter: 894100 Loss: 0.005947135388851166 PSNR: 27.286745071411133 +[TRAIN] Iter: 894200 Loss: 0.006104486063122749 PSNR: 26.402833938598633 +[TRAIN] Iter: 894300 Loss: 0.005950741469860077 PSNR: 26.986207962036133 +[TRAIN] Iter: 894400 Loss: 0.009122248739004135 PSNR: 24.910064697265625 +[TRAIN] Iter: 894500 Loss: 0.005373919382691383 PSNR: 27.249122619628906 +[TRAIN] Iter: 894600 Loss: 0.006493065971881151 PSNR: 26.51494789123535 +[TRAIN] Iter: 894700 Loss: 0.005349579732865095 PSNR: 27.980390548706055 +[TRAIN] Iter: 894800 Loss: 0.0077339885756373405 PSNR: 26.09642791748047 +[TRAIN] Iter: 894900 Loss: 0.005870389752089977 PSNR: 27.692237854003906 +[TRAIN] Iter: 895000 Loss: 0.004412859678268433 PSNR: 28.31826400756836 +[TRAIN] Iter: 895100 Loss: 0.005302119068801403 PSNR: 27.207780838012695 +[TRAIN] Iter: 895200 Loss: 0.005914993584156036 PSNR: 27.05423927307129 +[TRAIN] Iter: 895300 Loss: 0.0071798162534832954 PSNR: 26.137392044067383 +[TRAIN] Iter: 895400 Loss: 0.006991831120103598 PSNR: 26.40854263305664 +[TRAIN] Iter: 895500 Loss: 0.007122359238564968 PSNR: 26.37398910522461 +[TRAIN] Iter: 895600 Loss: 0.005743883550167084 PSNR: 27.18608856201172 +[TRAIN] Iter: 895700 Loss: 0.0059733581729233265 PSNR: 26.696102142333984 +[TRAIN] Iter: 895800 Loss: 0.0055067166686058044 PSNR: 27.8147029876709 +[TRAIN] Iter: 895900 Loss: 0.00545208714902401 PSNR: 27.596689224243164 +[TRAIN] Iter: 896000 Loss: 0.006414754316210747 PSNR: 26.76700210571289 +[TRAIN] Iter: 896100 Loss: 0.006377151235938072 PSNR: 26.51719856262207 +[TRAIN] Iter: 896200 Loss: 0.006417570635676384 PSNR: 27.573793411254883 +[TRAIN] Iter: 896300 Loss: 0.00529266893863678 PSNR: 27.030349731445312 +[TRAIN] Iter: 896400 Loss: 0.005872470326721668 PSNR: 26.83509063720703 +[TRAIN] Iter: 896500 Loss: 0.00509576965123415 PSNR: 28.245826721191406 +[TRAIN] Iter: 896600 Loss: 0.005625242367386818 PSNR: 27.892667770385742 +[TRAIN] Iter: 896700 Loss: 0.007148263044655323 PSNR: 26.067502975463867 +[TRAIN] Iter: 896800 Loss: 0.006479435600340366 PSNR: 28.272375106811523 +[TRAIN] Iter: 896900 Loss: 0.005833029747009277 PSNR: 27.039798736572266 +[TRAIN] Iter: 897000 Loss: 0.004814618732780218 PSNR: 28.12647247314453 +[TRAIN] Iter: 897100 Loss: 0.006203533615916967 PSNR: 26.19643211364746 +[TRAIN] Iter: 897200 Loss: 0.0073636076413095 PSNR: 25.657325744628906 +[TRAIN] Iter: 897300 Loss: 0.006337719038128853 PSNR: 26.46381187438965 +[TRAIN] Iter: 897400 Loss: 0.005842852871865034 PSNR: 28.052217483520508 +[TRAIN] Iter: 897500 Loss: 0.006247541401535273 PSNR: 26.378488540649414 +[TRAIN] Iter: 897600 Loss: 0.005926369223743677 PSNR: 26.772775650024414 +[TRAIN] Iter: 897700 Loss: 0.006937703117728233 PSNR: 26.486968994140625 +[TRAIN] Iter: 897800 Loss: 0.00640124874189496 PSNR: 26.11973762512207 +[TRAIN] Iter: 897900 Loss: 0.004347807262092829 PSNR: 28.47577667236328 +[TRAIN] Iter: 898000 Loss: 0.007392721250653267 PSNR: 25.55512237548828 +[TRAIN] Iter: 898100 Loss: 0.0053933486342430115 PSNR: 27.642730712890625 +[TRAIN] Iter: 898200 Loss: 0.006460864096879959 PSNR: 26.591135025024414 +[TRAIN] Iter: 898300 Loss: 0.0056418185122311115 PSNR: 28.03448486328125 +[TRAIN] Iter: 898400 Loss: 0.005708868149667978 PSNR: 27.059730529785156 +[TRAIN] Iter: 898500 Loss: 0.007333498913794756 PSNR: 26.375839233398438 +[TRAIN] Iter: 898600 Loss: 0.0050964681431651115 PSNR: 27.90096092224121 +[TRAIN] Iter: 898700 Loss: 0.006090203765779734 PSNR: 26.903757095336914 +[TRAIN] Iter: 898800 Loss: 0.007307353429496288 PSNR: 25.797574996948242 +[TRAIN] Iter: 898900 Loss: 0.006107929162681103 PSNR: 26.769285202026367 +[TRAIN] Iter: 899000 Loss: 0.005455566570162773 PSNR: 27.827526092529297 +[TRAIN] Iter: 899100 Loss: 0.007232217118144035 PSNR: 26.09339714050293 +[TRAIN] Iter: 899200 Loss: 0.004940817132592201 PSNR: 28.645410537719727 +[TRAIN] Iter: 899300 Loss: 0.0075964173302054405 PSNR: 26.122230529785156 +[TRAIN] Iter: 899400 Loss: 0.004563824739307165 PSNR: 29.070844650268555 +[TRAIN] Iter: 899500 Loss: 0.006543872412294149 PSNR: 26.985984802246094 +[TRAIN] Iter: 899600 Loss: 0.007242388091981411 PSNR: 25.968854904174805 +[TRAIN] Iter: 899700 Loss: 0.0067415013909339905 PSNR: 26.623369216918945 +[TRAIN] Iter: 899800 Loss: 0.006231930106878281 PSNR: 26.956298828125 +[TRAIN] Iter: 899900 Loss: 0.007221007253974676 PSNR: 26.305679321289062 +Saved checkpoints at ./logs/TUT-out-doll-360-np/900000.tar +0 0.003774881362915039 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 12.1051344871521 +2 11.775958776473999 +3 11.672924995422363 +4 12.093803644180298 +5 11.878332376480103 +6 11.939228057861328 +7 12.02646517753601 +8 12.000956296920776 +9 11.94206976890564 +10 11.881234884262085 +11 12.075377225875854 +12 12.114660024642944 +13 11.747259140014648 +14 12.189090967178345 +15 11.88790512084961 +16 11.68087100982666 +17 12.164881467819214 +18 12.227816104888916 +19 12.09545373916626 +20 11.639791011810303 +21 12.161554098129272 +22 11.824793577194214 +23 12.301689386367798 +24 11.66770339012146 +25 12.294066429138184 +26 12.045531034469604 +27 11.843486785888672 +28 11.634020328521729 +29 11.531574249267578 +30 11.889193296432495 +31 11.678812742233276 +32 12.197561502456665 +33 12.065125942230225 +34 11.567867994308472 +35 11.674368619918823 +36 12.202980756759644 +37 11.544777870178223 +38 12.064520835876465 +39 11.686954975128174 +40 12.078751564025879 +41 12.05750584602356 +42 11.502851247787476 +43 11.961298942565918 +44 12.20009994506836 +45 11.987412214279175 +46 12.014582395553589 +47 11.87595796585083 +48 11.864924192428589 +49 12.02693796157837 +50 11.491436004638672 +51 11.535600423812866 +52 11.655344009399414 +53 12.006765604019165 +54 11.522887706756592 +55 11.88520073890686 +56 12.10111927986145 +57 11.866026401519775 +58 11.687603950500488 +59 11.675001382827759 +60 11.669216394424438 +61 12.037174701690674 +62 11.540763139724731 +63 11.633974075317383 +64 12.30981159210205 +65 12.07836627960205 +66 11.945101261138916 +67 11.601053237915039 +68 11.882163524627686 +69 11.863028287887573 +70 11.966895580291748 +71 11.613207817077637 +72 12.065012454986572 +73 11.891926050186157 +74 11.963959217071533 +75 11.577576637268066 +76 11.87413501739502 +77 11.633750200271606 +78 11.532704591751099 +79 11.842794418334961 +80 12.186711311340332 +81 12.059553623199463 +82 11.808722019195557 +83 12.010648965835571 +84 11.550830364227295 +85 11.657365083694458 +86 11.663191795349121 +87 11.96496057510376 +88 12.028088569641113 +89 12.094413995742798 +90 11.65973949432373 +91 11.837707042694092 +92 11.794097661972046 +93 12.113699197769165 +94 11.901093006134033 +95 11.567988634109497 +96 12.076179027557373 +97 11.716761350631714 +98 11.839841842651367 +99 11.797161102294922 +100 11.519956350326538 +101 11.890262842178345 +102 12.101910829544067 +103 11.675238609313965 +104 12.072121143341064 +105 11.879363775253296 +106 11.63489580154419 +107 12.067014455795288 +108 11.51079511642456 +109 12.058196067810059 +110 12.185133934020996 +111 11.807880401611328 +112 11.865689992904663 +113 11.678063869476318 +114 12.109779119491577 +115 12.085856676101685 +116 12.035327196121216 +117 11.941598176956177 +118 11.842693328857422 +119 11.853486776351929 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-1.0260e+00, -1.1565e+00, -1.0986e+00, 1.5317e+00], + [ 1.3753e+00, 8.1110e-01, 8.8490e-01, -5.0649e+01], + [-6.2757e-01, -9.6574e-01, -7.5539e-01, -5.1682e+01], + ..., + [-8.3403e+00, -4.0296e+00, -5.3814e+00, 7.0888e+02], + [-6.0520e+00, -2.9290e+00, -4.4143e+00, 5.9520e+02], + [-5.2276e+00, -1.5093e+00, -2.4967e+00, 5.4061e+02]], + + [[-4.4232e-01, 2.7913e-01, 8.7613e-01, -5.0801e+01], + [-1.3363e-01, 5.6285e-01, 1.5113e+00, -5.7365e+00], + [ 1.8046e-01, 6.0648e-01, 1.3881e+00, -6.0871e+00], + ..., + [-6.7344e+00, -3.4720e+00, -4.6775e+00, 4.1259e+02], + [-6.3730e+00, -2.8655e+00, -4.1822e+00, 4.0461e+02], + [-6.4635e+00, -2.8877e+00, -4.3480e+00, 4.3134e+02]], + + [[-1.5776e+00, -1.2548e+00, -9.3567e-01, -5.1631e+01], + [-8.4161e-01, -9.9046e-01, -1.0374e+00, 1.2214e+01], + [-8.2525e-01, -9.7155e-01, -1.0206e+00, 1.2438e+01], + ..., + [-1.6021e+01, -1.3971e+01, -8.7410e+00, 8.1830e+02], + [-1.5334e+01, -1.3822e+01, -1.0293e+01, 8.5872e+02], + [-1.6693e+01, -1.4922e+01, -1.0280e+01, 8.3128e+02]], + + ..., + + [[-4.7409e-01, 2.3646e-01, 1.2923e+00, -8.3959e+00], + [ 2.4876e-01, 7.0942e-01, 1.6835e+00, -1.0439e+01], + [ 2.0551e-01, 7.9036e-01, 1.8645e+00, -1.6155e+01], + ..., + [-7.5924e+00, -4.0100e+00, -4.7655e+00, 4.8505e+02], + [-7.5667e+00, -4.2784e+00, -5.2538e+00, 4.8436e+02], + [-7.8935e+00, -4.7561e+00, -5.5810e+00, 5.1091e+02]], + + [[ 3.6287e-01, 1.0320e+00, 1.9806e+00, -1.6356e+01], + [ 3.3922e-02, 7.1592e-01, 1.6766e+00, -7.9254e+00], + [ 2.6631e-02, 7.0495e-01, 1.6627e+00, -7.8671e+00], + ..., + [-5.5080e+00, -2.6587e+00, -3.6867e+00, 3.0976e+02], + [-4.9678e+00, -2.0835e+00, -3.1512e+00, 3.0457e+02], + [-5.2146e+00, -2.0762e+00, -3.3091e+00, 3.2231e+02]], + + [[-1.5782e+00, -1.3644e+00, -1.4665e+00, -4.1946e+01], + [-1.3266e+00, -1.3865e+00, -1.2734e+00, -2.4110e+00], + [-1.6607e+00, -1.5421e+00, -1.2972e+00, 9.1244e-02], + ..., + [-1.9101e+01, -1.7304e+01, -1.2858e+01, 9.4752e+02], + [-1.8843e+01, -1.6657e+01, -1.1854e+01, 9.3938e+02], + [-2.0483e+01, -1.8097e+01, -1.1927e+01, 9.4519e+02]]], + grad_fn=), 'rgb0': tensor([[0.2876, 0.2579, 0.2637], + [0.4093, 0.5659, 0.7704], + [0.2694, 0.2416, 0.2389], + ..., + [0.3616, 0.5229, 0.7308], + [0.5965, 0.7603, 0.9037], + [0.2637, 0.2543, 0.2561]], grad_fn=), 'disp0': tensor([3609.2966, 66.1911, 60.3687, ..., 129.1148, 50.8197, + 54.1793], grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.1695, 0.0043, 0.0036, ..., 0.2004, 0.0032, 0.0032])} +0 0.0008106231689453125 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 11.876862287521362 +2 11.488364458084106 +3 11.50991415977478 +4 12.101436138153076 +5 11.63179636001587 +6 12.003419399261475 +7 11.553982496261597 +8 11.702878475189209 +9 11.870666980743408 +10 11.970074653625488 +11 11.566313028335571 +12 11.88240933418274 +13 11.996060848236084 +14 11.566254377365112 +15 11.665348529815674 +16 11.662446975708008 +17 12.169541597366333 +18 12.116363048553467 +19 11.579018831253052 +20 12.070887088775635 +21 12.138847351074219 +22 11.624302864074707 +23 11.682592153549194 +24 11.84727692604065 +25 12.002776145935059 +26 11.539641857147217 +27 11.699896335601807 +28 12.065573692321777 +29 11.906895637512207 +30 12.039315938949585 +31 12.103100776672363 +32 11.698185443878174 +33 12.17253041267395 +34 11.577948093414307 +35 11.876727104187012 +36 12.158060789108276 +37 11.79093885421753 +38 11.988445520401001 +39 11.563871622085571 +40 11.654655933380127 +41 11.505211114883423 +42 12.164461374282837 +43 12.228489398956299 +44 11.639241456985474 +45 12.11380124092102 +46 12.0821373462677 +47 11.635416269302368 +48 11.71071457862854 +49 12.078184843063354 +50 12.074329137802124 +51 11.888601779937744 +52 12.06883955001831 +53 11.883675575256348 +54 11.984200239181519 +55 11.568653583526611 +56 11.868709325790405 +57 12.144911289215088 +58 11.813720703125 +59 11.674911260604858 +60 11.987493753433228 +61 11.556523561477661 +62 11.693260192871094 +63 11.886013984680176 +64 11.882906436920166 +65 12.073808908462524 +66 12.054174661636353 +67 11.702610969543457 +68 11.890573263168335 +69 12.062624454498291 +70 11.89439845085144 +71 11.651169061660767 +72 11.683175325393677 +73 11.856335401535034 +74 12.047960996627808 +75 11.522074937820435 +76 11.683433532714844 +77 12.09202766418457 +78 12.15451455116272 +79 12.014991998672485 +80 11.937967777252197 +81 11.797133684158325 +82 11.685198068618774 +83 12.216362953186035 +84 11.527588844299316 +85 11.6643545627594 +86 12.144343852996826 +87 11.899134874343872 +88 11.897985935211182 +89 11.796041488647461 +90 12.046165466308594 +91 11.508320808410645 +92 12.063530206680298 +93 11.696202993392944 +94 12.244276285171509 +95 11.52617073059082 +96 12.048482894897461 +97 11.916621685028076 +98 11.868207216262817 +99 11.86690068244934 +100 11.873607397079468 +101 12.153364658355713 +102 12.189017057418823 +103 11.514856815338135 +104 11.878198623657227 +105 11.656309604644775 +106 11.892965316772461 +107 11.862170219421387 +108 11.958667278289795 +109 12.168938159942627 +110 11.524231433868408 +111 11.954521417617798 +112 11.78556489944458 +113 11.894912481307983 +114 11.952817678451538 +115 11.792657136917114 +116 12.179915189743042 +117 12.001638412475586 +118 11.63275957107544 +119 11.519587516784668 +test poses shape torch.Size([4, 3, 4]) +0 0.0019075870513916016 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 11.719324111938477 +2 11.663148641586304 +3 11.69697618484497 +Saved test set +[TRAIN] Iter: 900000 Loss: 0.004900811240077019 PSNR: 27.635478973388672 +[TRAIN] Iter: 900100 Loss: 0.007374748587608337 PSNR: 25.4711856842041 +[TRAIN] Iter: 900200 Loss: 0.006652279756963253 PSNR: 26.555444717407227 +[TRAIN] Iter: 900300 Loss: 0.005442516878247261 PSNR: 27.129411697387695 +[TRAIN] Iter: 900400 Loss: 0.00662615429610014 PSNR: 26.646648406982422 +[TRAIN] Iter: 900500 Loss: 0.0070131756365299225 PSNR: 26.12037467956543 +[TRAIN] Iter: 900600 Loss: 0.0067842695862054825 PSNR: 26.442163467407227 +[TRAIN] Iter: 900700 Loss: 0.006682814564555883 PSNR: 26.167606353759766 +[TRAIN] Iter: 900800 Loss: 0.0059944037348032 PSNR: 26.50283432006836 +[TRAIN] Iter: 900900 Loss: 0.006680893711745739 PSNR: 26.286340713500977 +[TRAIN] Iter: 901000 Loss: 0.0044639273546636105 PSNR: 27.258934020996094 +[TRAIN] Iter: 901100 Loss: 0.006719043478369713 PSNR: 26.573638916015625 +[TRAIN] Iter: 901200 Loss: 0.005453585181385279 PSNR: 28.690195083618164 +[TRAIN] Iter: 901300 Loss: 0.006507015787065029 PSNR: 26.14008903503418 +[TRAIN] Iter: 901400 Loss: 0.007152249105274677 PSNR: 26.53931999206543 +[TRAIN] Iter: 901500 Loss: 0.0051941173151135445 PSNR: 27.855981826782227 +[TRAIN] Iter: 901600 Loss: 0.006311976350843906 PSNR: 26.888839721679688 +[TRAIN] Iter: 901700 Loss: 0.00544511154294014 PSNR: 27.27200698852539 +[TRAIN] Iter: 901800 Loss: 0.004433680325746536 PSNR: 28.73508071899414 +[TRAIN] Iter: 901900 Loss: 0.006271211430430412 PSNR: 27.953903198242188 +[TRAIN] Iter: 902000 Loss: 0.004647849127650261 PSNR: 28.310068130493164 +[TRAIN] Iter: 902100 Loss: 0.006423360668122768 PSNR: 26.339365005493164 +[TRAIN] Iter: 902200 Loss: 0.004718628711998463 PSNR: 27.598791122436523 +[TRAIN] Iter: 902300 Loss: 0.00799780897796154 PSNR: 26.253887176513672 +[TRAIN] Iter: 902400 Loss: 0.005004107486456633 PSNR: 28.37449073791504 +[TRAIN] Iter: 902500 Loss: 0.006751838140189648 PSNR: 26.560771942138672 +[TRAIN] Iter: 902600 Loss: 0.006307201460003853 PSNR: 26.33991241455078 +[TRAIN] Iter: 902700 Loss: 0.00689720269292593 PSNR: 26.243196487426758 +[TRAIN] Iter: 902800 Loss: 0.00781543180346489 PSNR: 25.723981857299805 +[TRAIN] Iter: 902900 Loss: 0.006830149330198765 PSNR: 25.892995834350586 +[TRAIN] Iter: 903000 Loss: 0.006377444602549076 PSNR: 26.553186416625977 +[TRAIN] Iter: 903100 Loss: 0.006255358457565308 PSNR: 26.70719337463379 +[TRAIN] Iter: 903200 Loss: 0.005219307728111744 PSNR: 27.713764190673828 +[TRAIN] Iter: 903300 Loss: 0.006499075330793858 PSNR: 26.44955062866211 +[TRAIN] Iter: 903400 Loss: 0.0053948331624269485 PSNR: 27.31199073791504 +[TRAIN] Iter: 903500 Loss: 0.00496858824044466 PSNR: 29.003414154052734 +[TRAIN] Iter: 903600 Loss: 0.005974440835416317 PSNR: 26.970298767089844 +[TRAIN] Iter: 903700 Loss: 0.0059251077473163605 PSNR: 26.506420135498047 +[TRAIN] Iter: 903800 Loss: 0.005974188446998596 PSNR: 27.584815979003906 +[TRAIN] Iter: 903900 Loss: 0.005214858800172806 PSNR: 28.837421417236328 +[TRAIN] Iter: 904000 Loss: 0.004971862770617008 PSNR: 28.32533836364746 +[TRAIN] Iter: 904100 Loss: 0.008278125897049904 PSNR: 25.751615524291992 +[TRAIN] Iter: 904200 Loss: 0.0054197185672819614 PSNR: 27.647075653076172 +[TRAIN] Iter: 904300 Loss: 0.005401484202593565 PSNR: 28.172080993652344 +[TRAIN] Iter: 904400 Loss: 0.00755526777356863 PSNR: 26.40550994873047 +[TRAIN] Iter: 904500 Loss: 0.005037744529545307 PSNR: 27.687408447265625 +[TRAIN] Iter: 904600 Loss: 0.008133728057146072 PSNR: 25.558429718017578 +[TRAIN] Iter: 904700 Loss: 0.004411362111568451 PSNR: 28.94399070739746 +[TRAIN] Iter: 904800 Loss: 0.006257382221519947 PSNR: 26.611635208129883 +[TRAIN] Iter: 904900 Loss: 0.0059060268104076385 PSNR: 27.477766036987305 +[TRAIN] Iter: 905000 Loss: 0.0062983231619000435 PSNR: 26.876127243041992 +[TRAIN] Iter: 905100 Loss: 0.007080338429659605 PSNR: 26.109764099121094 +[TRAIN] Iter: 905200 Loss: 0.006387885194271803 PSNR: 26.087276458740234 +[TRAIN] Iter: 905300 Loss: 0.007595787290483713 PSNR: 26.402063369750977 +[TRAIN] Iter: 905400 Loss: 0.0065176053903996944 PSNR: 26.53211784362793 +[TRAIN] Iter: 905500 Loss: 0.008801848627626896 PSNR: 25.548051834106445 +[TRAIN] Iter: 905600 Loss: 0.004702491220086813 PSNR: 28.5781192779541 +[TRAIN] Iter: 905700 Loss: 0.00665274215862155 PSNR: 26.249475479125977 +[TRAIN] Iter: 905800 Loss: 0.0068299132399261 PSNR: 26.175783157348633 +[TRAIN] Iter: 905900 Loss: 0.006764232646673918 PSNR: 26.172435760498047 +[TRAIN] Iter: 906000 Loss: 0.006483844015747309 PSNR: 26.59427261352539 +[TRAIN] Iter: 906100 Loss: 0.0048830388113856316 PSNR: 29.150867462158203 +[TRAIN] Iter: 906200 Loss: 0.00600773049518466 PSNR: 26.972591400146484 +[TRAIN] Iter: 906300 Loss: 0.005646612495183945 PSNR: 26.6662654876709 +[TRAIN] Iter: 906400 Loss: 0.0062227253802120686 PSNR: 27.03038215637207 +[TRAIN] Iter: 906500 Loss: 0.005463914945721626 PSNR: 28.345752716064453 +[TRAIN] Iter: 906600 Loss: 0.005039000418037176 PSNR: 28.143430709838867 +[TRAIN] Iter: 906700 Loss: 0.007025022991001606 PSNR: 26.359477996826172 +[TRAIN] Iter: 906800 Loss: 0.006869779899716377 PSNR: 25.559276580810547 +[TRAIN] Iter: 906900 Loss: 0.005843102000653744 PSNR: 28.333005905151367 +[TRAIN] Iter: 907000 Loss: 0.006261247210204601 PSNR: 26.15859603881836 +[TRAIN] Iter: 907100 Loss: 0.005807877518236637 PSNR: 27.2564640045166 +[TRAIN] Iter: 907200 Loss: 0.00591027969494462 PSNR: 27.227920532226562 +[TRAIN] Iter: 907300 Loss: 0.00548015721142292 PSNR: 27.96132469177246 +[TRAIN] Iter: 907400 Loss: 0.005708635319024324 PSNR: 26.648826599121094 +[TRAIN] Iter: 907500 Loss: 0.006812965963035822 PSNR: 26.83428192138672 +[TRAIN] Iter: 907600 Loss: 0.006129555404186249 PSNR: 26.85914421081543 +[TRAIN] Iter: 907700 Loss: 0.006973511539399624 PSNR: 26.14181900024414 +[TRAIN] Iter: 907800 Loss: 0.005723732523620129 PSNR: 27.131807327270508 +[TRAIN] Iter: 907900 Loss: 0.005991786252707243 PSNR: 27.469064712524414 +[TRAIN] Iter: 908000 Loss: 0.0064032720401883125 PSNR: 26.879444122314453 +[TRAIN] Iter: 908100 Loss: 0.005600423086434603 PSNR: 27.666208267211914 +[TRAIN] Iter: 908200 Loss: 0.005692749749869108 PSNR: 27.808530807495117 +[TRAIN] Iter: 908300 Loss: 0.0071059237234294415 PSNR: 26.342622756958008 +[TRAIN] Iter: 908400 Loss: 0.00662507489323616 PSNR: 26.539926528930664 +[TRAIN] Iter: 908500 Loss: 0.00759700546041131 PSNR: 25.85575294494629 +[TRAIN] Iter: 908600 Loss: 0.006478693336248398 PSNR: 26.595340728759766 +[TRAIN] Iter: 908700 Loss: 0.006766811013221741 PSNR: 26.40447998046875 +[TRAIN] Iter: 908800 Loss: 0.005956137087196112 PSNR: 26.434633255004883 +[TRAIN] Iter: 908900 Loss: 0.0062590534798800945 PSNR: 26.870197296142578 +[TRAIN] Iter: 909000 Loss: 0.005121320020407438 PSNR: 27.716753005981445 +[TRAIN] Iter: 909100 Loss: 0.007245020009577274 PSNR: 26.264915466308594 +[TRAIN] Iter: 909200 Loss: 0.004442899487912655 PSNR: 29.155323028564453 +[TRAIN] Iter: 909300 Loss: 0.006859390065073967 PSNR: 26.494504928588867 +[TRAIN] Iter: 909400 Loss: 0.00524541549384594 PSNR: 28.227554321289062 +[TRAIN] Iter: 909500 Loss: 0.004676842130720615 PSNR: 28.01178741455078 +[TRAIN] Iter: 909600 Loss: 0.00703858770430088 PSNR: 25.841571807861328 +[TRAIN] Iter: 909700 Loss: 0.006164286285638809 PSNR: 26.81499481201172 +[TRAIN] Iter: 909800 Loss: 0.007144346367567778 PSNR: 26.049497604370117 +[TRAIN] Iter: 909900 Loss: 0.005922879092395306 PSNR: 27.19151496887207 +Saved checkpoints at ./logs/TUT-out-doll-360-np/910000.tar +[TRAIN] Iter: 910000 Loss: 0.006807099096477032 PSNR: 25.914344787597656 +[TRAIN] Iter: 910100 Loss: 0.006740819197148085 PSNR: 27.27693748474121 +[TRAIN] Iter: 910200 Loss: 0.005877337418496609 PSNR: 26.90011978149414 +[TRAIN] Iter: 910300 Loss: 0.007023331709206104 PSNR: 26.059520721435547 +[TRAIN] Iter: 910400 Loss: 0.0055815791711211205 PSNR: 27.400968551635742 +[TRAIN] Iter: 910500 Loss: 0.005362767726182938 PSNR: 27.330032348632812 +[TRAIN] Iter: 910600 Loss: 0.00650966539978981 PSNR: 26.204797744750977 +[TRAIN] Iter: 910700 Loss: 0.006071324925869703 PSNR: 27.01600456237793 +[TRAIN] Iter: 910800 Loss: 0.0067487857304513454 PSNR: 26.567956924438477 +[TRAIN] Iter: 910900 Loss: 0.006616877391934395 PSNR: 26.383010864257812 +[TRAIN] Iter: 911000 Loss: 0.006464239675551653 PSNR: 26.824689865112305 +[TRAIN] Iter: 911100 Loss: 0.006657211109995842 PSNR: 26.66356086730957 +[TRAIN] Iter: 911200 Loss: 0.006170195527374744 PSNR: 26.6336727142334 +[TRAIN] Iter: 911300 Loss: 0.0052210548892617226 PSNR: 27.493167877197266 +[TRAIN] Iter: 911400 Loss: 0.005217726342380047 PSNR: 27.556785583496094 +[TRAIN] Iter: 911500 Loss: 0.005652338266372681 PSNR: 27.02519989013672 +[TRAIN] Iter: 911600 Loss: 0.006588802672922611 PSNR: 25.95487403869629 +[TRAIN] Iter: 911700 Loss: 0.007331484463065863 PSNR: 26.198457717895508 +[TRAIN] Iter: 911800 Loss: 0.0064288172870874405 PSNR: 26.121564865112305 +[TRAIN] Iter: 911900 Loss: 0.005014713853597641 PSNR: 26.762144088745117 +[TRAIN] Iter: 912000 Loss: 0.005636037793010473 PSNR: 27.257577896118164 +[TRAIN] Iter: 912100 Loss: 0.005737863481044769 PSNR: 27.345821380615234 +[TRAIN] Iter: 912200 Loss: 0.004283651243895292 PSNR: 28.494159698486328 +[TRAIN] Iter: 912300 Loss: 0.00771164009347558 PSNR: 25.43397331237793 +[TRAIN] Iter: 912400 Loss: 0.007400054484605789 PSNR: 25.665725708007812 +[TRAIN] Iter: 912500 Loss: 0.0050666541792452335 PSNR: 28.40829086303711 +[TRAIN] Iter: 912600 Loss: 0.004916397854685783 PSNR: 27.937911987304688 +[TRAIN] Iter: 912700 Loss: 0.007600109092891216 PSNR: 26.079477310180664 +[TRAIN] Iter: 912800 Loss: 0.005645593628287315 PSNR: 27.288307189941406 +[TRAIN] Iter: 912900 Loss: 0.004434001166373491 PSNR: 28.4189453125 +[TRAIN] Iter: 913000 Loss: 0.006083696614950895 PSNR: 26.291000366210938 +[TRAIN] Iter: 913100 Loss: 0.006694757845252752 PSNR: 26.441593170166016 +[TRAIN] Iter: 913200 Loss: 0.005050534848123789 PSNR: 28.465091705322266 +[TRAIN] Iter: 913300 Loss: 0.005167743191123009 PSNR: 28.946260452270508 +[TRAIN] Iter: 913400 Loss: 0.007833126001060009 PSNR: 25.786659240722656 +[TRAIN] Iter: 913500 Loss: 0.0053311120718717575 PSNR: 28.281780242919922 +[TRAIN] Iter: 913600 Loss: 0.005480075255036354 PSNR: 27.614795684814453 +[TRAIN] Iter: 913700 Loss: 0.00690839858725667 PSNR: 26.617197036743164 +[TRAIN] Iter: 913800 Loss: 0.0068796612322330475 PSNR: 26.537141799926758 +[TRAIN] Iter: 913900 Loss: 0.0046762945130467415 PSNR: 28.228763580322266 +[TRAIN] Iter: 914000 Loss: 0.005787288304418325 PSNR: 27.540964126586914 +[TRAIN] Iter: 914100 Loss: 0.00470837252214551 PSNR: 28.853425979614258 +[TRAIN] Iter: 914200 Loss: 0.00590079091489315 PSNR: 27.53944206237793 +[TRAIN] Iter: 914300 Loss: 0.005620107986032963 PSNR: 26.898550033569336 +[TRAIN] Iter: 914400 Loss: 0.00663704052567482 PSNR: 26.030263900756836 +[TRAIN] Iter: 914500 Loss: 0.007136657368391752 PSNR: 25.837194442749023 +[TRAIN] Iter: 914600 Loss: 0.005884221754968166 PSNR: 26.924551010131836 +[TRAIN] Iter: 914700 Loss: 0.006547556258738041 PSNR: 26.387062072753906 +[TRAIN] Iter: 914800 Loss: 0.005719093605875969 PSNR: 27.712383270263672 +[TRAIN] Iter: 914900 Loss: 0.006395417265594006 PSNR: 26.927587509155273 +[TRAIN] Iter: 915000 Loss: 0.006955795921385288 PSNR: 26.394046783447266 +[TRAIN] Iter: 915100 Loss: 0.006040939595550299 PSNR: 27.0098819732666 +[TRAIN] Iter: 915200 Loss: 0.005124963819980621 PSNR: 27.684589385986328 +[TRAIN] Iter: 915300 Loss: 0.006678382866084576 PSNR: 26.264127731323242 +[TRAIN] Iter: 915400 Loss: 0.0067162043415009975 PSNR: 26.005756378173828 +[TRAIN] Iter: 915500 Loss: 0.005522504448890686 PSNR: 28.268815994262695 +[TRAIN] Iter: 915600 Loss: 0.006579324137419462 PSNR: 26.670001983642578 +[TRAIN] Iter: 915700 Loss: 0.006195809692144394 PSNR: 26.528017044067383 +[TRAIN] Iter: 915800 Loss: 0.006802585441619158 PSNR: 26.446025848388672 +[TRAIN] Iter: 915900 Loss: 0.006680317688733339 PSNR: 26.206039428710938 +[TRAIN] Iter: 916000 Loss: 0.005109308287501335 PSNR: 28.26021385192871 +[TRAIN] Iter: 916100 Loss: 0.005358541384339333 PSNR: 26.8687801361084 +[TRAIN] Iter: 916200 Loss: 0.0044946614652872086 PSNR: 28.16666030883789 +[TRAIN] Iter: 916300 Loss: 0.006904577370733023 PSNR: 25.777130126953125 +[TRAIN] Iter: 916400 Loss: 0.006389953196048737 PSNR: 26.304113388061523 +[TRAIN] Iter: 916500 Loss: 0.0068320054560899734 PSNR: 25.696575164794922 +[TRAIN] Iter: 916600 Loss: 0.005798350088298321 PSNR: 26.580610275268555 +[TRAIN] Iter: 916700 Loss: 0.005614911671727896 PSNR: 27.111648559570312 +[TRAIN] Iter: 916800 Loss: 0.006121918559074402 PSNR: 27.36815071105957 +[TRAIN] Iter: 916900 Loss: 0.0044116852805018425 PSNR: 28.865198135375977 +[TRAIN] Iter: 917000 Loss: 0.004969996400177479 PSNR: 28.54424285888672 +[TRAIN] Iter: 917100 Loss: 0.006410864647477865 PSNR: 26.621109008789062 +[TRAIN] Iter: 917200 Loss: 0.004337047226727009 PSNR: 28.557022094726562 +[TRAIN] Iter: 917300 Loss: 0.006228670012205839 PSNR: 26.26083755493164 +[TRAIN] Iter: 917400 Loss: 0.0046958476305007935 PSNR: 28.480300903320312 +[TRAIN] Iter: 917500 Loss: 0.006011409219354391 PSNR: 26.725051879882812 +[TRAIN] Iter: 917600 Loss: 0.00537949800491333 PSNR: 27.42622947692871 +[TRAIN] Iter: 917700 Loss: 0.00726967491209507 PSNR: 26.182641983032227 +[TRAIN] Iter: 917800 Loss: 0.005015094298869371 PSNR: 28.724878311157227 +[TRAIN] Iter: 917900 Loss: 0.0059128412976861 PSNR: 26.554901123046875 +[TRAIN] Iter: 918000 Loss: 0.0057191671803593636 PSNR: 27.265390396118164 +[TRAIN] Iter: 918100 Loss: 0.0074627879075706005 PSNR: 26.11464500427246 +[TRAIN] Iter: 918200 Loss: 0.00638981256633997 PSNR: 26.847789764404297 +[TRAIN] Iter: 918300 Loss: 0.006587005220353603 PSNR: 25.910634994506836 +[TRAIN] Iter: 918400 Loss: 0.0068283770233392715 PSNR: 25.670713424682617 +[TRAIN] Iter: 918500 Loss: 0.006279577501118183 PSNR: 26.996015548706055 +[TRAIN] Iter: 918600 Loss: 0.006467908620834351 PSNR: 26.260986328125 +[TRAIN] Iter: 918700 Loss: 0.006383437663316727 PSNR: 26.26726722717285 +[TRAIN] Iter: 918800 Loss: 0.006192709784954786 PSNR: 26.98058319091797 +[TRAIN] Iter: 918900 Loss: 0.005151720717549324 PSNR: 28.084335327148438 +[TRAIN] Iter: 919000 Loss: 0.006507962476462126 PSNR: 26.163497924804688 +[TRAIN] Iter: 919100 Loss: 0.006140013225376606 PSNR: 27.497358322143555 +[TRAIN] Iter: 919200 Loss: 0.005226613022387028 PSNR: 27.429536819458008 +[TRAIN] Iter: 919300 Loss: 0.006319222040474415 PSNR: 26.43239402770996 +[TRAIN] Iter: 919400 Loss: 0.007061034440994263 PSNR: 25.84869956970215 +[TRAIN] Iter: 919500 Loss: 0.00578953605145216 PSNR: 26.523319244384766 +[TRAIN] Iter: 919600 Loss: 0.005608938634395599 PSNR: 27.18368911743164 +[TRAIN] Iter: 919700 Loss: 0.007163393311202526 PSNR: 25.776887893676758 +[TRAIN] Iter: 919800 Loss: 0.006690915673971176 PSNR: 26.45354652404785 +[TRAIN] Iter: 919900 Loss: 0.006571002304553986 PSNR: 26.7051944732666 +Saved checkpoints at ./logs/TUT-out-doll-360-np/920000.tar +[TRAIN] Iter: 920000 Loss: 0.005381736904382706 PSNR: 26.79144859313965 +[TRAIN] Iter: 920100 Loss: 0.0047370740212500095 PSNR: 27.827484130859375 +[TRAIN] Iter: 920200 Loss: 0.00505732512101531 PSNR: 27.423999786376953 +[TRAIN] Iter: 920300 Loss: 0.006181835196912289 PSNR: 26.155311584472656 +[TRAIN] Iter: 920400 Loss: 0.005268965382128954 PSNR: 27.567373275756836 +[TRAIN] Iter: 920500 Loss: 0.006887484807521105 PSNR: 26.534513473510742 +[TRAIN] Iter: 920600 Loss: 0.007146806921809912 PSNR: 26.056711196899414 +[TRAIN] Iter: 920700 Loss: 0.007216614671051502 PSNR: 25.665557861328125 +[TRAIN] Iter: 920800 Loss: 0.005580040160566568 PSNR: 27.568174362182617 +[TRAIN] Iter: 920900 Loss: 0.006099436432123184 PSNR: 26.286550521850586 +[TRAIN] Iter: 921000 Loss: 0.0066597796976566315 PSNR: 26.41460609436035 +[TRAIN] Iter: 921100 Loss: 0.004845331888645887 PSNR: 29.08492088317871 +[TRAIN] Iter: 921200 Loss: 0.006639955565333366 PSNR: 26.578245162963867 +[TRAIN] Iter: 921300 Loss: 0.005404987372457981 PSNR: 27.169265747070312 +[TRAIN] Iter: 921400 Loss: 0.006246825214475393 PSNR: 26.42658042907715 +[TRAIN] Iter: 921500 Loss: 0.007459341082721949 PSNR: 25.972017288208008 +[TRAIN] Iter: 921600 Loss: 0.005618327297270298 PSNR: 27.87517738342285 +[TRAIN] Iter: 921700 Loss: 0.005868952721357346 PSNR: 26.602603912353516 +[TRAIN] Iter: 921800 Loss: 0.00670727901160717 PSNR: 26.60120964050293 +[TRAIN] Iter: 921900 Loss: 0.0060823578387498856 PSNR: 26.730703353881836 +[TRAIN] Iter: 922000 Loss: 0.00622477475553751 PSNR: 26.773210525512695 +[TRAIN] Iter: 922100 Loss: 0.006461139768362045 PSNR: 26.711875915527344 +[TRAIN] Iter: 922200 Loss: 0.005971698556095362 PSNR: 27.088150024414062 +[TRAIN] Iter: 922300 Loss: 0.0054569002240896225 PSNR: 27.055824279785156 +[TRAIN] Iter: 922400 Loss: 0.005714517552405596 PSNR: 27.093921661376953 +[TRAIN] Iter: 922500 Loss: 0.006102902814745903 PSNR: 26.968036651611328 +[TRAIN] Iter: 922600 Loss: 0.006511622108519077 PSNR: 26.428293228149414 +[TRAIN] Iter: 922700 Loss: 0.006905307061970234 PSNR: 26.03655242919922 +[TRAIN] Iter: 922800 Loss: 0.005800588056445122 PSNR: 27.452322006225586 +[TRAIN] Iter: 922900 Loss: 0.0066817812621593475 PSNR: 26.395023345947266 +[TRAIN] Iter: 923000 Loss: 0.004908975679427385 PSNR: 27.857715606689453 +[TRAIN] Iter: 923100 Loss: 0.006624627858400345 PSNR: 26.403141021728516 +[TRAIN] Iter: 923200 Loss: 0.006655859760940075 PSNR: 26.73998260498047 +[TRAIN] Iter: 923300 Loss: 0.004184727091342211 PSNR: 28.805160522460938 +[TRAIN] Iter: 923400 Loss: 0.005299168638885021 PSNR: 27.984310150146484 +[TRAIN] Iter: 923500 Loss: 0.007705080322921276 PSNR: 25.658519744873047 +[TRAIN] Iter: 923600 Loss: 0.006564142182469368 PSNR: 26.47562026977539 +[TRAIN] Iter: 923700 Loss: 0.004676565993577242 PSNR: 28.626428604125977 +[TRAIN] Iter: 923800 Loss: 0.007008618209511042 PSNR: 26.177942276000977 +[TRAIN] Iter: 923900 Loss: 0.005935539025813341 PSNR: 27.0275936126709 +[TRAIN] Iter: 924000 Loss: 0.005973268300294876 PSNR: 26.444141387939453 +[TRAIN] Iter: 924100 Loss: 0.00641278363764286 PSNR: 26.497543334960938 +[TRAIN] Iter: 924200 Loss: 0.0059773558750748634 PSNR: 27.017780303955078 +[TRAIN] Iter: 924300 Loss: 0.0065559884533286095 PSNR: 26.34126091003418 +[TRAIN] Iter: 924400 Loss: 0.0053040762431919575 PSNR: 27.14522933959961 +[TRAIN] Iter: 924500 Loss: 0.006057256832718849 PSNR: 26.814170837402344 +[TRAIN] Iter: 924600 Loss: 0.005691756494343281 PSNR: 27.449665069580078 +[TRAIN] Iter: 924700 Loss: 0.005806826055049896 PSNR: 27.05254364013672 +[TRAIN] Iter: 924800 Loss: 0.006310627795755863 PSNR: 26.27358055114746 +[TRAIN] Iter: 924900 Loss: 0.007404564879834652 PSNR: 26.80275535583496 +[TRAIN] Iter: 925000 Loss: 0.006388856563717127 PSNR: 25.933629989624023 +[TRAIN] Iter: 925100 Loss: 0.005860639736056328 PSNR: 27.47978973388672 +[TRAIN] Iter: 925200 Loss: 0.0042856717482209206 PSNR: 28.164583206176758 +[TRAIN] Iter: 925300 Loss: 0.004773111082613468 PSNR: 29.259855270385742 +[TRAIN] Iter: 925400 Loss: 0.0053520058281719685 PSNR: 27.474397659301758 +[TRAIN] Iter: 925500 Loss: 0.004464623052626848 PSNR: 28.888870239257812 +[TRAIN] Iter: 925600 Loss: 0.005690999794751406 PSNR: 26.6997127532959 +[TRAIN] Iter: 925700 Loss: 0.006986694410443306 PSNR: 26.422588348388672 +[TRAIN] Iter: 925800 Loss: 0.004824515897780657 PSNR: 28.58770179748535 +[TRAIN] Iter: 925900 Loss: 0.006116747390478849 PSNR: 26.833984375 +[TRAIN] Iter: 926000 Loss: 0.006299203261733055 PSNR: 26.386951446533203 +[TRAIN] Iter: 926100 Loss: 0.005910823121666908 PSNR: 27.161508560180664 +[TRAIN] Iter: 926200 Loss: 0.005363247357308865 PSNR: 27.644062042236328 +[TRAIN] Iter: 926300 Loss: 0.004320819862186909 PSNR: 28.439682006835938 +[TRAIN] Iter: 926400 Loss: 0.007387654390186071 PSNR: 25.883115768432617 +[TRAIN] Iter: 926500 Loss: 0.005455931648612022 PSNR: 27.97836685180664 +[TRAIN] Iter: 926600 Loss: 0.005851658061146736 PSNR: 28.994958877563477 +[TRAIN] Iter: 926700 Loss: 0.004597533028572798 PSNR: 28.798011779785156 +[TRAIN] Iter: 926800 Loss: 0.006118801888078451 PSNR: 27.64729881286621 +[TRAIN] Iter: 926900 Loss: 0.005174783989787102 PSNR: 28.130090713500977 +[TRAIN] Iter: 927000 Loss: 0.0062111192382872105 PSNR: 27.187490463256836 +[TRAIN] Iter: 927100 Loss: 0.006409893743693829 PSNR: 26.300886154174805 +[TRAIN] Iter: 927200 Loss: 0.006799296010285616 PSNR: 26.649045944213867 +[TRAIN] Iter: 927300 Loss: 0.00714474031701684 PSNR: 26.13964080810547 +[TRAIN] Iter: 927400 Loss: 0.006125876680016518 PSNR: 26.750898361206055 +[TRAIN] Iter: 927500 Loss: 0.006973445415496826 PSNR: 26.49513816833496 +[TRAIN] Iter: 927600 Loss: 0.005022089462727308 PSNR: 27.84197235107422 +[TRAIN] Iter: 927700 Loss: 0.006169413682073355 PSNR: 26.61100959777832 +[TRAIN] Iter: 927800 Loss: 0.005683016497641802 PSNR: 28.611928939819336 +[TRAIN] Iter: 927900 Loss: 0.006165930535644293 PSNR: 27.75613784790039 +[TRAIN] Iter: 928000 Loss: 0.005936275701969862 PSNR: 27.365110397338867 +[TRAIN] Iter: 928100 Loss: 0.005273209419101477 PSNR: 27.071395874023438 +[TRAIN] Iter: 928200 Loss: 0.005583466030657291 PSNR: 28.084646224975586 +[TRAIN] Iter: 928300 Loss: 0.00747528113424778 PSNR: 26.212663650512695 +[TRAIN] Iter: 928400 Loss: 0.0049263956025242805 PSNR: 27.516860961914062 +[TRAIN] Iter: 928500 Loss: 0.005509319715201855 PSNR: 27.13019371032715 +[TRAIN] Iter: 928600 Loss: 0.006672266870737076 PSNR: 26.416898727416992 +[TRAIN] Iter: 928700 Loss: 0.00628680782392621 PSNR: 26.679527282714844 +[TRAIN] Iter: 928800 Loss: 0.006641796790063381 PSNR: 26.619983673095703 +[TRAIN] Iter: 928900 Loss: 0.006843176670372486 PSNR: 26.060501098632812 +[TRAIN] Iter: 929000 Loss: 0.006152819376438856 PSNR: 26.96004295349121 +[TRAIN] Iter: 929100 Loss: 0.006664258427917957 PSNR: 26.32335662841797 +[TRAIN] Iter: 929200 Loss: 0.005868498235940933 PSNR: 26.930484771728516 +[TRAIN] Iter: 929300 Loss: 0.008356745354831219 PSNR: 25.45538330078125 +[TRAIN] Iter: 929400 Loss: 0.00575781986117363 PSNR: 26.816076278686523 +[TRAIN] Iter: 929500 Loss: 0.006673445459455252 PSNR: 26.37851333618164 +[TRAIN] Iter: 929600 Loss: 0.006181120872497559 PSNR: 26.40179443359375 +[TRAIN] Iter: 929700 Loss: 0.006590161472558975 PSNR: 25.68971824645996 +[TRAIN] Iter: 929800 Loss: 0.005003124475479126 PSNR: 28.17931365966797 +[TRAIN] Iter: 929900 Loss: 0.005738894455134869 PSNR: 27.203508377075195 +Saved checkpoints at ./logs/TUT-out-doll-360-np/930000.tar +[TRAIN] Iter: 930000 Loss: 0.007546840235590935 PSNR: 25.373249053955078 +[TRAIN] Iter: 930100 Loss: 0.006122191436588764 PSNR: 26.93524932861328 +[TRAIN] Iter: 930200 Loss: 0.006068584509193897 PSNR: 26.892189025878906 +[TRAIN] Iter: 930300 Loss: 0.007424955256283283 PSNR: 25.74333953857422 +[TRAIN] Iter: 930400 Loss: 0.005954893305897713 PSNR: 28.247295379638672 +[TRAIN] Iter: 930500 Loss: 0.00626200158149004 PSNR: 26.84619903564453 +[TRAIN] Iter: 930600 Loss: 0.005828934721648693 PSNR: 27.72303581237793 +[TRAIN] Iter: 930700 Loss: 0.00566257955506444 PSNR: 27.022296905517578 +[TRAIN] Iter: 930800 Loss: 0.005874893628060818 PSNR: 27.168424606323242 +[TRAIN] Iter: 930900 Loss: 0.004770009778439999 PSNR: 28.140729904174805 +[TRAIN] Iter: 931000 Loss: 0.005669387057423592 PSNR: 27.23824691772461 +[TRAIN] Iter: 931100 Loss: 0.0053376685827970505 PSNR: 28.08490562438965 +[TRAIN] Iter: 931200 Loss: 0.0055328491143882275 PSNR: 27.20756721496582 +[TRAIN] Iter: 931300 Loss: 0.006857362110167742 PSNR: 26.293790817260742 +[TRAIN] Iter: 931400 Loss: 0.0058253128081560135 PSNR: 27.034772872924805 +[TRAIN] Iter: 931500 Loss: 0.0044278958812355995 PSNR: 27.9766788482666 +[TRAIN] Iter: 931600 Loss: 0.006843209732323885 PSNR: 26.312227249145508 +[TRAIN] Iter: 931700 Loss: 0.00750038493424654 PSNR: 26.442886352539062 +[TRAIN] Iter: 931800 Loss: 0.00612616166472435 PSNR: 26.53938865661621 +[TRAIN] Iter: 931900 Loss: 0.007635349407792091 PSNR: 25.550962448120117 +[TRAIN] Iter: 932000 Loss: 0.006347563583403826 PSNR: 26.91362762451172 +[TRAIN] Iter: 932100 Loss: 0.006084254011511803 PSNR: 26.983692169189453 +[TRAIN] Iter: 932200 Loss: 0.007743041962385178 PSNR: 25.87578582763672 +[TRAIN] Iter: 932300 Loss: 0.00617494760081172 PSNR: 26.63577651977539 +[TRAIN] Iter: 932400 Loss: 0.00793906208127737 PSNR: 25.357837677001953 +[TRAIN] Iter: 932500 Loss: 0.0063901497051119804 PSNR: 26.507768630981445 +[TRAIN] Iter: 932600 Loss: 0.006812724284827709 PSNR: 27.48006248474121 +[TRAIN] Iter: 932700 Loss: 0.006179832387715578 PSNR: 27.00579071044922 +[TRAIN] Iter: 932800 Loss: 0.005617935210466385 PSNR: 27.019248962402344 +[TRAIN] Iter: 932900 Loss: 0.00723992520943284 PSNR: 25.689607620239258 +[TRAIN] Iter: 933000 Loss: 0.007906961254775524 PSNR: 25.768421173095703 +[TRAIN] Iter: 933100 Loss: 0.006464894395321608 PSNR: 26.520780563354492 +[TRAIN] Iter: 933200 Loss: 0.006281825248152018 PSNR: 26.563758850097656 +[TRAIN] Iter: 933300 Loss: 0.006047429516911507 PSNR: 27.03968048095703 +[TRAIN] Iter: 933400 Loss: 0.004248823970556259 PSNR: 27.856229782104492 +[TRAIN] Iter: 933500 Loss: 0.006624937988817692 PSNR: 27.226037979125977 +[TRAIN] Iter: 933600 Loss: 0.0060576810501515865 PSNR: 26.985998153686523 +[TRAIN] Iter: 933700 Loss: 0.006646934896707535 PSNR: 26.07436180114746 +[TRAIN] Iter: 933800 Loss: 0.006991339847445488 PSNR: 25.881179809570312 +[TRAIN] Iter: 933900 Loss: 0.0049655986949801445 PSNR: 28.61368179321289 +[TRAIN] Iter: 934000 Loss: 0.006207161583006382 PSNR: 27.4213924407959 +[TRAIN] Iter: 934100 Loss: 0.006669282913208008 PSNR: 26.03234100341797 +[TRAIN] Iter: 934200 Loss: 0.0056756664998829365 PSNR: 27.228275299072266 +[TRAIN] Iter: 934300 Loss: 0.005716431885957718 PSNR: 29.00799560546875 +[TRAIN] Iter: 934400 Loss: 0.006084763444960117 PSNR: 26.6494197845459 +[TRAIN] Iter: 934500 Loss: 0.006281727459281683 PSNR: 26.701818466186523 +[TRAIN] Iter: 934600 Loss: 0.006643612403422594 PSNR: 26.366554260253906 +[TRAIN] Iter: 934700 Loss: 0.005686561111360788 PSNR: 26.80048370361328 +[TRAIN] Iter: 934800 Loss: 0.006980735342949629 PSNR: 26.062725067138672 +[TRAIN] Iter: 934900 Loss: 0.005342856980860233 PSNR: 27.94686508178711 +[TRAIN] Iter: 935000 Loss: 0.006967292167246342 PSNR: 26.323678970336914 +[TRAIN] Iter: 935100 Loss: 0.007193321827799082 PSNR: 25.8131103515625 +[TRAIN] Iter: 935200 Loss: 0.0071781231090426445 PSNR: 25.598920822143555 +[TRAIN] Iter: 935300 Loss: 0.00697680376470089 PSNR: 26.589326858520508 +[TRAIN] Iter: 935400 Loss: 0.00662051048129797 PSNR: 26.568559646606445 +[TRAIN] Iter: 935500 Loss: 0.005086213815957308 PSNR: 27.783952713012695 +[TRAIN] Iter: 935600 Loss: 0.005867676809430122 PSNR: 26.910625457763672 +[TRAIN] Iter: 935700 Loss: 0.0071128299459815025 PSNR: 26.29887580871582 +[TRAIN] Iter: 935800 Loss: 0.00616677338257432 PSNR: 26.57989501953125 +[TRAIN] Iter: 935900 Loss: 0.006627943366765976 PSNR: 26.38675880432129 +[TRAIN] Iter: 936000 Loss: 0.008002525195479393 PSNR: 25.106658935546875 +[TRAIN] Iter: 936100 Loss: 0.007351139560341835 PSNR: 25.655216217041016 +[TRAIN] Iter: 936200 Loss: 0.005470565520226955 PSNR: 27.04591178894043 +[TRAIN] Iter: 936300 Loss: 0.005976502783596516 PSNR: 26.649063110351562 +[TRAIN] Iter: 936400 Loss: 0.004736107774078846 PSNR: 28.73672866821289 +[TRAIN] Iter: 936500 Loss: 0.004998985677957535 PSNR: 28.91826629638672 +[TRAIN] Iter: 936600 Loss: 0.005789497401565313 PSNR: 27.648466110229492 +[TRAIN] Iter: 936700 Loss: 0.005445804446935654 PSNR: 27.878751754760742 +[TRAIN] Iter: 936800 Loss: 0.0045470078475773335 PSNR: 28.79403305053711 +[TRAIN] Iter: 936900 Loss: 0.007149025797843933 PSNR: 26.155323028564453 +[TRAIN] Iter: 937000 Loss: 0.0061648511327803135 PSNR: 26.811250686645508 +[TRAIN] Iter: 937100 Loss: 0.005392472725361586 PSNR: 28.338722229003906 +[TRAIN] Iter: 937200 Loss: 0.0071051111444830894 PSNR: 26.059152603149414 +[TRAIN] Iter: 937300 Loss: 0.006710431072860956 PSNR: 26.463180541992188 +[TRAIN] Iter: 937400 Loss: 0.007010219618678093 PSNR: 25.503570556640625 +[TRAIN] Iter: 937500 Loss: 0.005974845960736275 PSNR: 27.81369400024414 +[TRAIN] Iter: 937600 Loss: 0.005124017596244812 PSNR: 27.672054290771484 +[TRAIN] Iter: 937700 Loss: 0.005720809567719698 PSNR: 27.81907081604004 +[TRAIN] Iter: 937800 Loss: 0.004850413184612989 PSNR: 28.334375381469727 +[TRAIN] Iter: 937900 Loss: 0.0044867172837257385 PSNR: 28.481992721557617 +[TRAIN] Iter: 938000 Loss: 0.005314924288541079 PSNR: 28.077552795410156 +[TRAIN] Iter: 938100 Loss: 0.005831253249198198 PSNR: 26.63720703125 +[TRAIN] Iter: 938200 Loss: 0.007702739909291267 PSNR: 26.332807540893555 +[TRAIN] Iter: 938300 Loss: 0.005499100312590599 PSNR: 27.718652725219727 +[TRAIN] Iter: 938400 Loss: 0.003935577347874641 PSNR: 29.184547424316406 +[TRAIN] Iter: 938500 Loss: 0.007520613260567188 PSNR: 26.109403610229492 +[TRAIN] Iter: 938600 Loss: 0.006535777356475592 PSNR: 26.401424407958984 +[TRAIN] Iter: 938700 Loss: 0.005999852903187275 PSNR: 27.29833221435547 +[TRAIN] Iter: 938800 Loss: 0.005483481101691723 PSNR: 27.830060958862305 +[TRAIN] Iter: 938900 Loss: 0.007219557650387287 PSNR: 26.346765518188477 +[TRAIN] Iter: 939000 Loss: 0.005470461677759886 PSNR: 27.51024055480957 +[TRAIN] Iter: 939100 Loss: 0.006264125928282738 PSNR: 26.22773551940918 +[TRAIN] Iter: 939200 Loss: 0.005234138108789921 PSNR: 27.923097610473633 +[TRAIN] Iter: 939300 Loss: 0.006065574008971453 PSNR: 26.771724700927734 +[TRAIN] Iter: 939400 Loss: 0.0061464980244636536 PSNR: 27.226280212402344 +[TRAIN] Iter: 939500 Loss: 0.004996726289391518 PSNR: 28.158193588256836 +[TRAIN] Iter: 939600 Loss: 0.006852090358734131 PSNR: 26.790809631347656 +[TRAIN] Iter: 939700 Loss: 0.007323912810534239 PSNR: 25.748876571655273 +[TRAIN] Iter: 939800 Loss: 0.0051261428743600845 PSNR: 28.817386627197266 +[TRAIN] Iter: 939900 Loss: 0.006575331557542086 PSNR: 26.37161636352539 +Saved checkpoints at ./logs/TUT-out-doll-360-np/940000.tar +[TRAIN] Iter: 940000 Loss: 0.0075553422793745995 PSNR: 26.028038024902344 +[TRAIN] Iter: 940100 Loss: 0.006113354582339525 PSNR: 26.905054092407227 +[TRAIN] Iter: 940200 Loss: 0.004974255803972483 PSNR: 28.379270553588867 +[TRAIN] Iter: 940300 Loss: 0.00688452273607254 PSNR: 26.190223693847656 +[TRAIN] Iter: 940400 Loss: 0.0070608798414468765 PSNR: 26.21436309814453 +[TRAIN] Iter: 940500 Loss: 0.005479296203702688 PSNR: 27.873579025268555 +[TRAIN] Iter: 940600 Loss: 0.00640364782884717 PSNR: 26.597667694091797 +[TRAIN] Iter: 940700 Loss: 0.006419064477086067 PSNR: 26.340328216552734 +[TRAIN] Iter: 940800 Loss: 0.006963963154703379 PSNR: 26.391542434692383 +[TRAIN] Iter: 940900 Loss: 0.008305458351969719 PSNR: 25.15693473815918 +[TRAIN] Iter: 941000 Loss: 0.005256353877484798 PSNR: 27.450057983398438 +[TRAIN] Iter: 941100 Loss: 0.007211263291537762 PSNR: 25.929513931274414 +[TRAIN] Iter: 941200 Loss: 0.0065136379562318325 PSNR: 26.352252960205078 +[TRAIN] Iter: 941300 Loss: 0.007306921761482954 PSNR: 26.038209915161133 +[TRAIN] Iter: 941400 Loss: 0.007067753933370113 PSNR: 26.89425277709961 +[TRAIN] Iter: 941500 Loss: 0.005033574998378754 PSNR: 27.908344268798828 +[TRAIN] Iter: 941600 Loss: 0.005313113331794739 PSNR: 28.253971099853516 +[TRAIN] Iter: 941700 Loss: 0.005658374167978764 PSNR: 27.7930908203125 +[TRAIN] Iter: 941800 Loss: 0.005701362155377865 PSNR: 27.2091064453125 +[TRAIN] Iter: 941900 Loss: 0.0077045210637152195 PSNR: 25.966733932495117 +[TRAIN] Iter: 942000 Loss: 0.006378878373652697 PSNR: 26.653549194335938 +[TRAIN] Iter: 942100 Loss: 0.005171702243387699 PSNR: 28.039812088012695 +[TRAIN] Iter: 942200 Loss: 0.005748788360506296 PSNR: 27.429887771606445 +[TRAIN] Iter: 942300 Loss: 0.005348626058548689 PSNR: 27.443201065063477 +[TRAIN] Iter: 942400 Loss: 0.005131829530000687 PSNR: 28.033279418945312 +[TRAIN] Iter: 942500 Loss: 0.007614678703248501 PSNR: 25.9191951751709 +[TRAIN] Iter: 942600 Loss: 0.00569806806743145 PSNR: 26.835323333740234 +[TRAIN] Iter: 942700 Loss: 0.006851166021078825 PSNR: 26.202547073364258 +[TRAIN] Iter: 942800 Loss: 0.005638925824314356 PSNR: 27.253097534179688 +[TRAIN] Iter: 942900 Loss: 0.006227794568985701 PSNR: 26.872817993164062 +[TRAIN] Iter: 943000 Loss: 0.006761754862964153 PSNR: 26.790668487548828 +[TRAIN] Iter: 943100 Loss: 0.005694265477359295 PSNR: 27.753564834594727 +[TRAIN] Iter: 943200 Loss: 0.005830284673720598 PSNR: 26.93497085571289 +[TRAIN] Iter: 943300 Loss: 0.006162560544908047 PSNR: 26.85531234741211 +[TRAIN] Iter: 943400 Loss: 0.0064897360280156136 PSNR: 26.970977783203125 +[TRAIN] Iter: 943500 Loss: 0.0055504729971289635 PSNR: 27.244346618652344 +[TRAIN] Iter: 943600 Loss: 0.00739919301122427 PSNR: 25.36736297607422 +[TRAIN] Iter: 943700 Loss: 0.006384623236954212 PSNR: 26.12961769104004 +[TRAIN] Iter: 943800 Loss: 0.004957740195095539 PSNR: 29.17513084411621 +[TRAIN] Iter: 943900 Loss: 0.00512043246999383 PSNR: 28.166067123413086 +[TRAIN] Iter: 944000 Loss: 0.006866804324090481 PSNR: 26.486873626708984 +[TRAIN] Iter: 944100 Loss: 0.006672424264252186 PSNR: 26.285099029541016 +[TRAIN] Iter: 944200 Loss: 0.006511015351861715 PSNR: 26.787261962890625 +[TRAIN] Iter: 944300 Loss: 0.004985850304365158 PSNR: 28.423704147338867 +[TRAIN] Iter: 944400 Loss: 0.0060383714735507965 PSNR: 26.7952823638916 +[TRAIN] Iter: 944500 Loss: 0.007053902838379145 PSNR: 26.60883903503418 +[TRAIN] Iter: 944600 Loss: 0.005911823362112045 PSNR: 27.55398178100586 +[TRAIN] Iter: 944700 Loss: 0.0056836167350411415 PSNR: 26.707944869995117 +[TRAIN] Iter: 944800 Loss: 0.0066529130563139915 PSNR: 26.201383590698242 +[TRAIN] Iter: 944900 Loss: 0.005284994840621948 PSNR: 27.46356201171875 +[TRAIN] Iter: 945000 Loss: 0.007499115541577339 PSNR: 26.014896392822266 +[TRAIN] Iter: 945100 Loss: 0.006036259699612856 PSNR: 26.599830627441406 +[TRAIN] Iter: 945200 Loss: 0.005452772136777639 PSNR: 27.82415771484375 +[TRAIN] Iter: 945300 Loss: 0.006289640441536903 PSNR: 27.184101104736328 +[TRAIN] Iter: 945400 Loss: 0.004932207986712456 PSNR: 28.027912139892578 +[TRAIN] Iter: 945500 Loss: 0.004659844096750021 PSNR: 28.07343101501465 +[TRAIN] Iter: 945600 Loss: 0.007857260294258595 PSNR: 26.03753662109375 +[TRAIN] Iter: 945700 Loss: 0.004736721981316805 PSNR: 28.706642150878906 +[TRAIN] Iter: 945800 Loss: 0.00618570763617754 PSNR: 27.580684661865234 +[TRAIN] Iter: 945900 Loss: 0.007154623977839947 PSNR: 26.204444885253906 +[TRAIN] Iter: 946000 Loss: 0.006768419407308102 PSNR: 25.90907096862793 +[TRAIN] Iter: 946100 Loss: 0.008254536427557468 PSNR: 25.517906188964844 +[TRAIN] Iter: 946200 Loss: 0.006245510187000036 PSNR: 26.535110473632812 +[TRAIN] Iter: 946300 Loss: 0.005675372667610645 PSNR: 26.99298667907715 +[TRAIN] Iter: 946400 Loss: 0.006827524863183498 PSNR: 26.34368896484375 +[TRAIN] Iter: 946500 Loss: 0.008251255378127098 PSNR: 25.396940231323242 +[TRAIN] Iter: 946600 Loss: 0.00546324672177434 PSNR: 27.026575088500977 +[TRAIN] Iter: 946700 Loss: 0.006825635209679604 PSNR: 26.939167022705078 +[TRAIN] Iter: 946800 Loss: 0.00639775674790144 PSNR: 27.102691650390625 +[TRAIN] Iter: 946900 Loss: 0.005563798360526562 PSNR: 27.26156997680664 +[TRAIN] Iter: 947000 Loss: 0.006206570193171501 PSNR: 26.24315643310547 +[TRAIN] Iter: 947100 Loss: 0.0062487926334142685 PSNR: 26.21485710144043 +[TRAIN] Iter: 947200 Loss: 0.0057812463492155075 PSNR: 27.31686019897461 +[TRAIN] Iter: 947300 Loss: 0.00530875101685524 PSNR: 28.341760635375977 +[TRAIN] Iter: 947400 Loss: 0.006529570557177067 PSNR: 26.324405670166016 +[TRAIN] Iter: 947500 Loss: 0.0068490635603666306 PSNR: 26.069664001464844 +[TRAIN] Iter: 947600 Loss: 0.005279772914946079 PSNR: 28.162731170654297 +[TRAIN] Iter: 947700 Loss: 0.004628695547580719 PSNR: 28.290163040161133 +[TRAIN] Iter: 947800 Loss: 0.005616462789475918 PSNR: 27.38681983947754 +[TRAIN] Iter: 947900 Loss: 0.006880850996822119 PSNR: 25.841880798339844 +[TRAIN] Iter: 948000 Loss: 0.0057649165391922 PSNR: 26.308441162109375 +[TRAIN] Iter: 948100 Loss: 0.00568293035030365 PSNR: 27.398990631103516 +[TRAIN] Iter: 948200 Loss: 0.006009104195982218 PSNR: 26.834056854248047 +[TRAIN] Iter: 948300 Loss: 0.006283571012318134 PSNR: 26.905759811401367 +[TRAIN] Iter: 948400 Loss: 0.00409859512001276 PSNR: 29.177080154418945 +[TRAIN] Iter: 948500 Loss: 0.007673569954931736 PSNR: 26.211755752563477 +[TRAIN] Iter: 948600 Loss: 0.004525449126958847 PSNR: 28.246076583862305 +[TRAIN] Iter: 948700 Loss: 0.0044164517894387245 PSNR: 28.865224838256836 +[TRAIN] Iter: 948800 Loss: 0.006990153342485428 PSNR: 25.85389518737793 +[TRAIN] Iter: 948900 Loss: 0.0054013533517718315 PSNR: 27.771142959594727 +[TRAIN] Iter: 949000 Loss: 0.00607751589268446 PSNR: 26.596454620361328 +[TRAIN] Iter: 949100 Loss: 0.006150692701339722 PSNR: 26.543601989746094 +[TRAIN] Iter: 949200 Loss: 0.007160339504480362 PSNR: 26.457002639770508 +[TRAIN] Iter: 949300 Loss: 0.005210153292864561 PSNR: 27.458772659301758 +[TRAIN] Iter: 949400 Loss: 0.005301097873598337 PSNR: 27.454833984375 +[TRAIN] Iter: 949500 Loss: 0.005661993753165007 PSNR: 27.085987091064453 +[TRAIN] Iter: 949600 Loss: 0.006367468740791082 PSNR: 27.072298049926758 +[TRAIN] Iter: 949700 Loss: 0.0046056960709393024 PSNR: 28.223711013793945 +[TRAIN] Iter: 949800 Loss: 0.006252179853618145 PSNR: 26.447711944580078 +[TRAIN] Iter: 949900 Loss: 0.0047670695930719376 PSNR: 27.297096252441406 +Saved checkpoints at ./logs/TUT-out-doll-360-np/950000.tar +0 0.0009813308715820312 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 11.735342025756836 +2 12.147017002105713 +3 12.091358184814453 +4 11.808449268341064 +5 11.490576267242432 +6 11.911224842071533 +7 11.659850358963013 +8 12.14599609375 +9 12.294636249542236 +10 11.7887544631958 +11 11.881711721420288 +12 11.903687477111816 +13 12.071394920349121 +14 12.053841829299927 +15 11.968916654586792 +16 11.618177890777588 +17 11.680816411972046 +18 12.159302473068237 +19 12.130424499511719 +20 11.49152684211731 +21 11.49488377571106 +22 11.889399290084839 +23 11.895051002502441 +24 11.648394107818604 +25 12.169163942337036 +26 12.110082387924194 +27 11.550334453582764 +28 11.981581926345825 +29 12.062995433807373 +30 12.047542333602905 +31 11.617431402206421 +32 11.93230152130127 +33 11.649025201797485 +34 12.081089973449707 +35 11.697811603546143 +36 12.174269676208496 +37 11.577473402023315 +38 12.10837459564209 +39 11.61184310913086 +40 12.172461032867432 +41 11.806232929229736 +42 11.942864418029785 +43 11.619634866714478 +44 12.017042875289917 +45 11.503573656082153 +46 11.502513647079468 +47 11.971794128417969 +48 12.021707534790039 +49 12.146212577819824 +50 11.566007137298584 +51 11.881637811660767 +52 12.131627082824707 +53 11.809340953826904 +54 11.510398626327515 +55 11.8762526512146 +56 11.644288301467896 +57 11.703779697418213 +58 11.868376731872559 +59 11.67140531539917 +60 11.941019773483276 +61 11.609448671340942 +62 11.789744138717651 +63 11.85303521156311 +64 12.162088394165039 +65 11.519825220108032 +66 12.210452556610107 +67 11.525046110153198 +68 11.67043948173523 +69 12.146524906158447 +70 12.102843284606934 +71 11.606692790985107 +72 11.69262146949768 +73 11.662641286849976 +74 12.08521819114685 +75 11.858769416809082 +76 11.978135347366333 +77 11.59401798248291 +78 12.096557140350342 +79 11.654597282409668 +80 12.05958604812622 +81 11.943629264831543 +82 11.817489385604858 +83 11.70413875579834 +84 12.150411367416382 +85 11.784837245941162 +86 11.97790241241455 +87 11.984064817428589 +88 11.902649402618408 +89 12.086466073989868 +90 11.641557455062866 +91 12.198177099227905 +92 11.58706021308899 +93 11.869949579238892 +94 11.99246883392334 +95 11.527601957321167 +96 11.508845806121826 +97 12.012782573699951 +98 11.525180101394653 +99 11.676141500473022 +100 12.132468700408936 +101 11.64182186126709 +102 12.168106079101562 +103 11.79240369796753 +104 11.863199472427368 +105 11.887890815734863 +106 12.088240623474121 +107 11.879527568817139 +108 11.647782564163208 +109 11.664523363113403 +110 11.705225944519043 +111 11.682180166244507 +112 12.166600465774536 +113 11.560817241668701 +114 12.069509029388428 +115 11.985307693481445 +116 11.983772277832031 +117 11.697963237762451 +118 11.669809818267822 +119 12.153822898864746 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[-9.8011e-01, -8.5946e-01, -5.9247e-01, -3.7539e+00], + [-9.8719e-02, 2.8095e-01, 1.7892e+00, -4.4377e+01], + [-7.4718e-01, -9.8466e-01, -8.4206e-01, -1.2214e+01], + ..., + [-1.4773e+01, -5.9865e+00, -5.2923e+00, 7.0234e+02], + [-1.0663e+01, -1.1221e+00, 1.1679e+00, 7.4445e+02], + [-8.3510e+00, 1.9211e+00, 4.6087e+00, 7.0356e+02]], + + [[ 2.1762e+00, 3.3921e+00, 4.6612e+00, -3.1081e+01], + [-3.5654e-01, 4.8083e-01, 1.5217e+00, -2.1908e+01], + [-2.9823e-01, 5.6912e-01, 1.5367e+00, -2.1367e+01], + ..., + [ 3.3565e+00, 6.1408e+00, 1.4136e+01, 5.4252e+02], + [ 4.3028e+00, 7.4348e+00, 1.6239e+01, 5.7560e+02], + [ 3.3279e+00, 6.1969e+00, 1.5024e+01, 5.8075e+02]], + + [[-1.2874e+00, -1.0431e-01, 1.3995e+00, -2.9734e+01], + [-1.0453e+00, -2.6610e-01, 6.4850e-01, -2.1927e+00], + [-9.7560e-01, -1.8648e-01, 7.2652e-01, -1.2625e+00], + ..., + [-1.3886e+01, -1.3349e+01, -2.4556e+00, 4.0353e+02], + [-1.4077e+01, -1.3588e+01, -2.4633e+00, 3.9921e+02], + [-1.4107e+01, -1.3778e+01, -3.3889e+00, 3.9855e+02]], + + ..., + + [[-1.6387e+00, -2.6254e-02, 2.0217e-01, -4.7625e+01], + [-7.3329e-01, 4.0965e-01, 1.8462e+00, -4.1247e+01], + [-9.4708e-01, 2.9833e-03, 1.1445e+00, -3.4653e+01], + ..., + [-7.5097e+00, -6.9093e+00, -4.5608e-01, 3.6505e+02], + [-6.5338e+00, -5.6210e+00, 4.1515e-01, 3.5363e+02], + [-6.6975e+00, -5.9986e+00, -2.6972e-01, 3.4817e+02]], + + [[-1.1690e+00, -1.7733e-01, 2.9530e-01, -2.4069e+01], + [-1.0693e+00, -2.8745e-01, 7.3658e-01, -2.0815e+01], + [-5.4480e-01, 3.5108e-02, 9.0873e-01, -1.9608e+01], + ..., + [-8.5084e+00, -5.9245e+00, -1.7990e+00, 5.5639e+02], + [-8.7214e+00, -5.9610e+00, -2.1355e+00, 5.5917e+02], + [-8.1299e+00, -5.9601e+00, -1.6444e+00, 5.7152e+02]], + + [[-8.2407e-01, -9.0866e-01, -8.9772e-01, -2.9845e-01], + [-1.0370e+00, -1.2423e+00, -1.4224e+00, -8.8643e+00], + [ 3.9065e-01, 7.6511e-01, 2.3950e+00, -4.8723e+01], + ..., + [-1.5007e+01, -4.8526e+00, -3.2309e+00, 7.7973e+02], + [-9.7751e+00, 8.3210e-01, 4.1151e+00, 7.5100e+02], + [-9.7922e+00, 1.2293e+00, 4.5201e+00, 7.9313e+02]]], + grad_fn=), 'rgb0': tensor([[0.2245, 0.2071, 0.2266], + [0.3678, 0.5377, 0.7218], + [0.2582, 0.4278, 0.6595], + ..., + [0.2810, 0.4749, 0.7062], + [0.3715, 0.5240, 0.7349], + [0.2708, 0.2357, 0.2358]], grad_fn=), 'disp0': tensor([645.9869, 52.2470, 54.0105, ..., 80.7778, 53.2481, 580.8880], + grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.3124, 0.0037, 0.0048, ..., 0.0045, 0.0034, 0.2699])} +0 0.0008454322814941406 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 11.616820812225342 +2 11.682847738265991 +3 11.843897342681885 +4 11.470674276351929 +5 11.597589254379272 +6 11.771100044250488 +7 11.67703366279602 +8 11.890712976455688 +9 11.66837477684021 +10 12.082982301712036 +11 12.096890211105347 +12 11.65434193611145 +13 12.16369366645813 +14 11.979231119155884 +15 11.965043544769287 +16 11.610087633132935 +17 11.918352127075195 +18 11.943112850189209 +19 11.565428972244263 +20 11.872986316680908 +21 11.899627923965454 +22 12.179458141326904 +23 11.553953409194946 +24 11.95689058303833 +25 11.620047330856323 +26 11.911049127578735 +27 11.66315484046936 +28 11.650083303451538 +29 11.880302906036377 +30 12.181101560592651 +31 11.817535877227783 +32 11.614338159561157 +33 12.117663621902466 +34 11.672073602676392 +35 12.093090295791626 +36 11.63050103187561 +37 12.191184282302856 +38 11.570719003677368 +39 12.095319509506226 +40 11.897087812423706 +41 11.851146221160889 +42 11.954004049301147 +43 12.096599102020264 +44 11.577987432479858 +45 11.672546625137329 +46 11.711666822433472 +47 11.670909404754639 +48 11.678704500198364 +49 12.001405239105225 +50 11.516236543655396 +51 11.8910813331604 +52 12.127930164337158 +53 11.611127614974976 +54 11.735725164413452 +55 12.133546113967896 +56 11.56894063949585 +57 11.916675567626953 +58 11.634127140045166 +59 12.142159461975098 +60 11.644175291061401 +61 11.865737199783325 +62 12.129846096038818 +63 11.633377075195312 +64 11.669060468673706 +65 12.079366445541382 +66 11.879257678985596 +67 11.890104293823242 +68 11.655473232269287 +69 11.870456218719482 +70 11.695963382720947 +71 11.92727255821228 +72 11.627110242843628 +73 12.183845043182373 +74 11.566659450531006 +75 11.871804475784302 +76 12.066084861755371 +77 11.91842007637024 +78 11.992023229598999 +79 11.539597272872925 +80 12.129818677902222 +81 12.120932817459106 +82 11.561212301254272 +83 11.502758502960205 +84 12.220677852630615 +85 11.518246173858643 +86 12.141711950302124 +87 11.633155345916748 +88 11.889221668243408 +89 11.665103673934937 +90 11.86308217048645 +91 11.681755065917969 +92 11.694425106048584 +93 12.167213916778564 +94 11.83508849143982 +95 12.035823822021484 +96 11.908304452896118 +97 11.74797511100769 +98 11.575546026229858 +99 11.638254642486572 +100 11.904550075531006 +101 11.897032976150513 +102 11.861239910125732 +103 12.125560283660889 +104 11.615126371383667 +105 11.691056489944458 +106 11.676148653030396 +107 11.677475690841675 +108 12.25339674949646 +109 11.483638048171997 +110 12.166354179382324 +111 11.949247121810913 +112 11.615838766098022 +113 11.993391275405884 +114 11.889051914215088 +115 11.885834693908691 +116 11.874499559402466 +117 11.668907403945923 +118 11.675294399261475 +119 12.26676344871521 +test poses shape torch.Size([4, 3, 4]) +0 0.001096963882446289 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 11.976029396057129 +2 11.947474718093872 +3 11.769622325897217 +Saved test set +[TRAIN] Iter: 950000 Loss: 0.006125898566097021 PSNR: 26.779672622680664 +[TRAIN] Iter: 950100 Loss: 0.0067462376318871975 PSNR: 26.3758487701416 +[TRAIN] Iter: 950200 Loss: 0.005325422156602144 PSNR: 27.315353393554688 +[TRAIN] Iter: 950300 Loss: 0.006826599594205618 PSNR: 26.034841537475586 +[TRAIN] Iter: 950400 Loss: 0.005441286135464907 PSNR: 27.101207733154297 +[TRAIN] Iter: 950500 Loss: 0.00553504005074501 PSNR: 27.630020141601562 +[TRAIN] Iter: 950600 Loss: 0.005849309265613556 PSNR: 27.247825622558594 +[TRAIN] Iter: 950700 Loss: 0.005971293896436691 PSNR: 26.828304290771484 +[TRAIN] Iter: 950800 Loss: 0.005115816835314035 PSNR: 27.370594024658203 +[TRAIN] Iter: 950900 Loss: 0.006319970823824406 PSNR: 27.298147201538086 +[TRAIN] Iter: 951000 Loss: 0.005044708494096994 PSNR: 28.27821922302246 +[TRAIN] Iter: 951100 Loss: 0.006761317141354084 PSNR: 26.540802001953125 +[TRAIN] Iter: 951200 Loss: 0.005193657241761684 PSNR: 27.679433822631836 +[TRAIN] Iter: 951300 Loss: 0.004668597597628832 PSNR: 28.118188858032227 +[TRAIN] Iter: 951400 Loss: 0.006112680770456791 PSNR: 26.55536651611328 +[TRAIN] Iter: 951500 Loss: 0.005757233127951622 PSNR: 27.0118465423584 +[TRAIN] Iter: 951600 Loss: 0.006366287358105183 PSNR: 26.800334930419922 +[TRAIN] Iter: 951700 Loss: 0.005679527763277292 PSNR: 27.93496322631836 +[TRAIN] Iter: 951800 Loss: 0.0062212590128183365 PSNR: 26.411989212036133 +[TRAIN] Iter: 951900 Loss: 0.005990619771182537 PSNR: 26.465282440185547 +[TRAIN] Iter: 952000 Loss: 0.005846226122230291 PSNR: 27.792882919311523 +[TRAIN] Iter: 952100 Loss: 0.006805076263844967 PSNR: 26.63094711303711 +[TRAIN] Iter: 952200 Loss: 0.006201086100190878 PSNR: 27.099205017089844 +[TRAIN] Iter: 952300 Loss: 0.007458120584487915 PSNR: 26.580434799194336 +[TRAIN] Iter: 952400 Loss: 0.004052798729389906 PSNR: 29.266401290893555 +[TRAIN] Iter: 952500 Loss: 0.0057349251583218575 PSNR: 26.833999633789062 +[TRAIN] Iter: 952600 Loss: 0.005089627578854561 PSNR: 27.47045135498047 +[TRAIN] Iter: 952700 Loss: 0.0059000225737690926 PSNR: 27.410371780395508 +[TRAIN] Iter: 952800 Loss: 0.006155465263873339 PSNR: 26.358314514160156 +[TRAIN] Iter: 952900 Loss: 0.007592394016683102 PSNR: 25.844186782836914 +[TRAIN] Iter: 953000 Loss: 0.007343114819377661 PSNR: 25.623355865478516 +[TRAIN] Iter: 953100 Loss: 0.005854859482496977 PSNR: 27.117658615112305 +[TRAIN] Iter: 953200 Loss: 0.005433353595435619 PSNR: 27.30133628845215 +[TRAIN] Iter: 953300 Loss: 0.006541873794049025 PSNR: 25.965280532836914 +[TRAIN] Iter: 953400 Loss: 0.0069919973611831665 PSNR: 25.58442497253418 +[TRAIN] Iter: 953500 Loss: 0.007843740284442902 PSNR: 25.36309814453125 +[TRAIN] Iter: 953600 Loss: 0.0065604024566709995 PSNR: 26.09440803527832 +[TRAIN] Iter: 953700 Loss: 0.005829926580190659 PSNR: 26.30717658996582 +[TRAIN] Iter: 953800 Loss: 0.006243897136300802 PSNR: 26.524776458740234 +[TRAIN] Iter: 953900 Loss: 0.00536414934322238 PSNR: 27.2330265045166 +[TRAIN] Iter: 954000 Loss: 0.004681231454014778 PSNR: 28.084762573242188 +[TRAIN] Iter: 954100 Loss: 0.004507923498749733 PSNR: 29.008373260498047 +[TRAIN] Iter: 954200 Loss: 0.00466598104685545 PSNR: 27.974987030029297 +[TRAIN] Iter: 954300 Loss: 0.006618172861635685 PSNR: 27.5018253326416 +[TRAIN] Iter: 954400 Loss: 0.006787794642150402 PSNR: 26.13282012939453 +[TRAIN] Iter: 954500 Loss: 0.006082979962229729 PSNR: 27.659076690673828 +[TRAIN] Iter: 954600 Loss: 0.005594881717115641 PSNR: 26.98853302001953 +[TRAIN] Iter: 954700 Loss: 0.005347671918570995 PSNR: 27.22296142578125 +[TRAIN] Iter: 954800 Loss: 0.006206773221492767 PSNR: 26.619340896606445 +[TRAIN] Iter: 954900 Loss: 0.006582074332982302 PSNR: 26.110912322998047 +[TRAIN] Iter: 955000 Loss: 0.004241462331265211 PSNR: 29.053247451782227 +[TRAIN] Iter: 955100 Loss: 0.004722507670521736 PSNR: 28.746442794799805 +[TRAIN] Iter: 955200 Loss: 0.005145491100847721 PSNR: 27.57731819152832 +[TRAIN] Iter: 955300 Loss: 0.005533011630177498 PSNR: 27.265380859375 +[TRAIN] Iter: 955400 Loss: 0.006767631508409977 PSNR: 26.38860511779785 +[TRAIN] Iter: 955500 Loss: 0.0051277317106723785 PSNR: 28.553247451782227 +[TRAIN] Iter: 955600 Loss: 0.005763933062553406 PSNR: 27.158695220947266 +[TRAIN] Iter: 955700 Loss: 0.005705936346203089 PSNR: 26.863100051879883 +[TRAIN] Iter: 955800 Loss: 0.00699202623218298 PSNR: 26.23191261291504 +[TRAIN] Iter: 955900 Loss: 0.0063831862062215805 PSNR: 26.699010848999023 +[TRAIN] Iter: 956000 Loss: 0.006735306233167648 PSNR: 26.439817428588867 +[TRAIN] Iter: 956100 Loss: 0.007041040807962418 PSNR: 26.271577835083008 +[TRAIN] Iter: 956200 Loss: 0.005612011533230543 PSNR: 27.09124183654785 +[TRAIN] Iter: 956300 Loss: 0.0071724154986441135 PSNR: 26.036287307739258 +[TRAIN] Iter: 956400 Loss: 0.006210948806256056 PSNR: 26.65223503112793 +[TRAIN] Iter: 956500 Loss: 0.006448325235396624 PSNR: 26.53167152404785 +[TRAIN] Iter: 956600 Loss: 0.006065421272069216 PSNR: 27.513784408569336 +[TRAIN] Iter: 956700 Loss: 0.007120454218238592 PSNR: 26.06322479248047 +[TRAIN] Iter: 956800 Loss: 0.0063374522142112255 PSNR: 26.869609832763672 +[TRAIN] Iter: 956900 Loss: 0.00487481290474534 PSNR: 28.42340850830078 +[TRAIN] Iter: 957000 Loss: 0.005819786339998245 PSNR: 26.706275939941406 +[TRAIN] Iter: 957100 Loss: 0.006099049933254719 PSNR: 26.83574676513672 +[TRAIN] Iter: 957200 Loss: 0.006378873251378536 PSNR: 27.409587860107422 +[TRAIN] Iter: 957300 Loss: 0.006021721288561821 PSNR: 27.363643646240234 +[TRAIN] Iter: 957400 Loss: 0.006725873798131943 PSNR: 26.303194046020508 +[TRAIN] Iter: 957500 Loss: 0.007325365673750639 PSNR: 26.50849723815918 +[TRAIN] Iter: 957600 Loss: 0.004854509141296148 PSNR: 28.292041778564453 +[TRAIN] Iter: 957700 Loss: 0.006906459107995033 PSNR: 26.05760383605957 +[TRAIN] Iter: 957800 Loss: 0.006750493310391903 PSNR: 26.11008071899414 +[TRAIN] Iter: 957900 Loss: 0.005659898743033409 PSNR: 26.830257415771484 +[TRAIN] Iter: 958000 Loss: 0.0052631935104727745 PSNR: 28.183177947998047 +[TRAIN] Iter: 958100 Loss: 0.006748731713742018 PSNR: 26.100839614868164 +[TRAIN] Iter: 958200 Loss: 0.007609684951603413 PSNR: 26.049013137817383 +[TRAIN] Iter: 958300 Loss: 0.004790082573890686 PSNR: 29.000593185424805 +[TRAIN] Iter: 958400 Loss: 0.005939808674156666 PSNR: 27.14583969116211 +[TRAIN] Iter: 958500 Loss: 0.007772802375257015 PSNR: 25.952518463134766 +[TRAIN] Iter: 958600 Loss: 0.005052846856415272 PSNR: 27.846389770507812 +[TRAIN] Iter: 958700 Loss: 0.006900085136294365 PSNR: 26.234500885009766 +[TRAIN] Iter: 958800 Loss: 0.004968490917235613 PSNR: 28.420705795288086 +[TRAIN] Iter: 958900 Loss: 0.005455708131194115 PSNR: 27.304967880249023 +[TRAIN] Iter: 959000 Loss: 0.005091894883662462 PSNR: 27.987897872924805 +[TRAIN] Iter: 959100 Loss: 0.005830142181366682 PSNR: 26.729337692260742 +[TRAIN] Iter: 959200 Loss: 0.005047322250902653 PSNR: 28.03571128845215 +[TRAIN] Iter: 959300 Loss: 0.0058725918643176556 PSNR: 27.24321937561035 +[TRAIN] Iter: 959400 Loss: 0.005867115221917629 PSNR: 27.363985061645508 +[TRAIN] Iter: 959500 Loss: 0.006086289882659912 PSNR: 26.585180282592773 +[TRAIN] Iter: 959600 Loss: 0.006223730742931366 PSNR: 27.132308959960938 +[TRAIN] Iter: 959700 Loss: 0.007321390323340893 PSNR: 26.317413330078125 +[TRAIN] Iter: 959800 Loss: 0.006514281965792179 PSNR: 27.390867233276367 +[TRAIN] Iter: 959900 Loss: 0.006130955647677183 PSNR: 26.68623161315918 +Saved checkpoints at ./logs/TUT-out-doll-360-np/960000.tar +[TRAIN] Iter: 960000 Loss: 0.00534487422555685 PSNR: 27.10392189025879 +[TRAIN] Iter: 960100 Loss: 0.004623282700777054 PSNR: 28.577720642089844 +[TRAIN] Iter: 960200 Loss: 0.006076240912079811 PSNR: 26.86029815673828 +[TRAIN] Iter: 960300 Loss: 0.006447948981076479 PSNR: 26.361862182617188 +[TRAIN] Iter: 960400 Loss: 0.007445985451340675 PSNR: 26.209232330322266 +[TRAIN] Iter: 960500 Loss: 0.007469626143574715 PSNR: 25.526668548583984 +[TRAIN] Iter: 960600 Loss: 0.0073073431849479675 PSNR: 26.20951271057129 +[TRAIN] Iter: 960700 Loss: 0.005883918143808842 PSNR: 27.117395401000977 +[TRAIN] Iter: 960800 Loss: 0.007150398567318916 PSNR: 27.241830825805664 +[TRAIN] Iter: 960900 Loss: 0.007509668357670307 PSNR: 25.406448364257812 +[TRAIN] Iter: 961000 Loss: 0.005372314713895321 PSNR: 27.687307357788086 +[TRAIN] Iter: 961100 Loss: 0.0071449787355959415 PSNR: 26.31259536743164 +[TRAIN] Iter: 961200 Loss: 0.006489108316600323 PSNR: 26.736541748046875 +[TRAIN] Iter: 961300 Loss: 0.006292711943387985 PSNR: 26.62285614013672 +[TRAIN] Iter: 961400 Loss: 0.006073185242712498 PSNR: 27.83125114440918 +[TRAIN] Iter: 961500 Loss: 0.007019725628197193 PSNR: 25.777799606323242 +[TRAIN] Iter: 961600 Loss: 0.005556931719183922 PSNR: 26.968524932861328 +[TRAIN] Iter: 961700 Loss: 0.005917818285524845 PSNR: 27.214208602905273 +[TRAIN] Iter: 961800 Loss: 0.004983983002603054 PSNR: 27.812583923339844 +[TRAIN] Iter: 961900 Loss: 0.0057701836340129375 PSNR: 27.469114303588867 +[TRAIN] Iter: 962000 Loss: 0.0060004061087965965 PSNR: 28.31900405883789 +[TRAIN] Iter: 962100 Loss: 0.005731161683797836 PSNR: 26.866559982299805 +[TRAIN] Iter: 962200 Loss: 0.006321433465927839 PSNR: 26.15068244934082 +[TRAIN] Iter: 962300 Loss: 0.005897842347621918 PSNR: 27.359983444213867 +[TRAIN] Iter: 962400 Loss: 0.006708958186209202 PSNR: 26.225284576416016 +[TRAIN] Iter: 962500 Loss: 0.004740719683468342 PSNR: 28.52193260192871 +[TRAIN] Iter: 962600 Loss: 0.006151418201625347 PSNR: 27.04401969909668 +[TRAIN] Iter: 962700 Loss: 0.005073047708719969 PSNR: 28.620758056640625 +[TRAIN] Iter: 962800 Loss: 0.0067646256648004055 PSNR: 26.1539306640625 +[TRAIN] Iter: 962900 Loss: 0.006117619574069977 PSNR: 27.481157302856445 +[TRAIN] Iter: 963000 Loss: 0.006080853287130594 PSNR: 26.503211975097656 +[TRAIN] Iter: 963100 Loss: 0.00545979430899024 PSNR: 26.89580726623535 +[TRAIN] Iter: 963200 Loss: 0.0046568517573177814 PSNR: 28.57975959777832 +[TRAIN] Iter: 963300 Loss: 0.004554676823318005 PSNR: 28.456119537353516 +[TRAIN] Iter: 963400 Loss: 0.005689781624823809 PSNR: 27.530654907226562 +[TRAIN] Iter: 963500 Loss: 0.005704127252101898 PSNR: 27.47835350036621 +[TRAIN] Iter: 963600 Loss: 0.006103695370256901 PSNR: 26.650251388549805 +[TRAIN] Iter: 963700 Loss: 0.005765237845480442 PSNR: 26.890867233276367 +[TRAIN] Iter: 963800 Loss: 0.005945302080363035 PSNR: 27.489913940429688 +[TRAIN] Iter: 963900 Loss: 0.0068898880854249 PSNR: 26.408658981323242 +[TRAIN] Iter: 964000 Loss: 0.006210788618773222 PSNR: 26.263124465942383 +[TRAIN] Iter: 964100 Loss: 0.006753402762115002 PSNR: 25.96259117126465 +[TRAIN] Iter: 964200 Loss: 0.005688555538654327 PSNR: 28.31406021118164 +[TRAIN] Iter: 964300 Loss: 0.005447600968182087 PSNR: 27.71221351623535 +[TRAIN] Iter: 964400 Loss: 0.005163009278476238 PSNR: 28.43905258178711 +[TRAIN] Iter: 964500 Loss: 0.0058836983516812325 PSNR: 26.772598266601562 +[TRAIN] Iter: 964600 Loss: 0.006147951819002628 PSNR: 26.37834358215332 +[TRAIN] Iter: 964700 Loss: 0.007364542223513126 PSNR: 25.970888137817383 +[TRAIN] Iter: 964800 Loss: 0.00595217477530241 PSNR: 27.43221664428711 +[TRAIN] Iter: 964900 Loss: 0.005941922776401043 PSNR: 27.766740798950195 +[TRAIN] Iter: 965000 Loss: 0.006135948933660984 PSNR: 27.409475326538086 +[TRAIN] Iter: 965100 Loss: 0.005762163549661636 PSNR: 27.563478469848633 +[TRAIN] Iter: 965200 Loss: 0.006082221865653992 PSNR: 26.44025230407715 +[TRAIN] Iter: 965300 Loss: 0.006374377757310867 PSNR: 26.31609344482422 +[TRAIN] Iter: 965400 Loss: 0.005221232771873474 PSNR: 28.338844299316406 +[TRAIN] Iter: 965500 Loss: 0.0071122488006949425 PSNR: 26.048118591308594 +[TRAIN] Iter: 965600 Loss: 0.005852470174431801 PSNR: 27.142520904541016 +[TRAIN] Iter: 965700 Loss: 0.005683680064976215 PSNR: 26.623666763305664 +[TRAIN] Iter: 965800 Loss: 0.005280359648168087 PSNR: 28.55413246154785 +[TRAIN] Iter: 965900 Loss: 0.006977633573114872 PSNR: 26.395360946655273 +[TRAIN] Iter: 966000 Loss: 0.006953477393835783 PSNR: 26.364276885986328 +[TRAIN] Iter: 966100 Loss: 0.005900429096072912 PSNR: 27.908443450927734 +[TRAIN] Iter: 966200 Loss: 0.0065434579737484455 PSNR: 26.10251808166504 +[TRAIN] Iter: 966300 Loss: 0.0061164903454482555 PSNR: 26.351825714111328 +[TRAIN] Iter: 966400 Loss: 0.005636264570057392 PSNR: 26.945837020874023 +[TRAIN] Iter: 966500 Loss: 0.006790433544665575 PSNR: 26.638933181762695 +[TRAIN] Iter: 966600 Loss: 0.005694694817066193 PSNR: 27.288738250732422 +[TRAIN] Iter: 966700 Loss: 0.005345101468265057 PSNR: 27.234342575073242 +[TRAIN] Iter: 966800 Loss: 0.006324991583824158 PSNR: 26.66228485107422 +[TRAIN] Iter: 966900 Loss: 0.0065634665079414845 PSNR: 26.618806838989258 +[TRAIN] Iter: 967000 Loss: 0.005353197455406189 PSNR: 27.371379852294922 +[TRAIN] Iter: 967100 Loss: 0.006333403289318085 PSNR: 26.369009017944336 +[TRAIN] Iter: 967200 Loss: 0.0077963280491530895 PSNR: 25.742387771606445 +[TRAIN] Iter: 967300 Loss: 0.007808148395270109 PSNR: 25.699525833129883 +[TRAIN] Iter: 967400 Loss: 00 0.0010445117950439453 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 24.913986682891846 +2 25.283764839172363 +3 25.142056941986084 +Saved test set +[TRAIN] Iter: 950000 Loss: 0.009838364087045193 PSNR: 23.72943878173828 +[TRAIN] Iter: 950100 Loss: 0.009716866537928581 PSNR: 24.251604080200195 +[TRAIN] Iter: 950200 Loss: 0.009068582206964493 PSNR: 24.308982849121094 +[TRAIN] Iter: 950300 Loss: 0.0080677829682827 PSNR: 25.76264190673828 +[TRAIN] Iter: 950400 Loss: 0.008509504608809948 PSNR: 25.00338363647461 +[TRAIN] Iter: 950500 Loss: 0.011526435613632202 PSNR: 23.730772018432617 +[TRAIN] Iter: 950600 Loss: 0.007986105047166348 PSNR: 25.03352928161621 +[TRAIN] Iter: 950700 Loss: 0.010073158890008926 PSNR: 24.14609146118164 +[TRAIN] Iter: 950800 Loss: 0.010061411187052727 PSNR: 24.504440307617188 +[TRAIN] Iter: 950900 Loss: 0.008879126980900764 PSNR: 25.279605865478516 +[TRAIN] Iter: 951000 Loss: 0.011654950678348541 PSNR: 23.050636291503906 +[TRAIN] Iter: 951100 Loss: 0.010350564494729042 PSNR: 24.0596866607666 +[TRAIN] Iter: 951200 Loss: 0.009923812933266163 PSNR: 23.55295181274414 +[TRAIN] Iter: 951300 Loss: 0.010600554756820202 PSNR: 23.852680206298828 +[TRAIN] Iter: 951400 Loss: 0.008310101926326752 PSNR: 24.49367904663086 +[TRAIN] Iter: 951500 Loss: 0.009002771228551865 PSNR: 24.864133834838867 +[TRAIN] Iter: 951600 Loss: 0.009722720831632614 PSNR: 24.48979949951172 +[TRAIN] Iter: 951700 Loss: 0.011510932818055153 PSNR: 23.176475524902344 +[TRAIN] Iter: 951800 Loss: 0.009026114828884602 PSNR: 24.51157569885254 +[TRAIN] Iter: 951900 Loss: 0.010114332661032677 PSNR: 24.215551376342773 +[TRAIN] Iter: 952000 Loss: 0.010657310485839844 PSNR: 23.483020782470703 +[TRAIN] Iter: 952100 Loss: 0.010157467797398567 PSNR: 24.08388328552246 +[TRAIN] Iter: 952200 Loss: 0.010739728808403015 PSNR: 23.692277908325195 +[TRAIN] Iter: 952300 Loss: 0.012010042555630207 PSNR: 23.093957901000977 +[TRAIN] Iter: 952400 Loss: 0.01149878092110157 PSNR: 23.501079559326172 +[TRAIN] Iter: 952500 Loss: 0.010499948635697365 PSNR: 24.11112403869629 +[TRAIN] Iter: 952600 Loss: 0.01003970205783844 PSNR: 23.816970825195312 +[TRAIN] Iter: 952700 Loss: 0.01080283522605896 PSNR: 23.561817169189453 +[TRAIN] Iter: 952800 Loss: 0.00892695039510727 PSNR: 24.96711540222168 +[TRAIN] Iter: 952900 Loss: 0.010001717135310173 PSNR: 24.89739990234375 +[TRAIN] Iter: 953000 Loss: 0.008199770003557205 PSNR: 25.139062881469727 +[TRAIN] Iter: 953100 Loss: 0.01148274913430214 PSNR: 23.775819778442383 +[TRAIN] Iter: 953200 Loss: 0.008735080249607563 PSNR: 24.75311851501465 +[TRAIN] Iter: 953300 Loss: 0.009566052816808224 PSNR: 24.459970474243164 +[TRAIN] Iter: 953400 Loss: 0.010088086128234863 PSNR: 23.9959659576416 +[TRAIN] Iter: 953500 Loss: 0.008256874047219753 PSNR: 25.1731014251709 +[TRAIN] Iter: 953600 Loss: 0.009478452615439892 PSNR: 24.037771224975586 +[TRAIN] Iter: 953700 Loss: 0.010569652542471886 PSNR: 24.17026138305664 +[TRAIN] Iter: 953800 Loss: 0.012176456861197948 PSNR: 23.40757179260254 +[TRAIN] Iter: 953900 Loss: 0.009177513420581818 PSNR: 24.741676330566406 +[TRAIN] Iter: 954000 Loss: 0.008728171698749065 PSNR: 25.074764251708984 +[TRAIN] Iter: 954100 Loss: 0.01005612313747406 PSNR: 24.55555534362793 +[TRAIN] Iter: 954200 Loss: 0.0085845235735178 PSNR: 25.437068939208984 +[TRAIN] Iter: 954300 Loss: 0.011211230419576168 PSNR: 23.736360549926758 +[TRAIN] Iter: 954400 Loss: 0.010704569518566132 PSNR: 24.193199157714844 +[TRAIN] Iter: 954500 Loss: 0.009670797735452652 PSNR: 24.037511825561523 +[TRAIN] Iter: 954600 Loss: 0.010535656474530697 PSNR: 23.47542381286621 +[TRAIN] Iter: 954700 Loss: 0.011293526738882065 PSNR: 23.530244827270508 +[TRAIN] Iter: 954800 Loss: 0.010126547887921333 PSNR: 24.68946647644043 +[TRAIN] Iter: 954900 Loss: 0.010264381766319275 PSNR: 23.96405792236328 +[TRAIN] Iter: 955000 Loss: 0.010811727494001389 PSNR: 24.503353118896484 +[TRAIN] Iter: 955100 Loss: 0.009431090205907822 PSNR: 24.383190155029297 +[TRAIN] Iter: 955200 Loss: 0.009906722232699394 PSNR: 23.85761833190918 +[TRAIN] Iter: 955300 Loss: 0.008626183494925499 PSNR: 24.394479751586914 +[TRAIN] Iter: 955400 Loss: 0.009605009108781815 PSNR: 24.274133682250977 +[TRAIN] Iter: 955500 Loss: 0.008273424580693245 PSNR: 25.74140167236328 +[TRAIN] Iter: 955600 Loss: 0.009787891060113907 PSNR: 23.97715187072754 +[TRAIN] Iter: 955700 Loss: 0.009254898875951767 PSNR: 24.207216262817383 +[TRAIN] Iter: 955800 Loss: 0.009421536698937416 PSNR: 24.52256965637207 +[TRAIN] Iter: 955900 Loss: 0.00993137713521719 PSNR: 24.351167678833008 +[TRAIN] Iter: 956000 Loss: 0.010019534267485142 PSNR: 23.805892944335938 +[TRAIN] Iter: 956100 Loss: 0.011368528939783573 PSNR: 23.829833984375 +[TRAIN] Iter: 956200 Loss: 0.008692871779203415 PSNR: 24.897098541259766 +[TRAIN] Iter: 956300 Loss: 0.008907342329621315 PSNR: 25.19785499572754 +[TRAIN] Iter: 956400 Loss: 0.009785158559679985 PSNR: 24.06117057800293 +[TRAIN] Iter: 956500 Loss: 0.008901560679078102 PSNR: 24.746505737304688 +[TRAIN] Iter: 956600 Loss: 0.01083420030772686 PSNR: 24.146516799926758 +[TRAIN] Iter: 956700 Loss: 0.012611690908670425 PSNR: 22.95027732849121 +[TRAIN] Iter: 956800 Loss: 0.012065766379237175 PSNR: 23.42538833618164 +[TRAIN] Iter: 956900 Loss: 0.011671988293528557 PSNR: 23.330106735229492 +[TRAIN] Iter: 957000 Loss: 0.011312664486467838 PSNR: 23.72430992126465 +[TRAIN] Iter: 957100 Loss: 0.008867766708135605 PSNR: 25.73661994934082 +[TRAIN] Iter: 957200 Loss: 0.009456560015678406 PSNR: 24.578380584716797 +[TRAIN] Iter: 957300 Loss: 0.01102395448833704 PSNR: 23.969709396362305 +[TRAIN] Iter: 957400 Loss: 0.009414790198206902 PSNR: 23.97298240661621 +[TRAIN] Iter: 957500 Loss: 0.008391914889216423 PSNR: 25.036441802978516 +[TRAIN] Iter: 957600 Loss: 0.01004975475370884 PSNR: 23.793256759643555 +[TRAIN] Iter: 957700 Loss: 0.010224329307675362 PSNR: 23.633766174316406 +[TRAIN] Iter: 957800 Loss: 0.0084892762824893 PSNR: 24.687000274658203 +[TRAIN] Iter: 957900 Loss: 0.011705584824085236 PSNR: 23.20246124267578 +[TRAIN] Iter: 958000 Loss: 0.009375706315040588 PSNR: 24.28744888305664 +[TRAIN] Iter: 958100 Loss: 0.010225480422377586 PSNR: 24.455854415893555 +[TRAIN] Iter: 958200 Loss: 0.010950344614684582 PSNR: 23.987337112426758 +[TRAIN] Iter: 958300 Loss: 0.009302781894803047 PSNR: 24.50328826904297 +[TRAIN] Iter: 958400 Loss: 0.009330502711236477 PSNR: 24.529483795166016 +[TRAIN] Iter: 958500 Loss: 0.009519282728433609 PSNR: 24.375328063964844 +[TRAIN] Iter: 958600 Loss: 0.009264035150408745 PSNR: 24.35268783569336 +[TRAIN] Iter: 958700 Loss: 0.007500800304114819 PSNR: 25.156986236572266 +[TRAIN] Iter: 958800 Loss: 0.009412946179509163 PSNR: 24.758407592773438 +[TRAIN] Iter: 958900 Loss: 0.009550411254167557 PSNR: 23.88361358642578 +[TRAIN] Iter: 959000 Loss: 0.010519345290958881 PSNR: 24.224130630493164 +[TRAIN] Iter: 959100 Loss: 0.009565053507685661 PSNR: 24.03707504272461 +[TRAIN] Iter: 959200 Loss: 0.008239535614848137 PSNR: 25.419979095458984 +[TRAIN] Iter: 959300 Loss: 0.009961297735571861 PSNR: 24.59225845336914 +[TRAIN] Iter: 959400 Loss: 0.008898958563804626 PSNR: 24.627761840820312 +[TRAIN] Iter: 959500 Loss: 0.011448333971202374 PSNR: 23.50160789489746 +[TRAIN] Iter: 959600 Loss: 0.007923299446702003 PSNR: 24.876628875732422 +[TRAIN] Iter: 959700 Loss: 0.011125929653644562 PSNR: 24.57809829711914 +[TRAIN] Iter: 959800 Loss: 0.010327732190489769 PSNR: 23.96597671508789 +[TRAIN] Iter: 959900 Loss: 0.011413227766752243 PSNR: 23.717243194580078 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/960000.tar +[TRAIN] Iter: 960000 Loss: 0.008751705288887024 PSNR: 25.08450698852539 +[TRAIN] Iter: 960100 Loss: 0.010282706469297409 PSNR: 24.209442138671875 +[TRAIN] Iter: 960200 Loss: 0.010203860700130463 PSNR: 24.03963279724121 +[TRAIN] Iter: 960300 Loss: 0.00840810313820839 PSNR: 25.85428237915039 +[TRAIN] Iter: 960400 Loss: 0.010577241890132427 PSNR: 23.796701431274414 +[TRAIN] Iter: 960500 Loss: 0.010147811844944954 PSNR: 24.184972763061523 +[TRAIN] Iter: 960600 Loss: 0.0121119050309062 PSNR: 23.3651123046875 +[TRAIN] Iter: 960700 Loss: 0.009789833799004555 PSNR: 23.969989776611328 +[TRAIN] Iter: 960800 Loss: 0.011308341287076473 PSNR: 24.240814208984375 +[TRAIN] Iter: 960900 Loss: 0.009249625727534294 PSNR: 25.048465728759766 +[TRAIN] Iter: 961000 Loss: 0.009589112363755703 PSNR: 23.996366500854492 +[TRAIN] Iter: 961100 Loss: 0.009436541236937046 PSNR: 23.97022247314453 +[TRAIN] Iter: 961200 Loss: 0.010334007441997528 PSNR: 23.762968063354492 +[TRAIN] Iter: 961300 Loss: 0.011433080770075321 PSNR: 24.334611892700195 +[TRAIN] Iter: 961400 Loss: 0.009734597988426685 PSNR: 24.484663009643555 +[TRAIN] Iter: 961500 Loss: 0.009319551289081573 PSNR: 24.137033462524414 +[TRAIN] Iter: 961600 Loss: 0.008513879030942917 PSNR: 24.979049682617188 +[TRAIN] Iter: 961700 Loss: 0.009852807968854904 PSNR: 24.46359634399414 +[TRAIN] Iter: 961800 Loss: 0.009827084839344025 PSNR: 23.742761611938477 +[TRAIN] Iter: 961900 Loss: 0.010232903063297272 PSNR: 24.639266967773438 +[TRAIN] Iter: 962000 Loss: 0.011674553155899048 PSNR: 23.11789321899414 +[TRAIN] Iter: 962100 Loss: 0.009140703827142715 PSNR: 24.528581619262695 +[TRAIN] Iter: 962200 Loss: 0.011090610176324844 PSNR: 23.781606674194336 +[TRAIN] Iter: 962300 Loss: 0.012263324111700058 PSNR: 23.15058135986328 +[TRAIN] Iter: 962400 Loss: 0.012252083979547024 PSNR: 23.177141189575195 +[TRAIN] Iter: 962500 Loss: 0.009433903731405735 PSNR: 24.596027374267578 +[TRAIN] Iter: 962600 Loss: 0.008893498219549656 PSNR: 24.57274055480957 +[TRAIN] Iter: 962700 Loss: 0.012389938347041607 PSNR: 23.244888305664062 +[TRAIN] Iter: 962800 Loss: 0.008899271488189697 PSNR: 24.65899658203125 +[TRAIN] Iter: 962900 Loss: 0.01035076379776001 PSNR: 23.88222885131836 +[TRAIN] Iter: 963000 Loss: 0.009853404015302658 PSNR: 24.785436630249023 +[TRAIN] Iter: 963100 Loss: 0.00991520844399929 PSNR: 24.145811080932617 +[TRAIN] Iter: 963200 Loss: 0.007974331267178059 PSNR: 24.916301727294922 +[TRAIN] Iter: 963300 Loss: 0.009894122369587421 PSNR: 24.297348022460938 +[TRAIN] Iter: 963400 Loss: 0.011690165847539902 PSNR: 23.259511947631836 +[TRAIN] Iter: 963500 Loss: 0.011542486026883125 PSNR: 23.132579803466797 +[TRAIN] Iter: 963600 Loss: 0.007897330448031425 PSNR: 25.332719802856445 +[TRAIN] Iter: 963700 Loss: 0.01021056529134512 PSNR: 23.833240509033203 +[TRAIN] Iter: 963800 Loss: 0.011205801740288734 PSNR: 23.71672821044922 +[TRAIN] Iter: 963900 Loss: 0.009767371229827404 PSNR: 24.284244537353516 +[TRAIN] Iter: 964000 Loss: 0.01103908009827137 PSNR: 24.13544273376465 +[TRAIN] Iter: 964100 Loss: 0.00902894139289856 PSNR: 24.32503890991211 +[TRAIN] Iter: 964200 Loss: 0.00993512012064457 PSNR: 24.279563903808594 +[TRAIN] Iter: 964300 Loss: 0.009362262673676014 PSNR: 24.54132843017578 +[TRAIN] Iter: 964400 Loss: 0.011873144656419754 PSNR: 23.27174949645996 +[TRAIN] Iter: 964500 Loss: 0.009384097531437874 PSNR: 24.49761962890625 +[TRAIN] Iter: 964600 Loss: 0.012334033846855164 PSNR: 23.012521743774414 +[TRAIN] Iter: 964700 Loss: 0.009619668126106262 PSNR: 24.61748504638672 +[TRAIN] Iter: 964800 Loss: 0.009510520845651627 PSNR: 24.807334899902344 +[TRAIN] Iter: 964900 Loss: 0.009046344086527824 PSNR: 24.48958969116211 +[TRAIN] Iter: 965000 Loss: 0.008461745455861092 PSNR: 24.88602638244629 +[TRAIN] Iter: 965100 Loss: 0.011347772553563118 PSNR: 23.614288330078125 +[TRAIN] Iter: 965200 Loss: 0.010623162612318993 PSNR: 24.59478759765625 +[TRAIN] Iter: 965300 Loss: 0.010253499262034893 PSNR: 24.33977699279785 +[TRAIN] Iter: 965400 Loss: 0.011139490641653538 PSNR: 23.451866149902344 +[TRAIN] Iter: 965500 Loss: 0.010210483334958553 PSNR: 24.25618553161621 +[TRAIN] Iter: 965600 Loss: 0.010845694690942764 PSNR: 24.445053100585938 +[TRAIN] Iter: 965700 Loss: 0.011934438720345497 PSNR: 23.3692569732666 +[TRAIN] Iter: 965800 Loss: 0.010179299861192703 PSNR: 24.03767204284668 +[TRAIN] Iter: 965900 Loss: 0.011218961328268051 PSNR: 23.59586524963379 +[TRAIN] Iter: 966000 Loss: 0.01126551628112793 PSNR: 23.177921295166016 +[TRAIN] Iter: 966100 Loss: 0.009797636419534683 PSNR: 24.090225219726562 +[TRAIN] Iter: 966200 Loss: 0.008413316681981087 PSNR: 25.515554428100586 +[TRAIN] Iter: 966300 Loss: 0.009866895154118538 PSNR: 24.01064109802246 +[TRAIN] Iter: 966400 Loss: 0.010713675059378147 PSNR: 23.904029846191406 +[TRAIN] Iter: 966500 Loss: 0.011065264232456684 PSNR: 23.926410675048828 +[TRAIN] Iter: 966600 Loss: 0.010314498096704483 PSNR: 23.816774368286133 +[TRAIN] Iter: 966700 Loss: 0.00943666696548462 PSNR: 24.7830810546875 +[TRAIN] Iter: 966800 Loss: 0.008922080509364605 PSNR: 25.41008949279785 +[TRAIN] Iter: 966900 Loss: 0.012084435671567917 PSNR: 23.046550750732422 +[TRAIN] Iter: 967000 Loss: 0.00882890447974205 PSNR: 25.119850158691406 +[TRAIN] Iter: 967100 Loss: 0.009467215277254581 PSNR: 24.09857940673828 +[TRAIN] Iter: 967200 Loss: 0.009558937512338161 PSNR: 24.765769958496094 +[TRAIN] Iter: 967300 Loss: 0.009729510173201561 PSNR: 24.446022033691406 +[TRAIN] Iter: 967400 Loss: 0.010236147791147232 PSNR: 24.084444046020508 +[TRAIN] Iter: 967500 Loss: 0.010574530810117722 PSNR: 23.683563232421875 +[TRAIN] Iter: 967600 Loss: 0.010938423685729504 PSNR: 23.919132232666016 +[TRAIN] Iter: 967700 Loss: 0.010021130554378033 PSNR: 24.060016632080078 +[TRAIN] Iter: 967800 Loss: 0.010739595629274845 PSNR: 24.082515716552734 +[TRAIN] Iter: 967900 Loss: 0.009733129292726517 PSNR: 24.134567260742188 +[TRAIN] Iter: 968000 Loss: 0.010581767186522484 PSNR: 23.854175567626953 +[TRAIN] Iter: 968100 Loss: 0.00836155004799366 PSNR: 25.795305252075195 +[TRAIN] Iter: 968200 Loss: 0.008396662771701813 PSNR: 24.83533477783203 +[TRAIN] Iter: 968300 Loss: 0.009223252534866333 PSNR: 24.167835235595703 +[TRAIN] Iter: 968400 Loss: 0.009063541889190674 PSNR: 24.55355453491211 +[TRAIN] Iter: 968500 Loss: 0.010048791766166687 PSNR: 24.4373722076416 +[TRAIN] Iter: 968600 Loss: 0.01248167734593153 PSNR: 23.035022735595703 +[TRAIN] Iter: 968700 Loss: 0.009814604185521603 PSNR: 24.276742935180664 +[TRAIN] Iter: 968800 Loss: 0.010080236941576004 PSNR: 24.165334701538086 +[TRAIN] Iter: 968900 Loss: 0.010088805109262466 PSNR: 24.30068016052246 +[TRAIN] Iter: 969000 Loss: 0.009369718842208385 PSNR: 24.142013549804688 +[TRAIN] Iter: 969100 Loss: 0.009265756234526634 PSNR: 24.391141891479492 +[TRAIN] Iter: 969200 Loss: 0.009955903515219688 PSNR: 24.062986373901367 +[TRAIN] Iter: 969300 Loss: 0.009253473952412605 PSNR: 24.705814361572266 +[TRAIN] Iter: 969400 Loss: 0.009257770143449306 PSNR: 24.1851749420166 +[TRAIN] Iter: 969500 Loss: 0.0077476296573877335 PSNR: 25.737972259521484 +[TRAIN] Iter: 969600 Loss: 0.011627020314335823 PSNR: 23.37545394897461 +[TRAIN] Iter: 969700 Loss: 0.009193645790219307 PSNR: 24.11757469177246 +[TRAIN] Iter: 969800 Loss: 0.009152820333838463 PSNR: 24.472511291503906 +[TRAIN] Iter: 969900 Loss: 0.010068351402878761 PSNR: 23.730268478393555 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/970000.tar +[TRAIN] Iter: 970000 Loss: 0.009336955845355988 PSNR: 24.614234924316406 +[TRAIN] Iter: 970100 Loss: 0.00926048681139946 PSNR: 24.411197662353516 +[TRAIN] Iter: 970200 Loss: 0.01179048977792263 PSNR: 23.43022918701172 +[TRAIN] Iter: 970300 Loss: 0.010391872376203537 PSNR: 24.001832962036133 +[TRAIN] Iter: 970400 Loss: 0.010418832302093506 PSNR: 24.661584854125977 +[TRAIN] Iter: 970500 Loss: 0.008216003887355328 PSNR: 24.911548614501953 +[TRAIN] Iter: 970600 Loss: 0.010452095419168472 PSNR: 23.83440589904785 +[TRAIN] Iter: 970700 Loss: 0.009096488356590271 PSNR: 24.69961929321289 +[TRAIN] Iter: 970800 Loss: 0.009211765602231026 PSNR: 24.375019073486328 +[TRAIN] Iter: 970900 Loss: 0.0102927815169096 PSNR: 24.45545196533203 +[TRAIN] Iter: 971000 Loss: 0.009003909304738045 PSNR: 24.806493759155273 +[TRAIN] Iter: 971100 Loss: 0.009084293618798256 PSNR: 24.786998748779297 +[TRAIN] Iter: 971200 Loss: 0.00985693372786045 PSNR: 24.716352462768555 +[TRAIN] Iter: 971300 Loss: 0.008718088269233704 PSNR: 24.834856033325195 +[TRAIN] Iter: 971400 Loss: 0.009381359443068504 PSNR: 24.31243133544922 +[TRAIN] Iter: 971500 Loss: 0.008934710174798965 PSNR: 25.083839416503906 +[TRAIN] Iter: 971600 Loss: 0.009177697822451591 PSNR: 25.208600997924805 +[TRAIN] Iter: 971700 Loss: 0.008521908894181252 PSNR: 25.003108978271484 +[TRAIN] Iter: 971800 Loss: 0.01082889549434185 PSNR: 23.91514778137207 +[TRAIN] Iter: 971900 Loss: 0.009318050928413868 PSNR: 24.317535400390625 +[TRAIN] Iter: 972000 Loss: 0.008495460264384747 PSNR: 25.283475875854492 +[TRAIN] Iter: 972100 Loss: 0.010170241817831993 PSNR: 23.79821014404297 +[TRAIN] Iter: 972200 Loss: 0.01022775936871767 PSNR: 24.33428382873535 +[TRAIN] Iter: 972300 Loss: 0.009874032810330391 PSNR: 24.00324058532715 +[TRAIN] Iter: 972400 Loss: 0.010196283459663391 PSNR: 24.053142547607422 +[TRAIN] Iter: 972500 Loss: 0.010042271576821804 PSNR: 23.86615753173828 +[TRAIN] Iter: 972600 Loss: 0.009037451818585396 PSNR: 24.772315979003906 +[TRAIN] Iter: 972700 Loss: 0.008432583883404732 PSNR: 25.325979232788086 +[TRAIN] Iter: 972800 Loss: 0.011075304821133614 PSNR: 23.36933708190918 +[TRAIN] Iter: 972900 Loss: 0.01016946416348219 PSNR: 23.886030197143555 +[TRAIN] Iter: 973000 Loss: 0.011722242459654808 PSNR: 23.662477493286133 +[TRAIN] Iter: 973100 Loss: 0.00960389431566 PSNR: 24.57947540283203 +[TRAIN] Iter: 973200 Loss: 0.00998622365295887 PSNR: 24.153635025024414 +[TRAIN] Iter: 973300 Loss: 0.01148784440010786 PSNR: 23.715662002563477 +[TRAIN] Iter: 973400 Loss: 0.00789418164640665 PSNR: 24.98474884033203 +[TRAIN] Iter: 973500 Loss: 0.011234313249588013 PSNR: 23.24722671508789 +[TRAIN] Iter: 973600 Loss: 0.01092451997101307 PSNR: 23.835912704467773 +[TRAIN] Iter: 973700 Loss: 0.008631473407149315 PSNR: 24.975831985473633 +[TRAIN] Iter: 973800 Loss: 0.009777035564184189 PSNR: 24.555049896240234 +[TRAIN] Iter: 973900 Loss: 0.011662489734590054 PSNR: 23.36213493347168 +[TRAIN] Iter: 974000 Loss: 0.010964568704366684 PSNR: 23.383575439453125 +[TRAIN] Iter: 974100 Loss: 0.010635296814143658 PSNR: 23.805355072021484 +[TRAIN] Iter: 974200 Loss: 0.011063583195209503 PSNR: 23.6293888092041 +[TRAIN] Iter: 974300 Loss: 0.009858889505267143 PSNR: 24.055049896240234 +[TRAIN] Iter: 974400 Loss: 0.008781435899436474 PSNR: 25.20614242553711 +[TRAIN] Iter: 974500 Loss: 0.010902476496994495 PSNR: 23.737598419189453 +[TRAIN] Iter: 974600 Loss: 0.009981485083699226 PSNR: 23.647422790527344 +[TRAIN] Iter: 974700 Loss: 0.010907784104347229 PSNR: 23.64451789855957 +[TRAIN] Iter: 974800 Loss: 0.009675547480583191 PSNR: 24.34813690185547 +[TRAIN] Iter: 974900 Loss: 0.010166984051465988 PSNR: 23.902462005615234 +[TRAIN] Iter: 975000 Loss: 0.00969221256673336 PSNR: 23.83620262145996 +[TRAIN] Iter: 975100 Loss: 0.008210092782974243 PSNR: 25.31192970275879 +[TRAIN] Iter: 975200 Loss: 0.01015921775251627 PSNR: 23.711008071899414 +[TRAIN] Iter: 975300 Loss: 0.009972862899303436 PSNR: 24.919891357421875 +[TRAIN] Iter: 975400 Loss: 0.009172728285193443 PSNR: 24.510915756225586 +[TRAIN] Iter: 975500 Loss: 0.008008815348148346 PSNR: 25.432933807373047 +[TRAIN] Iter: 975600 Loss: 0.010393226519227028 PSNR: 23.976804733276367 +[TRAIN] Iter: 975700 Loss: 0.010216942057013512 PSNR: 23.81072998046875 +[TRAIN] Iter: 975800 Loss: 0.010592024773359299 PSNR: 23.786348342895508 +[TRAIN] Iter: 975900 Loss: 0.010309679433703423 PSNR: 23.90854835510254 +[TRAIN] Iter: 976000 Loss: 0.009240271523594856 PSNR: 24.4462947845459 +[TRAIN] Iter: 976100 Loss: 0.008777542971074581 PSNR: 24.457244873046875 +[TRAIN] Iter: 976200 Loss: 0.008692272007465363 PSNR: 24.59349250793457 +[TRAIN] Iter: 976300 Loss: 0.010444886051118374 PSNR: 23.967798233032227 +[TRAIN] Iter: 976400 Loss: 0.009060454554855824 PSNR: 25.10065460205078 +[TRAIN] Iter: 976500 Loss: 0.009419030509889126 PSNR: 24.27530860900879 +[TRAIN] Iter: 976600 Loss: 0.008184133097529411 PSNR: 24.9206600189209 +[TRAIN] Iter: 976700 Loss: 0.011227374896407127 PSNR: 23.872180938720703 +[TRAIN] Iter: 976800 Loss: 0.011817572638392448 PSNR: 23.135826110839844 +[TRAIN] Iter: 976900 Loss: 0.010645929723978043 PSNR: 23.617956161499023 +[TRAIN] Iter: 977000 Loss: 0.009383928030729294 PSNR: 25.026620864868164 +[TRAIN] Iter: 977100 Loss: 0.010353339836001396 PSNR: 23.979122161865234 +[TRAIN] Iter: 977200 Loss: 0.010254932567477226 PSNR: 24.08810043334961 +[TRAIN] Iter: 977300 Loss: 0.00927011575549841 PSNR: 24.680822372436523 +[TRAIN] Iter: 977400 Loss: 0.010795064270496368 PSNR: 23.621379852294922 +[TRAIN] Iter: 977500 Loss: 0.009983384981751442 PSNR: 24.119380950927734 +[TRAIN] Iter: 977600 Loss: 0.009423233568668365 PSNR: 24.069334030151367 +[TRAIN] Iter: 977700 Loss: 0.009001296013593674 PSNR: 24.342018127441406 +[TRAIN] Iter: 977800 Loss: 0.012429757043719292 PSNR: 23.316640853881836 +[TRAIN] Iter: 977900 Loss: 0.009331982582807541 PSNR: 24.344945907592773 +[TRAIN] Iter: 978000 Loss: 0.008766904473304749 PSNR: 24.087594985961914 +[TRAIN] Iter: 978100 Loss: 0.008030367083847523 PSNR: 24.87807273864746 +[TRAIN] Iter: 978200 Loss: 0.010015283711254597 PSNR: 24.1095027923584 +[TRAIN] Iter: 978300 Loss: 0.010520813055336475 PSNR: 24.263423919677734 +[TRAIN] Iter: 978400 Loss: 0.008154756389558315 PSNR: 25.09976577758789 +[TRAIN] Iter: 978500 Loss: 0.009679091162979603 PSNR: 24.465129852294922 +[TRAIN] Iter: 978600 Loss: 0.011983847245573997 PSNR: 23.667619705200195 +[TRAIN] Iter: 978700 Loss: 0.010428383946418762 PSNR: 23.91376495361328 +[TRAIN] Iter: 978800 Loss: 0.0104631083086133 PSNR: 24.006933212280273 +[TRAIN] Iter: 978900 Loss: 0.00876346230506897 PSNR: 24.308868408203125 +[TRAIN] Iter: 979000 Loss: 0.012112258933484554 PSNR: 23.166988372802734 +[TRAIN] Iter: 979100 Loss: 0.01015267800539732 PSNR: 24.546789169311523 +[TRAIN] Iter: 979200 Loss: 0.011331385001540184 PSNR: 23.57200050354004 +[TRAIN] Iter: 979300 Loss: 0.012248868122696877 PSNR: 22.960468292236328 +[TRAIN] Iter: 979400 Loss: 0.009824191220104694 PSNR: 24.380624771118164 +[TRAIN] Iter: 979500 Loss: 0.010306690819561481 PSNR: 24.042694091796875 +[TRAIN] Iter: 979600 Loss: 0.009294498711824417 PSNR: 24.373348236083984 +[TRAIN] Iter: 979700 Loss: 0.008400890044867992 PSNR: 25.22759437561035 +[TRAIN] Iter: 979800 Loss: 0.010528061538934708 PSNR: 23.53716278076172 +[TRAIN] Iter: 979900 Loss: 0.010089355520904064 PSNR: 24.056419372558594 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/980000.tar +[TRAIN] Iter: 980000 Loss: 0.011348770931363106 PSNR: 23.320650100708008 +[TRAIN] Iter: 980100 Loss: 0.009814100340008736 PSNR: 25.1149959564209 +[TRAIN] Iter: 980200 Loss: 0.009677066467702389 PSNR: 24.171592712402344 +[TRAIN] Iter: 980300 Loss: 0.008674701675772667 PSNR: 25.039657592773438 +[TRAIN] Iter: 980400 Loss: 0.010880694724619389 PSNR: 23.515634536743164 +[TRAIN] Iter: 980500 Loss: 0.00835342612117529 PSNR: 25.254934310913086 +[TRAIN] Iter: 980600 Loss: 0.011118067428469658 PSNR: 23.535160064697266 +[TRAIN] Iter: 980700 Loss: 0.008398025296628475 PSNR: 24.922563552856445 +[TRAIN] Iter: 980800 Loss: 0.009330043569207191 PSNR: 24.637392044067383 +[TRAIN] Iter: 980900 Loss: 0.011475732550024986 PSNR: 23.54941177368164 +[TRAIN] Iter: 981000 Loss: 0.009127473458647728 PSNR: 24.741676330566406 +[TRAIN] Iter: 981100 Loss: 0.00813879445195198 PSNR: 24.982614517211914 +[TRAIN] Iter: 981200 Loss: 0.00979307759553194 PSNR: 24.67862319946289 +[TRAIN] Iter: 981300 Loss: 0.010279418900609016 PSNR: 23.761001586914062 +[TRAIN] Iter: 981400 Loss: 0.009944934397935867 PSNR: 24.500179290771484 +[TRAIN] Iter: 981500 Loss: 0.008066801354289055 PSNR: 25.90894889831543 +[TRAIN] Iter: 981600 Loss: 0.010986749082803726 PSNR: 23.329641342163086 +[TRAIN] Iter: 981700 Loss: 0.0075281658209860325 PSNR: 25.931060791015625 +[TRAIN] Iter: 981800 Loss: 0.010091214440762997 PSNR: 24.218505859375 +[TRAIN] Iter: 981900 Loss: 0.007987706921994686 PSNR: 25.163440704345703 +[TRAIN] Iter: 982000 Loss: 0.0077585275284945965 PSNR: 25.847978591918945 +[TRAIN] Iter: 982100 Loss: 0.010340699926018715 PSNR: 23.942182540893555 +[TRAIN] Iter: 982200 Loss: 0.010523654520511627 PSNR: 23.74270248413086 +[TRAIN] Iter: 982300 Loss: 0.00978822447359562 PSNR: 24.693052291870117 +[TRAIN] Iter: 982400 Loss: 0.012536764144897461 PSNR: 22.935874938964844 +[TRAIN] Iter: 982500 Loss: 0.00921327993273735 PSNR: 24.177587509155273 +[TRAIN] Iter: 982600 Loss: 0.0093431007117033 PSNR: 23.954601287841797 +[TRAIN] Iter: 982700 Loss: 0.00966787338256836 PSNR: 24.46774673461914 +[TRAIN] Iter: 982800 Loss: 0.010180379264056683 PSNR: 24.142866134643555 +[TRAIN] Iter: 982900 Loss: 0.01113599631935358 PSNR: 23.2210636138916 +[TRAIN] Iter: 983000 Loss: 0.009764573536813259 PSNR: 24.229793548583984 +[TRAIN] Iter: 983100 Loss: 0.010564739815890789 PSNR: 23.964990615844727 +[TRAIN] Iter: 983200 Loss: 0.009437737055122852 PSNR: 24.87510108947754 +[TRAIN] Iter: 983300 Loss: 0.010632894933223724 PSNR: 23.566749572753906 +[TRAIN] Iter: 983400 Loss: 0.009256311692297459 PSNR: 24.41901206970215 +[TRAIN] Iter: 983500 Loss: 0.008573580533266068 PSNR: 24.548194885253906 +[TRAIN] Iter: 983600 Loss: 0.010824157856404781 PSNR: 23.44971466064453 +[TRAIN] Iter: 983700 Loss: 0.010926084592938423 PSNR: 23.58394432067871 +[TRAIN] Iter: 983800 Loss: 0.010475201532244682 PSNR: 23.46299171447754 +[TRAIN] Iter: 983900 Loss: 0.008888659998774529 PSNR: 24.80695152282715 +[TRAIN] Iter: 984000 Loss: 0.012328519485890865 PSNR: 23.48922348022461 +[TRAIN] Iter: 984100 Loss: 0.008889622054994106 PSNR: 24.434165954589844 +[TRAIN] Iter: 984200 Loss: 0.008832396939396858 PSNR: 25.35314178466797 +[TRAIN] Iter: 984300 Loss: 0.010921581648290157 PSNR: 23.926748275756836 +[TRAIN] Iter: 984400 Loss: 0.011783198453485966 PSNR: 23.452518463134766 +[TRAIN] Iter: 984500 Loss: 0.010350736789405346 PSNR: 24.290536880493164 +[TRAIN] Iter: 984600 Loss: 0.010330397635698318 PSNR: 23.711711883544922 +[TRAIN] Iter: 984700 Loss: 0.010269004851579666 PSNR: 24.21452522277832 +[TRAIN] Iter: 984800 Loss: 0.010304770432412624 PSNR: 23.764543533325195 +[TRAIN] Iter: 984900 Loss: 0.0087400758638978 PSNR: 24.755945205688477 +[TRAIN] Iter: 985000 Loss: 0.008317693136632442 PSNR: 24.779741287231445 +[TRAIN] Iter: 985100 Loss: 0.010687540285289288 PSNR: 23.715038299560547 +[TRAIN] Iter: 985200 Loss: 0.012100713327527046 PSNR: 23.124326705932617 +[TRAIN] Iter: 985300 Loss: 0.009890151210129261 PSNR: 24.349271774291992 +[TRAIN] Iter: 985400 Loss: 0.009413526393473148 PSNR: 24.75686264038086 +[TRAIN] Iter: 985500 Loss: 0.00919152982532978 PSNR: 24.366703033447266 +[TRAIN] Iter: 985600 Loss: 0.011894860304892063 PSNR: 23.409399032592773 +[TRAIN] Iter: 985700 Loss: 0.00843193382024765 PSNR: 24.36208152770996 +[TRAIN] Iter: 985800 Loss: 0.010584116913378239 PSNR: 23.886188507080078 +[TRAIN] Iter: 985900 Loss: 0.010442440398037434 PSNR: 23.367431640625 +[TRAIN] Iter: 986000 Loss: 0.010861184448003769 PSNR: 23.96059226989746 +[TRAIN] Iter: 986100 Loss: 0.010768484324216843 PSNR: 23.746803283691406 +[TRAIN] Iter: 986200 Loss: 0.010209696367383003 PSNR: 23.60420799255371 +[TRAIN] Iter: 986300 Loss: 0.010634558275341988 PSNR: 23.814077377319336 +[TRAIN] Iter: 986400 Loss: 0.009145425632596016 PSNR: 24.480283737182617 +[TRAIN] Iter: 986500 Loss: 0.007909871637821198 PSNR: 25.6488037109375 +[TRAIN] Iter: 986600 Loss: 0.00876409187912941 PSNR: 25.509008407592773 +[TRAIN] Iter: 986700 Loss: 0.008996126241981983 PSNR: 24.75602149963379 +[TRAIN] Iter: 986800 Loss: 0.010630237869918346 PSNR: 23.249738693237305 +[TRAIN] Iter: 986900 Loss: 0.010805348865687847 PSNR: 23.391834259033203 +[TRAIN] Iter: 987000 Loss: 0.010687312111258507 PSNR: 24.264402389526367 +[TRAIN] Iter: 987100 Loss: 0.010258517228066921 PSNR: 23.80573272705078 +[TRAIN] Iter: 987200 Loss: 0.010835892520844936 PSNR: 23.444541931152344 +[TRAIN] Iter: 987300 Loss: 0.008386297151446342 PSNR: 24.569522857666016 +[TRAIN] Iter: 987400 Loss: 0.009591548703610897 PSNR: 24.380714416503906 +[TRAIN] Iter: 987500 Loss: 0.011931754648685455 PSNR: 23.424821853637695 +[TRAIN] Iter: 987600 Loss: 0.010172979906201363 PSNR: 23.857973098754883 +[TRAIN] Iter: 987700 Loss: 0.009867867454886436 PSNR: 24.84855079650879 +[TRAIN] Iter: 987800 Loss: 0.011079001240432262 PSNR: 23.30967140197754 +[TRAIN] Iter: 987900 Loss: 0.010391784831881523 PSNR: 23.9122371673584 +[TRAIN] Iter: 988000 Loss: 0.010421325452625751 PSNR: 24.094079971313477 +[TRAIN] Iter: 988100 Loss: 0.00913185253739357 PSNR: 24.638967514038086 +[TRAIN] Iter: 988200 Loss: 0.009681453928351402 PSNR: 24.071727752685547 +[TRAIN] Iter: 988300 Loss: 0.00874578021466732 PSNR: 24.34346580505371 +[TRAIN] Iter: 988400 Loss: 0.009502535685896873 PSNR: 24.362640380859375 +[TRAIN] Iter: 988500 Loss: 0.008603365160524845 PSNR: 24.96567726135254 +[TRAIN] Iter: 988600 Loss: 0.008798829279839993 PSNR: 24.699077606201172 +[TRAIN] Iter: 988700 Loss: 0.009891418740153313 PSNR: 24.473106384277344 +[TRAIN] Iter: 988800 Loss: 0.007780454121530056 PSNR: 25.12760353088379 +[TRAIN] Iter: 988900 Loss: 0.010609842836856842 PSNR: 24.01905632019043 +[TRAIN] Iter: 989000 Loss: 0.008755935356020927 PSNR: 24.875215530395508 +[TRAIN] Iter: 989100 Loss: 0.011231884360313416 PSNR: 23.346893310546875 +[TRAIN] Iter: 989200 Loss: 0.009014235809445381 PSNR: 25.172094345092773 +[TRAIN] Iter: 989300 Loss: 0.009305866435170174 PSNR: 24.468116760253906 +[TRAIN] Iter: 989400 Loss: 0.011130252853035927 PSNR: 23.595712661743164 +[TRAIN] Iter: 989500 Loss: 0.011284739710390568 PSNR: 23.319059371948242 +[TRAIN] Iter: 989600 Loss: 0.010426288470625877 PSNR: 23.594112396240234 +[TRAIN] Iter: 989700 Loss: 0.011888185515999794 PSNR: 23.36221694946289 +[TRAIN] Iter: 989800 Loss: 0.011682474985718727 PSNR: 23.30556297302246 +[TRAIN] Iter: 989900 Loss: 0.009202416986227036 PSNR: 24.20247459411621 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/990000.tar +[TRAIN] Iter: 990000 Loss: 0.009966544806957245 PSNR: 24.59621810913086 +[TRAIN] Iter: 990100 Loss: 0.010467450134456158 PSNR: 23.9071044921875 +[TRAIN] Iter: 990200 Loss: 0.010032075457274914 PSNR: 23.99103355407715 +[TRAIN] Iter: 990300 Loss: 0.011401907540857792 PSNR: 23.3125057220459 +[TRAIN] Iter: 990400 Loss: 0.010595624335110188 PSNR: 23.60645294189453 +[TRAIN] Iter: 990500 Loss: 0.008123391307890415 PSNR: 24.891469955444336 +[TRAIN] Iter: 990600 Loss: 0.010342747904360294 PSNR: 23.470272064208984 +[TRAIN] Iter: 990700 Loss: 0.008020048961043358 PSNR: 25.71814727783203 +[TRAIN] Iter: 990800 Loss: 0.009698149748146534 PSNR: 23.636608123779297 +[TRAIN] Iter: 990900 Loss: 0.009305724874138832 PSNR: 24.82156753540039 +[TRAIN] Iter: 991000 Loss: 0.009016327559947968 PSNR: 24.756305694580078 +[TRAIN] Iter: 991100 Loss: 0.009657669812440872 PSNR: 24.097318649291992 +[TRAIN] Iter: 991200 Loss: 0.010973223485052586 PSNR: 23.656248092651367 +[TRAIN] Iter: 991300 Loss: 0.010028902441263199 PSNR: 24.21013832092285 +[TRAIN] Iter: 991400 Loss: 0.00919300690293312 PSNR: 24.532548904418945 +[TRAIN] Iter: 991500 Loss: 0.01078876107931137 PSNR: 23.789949417114258 +[TRAIN] Iter: 991600 Loss: 0.010077276267111301 PSNR: 24.624507904052734 +[TRAIN] Iter: 991700 Loss: 0.008438799530267715 PSNR: 25.0931396484375 +[TRAIN] Iter: 991800 Loss: 0.008758937940001488 PSNR: 24.886180877685547 +[TRAIN] Iter: 991900 Loss: 0.012089628726243973 PSNR: 23.516267776489258 +[TRAIN] Iter: 992000 Loss: 0.011818429455161095 PSNR: 23.480140686035156 +[TRAIN] Iter: 992100 Loss: 0.011312427930533886 PSNR: 23.809070587158203 +[TRAIN] Iter: 992200 Loss: 0.009026688523590565 PSNR: 24.075769424438477 +[TRAIN] Iter: 992300 Loss: 0.008833063766360283 PSNR: 24.69186019897461 +[TRAIN] Iter: 992400 Loss: 0.008397176861763 PSNR: 25.581741333007812 +[TRAIN] Iter: 992500 Loss: 0.014267573133111 PSNR: 22.436275482177734 +[TRAIN] Iter: 992600 Loss: 0.009481956250965595 PSNR: 24.19614601135254 +[TRAIN] Iter: 992700 Loss: 0.012148410081863403 PSNR: 23.07390594482422 +[TRAIN] Iter: 992800 Loss: 0.00922340713441372 PSNR: 24.307403564453125 +[TRAIN] Iter: 992900 Loss: 0.010531289502978325 PSNR: 24.08073616027832 +[TRAIN] Iter: 993000 Loss: 0.008879533037543297 PSNR: 24.55628204345703 +[TRAIN] Iter: 993100 Loss: 0.011465768329799175 PSNR: 23.51313018798828 +[TRAIN] Iter: 993200 Loss: 0.012466063722968102 PSNR: 23.18159294128418 +[TRAIN] Iter: 993300 Loss: 0.011176064610481262 PSNR: 23.533693313598633 +[TRAIN] Iter: 993400 Loss: 0.009234736673533916 PSNR: 25.26772117614746 +[TRAIN] Iter: 993500 Loss: 0.012125667184591293 PSNR: 22.96519660949707 +[TRAIN] Iter: 993600 Loss: 0.009924223646521568 PSNR: 23.865306854248047 +[TRAIN] Iter: 993700 Loss: 0.009570029564201832 PSNR: 24.226713180541992 +[TRAIN] Iter: 993800 Loss: 0.008891349658370018 PSNR: 24.769704818725586 +[TRAIN] Iter: 993900 Loss: 0.010735604912042618 PSNR: 23.549884796142578 +[TRAIN] Iter: 994000 Loss: 0.009042667225003242 PSNR: 24.82394027709961 +[TRAIN] Iter: 994100 Loss: 0.010414337739348412 PSNR: 23.707435607910156 +[TRAIN] Iter: 994200 Loss: 0.009909399785101414 PSNR: 23.800859451293945 +[TRAIN] Iter: 994300 Loss: 0.008781680837273598 PSNR: 24.606273651123047 +[TRAIN] Iter: 994400 Loss: 0.010395048186182976 PSNR: 23.41446304321289 +[TRAIN] Iter: 994500 Loss: 0.010124241933226585 PSNR: 23.834590911865234 +[TRAIN] Iter: 994600 Loss: 0.008539407514035702 PSNR: 24.73485565185547 +[TRAIN] Iter: 994700 Loss: 0.00928080640733242 PSNR: 24.930004119873047 +[TRAIN] Iter: 994800 Loss: 0.008105885237455368 PSNR: 25.665483474731445 +[TRAIN] Iter: 994900 Loss: 0.009588426910340786 PSNR: 24.58126449584961 +[TRAIN] Iter: 995000 Loss: 0.009486786089837551 PSNR: 24.41347312927246 +[TRAIN] Iter: 995100 Loss: 0.00907096080482006 PSNR: 24.47677993774414 +[TRAIN] Iter: 995200 Loss: 0.009019856341183186 PSNR: 24.177244186401367 +[TRAIN] Iter: 995300 Loss: 0.009561125189065933 PSNR: 24.24785041809082 +[TRAIN] Iter: 995400 Loss: 0.009600293822586536 PSNR: 24.63550567626953 +[TRAIN] Iter: 995500 Loss: 0.009896963834762573 PSNR: 24.034271240234375 +[TRAIN] Iter: 995600 Loss: 0.009461688809096813 PSNR: 23.88445281982422 +[TRAIN] Iter: 995700 Loss: 0.008937818929553032 PSNR: 24.08597183227539 +[TRAIN] Iter: 995800 Loss: 0.008881313726305962 PSNR: 25.82411003112793 +[TRAIN] Iter: 995900 Loss: 0.009628858417272568 PSNR: 25.073156356811523 +[TRAIN] Iter: 996000 Loss: 0.009099581278860569 PSNR: 24.486425399780273 +[TRAIN] Iter: 996100 Loss: 0.011201517656445503 PSNR: 23.68218231201172 +[TRAIN] Iter: 996200 Loss: 0.008782674558460712 PSNR: 24.36345672607422 +[TRAIN] Iter: 996300 Loss: 0.009352954104542732 PSNR: 24.630041122436523 +[TRAIN] Iter: 996400 Loss: 0.0102387685328722 PSNR: 24.230228424072266 +[TRAIN] Iter: 996500 Loss: 0.009724244475364685 PSNR: 23.693620681762695 +[TRAIN] Iter: 996600 Loss: 0.009246060624718666 PSNR: 24.464487075805664 +[TRAIN] Iter: 996700 Loss: 0.010327689349651337 PSNR: 23.868186950683594 +[TRAIN] Iter: 996800 Loss: 0.0107977744191885 PSNR: 23.519983291625977 +[TRAIN] Iter: 996900 Loss: 0.009441640228033066 PSNR: 24.297996520996094 +[TRAIN] Iter: 997000 Loss: 0.009580370038747787 PSNR: 24.64208221435547 +[TRAIN] Iter: 997100 Loss: 0.009883422404527664 PSNR: 24.99637794494629 +[TRAIN] Iter: 997200 Loss: 0.008229108527302742 PSNR: 24.910266876220703 +[TRAIN] Iter: 997300 Loss: 0.011877192184329033 PSNR: 23.395503997802734 +[TRAIN] Iter: 997400 Loss: 0.009291483089327812 PSNR: 24.473512649536133 +[TRAIN] Iter: 997500 Loss: 0.00974964164197445 PSNR: 24.257104873657227 +[TRAIN] Iter: 997600 Loss: 0.012508107349276543 PSNR: 22.80596923828125 +[TRAIN] Iter: 997700 Loss: 0.013263912871479988 PSNR: 22.96200180053711 +[TRAIN] Iter: 997800 Loss: 0.012869655154645443 PSNR: 23.10959815979004 +[TRAIN] Iter: 997900 Loss: 0.011447809636592865 PSNR: 23.567615509033203 +[TRAIN] Iter: 998000 Loss: 0.012193548493087292 PSNR: 23.20507049560547 +[TRAIN] Iter: 998100 Loss: 0.01024673879146576 PSNR: 23.61502456665039 +[TRAIN] Iter: 998200 Loss: 0.008007597178220749 PSNR: 25.75095558166504 +[TRAIN] Iter: 998300 Loss: 0.010443121194839478 PSNR: 23.80043601989746 +[TRAIN] Iter: 998400 Loss: 0.012004144489765167 PSNR: 23.379959106445312 +[TRAIN] Iter: 998500 Loss: 0.009176013991236687 PSNR: 24.795814514160156 +[TRAIN] Iter: 998600 Loss: 0.010833880864083767 PSNR: 23.55283546447754 +[TRAIN] Iter: 998700 Loss: 0.00988038256764412 PSNR: 23.829479217529297 +[TRAIN] Iter: 998800 Loss: 0.008765846490859985 PSNR: 24.49339485168457 +[TRAIN] Iter: 998900 Loss: 0.009346909821033478 PSNR: 24.308258056640625 +[TRAIN] Iter: 999000 Loss: 0.011300681158900261 PSNR: 23.610347747802734 +[TRAIN] Iter: 999100 Loss: 0.010918553918600082 PSNR: 23.618040084838867 +[TRAIN] Iter: 999200 Loss: 0.0109778493642807 PSNR: 23.40035057067871 +[TRAIN] Iter: 999300 Loss: 0.012258228845894337 PSNR: 23.067251205444336 +[TRAIN] Iter: 999400 Loss: 0.010649316012859344 PSNR: 23.586259841918945 +[TRAIN] Iter: 999500 Loss: 0.009759274311363697 PSNR: 24.400726318359375 +[TRAIN] Iter: 999600 Loss: 0.0098497299477458 PSNR: 24.456228256225586 +[TRAIN] Iter: 999700 Loss: 0.011290255934000015 PSNR: 23.098703384399414 +[TRAIN] Iter: 999800 Loss: 0.01079961471259594 PSNR: 24.305007934570312 +[TRAIN] Iter: 999900 Loss: 0.008344214409589767 PSNR: 24.641023635864258 +Saved checkpoints at ./logs/TUT-hikage-doll-360-np/1000000.tar +0 0.0009012222290039062 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 21.313223361968994 +2 20.701802015304565 +3 20.459221839904785 +4 20.644080877304077 +5 21.35836100578308 +6 20.28752326965332 +7 20.626330137252808 +8 21.47496008872986 +9 20.758988857269287 +10 20.74550747871399 +11 21.094299793243408 +12 21.091105699539185 +13 20.829915285110474 +14 20.747137784957886 +15 20.42370629310608 +16 19.86280632019043 +17 21.449355125427246 +18 19.424108266830444 +19 21.812821865081787 +20 20.654294729232788 +21 20.765649557113647 +22 20.15380096435547 +23 20.591062784194946 +24 20.596466302871704 +25 20.55267572402954 +26 20.53865885734558 +27 21.714125871658325 +28 20.30152177810669 +29 22.743167638778687 +30 24.826884508132935 +31 25.42456555366516 +32 25.27197766304016 +33 24.574683904647827 +34 25.11692976951599 +35 25.483201265335083 +36 25.437455654144287 +37 25.161239624023438 +38 26.55974316596985 +39 25.661882400512695 +40 25.48745107650757 +41 24.475560188293457 +42 24.754762887954712 +43 25.254644632339478 +44 26.440556049346924 +45 24.818414211273193 +46 25.472829818725586 +47 24.046786546707153 +48 25.61777114868164 +49 25.353068351745605 +50 25.3902108669281 +51 25.72736668586731 +52 25.504353761672974 +53 25.94378399848938 +54 25.39948320388794 +55 25.09851384162903 +56 24.962223529815674 +57 24.55553698539734 +58 25.361835718154907 +59 25.317407846450806 +60 25.415458917617798 +61 25.583125114440918 +62 25.739578247070312 +63 24.67399549484253 +64 25.267942905426025 +65 25.26879596710205 +66 25.63949418067932 +67 24.99929666519165 +68 25.6639187335968 +69 24.909844160079956 +70 25.470743417739868 +71 25.380557775497437 +72 25.060184001922607 +73 25.431263208389282 +74 25.195433855056763 +75 26.249680757522583 +76 24.882803201675415 +77 25.39596128463745 +78 25.710362911224365 +79 24.986304759979248 +80 25.23782968521118 +81 24.79050040245056 +82 25.76632857322693 +83 24.531322479248047 +84 24.56528902053833 +85 26.366576671600342 +86 25.275807857513428 +87 25.09690284729004 +88 26.48179316520691 +89 25.07097101211548 +90 25.533010721206665 +91 25.0845890045166 +92 25.449141025543213 +93 25.490371227264404 +94 25.573230028152466 +95 24.134209156036377 +96 24.76340937614441 +97 25.397233963012695 +98 24.885791301727295 +99 25.70103144645691 +100 25.241042613983154 +101 25.759225845336914 +102 25.743181228637695 +103 25.70775079727173 +104 25.757432222366333 +105 25.29859471321106 +106 25.786349534988403 +107 25.15446400642395 +108 25.887835025787354 +109 25.342446088790894 +110 24.408602476119995 +111 25.546029090881348 +112 24.990384101867676 +113 25.6170072555542 +114 25.370124578475952 +115 25.614418983459473 +116 25.197665452957153 +117 25.49199342727661 +118 24.695591926574707 +119 24.86718487739563 +Done, saving (120, 320, 640, 3) (120, 320, 640) +extras:{'raw': tensor([[[ 5.7524e-01, 1.2828e+00, 2.1163e+00, -1.8420e+01], + [ 9.9301e-01, 8.4400e-01, 9.5414e-01, -4.3892e+01], + [ 6.9548e-01, 8.6021e-01, 1.2169e+00, -3.3275e+01], + ..., + [-1.7540e+00, 9.2249e-01, 2.2639e+00, 2.2651e+02], + [-3.5311e+00, -6.0113e-01, 7.2623e-01, 1.8853e+02], + [-3.0652e+00, -2.1982e-01, 1.1842e+00, 2.3332e+02]], + + [[ 2.7961e-01, 7.5499e-01, 9.2152e-01, -2.3642e+01], + [-1.5576e+00, -5.8703e-01, 2.4767e-01, -1.7366e+01], + [-4.8356e-01, -7.5992e-01, -1.0209e+00, 2.0012e+00], + ..., + [ 1.0918e+01, 1.1590e+01, 1.1166e+01, 2.1992e+02], + [ 1.0094e+01, 1.1302e+01, 1.1569e+01, 2.2315e+02], + [ 1.0922e+01, 1.1565e+01, 1.1013e+01, 2.1386e+02]], + + [[ 1.2544e+01, 1.2404e+01, 1.2184e+01, -7.8199e+01], + [-6.3052e-01, -1.2311e-01, 7.2866e-02, -1.4498e+01], + [ 1.7042e+00, 2.0779e+00, 2.4751e+00, -1.8346e+01], + ..., + [ 1.0836e+01, 1.8639e+01, 1.5321e+01, 5.3271e+02], + [ 9.0788e+00, 1.5629e+01, 1.2762e+01, 5.3635e+02], + [ 8.1756e+00, 1.6159e+01, 1.2286e+01, 5.6263e+02]], + + ..., + + [[ 7.4634e-01, 1.6970e-01, -7.4635e-01, -3.1720e+01], + [ 1.1684e+01, 1.1854e+01, 1.0394e+01, -9.4674e+01], + [ 4.8924e+00, 5.3646e+00, 4.7355e+00, -8.5889e+01], + ..., + [ 2.6766e+00, 2.5387e+00, 2.5555e+00, 3.5091e+01], + [ 2.8160e+00, 2.5889e+00, 2.4723e+00, 3.4234e+01], + [ 2.5397e+00, 2.2129e+00, 2.0210e+00, 3.5662e+01]], + + [[-2.7628e+00, -3.5084e+00, -4.3319e+00, -2.0825e+01], + [-1.2263e+00, -1.3531e+00, -1.2497e+00, -8.3376e+00], + [-1.0583e+00, -1.1646e+00, -1.1418e+00, -1.6363e+00], + ..., + [ 5.7085e+00, 1.8652e+01, 2.8019e+01, 2.8532e+02], + [ 7.3536e+00, 2.3457e+01, 3.4923e+01, 2.9547e+02], + [ 8.9592e+00, 2.3096e+01, 3.2487e+01, 3.2611e+02]], + + [[ 3.5447e-01, -1.1575e-01, -4.8297e-01, -2.0058e+01], + [ 1.6838e+00, 1.2969e+00, 1.0727e+00, -2.8648e+01], + [-2.8199e-01, -7.2792e-01, -3.0541e-01, -2.9017e+01], + ..., + [ 1.5016e+00, 1.6049e+00, 1.4677e+00, 1.8502e+02], + [ 6.7535e-01, 1.2904e+00, 1.8533e+00, 1.7241e+02], + [ 2.9397e+00, 3.1254e+00, 3.3783e+00, 2.0683e+02]]], + grad_fn=), 'rgb0': tensor([[0.2949, 0.3865, 0.4930], + [0.3861, 0.2961, 0.2155], + [0.2906, 0.3554, 0.4120], + ..., + [0.9999, 1.0000, 1.0000], + [0.2364, 0.2093, 0.2019], + [0.5662, 0.5079, 0.4630]], grad_fn=), 'disp0': tensor([ 38.4833, 1413.1881, 20.9003, ..., 10.6705, 152.8532, + 20.0804], grad_fn=), 'acc0': tensor([1., 1., 1., ..., 1., 1., 1.], grad_fn=), 'z_std': tensor([0.0038, 0.2699, 0.0060, ..., 0.0044, 0.2353, 0.0059])} +0 0.0008363723754882812 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 25.395283460617065 +2 26.056724309921265 +3 24.75884985923767 +4 25.487327098846436 +5 24.991942644119263 +6 25.39755129814148 +7 26.08621096611023 +8 25.352283239364624 +9 25.124622106552124 +10 25.60058879852295 +11 25.15794563293457 +12 25.051801919937134 +13 24.66886305809021 +14 24.058445692062378 +15 25.198615550994873 +16 25.233522176742554 +17 25.092869997024536 +18 25.673909664154053 +19 26.102256536483765 +20 25.798414707183838 +21 24.92301321029663 +22 25.742685317993164 +23 24.855745315551758 +24 24.681707620620728 +25 25.145825386047363 +26 25.093340635299683 +27 25.83686351776123 +28 25.524949073791504 +29 17.350521326065063 +30 25.15451455116272 +31 25.10838747024536 +32 25.7207293510437 +33 25.33701753616333 +34 24.805349111557007 +35 24.92664933204651 +36 25.59771704673767 +37 25.41369938850403 +38 25.138773202896118 +39 25.509550094604492 +40 24.63716459274292 +41 25.123584270477295 +42 25.36088752746582 +43 25.3956356048584 +44 24.995574951171875 +45 25.792495489120483 +46 24.934381246566772 +47 24.885274648666382 +48 25.584736108779907 +49 25.671027660369873 +50 25.440549850463867 +51 25.368993282318115 +52 26.04289412498474 +53 25.305450439453125 +54 24.58843755722046 +55 24.627726793289185 +56 25.745282649993896 +57 25.877335786819458 +58 25.045194387435913 +59 25.434181213378906 +60 25.722957134246826 +61 25.396015882492065 +62 25.593739986419678 +63 25.63885259628296 +64 25.211844444274902 +65 25.771698474884033 +66 25.395967721939087 +67 26.744999408721924 +68 25.522793292999268 +69 26.60355830192566 +70 25.14094376564026 +71 25.13552236557007 +72 25.824220418930054 +73 25.27680277824402 +74 25.41227436065674 +75 25.71412205696106 +76 25.91852307319641 +77 25.42287254333496 +78 26.08649516105652 +79 25.262510299682617 +80 25.439555406570435 +81 25.579662322998047 +82 25.365922212600708 +83 25.724812507629395 +84 25.224939823150635 +85 25.376241445541382 +86 25.12691855430603 +87 25.365918159484863 +88 26.22052526473999 +89 25.113645792007446 +90 25.476848602294922 +91 25.386651754379272 +92 24.80839991569519 +93 25.113270044326782 +94 25.01383113861084 +95 25.14186429977417 +96 24.581583261489868 +97 25.122247457504272 +98 24.889063119888306 +99 25.589529037475586 +100 24.14243483543396 +101 26.51842951774597 +102 25.318927526474 +103 24.301734685897827 +104 25.92794394493103 +105 24.97889494895935 +106 24.818095684051514 +107 25.207843780517578 +108 25.31635808944702 +109 25.278289556503296 +110 25.5481595993042 +111 25.70899224281311 +112 25.306358575820923 +113 25.592024326324463 +114 25.541386604309082 +115 25.16650128364563 +116 24.015118837356567 +117 25.428528785705566 +118 25.24790358543396 +119 24.25043749809265 +test poses shape torch.Size([4, 3, 4]) +0 0.0008020401000976562 +torch.Size([320, 640, 3]) torch.Size([320, 640]) +1 25.106626510620117 +2 24.97494888305664 +3 25.868245124816895 +Saved test set +[TRAIN] Iter: 1000000 Loss: 0.01056873518973589 PSNR: 24.07119369506836