From cc5d1851a79e63e6fc31e2853b8853e8be52bfa5 Mon Sep 17 00:00:00 2001 From: LutaoChu <30695251+LutaoChu@users.noreply.github.com> Date: Mon, 7 Dec 2020 14:34:07 +0800 Subject: [PATCH 01/52] Update transforms.py --- dygraph/paddleseg/transforms/transforms.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dygraph/paddleseg/transforms/transforms.py b/dygraph/paddleseg/transforms/transforms.py index 1fad4c0559..7f285ed340 100644 --- a/dygraph/paddleseg/transforms/transforms.py +++ b/dygraph/paddleseg/transforms/transforms.py @@ -742,7 +742,7 @@ class RandomDistort: contrast_prob (float, optional): A probability of adjusting contrast. Default: 0.5. saturation_range (float, optional): A range of saturation. Default: 0.5. saturation_prob (float, optional): A probability of adjusting saturation. Default: 0.5. - hue_range (int, optional): A range of hue. Default: 0.5. + hue_range (int, optional): A range of hue. Default: 18. hue_prob (float, optional): A probability of adjusting hue. Default: 0.5. """ From 2f3c138d533cbf916da7c858feb7ed7b17385fc9 Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Tue, 8 Dec 2020 15:10:02 +0800 Subject: [PATCH 02/52] Update README.md --- dygraph/configs/gcnet/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dygraph/configs/gcnet/README.md b/dygraph/configs/gcnet/README.md index e330abdc6a..9af86240c7 100644 --- a/dygraph/configs/gcnet/README.md +++ b/dygraph/configs/gcnet/README.md @@ -17,5 +17,5 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| -|GCNet|ResNet50_OS8|512x512|40000|80.32%|80.39%|80.54%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_renet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_renet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=86cbaac3fe98fdbb635e246c2c02e87b)| -|GCNet|ResNet101_OS8|512x512|40000|79.64%|79.59%|79.94%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_renet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_renet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=73f0484b034f6c27bf481c7a3b05e9ae)| +|GCNet|ResNet50_OS8|512x512|40000|80.32%|80.39%|80.54%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=86cbaac3fe98fdbb635e246c2c02e87b)| +|GCNet|ResNet101_OS8|512x512|40000|79.64%|79.59%|79.94%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/gcnet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=73f0484b034f6c27bf481c7a3b05e9ae)| From 7247c795c46bf2fe4024f0a5582d26c855dff4f4 Mon Sep 17 00:00:00 2001 From: wuzewu Date: Wed, 9 Dec 2020 18:59:59 +0800 Subject: [PATCH 03/52] Add danet pascal voc config --- dygraph/configs/danet/README.md | 2 +- ...danet_resnet50_os8_voc12aug_512x512_40k.yml | 18 ++++++++++++++++++ dygraph/configs/pspnet/README.md | 2 +- 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 dygraph/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml diff --git a/dygraph/configs/danet/README.md b/dygraph/configs/danet/README.md index 47b1a4d83e..9cc775ea36 100644 --- a/dygraph/configs/danet/README.md +++ b/dygraph/configs/danet/README.md @@ -16,4 +16,4 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|DANet|ResNet50_OS8|1024x512|40000|78.55%|-|-|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/danet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/danet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=51a403a54302bc81dd5ec0310a6d50ba)| +|DANet|ResNet50_OS8|1024x512|40000|78.55%|-|-|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=51a403a54302bc81dd5ec0310a6d50ba)| diff --git a/dygraph/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml b/dygraph/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..3e1a140236 --- /dev/null +++ b/dygraph/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,18 @@ +_base_: '../_base_/pascal_voc12aug.yml' + +model: + type: DANet + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + num_classes: 19 + backbone_indices: [2, 3] + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 1, 1, 0.4] diff --git a/dygraph/configs/pspnet/README.md b/dygraph/configs/pspnet/README.md index bdb54f298e..a48415eeee 100644 --- a/dygraph/configs/pspnet/README.md +++ b/dygraph/configs/pspnet/README.md @@ -18,4 +18,4 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| |PSPNet|ResNet50_OS8|512x512|40000|80.76%|80.92%|80.91%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/pspnet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/pspnet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=d94fca382566d823dd23a84d380fe0af)| -|PSPNet|ResNet101_OS8|512x512|40000|80.22%|80.48%|80.36%|[model](https://bj.bcebos.com/paddleseg/dygraph/voc12aug/pspnet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/voc12aug/pspnet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=5fe5012cf0bd58a3574c95e0fc79306b)| +|PSPNet|ResNet101_OS8|512x512|40000|80.22%|80.48%|80.36%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/pspnet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/pspnet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=5fe5012cf0bd58a3574c95e0fc79306b)| From 9e58f1765b94e914ce5cff98f7a4e82a4ff0de21 Mon Sep 17 00:00:00 2001 From: Morgan <30824839+x12901@users.noreply.github.com> Date: Fri, 18 Dec 2020 18:02:33 +0800 Subject: [PATCH 04/52] Fix the bug of filename replacement in the create_dataset_list.py --- pdseg/tools/create_dataset_list.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pdseg/tools/create_dataset_list.py b/pdseg/tools/create_dataset_list.py index 8dd4c7e9a3..a33bfaad51 100644 --- a/pdseg/tools/create_dataset_list.py +++ b/pdseg/tools/create_dataset_list.py @@ -128,12 +128,12 @@ def generate_list(args): file_list = os.path.join(dataset_root, dataset_split + '.txt') with open(file_list, "w") as f: for item in range(num_images): - left = image_files[item].replace(dataset_root, '') + left = image_files[item].replace(dataset_root, '', 1) if left[0] == os.path.sep: left = left.lstrip(os.path.sep) try: - right = label_files[item].replace(dataset_root, '') + right = label_files[item].replace(dataset_root, '', 1) if right[0] == os.path.sep: right = right.lstrip(os.path.sep) line = left + separator + right + '\n' From 7aff74e41b1c1652aecfaa2b6bdb6b1a62e34013 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Fri, 18 Dec 2020 21:17:04 +0800 Subject: [PATCH 05/52] add release notes (#708) --- README.md | 2 + README_CN.md | 6 +++ docs/images/seg_news_icon.png | Bin 0 -> 118424 bytes docs/release_notes.md | 88 ++++++++++++++++++++++++++++++++++ docs/release_notes_cn.md | 88 ++++++++++++++++++++++++++++++++++ 5 files changed, 184 insertions(+) create mode 100644 docs/images/seg_news_icon.png create mode 100644 docs/release_notes.md create mode 100644 docs/release_notes_cn.md diff --git a/README.md b/README.md index d6ff9d63d7..4a77d36e9e 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,8 @@ English | [简体中文](README_CN.md) ![python version](https://img.shields.io/badge/python-3.6+-orange.svg) ![support os](https://img.shields.io/badge/os-linux%2C%20win%2C%20mac-yellow.svg) + *[2020-12-18] PaddleSeg has released the v2.0.0-rc version, which supports the dynamic graph by default. The static-graph codes have been moved to [legacy](./legacy). See detailed [release notes](./docs/release_notes.md).* + ![demo](./docs/images/cityscapes.gif) Welcome to PaddleSeg! PaddleSeg is an end-to-end image segmentation development kit developed based on [PaddlePaddle](https://www.paddlepaddle.org.cn), which covers a large number of high-quality segmentation models in different directions such as *high-performance* and *lightweight*. With the help of modular design, one can conveniently complete the entire image segmentation application from training to deployment through configuration calls or API calls. diff --git a/README_CN.md b/README_CN.md index 55e40d568d..558a6fbddd 100644 --- a/README_CN.md +++ b/README_CN.md @@ -8,6 +8,8 @@ ![python version](https://img.shields.io/badge/python-3.6+-orange.svg) ![support os](https://img.shields.io/badge/os-linux%2C%20win%2C%20mac-yellow.svg) + *[2020-12-18] PaddleSeg发布2.0.0rc版,动态图正式成为主目录。静态图已经被移至[legacy](./legacy)子目录下。更多信息请查看详细[更新日志](./docs/release_notes_cn.md)。* + ![demo](./docs/images/cityscapes.gif) PaddleSeg是基于飞桨[PaddlePaddle](https://www.paddlepaddle.org.cn)开发的端到端图像分割开发套件,涵盖了**高精度**和**轻量级**等不同方向的大量高质量分割模型。通过模块化的设计,提供了**配置化驱动**和**API调用**等两种应用方式,帮助开发者更便捷地完成从训练到部署的全流程图像分割应用。 @@ -87,6 +89,10 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml * [API参考](./docs/apis) * [添加新组件](./docs/add_new_model.md) +## 联系我们 +* 如果你发现任何PaddleSeg存在的问题或者是建议, 欢迎通过[GitHub Issues](https://github.com/PaddlePaddle/PaddleSeg/issues)给我们提issues。 +* 同时欢迎加入PaddleSeg技术交流群:850378321(QQ群1)或者793114768(QQ群2)。 + ## 代码贡献 * 非常感谢[jm12138](https://github.com/jm12138)贡献U2-Net模型。 diff --git a/docs/images/seg_news_icon.png b/docs/images/seg_news_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..30ec26f5fce25550dca2fc2c2dc2ce626ac514e9 GIT binary patch literal 118424 zcmce-$k&^E2k_Ks{ySuxTP`VpLz*2YY6(Su^VuYrcP0l16_+_yzy~pv%fgssaFT!T#+-~1)c?$}s3yQ>U4sG?O_ns*n^?I~YpR@lc-;JlsfTv6NK!*iShXXH* z)jMhBwpw8GY^68t;wYh471s;Rr@YiZn4Q%ayPt7v28jJP?EjAsqaax5ChK9z6?_(2 zfh2}?fvV^eLXI3lj)y)FOSTL6p*nmd4hbcXxC>=+LwJ4v`s{oqd)kx!0yu{HLw!Zg zw&rd@o?4{^-*>t{gXSg^ZI~a=$DbYotNyrCA)A~y;e%lR{m{iXz-K1BNJ|h6eWo6r zC#Qz3DIMG7NlZ+|iF`o|GZ$B~cOGk!y)98|zU69gXa%bnRxOgX-`_#C)djDn)s~l` zFP%tJmgA3k0`(}2FtL0UqLc3S&Y!AWP6ZtRY3zM*07bey;Uv*4iO-yeR~hv z(b!fAt9M2`1oQ8jM8>3oAY1;cEYVRhN5f6UjfLG#(g8J-L!IOlFs!#xDDiU9to>n= z;+MH2W-+JY>wTk>1=8Fn(7QA7#$fUYhOh8s|6mi=U@56K<=H^ScizFG-G>s=WG8b<@n1AeCS1Kn z8aN4l91X_L>M-A6Ahx>h-`?s_p8)Qw0r12I*rN0JCRV@w?}9l^MZVDYa_GZZ#xN-` zAcv%({BL-2&Vc6DO<}DFq}2OoM-vwp64agqxS@2Kd&!VEfCL~yM{TNDoq*Ks8?Rq` z^WA5PTJ=ns}&XF&Ozs;zs9DxYG+7}_+5@($fKA}WO`x#jt=pt{i-u~1;P3?IRXC* zOo-KBx;R@P+qQ=~NqgQFPA!B5Eu_5@;#;{W%#CNzvC?_KB)BKQfU%j_7AMWjckFKm8*s0gT$ zppXG1yIG9N7v5)b%tND*&o2JBE7dnJ3$6O^k>3HZdj-Y*mw3JAlO4`eh>pMf47a;O zMI6tb{N&z1(0oV?b@olL09lUAmm_x9h2`b)FU1dky2wVA$52B4#kc9iyV(EE0_7+v zW{M`rg3cU99PjsA1UhP@&iA`81f{I~{lxvuRpbOlohKUj<79e!_qaTmPo54gZ7zHQ z&g!kR0I6KCxOfOdFC2L z$38?z!Jr7gMzz+0Jkhd6n3PK!WT(T;V?VUAh z5Wtiau0&y&9oGcj*h}t>GtHLXw_^X;^%5_J47)?!hZ{bm$fpP zpgC}%M~ik5pf)9PW>X)DAXdzL8^lpm^rZp%&4rrBpXv9tWRh1VUkB-PS|Q?y-Wmf$r0fZ|$z*oD_%)399&E z8-ec{9-0A?aYJZ76n5s_d+^C}r~0uYLrfPN0!6Q{+LDzOj)q|>98M8CalAK-v3~i! z>u>$peD|}Xbu*obh4_9>kUN;%!RLQJ{EZ`Y$K9B~0pIu*V|=bnAp$4y13J0bUVQ(r z>Uik4=yYy59{1>r5;MDR;p;g%%sf>3H?)V$=r2#ioXOho#LEr-^{)4q9(;Uvhr!Um zCOWh!j86N&+=$=K`uM}?aWQIm6#|^FN3!+Ice228d#L@x^>I3s8@qQ5@m~(KUQE_p z*O%!9h=#3~eoIp3zXBj+^wWGPsz&(b1^Q(0j(f+!FHQ+lwm)E42x|MQ{P-=oayg%` zhnI(rJR_b``IkkN?6kShBxEl}1`$Yl6bq`VAOK@mo6@9LbblRy^#qix&V0iOHVg1p z8Mf@4;}Ads`&`VDhcpfS+X88x20F9P*iq4K*`e$H6I{znmKBQ23Bm|~uCzFN`xn}v zj$>hg)5v){j_RhlDZNKyVdN8r`vB}FKQ$g9R*xsWUpv75Fu8&NK;=B2*Y%@c{cD7| z|3!EwI&mb`5q7TUaib5+N*+mPh^(e<{kBt-<~je}Dwo!X?Ba;Hp;8HL(w*+pSB)oi z8&;=mO9vt%Ur2pphIZ6rNwfC1LqV7|wUL35<)Am6XXhc!IHACPU|iI9dRF4Mk-ksw zNM73S*1BH!F~`UwGT;15BJcI-^KIREvn!a}=gc(dF>I1(q8l0~yK|{w`~nV}T1`qJ=B5A9sHQU~P2*szV zNLwD!@@wQ&SxGCO0TI(paqRQvyOLW|K}00_dp`?{q?sO#d>Oc#pF7`{2B6vV`9-WK zN?~zy8VaNQSB~|B&^`jZ(xPKugfDDj0idQlmh%IxgO|k{R)6OD4&ZL=hkd>_G-|v3 z-ASx&-cB`DU~`fP{HZ5k#bqX2l+nf`N8lkdpNT2Zy;z+e*q=#vDQQYpwfo86?BJ&P zZL7b@FAgo{K$_Fm4w%y~UwBBUkxdX~HYHw%9;GW(-M<2~cNOR%4f-XB#Mswa+~1~! zS;;<&G*l`HI#t!>)Nu)3qtYk}y$!-1y0cD_%Y~`(ow;)@tSHxt=<9c7XwhsDtD1VZ zF~yXhe@KjNVH~ZmPY#I|nrY(N4e38V}6ceT^a}|4&qUi!_@H{hOVDYk*O*5k*_CxZhI6nIWnMA$WkfSr^419i}fzrj_cw&3F=-DQe}j+Y_wq2 zD4qAdGHzzfLozLtmftl>sPlI(m1!t`gM%+2j=$&8s#bbxKXxxV?7yv^MM1s7{Qd!J zN}<%`G$#W5_;G- zS;Aa^wT>f$`h$j@GZ$_RX8h^f()_xgKxUk&n@tKnsvwi{*#oxgPhfW_(r4J77la^A z5%@eYpu>NKCd;X{ZPT{+{8s72`w*a(R%H5yup#gLADL6OsYOzW=#?{E%bhw4{pk%g zSQr9ZN%Y>PQY$0r%s4H-!1SqIICbKf|}kCB!&upGA47Fv1* z=tyHh$li`;yaKm04XV>P_-CXb#wqys+)svbPn0sIz%JVJXgb(9uQmAh7{#v+BL zi26mT#5d?BYk<^F&(2d{tp#a#7{E=I!0xi^$jR zRNTHMm}R)Sy5r5)H4eZEcO^Pi#39<`?SIs$w!b*$onvQjuW6Z%KTqaL+pMBTf@ukz zT`ZY<`5A5lpY9Un-gZvr_a@5b^>&x2BvdwX8XK(L zE-5_SF2`C@8;;Xz5wUyS-~8KWWfZpcB<#rCFwTmYI5FSblZg#qRhyt}us8Yom3a=_&JED=k)875-rw3% zEm3vE(=(q)cY`P;8+iFa-$U1F2~L50R)}JHkho%^*Av%yEK6@<9&s&k{Pc7Y3$gUr zGfFb;Y&u~@87B`_oaKd-j6>VQr4zO-Y73mcx|wgN$i9#WFcW@>WzT&XKj^A`aZ1;h z#zHKdT@7$#b=vX#T>k7p7?pV;Ddj5BEm$Mb&qD|WeBNFnQZAq$Vc*w+;ho_UR1m|A z$Rz!@@(=}WX3HLzJ25&*x*yyHuy7xrzGn&NaKDomgy6f&ow7zl4p4>p{JC^6F`t$oPno>FHnZf$D9jlHw54T>V}abgNO9&o)Q z!QriPsM(;tdN0Ola#fwb8*9JV5I$q$z^gLf_w^?{-6W{JpBo$DDBUblOl~mt-1+xb zOwzt=|4OHSZtc49O=!99003=eVhVRu_bZACDMj>u9M##L)vfT`TslS%Ba#Bb& zod-ewMR9m@ET9>GyHjv-0!2t474^&6b7ds`)pLD5(e~}v?k@&Dy5-B>UZT4{eIhWY zivX)793CdBzdIDtYerLBnljXC2s!>3FOKi6VBGbV_yY&0?kcVbn*<@aq4 zq+u5c$q<|w3UQ`$6I02RkiVi!N%_>A#U zd6v%jF3Xueq>H5XbJjb{P2`EK>u58XTU@5^aWJh5@QZ|L*{LjmXKRdG`wvFxaRroL4}Tt>nP{%S{|;r$q%pVX zwC0dn`liUEkcgtZN0$#u15+2s2h3BJWzek;C|Mq)9kEk2C1s(`&n`9`Df=#_l*<>r z5o;C1c?T%mnk;yUvb#2#^qRUVlYQT7NLscUy$c7BXV#m`Vv6l>1ea()+ZzzEdawRR zP-iwDh{(-#pL*Rccn~nGpQ5h|-M?}c9I`3SrB$1$g*kx4vO$Hrg(67De2dy+26&&! zWv-D-Skc3_+yst%VAqI0I;{qFm(R@R4w*%sDN#kFfB8a+su>d-Js1-ch&!*K948lB zx~jw zZvzI*o>o$ePQT)!ebY`LbblR{AZZT^lgWsA`qIx`ji00Zhnu;`0Pye^QO&$DR+{^0 z-92kscN|m2{@FeOm9~)HX_$Kb{TsS!<5~V(ZMUVBTBvVN-$pERJ(Cc5Ia`^uXYD|X z!_V}$;-sHR#V4GT8j4lF=o7Grz^+B&SY=i=;PmQ={>2;4eOV6n$3DgG`-ytPW$llt z3)L|$chycJwBn(*OqvqKg!zA>hKXOlE?oaJEug}|0Enlg`cT(qon#$j?PMx!`UX$sV30~_ z6%r%uYRia`?#{khBi2$0sSnV?th5hX;NI*qzR##gOjuKGaPH?OgKD4cw8voZtd#Qb zgf}|qzjV)*6XAnMdH4nH?}wGf4lOGlVsQxGF5rp&E>uDM33{*h?MDOw{_Knwgg8Kj8|GbVI1|+9`>W`#B}Vlq9(>)$vk5$X~Hje}E`-)~_8ltm9SKK+j7$j**-V zaMf5EP|79YZ)n%JtKOej1%y3rnc)-oRuRJ6`$ATqO##Wr12N1c&I(I&asB^Ea!oEY zG;($(CJaC>;hh9I&nG0K2xd8mNr#$bU}fxyqUo+JVKp0N`Fo!QPQFM~de(|XR3X>Y zB_-R;gSI8tAb8Y}nCSxfSRk{$j+@d(a&T=Wno{mv8T>d5FUFcQpdEP)22hUrrI9FF z!xE`g0}jrY{?OsUr^;?#P0ljEKa;(iQ$lm`=8~`QvLSj`yn?+N&crz zG?Ia+B0f)-2u06zowphZ*L$;%yY@p2y%T(Rr%Qh2~vfY%OhV@(T{YvzuhUW8n+g2WH99 zmVhY*nBd@kyQOCFxdxUOm^;1n3kWn23tzT1+k6lkrTJH!w0ei$WtYwm4%+NIRJ2Rh z7(QCxFS05wbxvz=lrh)lgeLP(uBWwgq_va9_J#GR zc8%i$vRM7zW_37$-1mbk)+6Dx;g?*kZ#!AE%8Krz%IUnGdt#uTRXXLUqFO&L*l${c zM^F14UM8K)0}m&SJG6-p2}q6+KJSA#6?W87maZOy)ml>Pp`ZAl+IHl&mt!O~dA6(J zdKOIys=kKr72Efynxcjj+SV(v@ij-X(HP8ujtPR=1$?e2ZY2LdpjaRn;&#t<6)2h@ zgfc4N+%+g1>-&@jN}=lN8((h6!52MJ2VurC$#m(!X5953zITjd9-Idk_;)0|>9NOg z!NJrEFtD}Z|M1x`g7YCDkCUxh*z;rw7dfO7?v_1B8CIJi_$Wm4 zp5Jme^g>90CF{i~y1E*``uO|kY(=Ij))!p;?R`PauZyGG+u5bT!F<39?0rrIQ4kf2 z@_$;qm5ihOK7$fgtFV5l`zMIPX5B3+jyngtF4iCPo+I=0Y#sn*?x5DkVOc`oDUM{z z9a2miBcd^i>u>wWF}unzmFm1vUE(h?WlKtepi2x>#cYP>Bi(PC{5HSMpJ!* zZ^TSq>kW3r4!gVTl|1Ad=eaJ^Sv)Tjz9RsV77f| z%#3?S5ZZn}W0sqPtP38lvphl%ZZa;UUQ<>QX4Lm=8uk_e;P^!x;;_3@zS2kLNVO4x zH-P7$*r--idIog{3nsI%UUT#7Ho_ z9mX+|^0cbtx*qO%dC3Ue^WvDyl&geu;$))sj1yc5PR-4{7OWZY6wL00_bkD!m})Fn zZFa@Tl)=(M|Hm`Uom>AbVXUoA<}{|Ds3`oH!ZTFb-`oF1siyo36%QDn=JTPQH!;Fi zJ3%tK2qPLwo$MI-87KoBz1tAaUUm_f%w(rJ&&+sM`8~DTeARsyX!XaNN625d!Gc1C zP++Ac#OvgC&Syx8 z62Q=gln|7QB$jy`o5BgS&#mKcaJHD_{&trIz=he}&J$lZBsgvM4PznA`bvY-t0D1E z9y0Ix+KEtwp{gY1$lm5Q?edZN4c%y8!Zua*Pg)ydTc&o;!o%RSNT8pT?lyLNl0T_@i(GpV|{^|7v~i==qCXsj&y#AYQBThsxV_aswT=^Qm>(JIlVJP`J@zMcv?5G& zBI@e;bG}<26pzl^`4=*sEPl$vAxJy@Uzy!-mpls0pbejE7 z?lO=Xv{Ugt@sd&w|m;pw-j08`Y%B(K&2jsk=nWOWU}8eW>5T zTwi*P!vZRSd;!z$x6yjY*bA+GBrDX^Kh~rtAqM~il&@bn7aJ~XaSt7ye3+66kpb~` z?H}V)ChgCj#CN(wX<6CM-UAE)r-*>QyUM%n+E+okdJG9Ozm0<*U>-dm~`?HT6E- z`ZB=`?ZSm_(_wB0w?S5SDV)Y3%5>KZl{Sb8minc3u88EmMj3LFhA28(l9??pM79o+ zFi>m9H0o$L*}w|5Lv->QNvT`(71ZB3x=N_-u&PvOaF$t-DdoM&e#45Vr%(tU@e5G) z*d7jC{8I~M!T))WD!00z3v~|cY=UjvpKj5Si=~{&1B`Z76ZMWw8e( zFDJgk?B(>;(V2zus*>G^t;vDpya|E*>1*EC0WH~7Pv1lX6|2AS4cy6cr!J~7))Vgw zyA=gK>$1LN=>_`w9=|%yQdp0&#(sWGfQs0ubht31dxHdRGhs`lr%pG*b>1EWW=}?q zkFB|dOH-vvSp?~|h`7DxqW^ZvsMpG2Q&Q$~@kd>sK}%~!k6*TLfCU!)iS$xQvCnlR z>N+e#XxK)+%f0ed_@+KO0tNQhz#JYa-j-G~5h}dH$fk)L53O%s25uHZ+-#O3_idgx z!xi-{S{?f`H9p(1U21f{6UZ&Rqtk;vazFL3Q1jne%*L?wxmrRbhP~%cv zX6R+f%mWQWVm5gMLKx>?2QsR43us^>3Q@4IiXN{FwH;fW?bGuY8f|mn=;){Mi9VRx z6a1xWb&wq8@AFh3AIbfvOX1S4X>(|LYfD$&a=dk2F84~W(J!z5DgvG~u&*;+%~hr2 zFuo`K$VxLp{L}D0I`q?~BloUT?`L{0n)eUWv%|fCncHP38$ZbZN;2^>f&8_#BH4GI zh;_7w9)Ky01=hsz@Z1I&(Mrf^&+4pQ915oaAf3b7f9`3j#v1GM7&0a9Y1uU_SvJjE3RGC@r5rylf zK92<}W(u>Dy)&@t{SgyrF@NNi=I9#GMK~c_OE28Mnf6p(Xpa&i-tWHJ-coy53z@_S zY0~_sP?!*dw>@S1Ni9`ZkjTrWy;UQB4o*`K*&)uV$`#a0CEho)dq=KvTyRZMOF77M z)2WdPywU%hsF}XA5vwR-OSnzFU5ZrIASJRaA7Xu_y6sF+lTfX7*^opEi*0%sk00jc zdLb4#lmPu`E7S53H^vOP+ryro{iTy z{J;$yiG9jCW_Vj!Fg!4=x8E)sEfl&T{HejnyhDiE ziCQR9({@V7*&9WXa%Y;L!5M=tlg*j{9!hGk25n0%emI#$)DvG21WsGX@G95wgx_f` z(;SmsbKpDn?}tR^f5m6uJ`-mu z)v0*@g`8dlBUbFJ*$&vnius=Mz}Wbxn~IJadE~D3Vf7LF z=$;0s$~4uuq1c%yaahYfRTNy(xr%7=Nojd<8HNrLn=NqD(~7lU_qO7R1on)rAdm+Z zD}Y~!x5j0g1MRl$n1z%m1M+DUZUDN_y4>uxPvPlw>MCnGl~a11Rb#pS(|Bp)P-mZi`4@Y8rK6ieZ|2@qSU)T=4VL$MR?8NlrN@nb$*9 zb$_Xu#LnrI;~}-7?P9B6fM?WFV22RVql88hY{kq&z|Q@Sd6`>D&9vcJVUj=-oZf*T zC&@n0!A@!pcr~4IV=iv_`bl#RxpBY!LrCy3@8pS^uqwC3Tc^;!GJf4^SWhj#lI7f5={-L zt0DK|K@FMJKI`Rt^?KAbuW||OlnS1^b{~rgcah+xdw8TrE|3=)?C(U>rHjoeQkKZn_iK6?~D_wi!{-;Mp0 z=0j5eBp5#NJOIgSdu5TQEIWG^yLbDal?HEMNB7in=$-kxOsYPpLh7~We`MKdYgaEE zGCb+rII5`<*`~gY2W)?=bA6zfzeQ!@sg2rFoYAklU zaYJ|}#Y*i9wsQ~&%58)fn+@HPvr_g!D6EK7TUACT-)a?Xe{QIhTN}~EAQcpRVM_Nm z(5VFih8i|U_bWw*(RWh5@$npczvZ!y1oON4$(suH4A-WNT*Z1+c;OjA7d$RySW{nF z1M**ofYHP!vQM#Xj7elvLCHbsE&w}XafkLejl(|J6neD)WCP(zLSkho{U<#VoX9x%c9$3`wI`a8h*{u+9 zqn7bc&;pV?EA{S3_)xxe;*L)XwK)=E=fYg;TaQwyT@>de#jU9VmqmfnBb$y1QU0i% zceZwB}YcBh`Zu-%F}^;9TzSZ+{)p8+IlKv1}=9>b=R#zz+-8)!#@=uE=7Kqz9m$A z+1;1$s^bR@BI0^_s(cydlJe|NdNZS(8?y904k9?`)m&yizQ@}I%QXIuexQo|%X~mP z=0?pIE?zGxSNR*mZHN7UmVq=ZHOSb+Z=~iw16BN%fJxOR`L93Ma)h5eyZRyx5l<() z?MI27ifv~)3zGFkG{i)HPvBp50A-3y#{;?Ajk7cR9XvWQ!?qJ=8cb*&gojRxzc_pl z>m}cRHF!<1om-3dVIodR9I)`3v8;XXexWsPwDSbnlmG6xTD}DQQPergHuJYOw{<4C zM9_9wRv$JyPGmQmeByV@#jqB1JhZvg;b4+(I{|*lq5G32Z`mU3L$>#1wd0tHTuuIw zZ~I*Foq=}_0j#Nz$xuS&tJ|y3jyyYWs*TbQ!o)Ko)Sivy3wm{!5O=qRE_3P|VLN#| zFG)nWUdA%sVE}IZ0D@b9r5rVz3$a`n_l=9$9C;6;w^!N|WrAMwIcDDtinWNt#j~ zcu?e)K`nY#8kY`%%Yq4n4;v$A&Rq1l{=Rs9!_&7Wj z3U66PWMudizHKo)jpHw-Y#Q1hG2_V|G`lJMYE;S~Nf?1QuZoKRpw6LUda~zWt2dh- zYX;PRhKzxxo6k$)`rC?*n&_h1C2i~u@NQ&(WNyf8&T-mDX0Fa{ryoNul0FHS8RvMQ zv9-Lo$Knd*?$b6AzxQq#Te*qsH*wYWSp zNJ^f}xsoby%d}(0xa##_ zJTrFE;q@@KJI&RmIQ%I6-Pfn>u-KcRfj_D|u-KpP?m>?tGe_j}H7^vMJ-s7))J*9v zM)&)p?`XA($Ycl7Hj)|tsVm*GIzhubK}Xl&3t^w4Kdpyu13wzP?dP~$JXg#`@uq??NijLx^x(G= zmxb=}=nICrSyK=4hWF_(#bd&3`CHY!pc3F3`_-Z+&{?SvrTg@4qZFOKsoAw#Qej%!-1q8grdQc7Wi4C=3lb)z z`Pv~Hk@+pL*ezOq-zBItRQ?f{^@Z&-s)GPKZsv1oib$GLaSA_>^|!67-35U@73&lY z5((=QX2KzmW%`nP-<&G&pgSTPJbE0Sj7d1H2(jo}w2kyA8TT21gOq=Zxfp$IIf1+y ztk>HYWZO7_py``P(a9TJ$On^yIXs>GO?uPd0dK?hmWLad=7*GgY3ZFt4J~Cu`g&ZW z7ehI@aonAFq4wb0GWi1$a%+q+PURpQ;Z>2EGJojufT5q?IFS+~y+hjae?8P2+kDHa z3(uUS{IJU}=vY6gm#h!0I=K2TGG-%RXh3*OOodEMpVdOCoNiLXYO zm}Xq$&my*3GqxBCiJFR6(RFFznV9N)Stm1$v3|Q_I+e~`Z>-IexI%PHs{i)e_bPS{ zZIkT3DyFlttASYJf{1I;Xlp>8@B87fX%FSBF-du8I!A}zk^}eRE4|!wao@FOgz#ES zATGeAtE6a`8U#TBL$!zAvH{o?Oyz6rgL}%UJK5h*du;yg zd(hte$k_%v3YsJ3i9RTwoZo&`TCY=9KGm+ektFHoJX=S)+`BTS7`)`T1v``|j6>uG zStK^XwABq`%T7et)E#UEKI^vZkeT!Qg92tq$)fueC!6_)VTo~ZJD5hcZ#d{Y$L!@T z!ms12FGi#X+R-S&5Q1)R8w2?^j+HYu?;T;v)Q4^ecmYUGFn3abn{g#nl)pB01X0fO zyX@Fu@>?__(q}5%ZXM)IpC4O1iD`skR2Du;1515ev7EwIy;m|!XuIt-512LRebDcp_*X#b$xBFrjD&o8goT*(6nK$0PP1EKRCgpui zHT`i+T9g1FPnM9wA>D@Vy8{KtyY%P4p^K4tc74>IY)X6$UfG=alsLb*52}J#H zqOhI5UX(aFuI~9{lQ5t0)TUvl+W8<@Q5 z&~RQB0|dX{>Abf6+F-@gp|+?M4JQL5T5t|;Qx6$*@xCbLvYjfNEo*at7|QGD1f-Z8 z{WQ`8oOv5ZKJKt6P-0K?XNj?n|2jXg?q*%0FPYK0c39TjTLvA2j(IlvgAlP^CGhAW z3A9^pAGb@Gfg669`Xi%->I$(5{G)pVTS|$P--8Nr%Gi+e-sQBJbE#vv5ySzDj|gqA z>&2ME0Sm0)sb7V^r)pKq7L zF@$|%s{MB}GZ2rtKXK@YzerUD8je;$hg@aw&yRge7*T%ic^6#k*i!~I!h9T8<@hf3 zqvg-LS9P4cB5DQL9q3U~58)SggI$}z=`koOro1qyvzx+a2c`7@kQF1l^6)5p9Ll;} zq`@2&mh4wj_vb^L50kcJ1%D)DIP#-?bf{N~$S2Pbig8enbrzsB6Q1YC0aLc-Hi-)B z;72^Wrz8U{Ut0Zo(MsLRD9UfB<1NaKNu+y(B;JG^r;qxcLZe*^95Jp2u*$2mwct#; zrw?Y5a*rl%`;(TgxZCCFLpHA>=+ADOJ#kr|xrmEc>1I;AgkCKbG#cEKPQ&RxXXiI#= zA?Dxab_c9`jwX)}WM7}V-{Y-3GB7o&UJh(oW0_*qp%06d`DgNbUl^K5gCy(0W` zpf)}ujxfpB*c26zR3?o@~P(ze}+JXq+2ifQ|%w|;=OM1@sCx-mo^!!=kT8|=} zhvZ%l<$m$+GU>`%rI}xf>()cd7l&V^Mxu=5Fq}w?e?cPV2ly?c!=_4{Tb=!6+0F0! z_7s4vru=hq0o4_S+zjA5CLu}#ch~pguvLM_!QKF1mqMsVOJKzpzHj;LMiQPrix-n* zg>M)%4|?zGYyla#u2TuYv5kxMu85jHVn2G;YNl)Z-1>5ylCJZy#jj;Mr22|Ng0!bK{B%qP@ zEC~GcXEwA5ngl1vVh-w`vxIr14id@8)zp7~=zDzy1#g3U+)KyA;@p4Vw@pYr+_wH1 zXk_ZmTu^hxn7=010+d#<(=i{&Hy?2KZKyN05-JgIWy94nvH zlCpKIV$0t=;&HC^_D3^^oWeMoZMi@1?S=m^PBT{PgP# zj^4WmN8Mb+Q%GN}CG<5PmWM8=tOGKM@Eh3lRKZls9Deh_^Y_C$h;ZW_em8S_f$5t& zxIA=kmx5k#A^-N)kMWqlHQMG%Um=17?c+W_;o>Nbj-C|Vk(B4sXN7`yfL@S0R>xiD z$F>6vWLkzaL1$Pwq`VNJ#g?T52RCATOm+SA*^s8R{M=G@_3>A5hQDNinsat<7^pyh?qKC&}Y1_lPrT4IQ=V{huQk%u)|!XgZnO^_-2U;|Z|~<@O^8C+A`0ZY8tlJb%$Zuv*C0@e zJn+#dU*_>7F z+5Hd5KNyjJz*ME6vZb9rnGz87e8cj+sn{d7v)5NW5q7guN$V-+^K?J zQ7Sap!wMz}=-|libI+OZmW!xkkSxZ5C;PUE25s^0xwDs!MFT5FR%E^2AHxGYU&+us z=q6m)$VIe1qb6}`275guHp(STjk}F^#-aDv0h%3tsd{5|vQz-|dpG_WZ4g?rFlb;* zY|Jw=T#a1q&+Am8_s07fZ-i(v?m0d4BoM!mJ(6b#pZjXx=_-rLeRn!#3<(ZK)OlC% zG$B8YZZuudSl%qNLf1I;>7lQrpak#i57?PWK0kZY%WyAOmY+zWQI2Qr6=-+}N7t#y zg6HgOxR9>fy;~Jf0L(;WtD$FS9(ss3Xj$I5wI0V0Bg@Ny+(|<*URk~>O3XpsW(E27 zw;RkJ>)nPA^$VT=KbR;XH5m$@2*ngi7~0FhkN6%@~d!4q`;jS5RuG4K^n-B(ze|g z_ocohAeOCaLgh3bn^Jk%+;F?Rj?<&f?M#6w?MU02!?*5Zu=1)_!w3(4id-%ZL?qqc zgb;4QA8$2RY2mmbU#HKVtHi;B6q$@ z+#YOeLtb6R6k8!hiO6IB~U86iFztPgJR8-bRcaKa96je|q+#w36A64X7RPafT z+9y)e_!(0ob&U2Sr;DiR%X4y@iy|w=rV({T)#Zhp`c*^!uNMICP`9o1(^K9gM^Jc7 z3$;Rl(mFeQuI>$#A)j=}-e`3=xx50a$Hm2E5VPljgl_6Y9eFbe>kHtEXv3~;y((~L zd%IT>axy-iW3gsbYbK#@3D)C5y_)Z*W145OslS$mEGMIfgb3r=b11r_ZO-Xk4-C}d zhDm9bmMfy?_>UTw?Yiv_tLei*q;t6orrPqbz{>neT)1KTK9aHmXaSRc90;BdjYE&_ zlL#N&$4=x?5HW_DvF6X#*H-~fqyQA*Uh`Tj_5a#NcmT2D!z{mT>OE+r2>(dEuV?&h zK?Us0KlHgY)vt>XmOzYnIQe>^tNio03kf|>S`MX9GgPgdFfu?tF23r<*!(D``K>x^ zJm-0^J1N4OxP|8V04whU@eqHO5HXe>MuCd&Ds9D)k+LAF8Jb$~jL_9LPM5ArR|}om zHXIM4XQGwrcEBrTMZb}6h!&M8T1zxs+=(=3=9*>)i zKH4lB_k4Jr-FEZT0j6-wY|xhXz^AEwwyw0#n^O)Q4ktD-nYV&m1MN=&#^Z|q%nFXO zP4#EhJhS}+Ejys6-Z6x~^c;0?e==cws(`w~pFUYQ@%jmD5kg*h_~DY;(XFC4j(=yP zYZ_B>PoUD69^vfn<(<69UHG2Y5gRLz$>{gDBpf=h^ui5MQXWxj-<8rRsPg~O^i@%9 zHf_5BTA;W?ad)@iE-mgZMT-=7hf<0Y+@(NqEAH;@F2UU`I6L3_uRUuehtI*RnOiQo zhivsA`g|E;M-nmPl6rwujjzW3G>*qLCZx+ure7D;2!6A_y{AjNC{g}=0u(fc_j|Ou zN2v47D?j__27IK81hHjOqCl)8MYE~2!CHRWiDEddE4@}RxTmHg+-X-ecGPjC__qXNc78{4zIPe(tW*_F zYLGI{>|dXoZ?^BiTePEz)NKe);LPRf$F~yBy|6f$Oxt%EwkSSP6(Qcttd2=w6396; zo=zLYbsn9jG95MVhcgq!$P6$kS2;xIuadDiiFAb@CK-bIU03Ar+3W=}hu`|UakI$0 z{AUf(jjWp&1v76r4^F={>j-TZ&Gzo|!T(s7@c!&wf$aZI z*e5f|%@_UbM6)edtCOa$IRf$34TJ5^w_YYEou_t5huIw_gM(ueG;Lw>t~Hx!k*TAzYt{R1H29Bw4!&+>oC(>*4WgueOCenBTw>U<*6g^N zyQ+)sz->qWQLS0ezzM4LXc>1eS$A3|R)vkKM+Uzu&VlWXyEPprFZ+cj-P}>5B<7&TSLH>Kx|l7_h5)IEaAg`-v}l5HHql-@R1;UQmU$KYa+3iJz*a*8-@V+- zds$n-oqNAMasVbtC!prux>m}ul^wrAC%bo>)a_ROjeOUu6L%{_c+B4!DrIpXbf@M2 zLM_0_Gs8(*#>--eWj9S1(lM&ZaD1ZzL3_q=c&-C$Av~ETMx=v zUck2~@h1^hzmwHV1o)`+d1^bQcveQnS^Ho^pfw3yks>Qq$!qdwg&$YeDKIS2>)OM1 z+@tK0SG0}bbKrAQndSQJx`PWQ3CXfupR1lQ0z8FOMrv`JxCU*d&Q89|WYw1Q;&pkTeSXpQnZc)TU$Q51Wtx9*>H)+Z9(yoWiej zJC>&w#CCIn$oPBzbp%t9p>}m&fkY7b3Q+k8CN=F9myNqHw7y72MCF zJxMc$dKJk?Z0)zz|2i)wJv`j}EVPcDvQ+e|w?76f*A(rT>G;jI?6X_9F^L1es^lEY zGt4&Q8~fHANxiqTu^3l-L-)#y8z1RC(c(u{#k5)l@3`^!)sG2=8=%yrtc~>;KTZ8R z0RUn1OTohH@%^f3ZzFs1!l|tOZhzZ^Y&mmWf3d=L6Ej1V@&ohWRb& zG&D&jh4UV(<35E$xXQLC8XBK1F@ih;5nqkk?2i?AzmNc>G8W?HxvHCa=cJp}+P@{o&+M5jC0)Oahd4h5 zLCrG=XcY#Jm$~B^Km%JpJi_TjQF^r!GOf?ZAIKF@vq-2^a&6t?6}2^6O5XM=$?Bd> zE|2BshfjJyG#TUPsJ{-&l-e&BHVE0rSz%m6R*7Ku>3_NxnoVx$d{%2{Ns%E{FeDr{ za)AraApH+=*pGdiNP00pQS)C>Y(}|;qT6(92K=i6z~ zn!U#R8iPM;SrajHK+zsc%gW}gl}i15Uh1Tx3m>|JKYB@Zf6W1ZciNEE&ZBT$aZeA= zglqB}A#Ab&0`AXgw_bDL0HWE4LzzG|hkL(k7d#IAPMx^RZwIDY$em|4hVw8nH_Nq$W?<ZIYdW?4<(GF+}Lx^qNPr%L#{_JkH7;X->XP~5#X{bZGS&zxqQ`kyWCi` z$3g!c?8V6QZ9*M)K+LctApHQz5c{u87$FA@n5hBj3P8|tSEj$n@*R!Py0JN;#C?W64&GsVehM_qdzh#>{*(gWS6y#0bxtse95=3{kK`<}u;Hcs5JdG*4kHh!DGI06fWyoY;u4&{hc%Fzm0x7*d~fsV&(^FF zxxWUX#K(LFES3d*3K}S+FFa6kl?H_^54CB9W_l2y5?&=r|r+p z;R+vq^F7|~cZqR1l_=r<%%?=|OrK6W6PG4NN|ZIT8?bk&OR!@1XSrM}_#3RJA4#r# zb=$uPP)JBfKj)@vy`HeTA!L)G^Ag<%<3(Q-Wwh{PBf<7x`;O39%Np3S$h=?vvp3T# z)o}U7k4W(6on+Gev-#d`eLm{N8Q8yWBG7d=)puD?jpZKIym)l0q@gf)k!his2sjGS zj-Dyvx_+s!FeL4f&q;UV*7|rS9#}Z)J1Q8dL8GMs8>S|!7sFPkFDHD*IXt2%r=R!< z@PzFck0_C7L+40V=;9$pd^c>O7s)LyVQSb6KNGRLg-I;Gm z)wN!P%ww5%``cY)wqU{t8;HXzEt}`9ubSMOYX<8>*EftrAK?o_kRAByq>Nmj7U^UW z;qB=T>}cS@xuuesP!gqkBhL*Vj$=+G9UpF{zCWF_~a(d3rHjJW2Ll@iW|3Ni>fp_t>86o|M*#D@gUpV4Ui?N1zvnfXbmRG#4)m zSkF5@JMUk}$Tv)>yeHnq32AJ*>7VnbP5`#E-SX-*ZLhfzlx3Dwu6nezROZFCD6Bc& zk1dDI1R#+dfQ~EmL7&1k<~8f*im>W(nxb$cRqNVuLK+u>H_f7_r~TN7`8@Z4R~|v@ zve~KOks%4?RTmrp1&yaULx&^d*#~t+I2)#h3*x{hady=X&#QeX-7=tTm(@fm%ctIn zfdPJJ{C@g_Stvyk()XtHZo0K1)G<4g*Ew&V&R(aAgDO~CyRc&x_o?hb2f7U4W9~E= zH&5Li-&C=rFK9E-!;ZGm8=$CpqnR^x35&^vg26cOb8Nt%D&V8)H}rU!ABLEm{ zGn4nyPW3~n=&3@#moI{uKkD5T(l&pln2+S*GX8KW4ged#b0q(YOnMtOB~Yp?Ta;Dk zFv5K_6IzXg>g>ifch^_|a4ubRe3y$lxmWgADE7-$txsjTwg%AE)+xQV!2)?`-Jv(x_!XCLo^kWpkOAoRMZ|IhPswEz5_2f~d2IG* zvrP49w_w=#1>=2Wb+6WoYnmfPoj=|}4ZloRM6j7>P9Ur+$7{89Bz&w6g+NOU!~EqbL972wPb=|RB-Lrse_}0GDceK zHAFi4r0$md+4-T~rK-*De2u9$>!Gpf>6@?!9&{OQ_Z?bcX8!PCGbA>3@+Cv?LeF|9 zw?3}=TJd;Rblv%Bu5CMsKRQqlEiVp}4;pG_^uZC`WN#+1YQqF83l%A{FMk*gh_7DW zvR$Vwu3LsoXbZ`!e>g-h@mfxX%4x52R%jfZh~*p(sBvR%ivVp+GuL_?y80Gb~8-{ zVZ9(;3#>T zu?-(|-j|*tj_X|XpZAcA=F^#HGqqG5VtG-N;cdzNSmecHVQurD@j%_f)Y%z<)icXV zIJNYaVhyjL9+1t~$#b9^ri~mn%ab>M(xQ8M+w&JYV0L%p2gofH^}WOpEH>Czug=-6pCM311^lecBH`hdKMwCO?!6$?_+_L$OeZ!_pQmjGA z?a6;uM~Qw2C3gC+B?A)n^Y>Ln^~Ct6K?o`jkj~(&&MP8%MTNg!`;v3XaHnhhQp!RU zb8PR(EYgkp5k;kjXwczqerSb)x~f#oFGJS*Z`e1^X6YrJcisxh8Ug~Sk4EU7qqaG& z2DH``jy_B==Tn@lq7yI=oovz3uZuFqU@l9MJOAnDyyzhL6nAukrpsyBfx*t}p+ZGl zZw?}RVjlrg!gNSS*o{=q`;$GNOS%$MJ1fUNz9FRWLP_D}(Swe*a!4E@#oH$X`kQ2z zpzIRDyUF$#N#P0T^T8E4;3g&XzEFR@f49<3(lg-szamE(-st$OER!L{Bj*)&ze7Kr00e9 z_W|)Mn<%93yHYLmm=$M9kti5QCQPfx`W1SyA!AceKv%F`BEDpLJc$5kh&huim|5h!l z7i@FfYN7dLan%y?f0YdiUx!KE82vKRfCKzN=eE@JnG%jBV0mR@Ss+bFlWLB_ux!Ep zyeVwd;#USR|0O{*yu;^0+UsCKdiW0`pVtP31fnN#@W-Go<c?(sG6!LV=)CeTQ$O3Wbvl~f?>2m9H_RfA% zaT3a^gP>D81+o0%(_dR%M|ZttHGCouFSP+&vPYt0x3@y00LKuZ(~rXqw#PK0!GezF zrzY${QN^8yec6G5zh*m&NNB>4Q|!00syD?i)(LP55ciVE`?-vwnA}t4RmNAs!9BH# zhAgL!vMs)OTr3vs5Su>)kIga_XUU+hd{0n?p;ca9U8!v7J(P`uHai!PZxvdvSwE z+~5DVVG86A8VX1Fm>&Xmos9}_GLmL@`(th1&opEVCKNy*s;6p%>YJQ5%=S>U?lLVG z$rS}_T4fB1zK2W~G`tUCHat`Zx}(v|&-RSU?tGxPG2&!e)~A}oRC>$hbj8P7iGC?0 z8*m0sA|k+jjWQe_Ve7Xg49;xfwSptk&Dv;TEc=cR4^Gi$TGa4LET=hZ^T zq|p>DK?iqz>zvIiC&HUK&4VGD0U?Q<5h2N|(rF8iDy7??a&nXxd8{lf`^6oMV~Nho zTEE0rWr0(>tAWSu(A7x{>0?D|^=C^Jr$ z;sMIH%}h*8D7FyxMyAl87z54~lsgBlxf>dDEJL$Wdm0r+ciUQbKf#G>(0{w15ahTW zRyN(0B!e%}2=tPsVf%aL4620(;K_uY+!=_t(Qj$!(aROZFPm5$Ek-KXW#Vop-zSyQ z?uNAQT_x!J`E*;rgjYVRYob6Y+H#!8(oTr0-5hJGLbYba*@U06G}C$Qq_>KbSJayhm}mi)Ry2-im0c z-++EE&X(5J?nqWFY+(D`6YF5@q=ctQn9w?|6+SSFDzGTV9jItsGJi|A)wTNq(*^m> zW=GuWW8=G%zCmPJ?frkj=3^iiT<{JXo{#-)&n65o;=Dk$E`gIb)|;iKywZ$>l!+%Y}N;)xT)qu-NoUS3y<-&^;&(=W$R z>uw?y4V(P8^Saerc^vdeg>%djy!0Tk@WNV=OFm&rniBTswawntHWBfjRqCgA%HEN!T z*`M#vt;ClsF$L^-@^ugr1CQtr1!X2F)z40KKK3?G5x$Fb^hpz(i&@Pcu8Goa-^Oqc)QU{6mEETAh$ALfO?ss=l?aFCRGp%zzPph~ z_u4u|8}+DE%Of>({8^+?>M+7`z2U+&TwGB-KReuSN@nx!$v;&+V<*ENL*vW*bdt9Bu`K8faxmr;$vCh z{qO!bY3_ueQiXOu77Z83HwtE^z!B_xkbYS8 z2t=!V$+o-eEg0?~gQg=?p1JY5e-pN#y(=ZRnqy0^If0}RCERBjjUa(}({qSY+ert( zd`~-H`m#?V_orfHB~|*48&~C{)eOtR>}jM*48(_k(va@q<+qW_t+eHei5HK5Gp;Ud zrL_vg;jj3tYZ%FLdvxv>8!}@4{_kAMT;-UFg9C3aS4rBib$S;tw;V~0?<@{&88KU5 zrtm)`IpZj1E;YWx0MH!-z0LbYw94`VEbrxw@*gzFng72QfJ00yD%YKHv89G?QWy~u zd%c&TAtm5Xj2~)K&#{C^7wNSW^v%S?X?|PB;Za<#%x^6#Ll9*G0dOAau7O*aC0s}% zY{{lVG;GQZKh?5Ncqw?%^&1pG{1!vvSQ`Atf0dnqRO{c;#jWcI+wX~RB#iGULJE+a zNDAA$nPkJ;T$}GsAbiy+wu&f)=L~ScB*-6$@IhDhVV!w!uy<)9dD~DcvC!TnI4v zI0PggTesjnA?u;2P$^NS+X-ONP(`mMF~L5zo^I%tge;tPM~wL7R>0G#?;GP?@jAC5 zI%*;=>UwJ~Tgac?1%3@5sz`{&^w$%`d02ISTwUJpafRoa@h9=ZKfO^c`K3TX)-O&p zhUzmq#ujlqrD97BV9k?9zP;oe5IX3pX(NR5e(}CeEo`Tp+Q_ zD+~CIcKX;Rii;vw54F+kzLn=*>VX?MUjC6nY^8*|7GmLxCm!+a#DX2~U8Q!%WTKF3 z^Km!h3LhhuJr-q|E4={V(BkoPx0GSQTDA#}n1UlZvLqCSSW*EZBS)5@HNtw5A&6{; z=9+4LP#8tw*;GUc$Fi&5Hi|137704ZDJyKa;eHO?m_ja)Tx7t&?r8a8J9r_Dt z;diuVr}8XVn%{0m$csNBnfB!^9*L#o_s9Bp=)q113!Sh24~xd=jRwg_?kYbNB2*WE zUM-sO*Eb_bIVxq&i_5*Hh1u;GBej6W(e&OKfjbR%n+*gWb$<=En5VHe;Fk%eeds_x zZCWFR_V~pZTE5KI4^8_85?7Z4S~sl&&}R_P-i&S0Dq@{4?z>mkEX6W|mpd>t#qmbH z0Cv!vZhJ6+3iANgih7g|1SkVuxKKPhN#(40(Tc4DZS*j3?@pcASFP68g*@!>%8DOu z6XgaoEpKuFYkv#F<@6ypoSiK3#LnI`oJTh!YisOuQvO?d21);N?V)>6Wt&-9NyM*t zF6cVLCUp2~%aA(;ojgV+o`fKfSdz^fwl6@@M`cDBTWU1tg$Vm+Qx*DEckrRtAlza5 zg;An?$7lcgmVY_lgElBdRnkN3af;Vm6$f_p?1N6-u^{ZJ)*}n{Ze(Kd_DaDTp+*y# zY@J4$8az6 zJ*?HLBjos;r~N({?@NRa6bV(hz|pky0%dKG7B)Bqf6FVHZo0p12;aiR7!Q-^7TtjX z%RgxZ*kVzEZHz~ct}_?FL^i12s}D{F(8(A(5Om_T4ucxKHeM54GUy#z6ubvKu*1Bq zA^*%$B2I~acoI74cyh3jOl&8jbqGxb=HXPtVM?J3|Ee~Akk)|mmUUH=Lx*ZTYYNZq z0=ZDonAPLjz&>cMm12}7dA^-s`K++8tYPbRV~&V(-QA0_AdFK?p6iBck?lZ{d8zOl zC(o(E?YHpk=v$*ihE6+I-P+k}bJJ`fCN6U0Ceqf^@UTO&rKRX>9=9m~+e^Mu@rGL$ z30v9}K<|8h@7O`oh0n*{;XSi!R%#%E-(pWPbr8D&+`p>2uSzfU{_?C>VG~eR|GBFY zig9XLv^Tzn(<#Y7N%XA0%C-9?-gfZ}iYW(mB47cK2l}7CDofIVf+^w(k&EpAW$zrV8e z@Y{6uGCU)y8k;h9_H%2YA$Kbuo_QRrkq6S&$yRG9lk zr%A21{@-O)%VliE`?18|EIiDgjwf2b(A!9MIxmsw!LHbu$51UNAv*q$#I;?P(vaXs z`)LefGVi7@jib*mmqd3WOg|L|Yzy;AfamOU4Kc$6YGc)hwS$b6S_> zdI=RNTz}GVQ#iO{Y6R{T*#EElkFbQFF{Srluv5M&wBXU#E0@No{v89tbj*ia!W@pP9 zHe5#99K5qM)!F3U$OHrlL@`9MRZ(PGT-A{rXWjrdC|J%ixRLR{M$$69 zCI0X4l9J<}4^@QQv=W+aw-`kHv0qY#i95`G6SC6HDz4HPc9q_acl$Fcmm26f61ed} zl%rzoML0NXYz9dqmu`0BzD-V3f$f{lxq?oz?QWPSrCsO~{0j=bqX#Ujrui@wC)XOr zey|b#FBM~>ZYqqr*@`VZQy)2GbCBlNM=8iVP=!{TRxcCV-$NYyT({FTmv44cf>*RAXV6M}fSJ$T-DQZw$>TR!7`@agt4MyT0 zZr1_eg$)=J&<)zbB4^W^AMd6d)3;5Kmd-B9dkxt2YU1OK7`b@!iSmEDS-RaqM{dK+ z-_B!{G|jeGa?n?DoDe5Ls9Q36PwxrW_(<(ls%L(am{s|xdKiXG?2;2kJgiY8Vbv#4 z(s^Yh7E?v?XUp7qf=epLVn-0LG zKK|t0HU}TiI&o(D8zE}%Ff&b?aK`urd;-4uFTQ8R3)m#3pRkdyZ}g=Az7Jj5m+nbD zHf{gQOkQnM`fosA?KcQ!dJ(TbM)`t#KW@xNk{16CemF z4KF+=*`86?9H8d0jk35Yea7!6ymMk?!&7V^VvqgFP?yvbgviMkEM-LBkuwzAAw)SelMhu??a!*@8UYQPEFRGK< z1fzr$+X)kkg$-8u-27gtaAjEg{oPcs*K>6{2Db#r-Uq!5UoFvT_u?C-EJlt45=4#O z^EBl`RrOkQE;l}I5(k#_k62!v+N@}BZa_&7`8b+mR%)Wlfam+gu39W_;%V&hK zDqTnw#tg4FL`lJ58TNWs!jkveeJ(~v`E$GZtl|%8Rqc4TPedd6ISWSL_CkKd-zgbJ z`aZfkXeOu6N&nHwu%JR}5*b)X(}7U$r@reGqTq2IKzk8#K0dsHZF=m3xeRGRXD`TS z#Gyq`Qj$%U8<%&!3Fkb?=22CuH_&=^33QR`b^P^v3wQm5%jwpN#W{1QT<3)r@sq+v z)xEDmj%6bd?;6XIFK|`cVNGlQbjk$*-3ENA$AUu)q0T!zf-iu9OI?~hE1NLmD0{yV z3M3{Dfzm|=kQF+}>3SnpP56@QJu{K$vs#5Fvr(9k1FcWP=uJR_{#(^C3{Ue4nIg}bS`VwA zOyP@{nvW0yMxtEmXOjNpNniMhO>(P}#C@QrWz9?=H_imkljZesYGPKoz;g6^RQJBbgBdQP< z?jTSb9F6leS%hz^MrAr`2-`m1@=(smWbAWqm0Pn%TxMfy7w_o~(* zW`ev57Zy3(=2=lF32Hjc^~bka;q*rJ39`P(Cu_PLoJJ&WA5AjtFmp$)ta+ zxG7~4rLFAiPi-3eNg1lUE9wr z=Hn4Y*<{m_r6e(*ErdSVps1AHmLqGTnJk~hah-|@)++r(K3p^*TCKsy?;7I3NH(h&6YFw*_8-W#uA1dcB-f$r zo)I=5zkqx1u$yEK-`=C~?ayzGM1yKpG5pkuO0xlkdqUSent!FI2nfV>x?sxRD<(}J zx*@qW9-z#kUWOmK2*kx+E!}cimoh@=F`LbtYznt@$8Y#f{4<|ytRfW#4MNjGnI|;l zylu3;-Wj1Xb0v+I%Kkc}QfrR8`3@12peu{{o584uJkMz(&OJ8w#6 zNrZVlhAKVqB1f#Pe7YL&@UZbpQ_jwJ0H%n3f|wrKG+Rr3&rndPNJxx({dxA?l4|fL zB+_Z|;$-!)r{h(d`E+`Fr2D@T!j^P!BuUbW_kz;o!N~D#2hGgvmy2kaIg@~~{;Plr{c>j6o}i#E6I$ISb z$>mu;YV~7|syX;uDi(@0 zt^v#spQn;gvI;E+XZox)b|5gj@G$$|xr|!+V&u-u)ZE;8aU*1bAk!r2zLk2(^?Z*u z@O$T~cG@Xw@3>K4l3CJ?OtOf?-Bpq(p6RzgT4jD1zkK1&62`S~xH|BIZdOzb5d7i#@GLt8*?E+r;2*4%bQ(!|6j4bK&=!N3bbj_2zh0d>EPs#)h>zSzf21(=1}~w za%1%Glcasaen1O>H8#@H<2u*WIqD1>;G2z()M@7Hx5ly)(LY-{}7E!qVtp^V0|0kE;Ua zwVotBzgu*5bjlf^(HQmR&l)75PRj=YwO7BpI@Iwz{t=>y6=RNc6&Fk$mQ1^kc|`pv zu_$fph%q5ZDz*oWD$eh(^rx!`jd~&KV;jh;M>~JLo_%UW^j40bYUS^dTk3x$h0yCX zMi9TM!hM+w#NnZrW-EfvO&fuB4;I^lt}LOF!#jN%IR?Nu^I!3n`|9O6^blJ971>Ly zigE>Y)Dx@|g`**cLVvYY#e&qSE%L+|60*pJs|(D{AmakQQd*Hm4{bal^b%!n9xh z*^%?aa7!le!NHMT-TKItQs2n`O6R!XccsPjp=;F)h8NYZKsCm?%r+3Uow$o9K4x-y zDOn%R8s)S57)^idKcVz}v4IySN_EwnoV~F{m+dCqV!8xT3dXkA8#yi{AJ4AC^dJcUJ&Qa3Et)5!&HN+>YPz&HE)j)}>@}({E_xrK`5f5g z#C`{@LLYlY`|1OpfjR*PS(jz=l{kcdYUAT~bvM~|bdH%XK%cVi;nL0(txc^+3UKC} z@Y287PyYzZj?oChJRYX6V)MEckyq1*Y?ugssrn*Yhjp?e!4_9o5}}n|_NQ`F;45t-|NFLs*G=DQ&)o48#JioQQV-={!?R|D z{MW@l#>Uo+L_N-2m~lbVb~bGoo(962Ioq)JRlzAlE492C914d3M@lxsD%oBD(Q_58A#wSojkmswEC`MA4K9eGcMG~1R$d|8} zX77HlGa~urvhGIUyV%O^`M@5rvqL^&lx;7}W3!@me^TCOZAWWtmwke26k$xQ6SmE2>aYi(FI7K`0D_#fQ=-6`plve}rVk%FM zH&L_=dI6Jv4N;&#m+}H}k&$`%uFL+hKdG+M( zZ~vignuFE3cOnS)xR&4LnPVJwehGcqZlYf8UHy{GYW<)r2(s!_51_AB(P|8Hk?0n} zcQu8bn8>1E^Q7_)*p=P1TLNtJ5mx2J$L^J2_HHS7)!^g_G2bON*pFB+2#0S|u_ny& zDwv($-7BRmh?jg25s1i+Hbd;V13B9AOle`8@};&6VM?Z$7f9UBAC7>+8@W@+GgOrB zI{v!G>;2hF;!nUu;>S^{k|b`dA*5YxRP-Hb)=aZ-(?GpQw3u4eKr7MPOi>%~Rij7A zIXVg-C@MN``q^}T7zgN@rW^J9E;hs}ic)GyRO(_ZRpbE!69lJ%gp;LrW@ts`{I4_u zq|T^_H9)$v2^{rH2&8YfP&);KRQPYj_^c0roHm5fUhQ2kvK?4~B{3u$0g zOVq1=fcTL$mF`l3@#n8On0s>d+Xag)A)r8VN8&jIgzkXz1NNs7wr@K{A|C^Oj1}Jk z)_qJ`G;$TovNS1@M$a)(-AZHp@%<)B>;Den@aw&^sx(GN zY^UA-tr1#6)-SREn&7>nHKsX^NFOx+q@G)=GRC+u(aZJMEsvPt?c(Kk&Fn&qK#V}o7ByUur!WEarn-oCDlUgH-b5sm`K&v1MR zzNFh~hIP$!o14&M)};D@lP?VTaz$qL6yQ(f0)C;rEDAsZai|O}=*|-|={eUv8$)8k zv>fK$l{iOAZfnkGrPIbhBSp=b$5~Sqx9fWK(k(|5l$0uQ&xPu(;$Lv}87|u(&JwHu zPk39n&Xe)6Oyr zaFtd}o@tPaHpy23u+A6lc5yp?id_|1k^P&xRa%t%>H(&N#)?c&2Bi6J*|mEH8%eJl zFgIAyY-LXAtQ9&8hkrbSaInDBa^ZK2p~Zjlfc}V`rEhLr2mKK8@>EsxxZvY zuc4KGb`8!Wkt@DT4rMBU0hwu+*^KJ*abPZ_3+7?9F*vd1IZC<+dnVcCeZu+dN>?s5 zGc`vQNeQv*4lk+d_P)>rk+7dUCb(w^(Hs0AGnGx*^Cx5h{6;eLcymdRFaq!%LQZ6= zSDKH^zgaBkH{KR+=hdB2^e83Fy^xN_FXudC-l!Sq)RRv(1doA~)NLQhQ^|j!e2dTE$1SADb_epr3nLhJ24apmnt zpsJxNs|GYv&^DJoWEjSV?!xxk+7f5j45kQ$vY=C&ayhsTj!Ax?91`739ko~YIFPEW zUvZ2jUE>-3e+(7V&jhOcL^ulp;WJrPSHwjBh!5SC0Q+n&79UKjjiv0uNRjZz@4~`} zK$PH)EA|MocLohKVgL)y%q?q-|9Ksd?gS8A{@m~TP?vGwMc2!MTfGB_JNl*l8Vl={ z@(CuIa*EGrGr!J`DT0bfW%VD-`0V$t7~&uX5uJ>pa+Rgy^&}^Xoj@ zS>X|S$>x7))=H6?{D7bbpkI1>Ag&)j`w;UgWIQu1xuew1@QUU`H$foJU35aTvja(2 ziCh^^AI!=)0mh(`H(T~y9YGIGF%>~5Q=>rD1p95z2!4}KuYxnLcxIpF zh-kKL2#98Jb{9W5);0zOOb4n1PP@7(1FfF1K~+oD6YZ+EDB*d?dBinB+t|W}x2Ivp z{(>aP)-BhGSu%XawxM1+<*+}#>qDzzoRJR?uz|AeGCQ5OW8;b#)J2^2)7IgYp?1`Z z0)1q&VHk5h-YU<1gB;f>0pVxHPXX+gg}|?J=zo|{y(0Qe(NTt4Iy22m73CDK71nlY z=Oi)rKLz^OuR;x*agrwf9Q_1V#1t_*AN@1cKD{m)gIEHtEN)*195#Cn4}HU3on-3@ zKut~4b_?s5e@hYiVg;+<-Ao$(34yHH{=XK0L2txCPi=3WPHw$oKAqlrc!sScxAlJN zk$v%mL(5gStq>*u)y!-V0s4CM2eB#t_;q_u&i#aVEkNlH|Y4bMUL^ilZLh7?iq^brZVYColm3Daveg@PdVN1sL*bfIo0~}OWceo4crH%<@ zcQ7A*>K>6Oa-FA~gc-A6{k@UlFfg=w4tswt?X)u>eHGALRAM+G`&DpH_Y^@5Uw!cB z0g~9M*JhnyYpNdd9L}87;jrd8c_QCojeWRj~o5QqpiQH^IN1g1JS;b<| zve5>K0x7Z6QSlSzb#L^T&MCdV97&e7<>eOs#>4j1({p2{&toTxZHW8_mI&asz^O zHt&_tgFbKXDtlC<$1D4TM`tLM4d3KB{_U@fizG%07mgs+zOaj!%#00 z*D<84res$J{bJzo_qj6z6`<9qKLfq%?o+j6jvQm7Gb06^xnSfD5Jm?(VlrlstO}0L z>+g){(Z?Y9ykb)=fA#mwul~)7FeP-KnVzEe35bk3Z-!a)f40kljC8D<-9_FA;2f`y zvS+06t-PHy;(G;WKL#uWeLjZw$82dkc7hdKvpR*>+8D*fk95NtRBxLvFWoWBIpb^$ zw2xvRhrRWJC?giCF@JE0FEVHakr4sWzd0p~8MkE5T<1>H)S zR&;mz`@W|eD*{0j1UHr}iTy%P{wUa@H7VkW|Bt4xii&INnr)om1ot2b?$)>kNN^9X z!QG{C4-UcI-JL+=?(Qy)yZhyQ|G4|1_vqI>)|yhYs^$d8*{x~Df$0R{40{9P&F6^g zF-7dt8AKg}?_$SU+j!&8+B}HDNrJ85+NTvbGcuz$8Tkp&PmtG5Bo1b`Z;QjZ3V8%m z;)kC{x9XkSf!?H;&66oKL7JhO6O@@?|lo%dG9&5d{t3iV@2>%je zLg6)eto`+QD=Nz0O?Tm?3hq2Sw1C(Y%rG1!gV8pP-sYiL zOVw26#2h`uOENY_?s3`I$geS{QnvW;L0{Y)v+W=9d97l77^b)DK{Q7nQM375)N_KV zQn6z$rz{ANNX<x6G&PPHsDENcAr5seCZ(N2h95lD&&|72)lL03B$OlFQ+*^c z1&KIoHN;QOc(m?VdZFs1<{im=8~dww+H|p(v_XP(1+Audd5KWQ{Lrw>-p0kp#I#?x z8|SsLb_YHaheCWJiX<-=b<>mA+xXd}2J&b2tmZtK0zjB6%2wG_7=a(wTj10*V0o!V$}BTA#cU!jKFl^5KvdE=fX1TYaEo10&*o zWzS+NdlYW&f!mt%hdD~eyoTV+AGeH^LhN7|;~2}RbD?SwoOtY}vd6U1iMu^Rs++WA zm(r6iN$V*n%W>iSkU;2zJ#=S`C*!+emVQkbxnDbwJDLPk=BeG2+J|G^RW=`%M?_2( z8)5xwX;ZFMj$$SGkxiWidy8}AJ0hpZQ#a`z`Dn} z7b%=D+2&fAQP~u91|EI$(g-D`aZvBuxA6Pl5YGu!t~wg)1=*ru;@=nq97H3~=4&RT zi*feXXAZ7Bxs+M8m+V-gCUm({$IkoSwdVEx^)uI7Sr(X~071PCx<>Qaw{~*Oetx7Y zpw$3BUm?b-@LwN?hD@k;=$$~-97Yxp%Bc&>UT4w*hqB|EkLJBsa>^+^sNQKQPSbJj zsgD|fvP18cYAlfSpm-IE4V;FHZ)=V}UZ@zO75eyiP3l#DxG$v|haPASMD8e(YQR`^0a}>PS{E&W! zb9PXF43DC}?ZzFcSjT2Lp2m+;Ks~(D{PVD~<%`?So^)JfWF7lo<<3j<1=QR-a~I^c z`nKRK_pmu{>zhZmOlzKf zWNuwy3oaxiML=PL*IgxEG;0V@gJt1-7$al^BA4TQn7yovratrmE=o=2(NX45Wm;Zy zUTFWw24tiQDW4^rkcPUatqkOPZfakYm#7c_kkTNsg|1Uz{rYy`G~EH{{6s5Z;D&E` zVpIqqbulr`13dU5%avU8|Ioj3O98uY1Aor6saa;%4ujIW9a=tZQkj^1>onZpfRE$* z`%8@E9O9L*?e=rPf*7`E>gFblzPZAX71}OGy9zy^Ptf5v$D@YxPV&hXwvT~K-N?;K z50JTr6Mx>khOL^!`eQWLi%dz0Uhy$OC{6&eb}0*Y{&fA zlx<{`-A6hYUdDeOP^^X#LldZ6=Dc_LbMxk=bE5|4>p1Hz-aEniF4e(mdj3T|XIS|A zi+FoxqF#U7;_Rz5oy=uOhK1HQyOD(eWiV%$qr zQAgoD{Kc8w-LO(dMsfGOo0>->-{ZLmK^(+`DsSFaJ~^*)j4B#zapZ=|r@CHv{6~cT z6W0tofq48{$g>f}&I3dVFde$NQR8HkX2J}AJdHIkgKvq4-zgt#`2c=!7o$FaAN9_m zshVd@*!#<+6f$RKZJNJ#e4>gTyLM@9W3z?T$X-SsPvsgk6vSkb8%08qr%fF=L8wH+ zo_#cnU;kY~%I|Vu*CwHAapDUC|1skBIxDt3n#iD|k_)%G&U#BT3>~U@E!E2H;Omw( z&_qu_TTk>fbUc*-cV6_8EjU;%t~7=mtd|ecgA)%2M9U;G26wPA2EO4I54(%=)zWH9 zG0;xSm;p*Xs6v8=PfKkExq@1T3hE$49P87blS+GB=?Km{&7qvlWX-NUgDAcoP8JQ( zU!n$Nub>~#DOmO5e#sa`B=~=Lya@Teqj%QPGb+2g+)?;;z!{P0Z>l=;|BhU7!aZ^% zmzS_td$vEp>Q`H>`$cmP4~S9{y@M)FMK9-?NR#Ujcxr>%bzytIv+EB}zvSfZXH=#aZR;`W@nj@PIe>EF}7w&FfE#++zqJrWh-TIf5Q7>hHXxgW8a?- zbauhetlo%SEL0&dy4>@v9y&EL^!;WX7X4*lD7xM^ttaK(Fg`-doapRHW=8`Al-^v6 zzCCu-;!){%03*nwjC@%q#fe$b05%O_Z=kbaNyP5=3$jhtZ!}~cX5p!YPYNpX((50j zo0Q)wj*x;w_c}+DDUPwGwpxwbG)M`tLc*@}8I&5`+LNKJt>eguDOnF8lzlp(ZNWWG*B?(+3^h-ejK_rfq)M zGE{k?p6ycoAeMG6M)=r)rF4)dzq*=nituy9@o@aaoHg2w(^NCSD&&`RX`W<;G93=s zXId?`W!}8h>3cK*6qIW^nPEf*OtlGi)`z;h80KNTE@&GrdV0`!bRmJauIrMf%yZcenB(V*=+8&%sP+5X$Rw=&jhobqKkBw1_{KXVd z9=fe;6$FLogBvkNp!6&EXh=ms)4fCB$n(4~onxKHm8Xd`s|>C&kCCBL6eAnD+XuVz z*QAyAQcA`vfl$=Qc-!>Mc%GIw)`2u5w0&%>bx@*Dd6A?9M%!_Q`uki*_nqFmkRo^| zDoP4{a*e`==ZZlhKGeeqQK*LbFP4(8QqL~Z*ofgiKxc6F8I<2Avav6J5-NR$j zeoX?)+|foO_}T4vzDvLj;~^yr9x@O}Tur%9CiL@qWbgMgP;puY91D;C$3hTOsjgz) zK@R$dC&ZmXrPL(#Tczr|teS3y$uifr7x6%A9?L&?OAPja0Mj%9fxo`)=_W&#!^}ku z@2-IQr{@iABH}jl6!o*v>?%*DJ1}j=j^}xw;X0tigM1WEDx7g)h16|*dON(e6GFzn zu=?_H9nHa$g@h`pS+pub7>&#?MQcHqucGWzc1;2R?lYBdJI;0fC#kKZXxqiJwP+d7 zs4%%s6vdZ&=tKnGiz5>O_6BlLr_`82VyV8WShkvR%lQmL4~PJPi0?gQ-Tf;zViQp$ z7X$si;(=m}Chy})J>$3meQr@q2pYq)4w-UElYg}FVvnH}5tk^TLcL^!zdS2e$K<&~ zP^9==m{O9ugHabBCdGiaw^cpvkmrMxe^d&1_ZNeCD!U4*!>Su0<%wx8BDJLyAnum$ z^13p^`{M>Vr#Bk8jU{apBDb;yUu9l%X5x9U?#2I{iUe|~ehb{f__G65E`t61{9b?| z-UA>**uAqAYoEXO7e2Oa@akDw5-_Nv&8n~Qzj#83`e(o)LpX-nu;IihOk7l0e`EVx zviiP#uqj-F!{G{0XYHdFS-VaR0@~ni7w1RmsY*=YnZKQGiQ0I{jTrG3)G7G0W?>oP zWR=1GFEk?Al}`(yZGKxfdx^o?k&qiC?RM8_NHx~L+#^%t;YBhX zK)ca(C3yjziJY&4FZP8*|KUgX8Mg5hzH&<|ex8k~mdkPZrptu)Z5^)+)`V8-NZ?w4 zWdkAAyf>T{QO28TA8{P)?`Bh(r~OSAs*@Ibvd*5;W8;pi_lrKdP{hr3;gf?zh29>W z(Dw%eq&Md!Yl*l=7QW&4l^bQsbv~oD?!;Fk4k2|G%nI)DOsZZt`(rEDvazlz4QK+448iYmq+C}bwB^l-m;Hs%5NNx zw@gPNn+Az$B+H)3drMt>F3bVsq6Kr(;voCX&X*OVFaeecFV2RHo)2hT=Rat?Z_dvP zAc!zNkO9msWNGYGv(j}*+1}bC(_QnYIsc^(o4f`l>Ny9IakcG0O_sJrsYjuEh8;kJ z1Nvw_pvrNFj4unc6MNnd9JWF+n%wl*ondhL{lwzvJNKdCSUB+5LY0i-_Z+=;VG~Ve?{FJaLs7NWnrFqTeGyPp zXYwy!meiJJ5Ip-nmH#q(LOq%mZdm#x$CVT`qS=L^mQ$rsqrQ{AFloW~f>A6LR#|`G znHEnlgC^)cKFE>szv9H19ubrq zB97*b*X}B`mJ1Q!6*~59;yW%;PhQKa^mi%A;0cIjjurU@3Sa4moHKu!(So$q9I6yD zwWUj>6NH!pZE2sqXJOFXe1z0}`2G!L3(~ok7G-;pdA~pF<1zaPHFNVK-6d*GJxn67 zyJHO0loMhaYnSnP-^8Zn|M|f$HRLQRc}t#xyQ1MpUA8t2r$?PyDBhYpP<|oekqL>D z9(JeAS(&&&_dJk!?@ZO=);?lfV7|S}{F$|jkB(cptDwyjt)1x)=-bZxpgO*ue*7>J zgfY-J3SN&knFP$wu*btv{Q4Bn9qv%HK4Q*K&1^b4u(}F_K-BnW$7qu#fbBv{Jf`~_ zq-zW_Ys9($BPLU;u}I7Cgs#;3%@5Wn11WRDGdysAB`vV6Co&hRX$n7-q)3W@9y&c0 zS4g-qLAfE%x%PK>CaDBXt|-(?W0U^o#VyU*M#yv&$(PKg;;~nH61m3N$r_sjhCQkl ze&cW9-nwhjJe8mvD_es4RW1k9j5w;IAzdVZ9g_~MIfM*tZj5y+5F@_H7NOs-Ybi#-_ovrKij>`MWpLwc0Epd07FoM4g>g z`w+1S5WDaDtlyz(h;oTG-~!*GVNAFZTXZF#F}G;w%VsD+S$$!Vt5L$dtlu#lY+IDt z<_P6L`EFzE`77piavM#c<594|r!swPSAlcYkm(XDLGf|k*Sq4(RtADPbEdb2p6v#t z|6E$yg6#`^eEsv54KEc27fqsT#NwyXF~>s?6&$wlS>DV-M~Y@Yo4m1NB-I~688*e?)dRlY;0q!J&k|Qga;k=KrL|((O9bzhDc_> zkb1(b1!a0+K~6c|h%@l}_}{DD(NSO6k}2M*_gPa5O?xYp%H!>io@|p^4>l`>i^)P} z*!SQ$_DHlyTcq~@sf z>5Ai>XUXjSsmIB>L$LfsoVl;#LU6k!(58q{ObCxC>|l>xA)r{zTN3_-B< z647}_poKV-scF?Prl^{+PdT9|uvtvZF5!)xF?BX5(CvHR5Mepn9$eaar!Hx2OBGPUm>Ohq=vrEhoCRNZVY3@8P`}(RJy0cR} z*Erl}e4o;8E#yOd@CeoPNkJ7k!wJ7NX1?N$&iT!HSX6i2N;$Z14*Guw2mU`a z5k|4jU78-rmf>ScmlZl|b^#<%gw z$CKDeK?wHes^c^*cDeEo^&*T0o2a_8ewdon=JXy^rU6HjT;&h3A`#IHL(UaYsi!rr z=#Ve^6qyp~`$D07P=;U7lSHgGvgRShdf3d}fkH8OV7{_D(*xRXr_ZckZ`qqZ%J(yXW`pIQvlJjD&U{t!}Sz|#1!Q*BvXDaqe0 z41Sr8_D>CVw>ZSLnR;7!o7xP_X5|ge6hqkhU}oLltxVN{xXp4-U5AmyN2IV463QJY zzBB(=uNhmTkyuM-NGeibl!#@!G|6pEzGyG6muo9!gA%sC{&rEJHK~cOf%HB-yNa65;LaYX&d2Jp=B1&gBkelvu7ODHa}j{Tf5`)O8M8Zg46 zz!aUX*>O7hQQUGjP;^7tK_K6>>^(Cl=mlx<9*NUTemGRTtCjHV?hg@XqDR+?CxN7mQL*yMY6Z#tYC#AF;;;=EioBio#2&YGe6z8BbSOe>dhuuDsm zVdg~n*KLhamQ!z{(fZ4RcEC54XPKxpI#^IddX8L%w3LEU>72_PD*-Ce)h8)kXI78VYc8Pexgx7KG4-)VUaSMWpQWs}%Gj z?(V0ORs>1t&H;#?QEFnB9uHGuK%C5In04sD_Of0id&Z4gO5=k*UeuZ9s4 z-8mTzGJWk9R~SaRMo9P_Df|~ot|Mtb=7+ct;&<;54ekFc;0WROo5KNQFm^&-x2g*` z#Ehz7qAceEjTLLl-WS+p72<1^G-&F zaw+#mljdafVsccT7!cc0HVt6JeqqlkohPj(t0E45P}P^CG72~f8fdG-lkV)KBojk+ ztRnQJt%VbO zHCKoxnXefAx5rQtH1STBG-7tL9+~6XK@GbP!3qBy#fWd>;Xc z0exu@T>>z?U^bRJDJy{92;@|B7cT0V>$v9cg+_svkG=NkiO?_Es(YQK>7KYAq%bT! z1ZBQ;xuo5+_ccIo)q`{I>*Vdt&G@_iFZ6GWS&NR8qUi|e|DOv0;DiiPNZ?Wy;qMz)Chsd6yP*&IIe}uxoV88aYAHyzan0OI9Zp9WxUULM_AOw~^XXs;}X^34&~A zsuO>cOUXQ9&|`HqnNaOjwfrTgV0}5=2fG^cB;^F=l>Z$7FCo>4Aj=>yh1h8ql1(8r<+y90cTF z;XlyP9xtIyVbojRfGc}zokS!cUFtf{NRbTG=w{vXpXR_4!3Rx0Fxa3^ z2MDd+z#XPWXU}Td2n{otd9)8WoL~QN+H^&a+XbHYdZ(!Btj%06E`2-*E_lfLt~G!z z9Sqn{7jU9fva#_$ep1N`{-l;J`ddCgn~$7@9Tx5w=(T0H=!%Dw)icjSOdhf`o}-YC z(|Eq^l!KVXs1e^Sc_@QPA%aPTtA2vK*vDjIYO+5ST`KcFrL2>p=dGc-HnTRfIonh- zqnhyXKPBOD?|!&^eWtv9Y)NC(`!GEDy&KOC5`?$tA+dEiv{Ki-wppEl2_%x(~@A;|Ij`5*ju74uVkN0M74%v!|mw?4x{pPmIZF;@6kRZWk9l z#kMx&ZLSlF@IXE60=|N|4=tIfN6Vo%&ou#K(w}5Voi5XOno9r3-;zv?aG*hQon5On zcT2b=IcG6m#<)Ey$5MncV5M#>l15Igre(=#qnVM*@pXA5uOgFU;HUB-2Ipf8JQFmH zPvH|rwy9|XU>%!}Pdc>KaTR}@tcjuse5fUVUaJ`Mqb4TwvcpKH$~6$<9*|y_S?4%W zmg!QDi=G&TBs3EfdPAm+Y>KnQ9; z@n-$iC1B3h#(R=va{`600}WgL+NS1&(5E{CHMb9N+7DIl{t)bu=Cdi}|2~p{Il7_UJmu@G+>ebCa+O)yk*=%|Yx^)fxO;-q z0stS6JEF#gp7(!vpWl0&{fU)}KRT)~T?zeNR|l2NLgWdJ8j&RM}LOb#Y69(xiKxyLrecWjYai-KjI1NfI*{vI0Go>|WFY0e1l zBE$)*c3PGE@-P~Ieh=)J6q(Isty6s_P@DOn3y~oxt-=v|_~Zn%9&ol)x2U1sdD7o+ zQ!!g+9^zQ&E;CZqD&2`qTv8e*umk2&adSk)sDI-y4W7yGyUVq|O%o6ku={2^I5N`U zN2jD@`DycEtk@o+QslhmPX3G%i(&g+3e4aue|_%n@Jmj^X`qJSOQIQgb$wk>sNVzt zun=&0u`Zbnr_ESxuDh?k>Gc*m=f-h(Zt8%inNt~hz{$#1vxI71C`+48Eh!T}pL&;z z#E|WV8GS?r~3jcdQL7c_BhS6H_ixV zVfM;aB-!sjtbUTp=+UE7gwE4Mu+Lej+i`-Q#4)>#=rKfuVLctK3JqbO%nQ{qomyi~ z)yQ=MXxdEHpTy9iOvkftC&kRp=ma6eKT3w*y4_5gK5asBEqNf=TxZgZWOFZnAT=|nHE=GW=P^zVDBR?{%vmg5bo!UVQ>ByQTg<(52=a-+@`y@Xh zrc7BNIHnCpgB0Mbvk1mbs=|L0udo={g5aJJ61J5^;C4dZ;X8`}=_M7FQb*QM&Elgz z9Om$C58v$}b&ep`MSf?br^o%v2cKv8FK8rJ9&XL3``VcznU3Z?yCkZiIWLLr)o*NJDd*XtE%KgZy)rPD z6K}*7e#OQnQg;hpQ=`Yh{F=b*z2UKY3T^D7G$sI5WIr4y_dSYB z<9ii(DG`0T_fF(O2kVUX_lidM;uY&VL8zD_s^*15EL%O7R^+bv`NimZ!Rxj2UO@Ym z2cw4;bSdQ1z!klS`_78cTO3eVVeM@3{$Vz75too#8>ty5wseob*)y!erR{q{&wi)y z|6*C_)>DmG||a%l)4c^oQ6I_>!~#;huCaGCBbx5;NE3MjQrvVGwkqNJb_QLH)bq3Oix!ctbI$ zFJ)NGZ5I(&@bc5iOP)^~=r@PHM)0581fp)F}7+skJx+bIk)D!xUV^5TXd|f{cJ>)4J1y!V0va4BCT6(JHmGNwHgl1jsm%X<#R&iN_m%Vx)RlB-2oBS8)g# zhUn`UNKvN0Wr3PGpV^FS%JYQvWY%IF2-i}04^f}3z~$itBuKOxp}N7!pKNYqLD`J45ftyI-v*VGO-pW^( zN5f@?fN&-01DAobOMKj$Q5lp?*{SQ{?*LnlBauu!fouF!chL%CfuDrjXv~a=`p>hk zs_O+j6bG{wjImUVHLd6UXi(C$w~-z zr4qJD`0zWed{2;{r@QiEYL?`c7qj7E$NRb6ONdXhst2Isvv5Lo_dn^t0n}>&xFb}8 zz|1z5>RNKIJTuS|zX>?cw~giB7ep3zc9^eU>$9P(S5v`81V47TLNMT?X`SpRynsczs3&5H1QtE#E?#@96#} zwBoAQQSFn{xu$cPeuaVkS68>_wgs2|_LwzrvL}rgMIx5jXQ@IODRPan1C?uFVBB7# z9B7G;e{Vmp{jfsQmEVCS9%B$%z0&agvxWO@r1+Xm-tDk!w#$PUnkUPTm2ML(^!Hu) z&C{wER?S~kx$Vzu-i}*U~_{PH~8m2Pd)0+mf z`Wt>)kQ9>toBJ8wtoLca+)(p+IG zHs(81NMFy?FB^ues}7}|7T{5Zi>XA}Fdyw;pnc|oL*cTag%gi`j{eXKjuWDh@>Tc; z!2B-(eBCb#{u6Qnl~!vd=+ZDvWgcMewpv;>aild$U`bP7M)+AKik3`(^Vpo_Hn$~Bw^8EW z9pCSVQCG93A(dvO)Lm?{cOt}^3cn=Y$rY$idFC-)tx?x?y`=a=(TEM0tH}UVIAO51ZLcP6K|@^nb|Un> zO*3cQ^>6c?mX_eCD9T?KHhxkchYlKjRhxhH#B5DLlly38h3&9EYfj1wNmD3*${hCl z!H+InV&HSx#M5z|Xv)NBvyWg~B$KB(EhFsBFw(`#HKNe72J!n<(xMBxBZ_lsNKY3D zpX~Vv9lrZb#4}}4JJ%jbp6s>K=CtQ4>)H8}opWKwP&f4NJShB)of_*Z0M6T(Wh_-E z7$m%)>N!lu{VjudO;jp-36@FQ*ZWrWK&LNc9Mcs&qGC{^`6toiEBm3IY-Y>eD=ECX zYa=G?c!G;r(_zuPPo03tU}DPlQ|O6TP`x!^w*iY94+{Kvp=BHC96geKG1#PQ1Ld~2 za{FXLl?`3-_0%@XRVm5_gk7w}&~4Mi7pu@zt*eKee?kVGopPW02B{FRdDh+&24HB0 zh#ms4{J%>TG^mMQOSOGE1MNiXxQ?7w1`Muu;D~bfg?Yl|1^w?^3VEV+-sibu-!9o% zk7?M@sE_1+{k)U|SGhLdxVU79+Ax&zb%sw+jqmj|n4Bw3HvLiW9gc8+f|A#RgPzYn z+D%EkH-!0Ii!lJ{K|C`Y9sT`}6yim!Xig2m@cti~A@Vx!vzImpj&zS&z*@?cT z>t(fYJs0h*^yCl25N3Zpz-#wS{4j<5D-^`xN!p#_Rjp*ezXzvdJu2ur!gku)f`s3gw%@uaCi15<^H+T2cFRzt5a5n+-iJ$EzP=# zoA8vYm27B$`w(p6@yn8=KTRvzLjPnyklQa!B(5!1aZ?ZZYM#`C2928u3M!T=XBBlN z6PAY7Fe909Q=m#44HS6-C1eAV_YAXgZtm*(zrzQktHtvsK6UE(-O5_GbAG-g6=Q@@ zy-<))=%G!R|98Z40>C&@EZcV=q?BE^tdEJ^*}@L|~lB z#$u%ta5hHkXmzK1K!Dx9MPR}IgumVOcyX_6z>{K88yjz*cH8dX?bU>JdSgu=gT^L_ zd9S@E6c+!yy8?!z<3hoFbhm=%1L?V62Y7U4j`w?61JyTj$xoN4RyLVZlYFtmO-$9| z6~`3q)qIBanFl8C-{Bb<%wNXJQDnV@eS;34n0T4iO1+N$EO3&9L1LE+u|<)cpL zzFngQ-$CvK4bh(XWAdJ#9kX7jdS5(qOarlAMu_oQB0`L-27VRY;LIr}lA@8}&8r2P zPH(W#)iIaK2(<6t{-w@AV+3XnVIPDRvcnJ}?%?zOY^zUH1di0pa;3!#M2QJJQeBw( z!1wC(hsI1nkTf_HX7|Txw9KESJxN)t6IiUT(u+cs(x!H%5)eveZfp5~{w)s0BP;=UY* zISTc6i$*C3ISuhxDzHxYjZVY^KR(B8N1e90caJIS5tR(of8+z@k?N1Olp^~Q+GER# z85&4%a>9ZnSpyzwP`re~7Ke_q4jQhTH-^TNN6)t4w9@_;A?dw^$NGNqOpb^9z$+c7 zlCX5m9(GY*%ktx3M)w+(G6J2x9=!}d;S!io60%YU3yAznJ*fi>%&%H(-H9hUXSte> z7hR3S{Pro|$+(n4yxfW?2m>*SOl-$7E8t;y0r0IYYT9Hbqi9jVzSecWRO~K&!eRMQ z#yBs6QKsER^6;RmLGUzqczs@eQ zPrAmrQ`N(n6i=VzdM-U;Ys5qBz85%euxP^^d^$cmj5&6Dn-p+*%KaH-gPUhZVm_`} zQF{ANy-%5Lbkor-Cn`-;rU)eW&~;Wuw?u_z?jvCmh+B#dN68YYFUwF$GwbBZ@Wg<4 z?@OJJH~%ZEHbJD78Nf@=>9cg@jI+Gk`SG^(MK@eg?LeiG+^U!3b_FqOgR?G}M$O;w z@pd^BOW#b`+rHLm8LROUMW0qKs0re$B;OMfu6_8UvNN}brhL2|TwJI?Zt$P>hRdpC z5EqL@ArfBAUDfVp8xZ$l&(mc~MD5I?K?IZ3@;P5kRC^2xC6Y7Jd4oQ_BU6 zwRWAE4dv>3Fv+SR+2o0b-Y5o}Aaew<$@7|f>LqGjx}`Q| zpyS zp(!+4zA2qeKoZAb*x>hcyPSP}w%oA_56i{xgx9S2nv;<47(o_(_Q180oB})j8QkGF z?+{m%KE$$$A?mc+Vm5)m8*-4`xKCP6yvonI-(7q!uEqlXuTAol?@rju3#a&r8&Fa4 zqkbhT@y}D}{1E{ZLpU&aXuDV(ZyX0z-tn|ips*}*ua{q%nqF*3r3&c^uNyIo>0bn@L&h7*>eL~tt%x{JcH zBd)sl#(Ip_(aRWcPoIZYh<@Ky#eb$l!H~q{iA%jUFpEo!Tso`J3O!#pHm4OM5;#g5 zK?YEukauHTQbvDSU0xOlUTJDbDMhT;={Em4PINLM&CW53od|`m?N1j)tjgtmJ-o~I zVJyS6V1Ld-Hwykq97N~~n~&xnL?kUeHeDQT1bpqEYtpmfP!s>MV`Al_oq8wQ z{zwNRgU~6=#84n6IWuZPv#9kUlx)M${0r#wq!zb~UPF6=fP7r<8|6N3ZZHw_L391T z`YHFG4sO0NRO*8Jgn|7pA|E~cg}^7#OG?0kYEkR!tVr>7+Nti>@buTJeO)ktE=;RbZ|A8^5R)A3v5%zMK6uDq^w%l1# z>h9=Pds?CI^B*Yrkm-9oAn*Cjyh*5-H)B871q5|WL=9CgTr%0{WuFU;M+2Z2qV`pQ zni$>3`s{hz5^mc+&%;n+m+y+Vz56T_u>7IV$D!;WCZMs$jN(jPG0eofG(|`gAUr}>S7$!s@P7H^ z?z|h79Mzh23G;`a?adEpnXTr>Qi1Z%qAB@bo9?yS-hm9 z#_$e2-}L-hOUmbhPO#4V;t4nONa>#k(<=!1;gDt10lx3$Fa3rulm~=!x ztmpM1Mr3`XV(1BH9o`u*#Mas-=u#LJL0|o@}robyve|W&*Ii&f{I)m&FEH zgeU;s@zKc8|?^FJu<;tbAD9_oRIy!WqJ!*9S~ z=Z`}BNih8a+b*)}rYdDC!+qeBV{t)p|Lw}24t;qvm$2Bv@qzc_2=EFP+9S4Kt6E%G zCV`zN&2iv(JR7P1stXJpuQBR8qC05Y0RvY(>ni%(A#wb~ji*a!lS54wpgvB>pR5f< zv~As%m*@ebdfl%hntw`EJ&(p9@Nv7SUpk`K*6+Z4d{uF^CEqSE7c?=P0IN_K&;9mO z%RA6Q6yH$&I)i=B9tLukBwDS-t#9lp-|4iw?s-+hSWYo=Xd-&N*>L;H0}MvNH;rpHGgi7FJG4#_3{tX@^|>8K5w2j zU2Y!h4Ro_lpvY>GDg?DXt|G+pF9P1KeL5eEui6+Sz!ED!zeV{3ui#|Mc^ai0H@TQ5 zgwPeA@0NR4OK-nBLPie#5Q_K%&RC)BEj{V2(MjiCMnI<0^dh!~Ve>o)xnu$m*hk^)q;aLI%lV?Ljt$YIF<#h2P#3@mH`R9rptL!+OX^ ztEXSOb-P2IDH6xe%srolaeRy?x*4Hl@e_aw4?D+liTa-Q<4-S^~-ZM=n|J$JKf~+e!bLl(yXp z)8TxoC{jyAT;OhurE0K;cfvkRHgB zKEJ5_kKc)trULI>){nsoFoAqcSJ5ucXoE%cgr8rDu^JAsOfwE=>RK_^LS*jVQvgEj z^3r2`;#ElYEkbOs(ixLBZ^&r^REKJO)4>%GUW5O1wH_BOd?GUm7V3t5BKvYGTK`gv z%;7qMmDmEGKkr6D0l!Z$*FZo{JjcNJkT@Zi8*z8)u#7S}L;UnW(WwU`Xa3+c&&bXf zE5B~zwGE%3ar93vGW!e8xp+R=+YyM=t7<`KK$GLar-`HhO~*p;LkT zs*NWA{(N_;@0?~^gbwGBHt}os+?;j6Y#NF1oJVxq0LMOPMAio)$y_6y(v2$t9#&i?cl80VY6p$K>75NhlQnK zwA3F430fY?O+@%z7J%npQ8B4quzJ3nZzysu=ok+3lG{fs<2_>Po$O1tx&w3v8A#N$ zDE?*AJlGHGA>_h^*uM$AK8KR}wAtB86W9*3kgEr5(k|!KkmcC8Bk()v=KOyy0AhT> zz{$Bm&mz3_GZoh+bG7Ztih2dZ1G9W-Q;s|K@mO=t;s${gk@ERX6vD9E9%0Rtn?DMKiTsPN-pFV(K zd~TGg{ERshLTXk1fjL=GA{yqXP@z_l-B&j66Ofs>xukPU9Y(F~{1yF$__aYT=koHj z&2?|ZN8)@;@%yhq`+!By>XL*oW!_@fskIPkBYrnZUmYe}v|n~H`W7jwa0m#MBtqVY z%fmH#-W5F01Amm>l@~2uExa6$6-%6xYk;6j+-e+B4+`bsMXTeOF<9A}FW}lFbP#!9 zZ2$Z6JdM_YT}|@s(v3N#>9<5GQHtgVpxoPxPuQ|ivyJCfkp|*Od`?ty}O8314Kl2$)blIj42>)T1 znjCLN;DKPRa(R=E%6vUSUSTF^#7L%HpN_Vg=I8eVsNB^6cyM-7rTMX?%x`um9uMpt)vYL)xXPyquH;7L z-zXayeX#Y^vMRnW0qb6tErLeo$7(N2lqZvzjqY0ZznGdngFsKsj(0J;0?a)ZAsX25 zIi^+4ds~Cbox_MT#s{=Sgumc7bnl-d!gL$8@hLn{6hlq0&QRq!}Wc}$GuOJ|POC5E%(Mudq z^`)_9vSmNqxm*OIp2x&X{{+9~C!Kf&=c|b+m04So8-i4(?`6U8hd`G{r+3{$3@KR# zk8g==XAM31Gi~Eqpyu0PBJ01mf$5q$Yf*l{!(Ss6&+QG%nYJ${!)I{HXvq=WVE;p3 zy4ec?i&wZxUQ$3 ztX9hYV9i%mFVcsE1S=-*(df7c<(~+;UwsF5{&rveu_~%ItIT(K_fs0E6TyQcUs$^u2hpsN&&E z$T~CSm%Tm-Y=YQswclSL}+jJ^!+qeWNFC3mmy*aS*X}BLBwK1fIA5D}jzqgBq z*lB2Jt(R0BKjk!hNO~4$;}1TXl&>pXr=eu zVdp+@R~I@qOmEO+HJ@0>I2KX??tJ0KFwbF9;N~fpX0F2%*7PdaH>f;56^tFO9A5gB zpxu5fSX)~{t#(et@q#cmW(tFK7cE2rt~Vvc!`XSva`vweKVNQ8DB4cyTBxY4H_g`* z+U9@irx88*B*e^2hI1eH@AT=*IUTJmp1mAr?So-FLwn6rbjsT(>f&&h>C-Y$ioPH~ z$IqYkrKcD{@$B8&U5Kl`HeYUTg?=RO+jhs7wp9=_Rts6S-j{RFU!-A~U6o7j| zh!*W}Zq4z(FSzeWDG4<>j}O9d8&Yy(GFFq%24Gm7dI! zW`={MkmjMH0&i936)A0`*<~|Yvbq4D3dUKJb_w%*t2aFd=SbLZODi!+C_tmMiM_GUf8hmw8uQ zh+bJdEKVs=J8}eSW(G+ZBHW$N0G%7XyKL;6;X@jM*4TdiD#E$BzT%U2f?Hl2eQi#` z%qLHufjn^%nmJCJo2g*=0?lc1812kwGlw1SDoB2wFZwAnYclzmT}Qu<`uDm|i%B`x z6ZWs!eo{i~2ywE2DTScNH}6THWAl1@?XlO1?CsEgv)-H5lU00bwM`*qhaZFXo1x=| z2W~BzwAuq`A(RGG-nh@6nrnLRSzbH+EB09ezghWY$cMQJF%RYr{3`&hXik(Y#azD)IukZSvLUx2~o1x0_9Do;-=O zANyEtofLfDfTHj*AM-ETM>4F(rffJ{5zawFwuZq|w%j1}m#vQ~Zk*6xCL}M9wmgp& z{p~bo(3xqV(d{}GRqB!DycYt%_O)whKJrLm+w=N^yH9E7+SI&!37t2tLVWeBKFrR{ z1x#fm6S~Z>FqgM+m^t1>xlNWT5i%OoV7H? zlkFv-5%SrcYrV2hd4W^XWvo1nAo=K1_go-8pL>duuf)s;p2f5LQ`;L3MTbaFL zMc`}0kr!XYq351MR|tnE@U&7-i1}-ZPTXl(JjT5HBI~@}!+B-3m5;;grswM#rum$DyJ%nd`o?-P6!sh{%3Hzr zCRB<~1w+HtdZVE_-LoPLeihJ9$wV#4B(ane`WML9OKQI{@9DLx>bT;9iH8#eI2ad;Mz;utfA4KpU2EYCozBVB0~vjV}UT}OBkrL2arG`IYQJB%jAKmSHr}p1$m!aD?mA~iUz|=CT3f>j)Uj*CRUu&$b ze>h0gg1xBsAX`kdvFO?^YxCYN=iZx}N>jIXR<9if*DEJTxhzaU7?`O*VAf~iu|)|J z1FU?sO|H;mFIbd50Rj1gKLGLUv+zOD(Z^<k z4SJk)nf4UHpx(2x<&S*qV_5pgNBZF~_INi#QVm?>dgk^WgxfoX^cB&a_nd2+UOJ5U z@?}6~0csiFNQHP1-jcS_oTtJnXDS`ke(v^1ijD6#(}Qf^n_MTO$E+X7_!Q-nS!(v& z%w^JI*XQzR6OZdJm@W_->ku338&Ila3%z!`=$Zt;>PL=e1AFI>gFq<7J*j&kd*-_5 zYm=?;x@qQpf4m6+J?2ON3R*~|rBswpxOAH{uwUAlU#>!E&lxKuUP=Wit5e+$2vmws z1p~qB$4^|idgA0S-w12J8z#vYg5A?>7?H_(=G#!p*-R-=JefdZF1Plw&o9v+`CV`= zM#C|G;k2IuU2uh!5(7B=fe&Er{Q2y82}Q@MA?Kv4LGYM)%Hk;_7H$kh`foKP1NuBTDVr)NDd=@HkW6W#Ye*COA^FG0;)l~(p--vWUKWf0{F6kz90c7HiW`fEHcY$W?$s2Z0P4PZIxMVbeS@(bLKUr zh2rDf=F~n4>lxi9?9#2Rzrw<4_Jl^6azh9c}L6tjppU>hU%)aNzUTBR$k0x}x*L418e}$EZz^#Ha#4lG~4dd-N!B!@FA^xdpxHNPTjx176Y9GJP%KS>woG&v&spfG$!{A z7^(HP&^oq9q5a-`YmdFgrK=D-#+915$4Bh;W&a7G)jg2Jd=vfklgwjx2Dl?aYZ2wzY_@6K95(L;@SU`C4A?M?cCKL&%;|PZ`VO$3z3=+aL44y%8j>7{r zOkKn7<0`^S%a=VsAc<;dEiGYgX9r@}Ytx(4rd@Tne#Sl8G(OB_sez<|60!Fa@u)jdr>l0!y!jlh2pSzNS>U_Z*kwc^}2W;|7z; z+1FkEo&e_rFty+FNp`;#(>sj7=M}K8+@2=ogiw%^vmpnGYW4Oix$m!N?V&Br)?ZjT zln_vpRs^I>v{K6L@7{I#SCe~p+|%*S&u>dU5V9kxP`3jEmB*)oLb%dwo_ej>{APV? z`-h`edxein&+bexZH=aN(u_N|Ei$5?M$4IH;FPoTnE5CzW`69}KrEM5JLtw$+nVdP z**>?4(w17Kd~^d?SO8vl0hn#t^$ro{G>ZK@K2HeeftnOdQ(Z-#H}iD2z1Ed?Kt-95 zyfRSQ^E1By{p6FtZnzsjZM610ussvKJgw{3u=&a>1JkybHiU-`q51gZ=tL0`Dajmm zW}~6>=stH!1u%O?n>Kj6y%`?*Ua0SdmZf~j>|$ktKWW})lcw9-RYcF zozB#L&!;1xVj&dQH(bAgNptRneGK!sLGJ8qLaB~|`%a729`hcs6?-#PNf9SnA!0E>uU!TE3zgziK`vaaH(m^quaw-FN|pRnP(8p%vkSYm@ub7_LYnsLih2 zd-L7c9xpmh`MF`d!%FcvAW$hj6?ozGsJ;TNe@JNjN|>mIbjFKqd89y954zaePcxjh zofJm8CS|kSDbvfSN@*)TXq5BX&^DxkjrrWQ-D)FWrU1E?C;uX;0m!*oEWH02NTT>S zd57GaXX)n6;jK1NcOtdql{9+DTI0bSXBS0yl=i|dp!kikoT{(MtJBDW-ncW z2m))SaWjt2@)XqSDw5lGa_cHfV_qH(XZO${sK}PlP2-F`Z#meDGRb*{GK@tYn(Qk+ z)F6{rABQL7($ur$_w2ZwuWuqsk_C|@QyW6Bg@m~uC$GoMHAR4q1#MEQ$JIPtGNd|r@efR;ijDJSN|0su&$x9%k@tSO zIWa>J3hd{mrODq{u@KOUhjIAD7ZJ?P=2AVZ5J>l$j7pfKdG<=*D2{U;rru$mk7Li# zps-i34plhwJV-(aG)|w!i68qhs79l&@&JGvWaNw%txpT2fR9EdPKCXAszxL2EMdxZ?=?}t|4r|@3aDz#*Kd8lPFV?)J@ zzJ|RwCk3h$acTE3QUi-BHVyi9cT%p*B$)k87>v+G$##{5~!quGP?uBFNp% zvU}avGpcX&aQj%O7h{xl73Cdd-^^aQN zKaNG=+{~cXYNNirT`KP&hiQ~)!`%DchuX?fJKbXursA&6F;b3{;qcOBc~D#)geIGy-j>wLnX;HH#x_mQ?4UFb-3<9k zZBo4_*z+~Wf*PR&000nh77YdaAr?I(uCD^nwH{Cq6la- z`if5;Oq(ZSm^_>?JBY_;9G)K^ZCsdJk+Na#@8S0PaBVm94C6(kFvuRGB0MRW?dvQq zLaeVN+KdfeVa^j%QLvAt4dNI`eT03@tX&E{XO&8W6*~V5x!vasqEUo*}u2v zx|zoDFBtjk{27n(*GKtArh-wvmF6`$EDf0>pdY5$PR*FVqPo&5JD)i zdz>{l{n=x0TmVOFHA3ks$kS`4#1b8@f#ZYVotu_0Pt&} z)?vyRI7hl)Au+E7!@&47x2&%Y*G47CUz16Kp`F%eT7c~Og@u)tv%^BTR`~nIcc0A6 zAfB7rLy;^PwJ<$xP^5U3O&_HqI_#dPLHfBYz7%Ko)F~tjy%UVN@ovvQdqxFXBS;d& zam!S8a+alvHXWzSV@uo!+7M?6GRkTbB z(mNnygwz8?X-o6k!VCS@4u3~~v)WyoFm03TncB^J^eDv1lcki3N#*f*H-=GKM)mEV zQMG+kbxjJ-IQKnKJZ14UKm1{YYiq!pZ(4IKf1YH{{brHdCi~aMGtHnNlDAhHQ$ZX{vg;!Ei4zWIkI*D5>S+;lv;p) z-_(0(u6yQ6D8g_XQLQzp_5c8+h=ml&KWO%+bo!&SllL8bV7 z55XJnd1_H9b@H8e?tP|x@7}{RN;Dpf_2G;wi-$i9r9NA0 zwN47HBUT_c?(IdFTETp_CHJ1X+HGs3na})7sma*2Y2&fWa`n+R`rF}agR3A>rtE7I zwO3_N&^?A+bc{Po?-CP{we$3%H*4W{Bb0I z{^z^G7{@1EPGMdrtrl9}eHFUX8Hlz)Xl3V57(&%*h{F(#B(c*xv<}PPT)vRqn|WP7 zxE~MANmU^~-{MMVe3W{HxHj?h?v@RL$8OBcT|B*Zzx)T9CI&9u)BtDu4)~SMY`+wu z6-+oROrv*5!=URaeW!Gcr0basf(;pllU00#03eiq+|n^i$0{A0ejKI@lfFZUN$+W@ zV85VJeBR~o+U3gQpG?gW)beQ?6m&R>} zX&>{q*4^z?Czf2`5jlzg!=s{yfwRtav*bZbcQIC)A@vc3FX1N}s^jb=}8HAJd_y`CkfHv|=iLvB06qiY7vqhMi?i+a@(N5ReeYV%%=~Hi=%fD(FMZuA zECl{Rm?R$!TCJ12PX?;oRX{jPq@9;5&pk22J2+**%tLFWwmM1;t<$I6(Nwo>uEmM> zJ^ARjp?=~xqO)fU6`D~Bu4T$|c#1aaPZ}t#MTZD*&-3u+JsXv0ttfw<5;p3Pyt4Ra zpLz;ACr+SMt6{Fw$rV}HlvWGk$`zp9ao$;31#{RwNt#W>3k$%52M&##kZbSpp{f2h zQeI}?yY;!{v%ef(nwrlaIsC;maCLHNDh!uZqj9`c#Zv@!|K<0!c^?Se3&UV-oU}2- zfEy1*YY#=@TT4SJS5d2N?vcuYKm8q+<|F?DE57X3JbMs38E+zNx zzaW%4o0ZD9v`|ZBUf34}Q&6Bwd7h1!7eIKcqnXbv&BX$9?d9)XJU-wQLA!nE=b$|T z0F4V5(0KakkqNWo+y|8B1@Yk=*M6CCdYj6;i%IS|^1-mUeF|*Z@h5~pyfBZ>(PiM) zn{dx$1bh})4Gj^;h*no264&6mS5H-8{=ZMPYZt!Z3Z5cVu=RZf_k&jzB#8FmUTlt~ebfB9002^{ZK?FWhPu&^Llg4WRx^asEE{OOsVD3K@hfNU zdTQq9#xt+e$BGs z;lrpMPouJI^^BjxZW?`7WSutc@*sFJ4rZHi{QHJ{wPa7I8HyT>QtoEw@dxwetvLc3 zTI;4(Y5`LS^rUV6a2k$(n~JNwZiyt>36f;;CxQw7z2~hx#cOXbw@i}tP)}Hyv#xE0 zhoduFnQi$Z!^TM=*__e4cM5=9y?+m3PD7ZX%`<}S^6a=s<7?%}ZhY^$q%~LkSYg8VeN#c38 zl?R&L`}3!!ct1z z{---V~~+LcBizab!60B~dFSg4^tsyDYTiX>U)>_vHQV>h?0Bo$fD zIO`3iAspKuAa_N48&P+!k+FoQh4Mm;}fdd4mLt?k$E<98H% z__=&(4ct3~Sz4EC{l)Km=X&}~cRC?} z9|+?3QK8fv;4XK?wpv?YF@^a&zlIy?fj%Dk%vKOMkCQ+AgVN>?5iu7Cf22HD#t&Tl zzT0M6NBZuR5D?O}PWzOJQG}C~!DH@?;mUJ#4kFwSBXEz)dopZVS^A4Y?R<(1m%nJ7 zl%bvCEBh|!SPT~y5FI;)PNRX^&JIMv?|k7!^&Yfee;x7d+iaWi+Thf|w8K}fcH{_> zhfbme-ta<8QS*h}F1Jltx_ifQ8B=ie@l5-kv*8cJ7hFZuhcoRXZVeCsgwml9Ni+lz zV+Y|Vo%UM5DXK1xOhWGn5hF=fAG`h5h9~>j!SqJ?(|h2y*B)aEP+cJuI-Pruym@P~ zZ6cmoy5eWoZgdFE1b5X zd<>|@*4F>{#QNGlHxcXY?Fhi%(Mn0X?BHNCm#lt1Z$@*GN?(-e->m|7xhn$8*xYikJ!N?}{+Tp`516ae|{vmF7?bxwT#7g~G z{r;Nu&NHPX-!sC2NrOpW76`yIBQxEtKgE#!x~hv=#%{YweU`?#zB8|pT_^Sb@NVdOP{L&pA`SusacuD>%3sWT zn^RDKE*A=xayOG}yS{Cz7SRx%v>eaJ@&wyFFS*I%Tr*SNbN31m8lg~XI%NwDDKw=0 zhzp7CrS!nlzd|4mf)=JCOp4B{ytT?7;h=?3-7wh1zPBka^imr+e5V!y-Zu8vX$=HH zM>QI&02ChVo71j>3Bz(c-*;Qov}OSUP9b9-;nH1jvF?xHX$ zGf&y0hB@4Ww4LTJBP7%I)4I*HqGPCw>Z0`)_0O>TmdYqyWt5__A#UD2Xf!bY?E4|U z@)hW}%m5r3T56O*HqUCE4&>$obXQkVTUsg{bAu?lxzD9Q5=iKzcfy>_8>moa@*9`O zalClqpiF3P*t#9$o&GMI2-prw@cN$0;v%--0V~fyc#S-E zx&Vaehb0J!C7*Uq7q zYvKQ)wRj`Vdu4f925aj}0{Q|~CR`IYXs$EvlE86QYiDRe-sece}PM+ZF^B9!DMQJTVQy%2K z1GD~Ng?DaWPgy9dFc$T9P_N_g2R?uY^YciMaWTpx0Cz3|K$IlF`Ucv!ZlSicluPR< zPODipVW$)zM~Q(h07 z>YLdU0>a-R`wT*BnJPb=m&X^Uz#Z=mA(f7z4eW!GAjo{@eUE9H-&jYDo?3`lNA-Pa z6O@)3Fv1XR$QzID+HM+lsS;?Y{8Dygqj3-W?%}Fnl29o=6NJ|y6 zHx2U2am?AZoKo+LdxWgt666QJNRV$W%CFC-KM43clq z`i6xf#nc2&NiDM{rwrun@yTm{norj2n7ecd?;JUTZWN)}=?uhAB!Je98`%1z{|9sL zeXrAA^S~@$t$0at<95 zu*1ow1$HUT+2d1C(kLf{h=aNL*ERGu?%Ww6m1RKpL-vXcm3c-*8h|Z@=mfK~*Rc;? ztR6wyJ8tZ?lIJ9JB=ci9lOqCPqtR@>42VkcIY3Y;J`;eO$4{KPtMxPY-+JpqE8E-8 zA4-x7>8WZd@NvfGG7IaO%jyKeFuYwmwD9LxyKmKB+uAxMlVl0T;>`zZaKOjMy7al> zU|jp?wW~ClDiR;J%qdzfZP^n}P4u;s!(sPXo}y-luyF0=`YJ82gy>&S@;K`y;LR)n>Gmel%29+kn!Qehz}hC?%s8H0=NIgoE!1I zduV;*8(!RlhPIXRWC{TlhR|#tTdA?RlrPYXVxWc&)9*$1@%V*rzVF;sg5ya3a38as zuu9tZ8RnwBl|26G0e6LP*Ejxoue7la#bFd(2QcxEmkJHQ3foJifc&+`2BwWdboQ%F zW|(+(?@Al)YU_@9AYEsBPk6;0kXmoofNQ8cJ_iaa#b=E0=V#8&3E)h8fA#5l5PUKQ zJ|Wuev)l&`vIW&6guz|h%n7Xk2qA7phY$Y|u3ZBFyng(6Q)_)DcxUxEfH^=gcw9zL zeJ*zBho7NRuZfT8n(k&kRzAaIGBKMzA70zF@2ZKeGL>iNW2~7TTG3w`X(=q|f zw&VVE0&VHY_Y}?V(s;qzde|DT;1jKRiC<(1~Ji9Gy{aIev!p zKmpK7ZPXyHpi+De6jX}O7(obJ0Pvft-F|*fB@Kye;dh07*IGjvlj|wTpbf-or}grC zUV7=bGo<<~g!r!@<%&>>wFIS%kqdXQL++?JkjGHr;IvcnJU)IVW|rkWFs|*?U6^4} z?$XHD=WC_A9J=-O?#;JqKIeOIP(j-#KyyMjz25KB;YI~qySTJL>DlY4cY`RId+DFO z_UowLP2=)qbZ*=L{_5NPXL)nl1o7Yjf@}Rxj}5(#ib9^^tA&87*A2yIw+zdtvO8Bz z(M~xN8>c!^4|>M6Y_B)Rhp6Ypx2~Ap8`lU#TfwVd`AQl!{|IPc5dxnE@cRI!=D4g~ z0lzO`)AiO}`?z6)I&wt}D|8V4usbH$1QhY`X-+b!55rAI{u3!I9_|{t&>O!27 zN<{)3dvyhHpG%fYGw-*X&3oM-*ueelfD*B>u^@ntLa8RrU(OZI(~;4gSI(uC-{VuJ z{)~x%X@i-E*6r)xGK!3uzsN7-tB=FW?ePHsnI!#9tD_Q-%Lx2?!BaNRASZ{7WADtk z^OS)y9;8v#HO}L!i(vU1O@BncfLrVWQtwst`LO1Gw zOyyyo7jyiR%b(|scAxMnA?GOrcyBSK!{vm$LdWr_Ox>a_%u!b1MiM~EP&FDW0K#cH zCRhMcOlv1h{zWw$a*ayW&X)X1x z;=>zgO`xkk-)dLZ%I-Ts(CG>hPx^aB73@D$iqDAP^(UV?^WfgyM?2TAKe-r1pGCKO zK`ONz0@4)Niw*(k<#C@EDF~v`cpXvnmZ|szaUuaM0Fb;#iN{Zcci!6P3mS8k4s+*A zkr>3&XZKf`80L&&mQ`8IvaU6g+kHjZZGP;#*&5Gcs}#&I48Rp&2T>q=P-dTv>7@X8 zp}5SRo)l;s%HkZ9R$o~&?95m9UZ5!Ks!XWiAZ;C#o;=9ge^vH!2vx$sxtnz*+I0HoGZj-YSNcEtb^ zV0)(BzKMNMe7J)ReB;m^(>Z7jZ>bqNufkwE4ub9K!=(ogD#d4y@U`>jrPewU-OeSc z_2-3BKU`m1TS*r*bNitkh{Z*TrwWH%&a^nWSayYtX7jsZe*QXc-T(kxSveNT`}Z5r zI;0am^Pt#P%JiwNF4&=E!YJq3#57xu*1@HjcJu|vbjoaSLN=6l&%+{Hy zG$tMa+W$U9(ZtL5p`EXHC6xknzCbsq5XwEHLT;XtQg#mvR|f}!gz%LSZ1ZU4(m0yS z(py%Uq3y_{lkNc;=G_;Kfg*(=4^Lj7`e>E2vrzN%*qNV4v(>T$e70TItOUX&#^y^e zA#5}d&d(QybLP=f1VV_O!c$K@J|>rN%^TVL&v9$lZ$)si6CWte8K+9``Ry`_t@Ep^Z%&{IF7&)u(tbDY5$k+A=t!q)xVc96N~&74 zkwmpkY)p?wvVzILR57J@BWU1)0Dd>Pe*Jfr?%)59S6ZzVb3x0lGa;RKp4&pWKsHg$ zMb*4d7by*(`OZ84_o2IYUovq0o$l=HKQ`;nn1g+swOzmY*S@F08ChF6MULWe6$HM1 z+IH7xLzh@H&8}52Plc1q6hwqYp<6@Vd*gt%H)Ggt3jzSps`P1MWdvm3v$D!f9?(Mw zMjcu@|#DnQ2B1LWBV3G{jWN7!rWc8nh3&z7b9@W&NYVaJw@zvo>yeeGmW$ zw3ht1$?oae1JIUbDRCIK+EKJUR>Jc@>rn3+YE9tYjV(jDbC#~*+*}N2XTMuJa^%$! zwxEJ{2}~7JdN;w%!%JtsKELow(Ob9w!QtC?o|Q^9seqmVUCIgs3WCM90A_du00pQA zL9o58)ODfty?&XUonz>BPxeaLbw8h*TjyZvw^J!VDJZ2_flX^|33O$Ix*|NjJZ123!go>GvUK~((rX_s z`n)WC!=(H2_-H8Gey*=3r3?;I7nd(uS;3)?zW@Y5;p}nJmFgXgu)bcZt~{Db_m?oa z8I#;kGr5{3soWeV@6UR!NxEr)FVknVyh&L`XF1^WYxc@lr)*ioeyKk+L-R_*wdeJPE;dSxY)cQiRxG*gak82Gqs*A(qvu*lt zalk1}_S@icnl#WO73|Vx8W+5hmJzva+D+RPbjyT0745~K`SYHY@iGh&a+a4?mhPgm zv>(di;lk+l(zZch(04az-J_y&(6BMur$PwSmX~qpx#zGI1Q6Z4nas=ytr6V11+g=l z*!w)?xF6~!%gW~^<}t%pTz<)6Fga+!m?Wx}@Rs%S-i#Kr6t#_)qb;MSZ$^?@>t=2v zCJZ;1mgeG(2O-y{^gdgUxK$~8zaj`WWTWx+xM}9#=E;)_-MjZ@(tG=D55D&F<&rSm zz|73PDRWw>{^173Q}E`|qPgNPr_no{B8=nL5Cm`Rf#eF_rBEq81A+h^1@LJV$Nz9n zYZ(}6++n0ZI@vPoAis%Kd($!@+hk(9^~s}@KAa)rNhRRDRb#>9=oJ!j@t(=wfs9v(|m%5`EDhI zq`-{i!zWwqz+ET%E!&JGUryag|>C#Wv-g@ir&%N@>--~)KbC}Di z=(9y?-Ak1WzvYE00(KgVG#jTBVmn$~{QsiG#ccq%HNPNl%`bdG2yu!EQpCvzXp>p*a^hlkk1?>^eCk=XT9(r!RDWnW(k8|MiZ}Z&%;m*#L+4c259TT~^pl4?r zK@dcl(bc8ZzRyd`?(5y|cbgh7VV^)NC83mcoOKHBd&U%_}{TV}A^ zH(5JURm5zO-BuTmTZaY+MOiNik}u6A`nYM`)C)(`?N$rs8Kop;xW3-9l|%a-_nvg$ zSw;Ky?Sc1|nEOG>?!EEl8AOmCRJj;-zsxA=E`y`YUL6kd_DmHW8j#nHyi@1$_6+4I zP=oC6@{OgUeoC){_dE%C2I_X?#g-<)y0;SC0NOd8^Bnd^r5IK?~~`y8f_s|H%aaVoz8Vnbz=n;MGY-w-!sCs0M-@1_M?B@JIY#jyEj7M%0wE$ zfmX72s$2f}-py>Sw3@V=TZQ|1givw4elM=q_w7`^3icN&#isyXd-~}x=yv~q_TDr| zjwCw|{M;k*sH_8p0#K+%qr1`5(|1oddS-Zt$A}`Alo@3dSF{x+TWNP?E7O|Ev?glz zk4!&g{I%M(UHy@nmZqhdNE5kh8qMk;C6SyN&d!j-x!9aRU(@Jr41fmExKXGoRAuEA z5$^jVGt!?wcaMzBy0RPf+8UJ+?*9C_NBFzlV_EjPf2F#$^;2=D^GSgE=8QMY=dE+Y zto^&aS`Dkq%h>qgA4dDpM~8JOE*iJU+Y3;`L_lcWYAF5vhSnVb$l_RLar}~&a>>x* z^Df$S(>_g^ZzOhHi;kQAhSknQO?*+OJf*|nbyMFAJ%V8t9-Q{9`MjRR*S@@H>O;zZ zX{)V!W}izj#lWc-5a_M1p?l|!l{Q47=;W?4`+3Gul4+YVMn;(f1w!K*haQT5)Zqu? z!lb9OjXFgwl)^U7$!WZP(X%ZwRB7thI48=Ds_QGSz4|Jm<>mf8>;3nwKT}IUpm+T` zIv-pqq$$*QnfIPFfEXA)_F8p$jAnQ2dN%)|Wpw;>#I#IXedoW!Jk`bT@@d;475)#f z>E<{!pw=p9HXRfkejp=0PMf{vmXLP$wwknAzn)L>17NRw{&_L+CSe;~#j0HA-kf#lD?eHZg9-}~;$S!W#!k*Y@H zwrVsE4WEY;rh+L=Iefa>+WxO+FJ1ckB2AaM*Di*ANeQ$ z1S>!DGs90mYmj7t0Iq(CKnkdy)|+SEedjA@&cCw>04hnKlHPM#YxCtl;`gv=H9h!M z9`0$R{k!c%ur{9?x6EeDerhP)+{Mpz_fvSbs#p^^VBB zpWMEK?zIntp8oxe=Fm2i7!pN&m5Fkc!C!Rv!EQyDcB9V7eI_IDl-c()sSz`(@14M@ zU-$*gJoOZ^3EF1wz6Ky`@w001BWNklB}>q%&RK;`pJC{(0?+GE zET(^Vl%d+8zHPzd$7A0!-YnZQX^+G9bkaVHgq%yYYdDPpS75q$ef{P2)#fJD+SBh2MdRWPXWAkV)>^;lK#0UNuKMA2A?NTSoegthOIQW=I7B{UdF*g~-x5C>{IaPA>QuKl4JDXy=a^g(F7A{TE>gL3s1+ z=$3l}A$okq?XXR=3B60o-fqCS-`&}3m?l~%>?U0YJ;t5(PmPRdeje(?aik|sjEyJm z^$=}sp>y*lRGRJzZ5)IUfY#7T0aN;J9TXI0E%>hOLJepgiz%5?B?Xie4TzneG8fhB zSE8AN``(RGdQJ;bEvhs2tk29`$IQ&ZeY_Na&|2$*MJL@kW_~%(AK&V3v<4o%@TI!h*PLp9WQRvQc+i>yj}YZX4o`Kn z4O31#Pe-wPpj6wV@JDb$K>I6>W7#t}>}PWmVr^||(zN)VkZGW&EbJ%EWuReuw08m|6c&~co$bm*cgi=xr_JoZ#vfUcI+*hkBW!j9Q z=IPtFE}p)9>(KCdNMS0N(zL<5ue|!gyJye-&npieJlE*;7L55|P9f`qCyeSXvC|Fa zKJ_Wgz5Jjg5h}5A8+CvD;nWT|dhrLx)XfA{bZ z9%7h^kBdzk=<6Sz2Jn}p)-j+ZkG8nR5C1r(|6`pRKn9?8;yCKZkDCswHJ~v|>e`>T z%ljPCH>fA2Y(~|p<)s;XC{R*kjRF^-ZZw6D4+UeQpWrjG8$s?;oZR^FMS=SF?LKTg zX3p(6k*^M{wxM~XqxU~ILj!-#HVE){#TaCx=T<`_$}&Vrf-K7*rF2dN<1i{h5Gtql zi$0o}mrdHwr1dz2Xbma$yq_}j;49<(hoH>->;|53VcZDWZaiaNr?#>J{q!fX8po(+ zDTL_yLfV|Cs66kt?l%tbhE1S*?D4XX^OY{{Zqp4ZkQJ!W~5UX!cP-5k6fC<9xO4WPe5iFWRf6jw;>vR zr`{yr$|>{x65sD}@f6|kDazv*N2A(@jJI))>@Iss+9>eRHHkqPsu#MZZ9?=@MP)95rBh_|;Pvy81BKMw%_(dj^b_#u$GdS-|wY7mby+=uT006_H(+lAcl z2|XLzac7rhQl_n1t#2H3+IvoO2nL2vl{;Jk+Yt519{#*CAyh}o-isIBzq03qj{vE) zv{Ur^5MD<`&(Xh5rIc%Nqj6<7={|&y9!!l#W?Dc1zba;C{_cWORjLT*rMZB)F;mB- z=byvnmtRIxNGS9dn;tS5E;#!w+fjs>XP?FVv(FC8WvvQ1XZHBG0tmx03bDXitzNUj zwu1a`L|^V3a=jzA2NL8S_k_9Ly&|K8}@`SwUyo@G43yoNa z>CiECsmd@kY(t$;L06_P;OFGLMKErsF?BQi;9JiYD zb5oJ}6=^NyU{9Dk2Hz90uPKDy+gh5ww;0ENxN~X`;iC+P5uX8kcyy^MfX6#`@4gf% zb%vIisDVaXM%b*?@c2Lf=ct@Mjn3^mxcra)5uUhn2Q}iA^eN98Ku=0k>vhz~DQ==t zft;BcwDWlY;gkV@RC+T8hI@RjojCakrPXKU29H?tLm6BTq|bX7j)9nacAUdy!L~Wz z_H80RwqMHb$@SiFmTyy7p!9i!McBI<_9D4d+IQW#4-QzPVvNCBjXFvD3>_*yFS$fJuLv{<#BzPxDq1%nLyBtTfuz;h#^j8qw zya{Y?7q@}!Ac!=u`iFmrxlRXjFTS|@EFY@ek>E$>+UT@=c zoksAqp%_i<-i&5>K^<#jgqkL8fgiU z>NLGp1rBU~)*(y>OocaQ8epwbY22?=emc&w$Kxy=ZXDE1nN`P+BRP8(i$C)-So)D4 z!Tbx)qqDRG9YsOhRBZI32qKOVk@_O8Kvdo1tbNG`R{SYVn^Bq#tsE`Il9KX8LpP_v zj(QaLWtSQXcm#wpn4&m}-7AkiQHB=w!udUyHidQ{xyO@xCwZ3pC<&djUpt51j$-9c zC**lGHQoEPT(#S1fB&7~NmdlbRg5QzR`?7WpV76`zC!otb?En~KFc0_P*#Z^b-&Dh zpiyHfi>EAo6eitQHR*|V{C$aLXL01mehgw^0hzG*Nr-FYza@a?_s*en_3FfiSYbPj zVJaPuxNFY}_0egg8(r7;AykgT=hGI9r&2tjCnZ2Rv71pq@o*(sTmAVsmre8J9~IbJ z&}j;trgK_@eC9#3`Ea>({#OCeO089@)o)g+l>;?=WSZ8Ybk%;2MUg@Ci@sMH8WFHj zsn^f%A-Sa?)*2Bz1yB>Y?bC)vgL!Ye)yif^%KK;D`TpUDOCNGLjQ9+o8_7ige^*Pn zA_phY@O>x%AfI{))nEQ)%$_>cZy3kG(h@K*WPIMEAs!*ntHe+|w?2*`t5v7Ije~pE z0GTAsn0$mwOWAO6V`w-%IhU?QL|=or7-MdE^HfVJWkRXHq<6QkKfL9fy{(u#$n<+NAs4W>f8Jf1MmHm7()H`t)aD! z!%r$KClWxZG`aok8?S$PAE+&aILqo%DJOE3|KbpO`T_!xQg`Pnl|S7JI>&`d{|I!; z@bNtXb>?uzg!pCFjE1_(j}*O}TwnkUr=>CIs*Z3lC4eqqXe*_?#LakS~%H zW{kcu0k(4E;&i>7w!e_iai}*qqvk3>el5FQR4-qK7#x}rf-+HqGBBxEf7Jez-Rx@| zpc~uE*4?c>_Zm-;@wZc>hvV7JRKOhCryoV<#Bpog0O5E-9%HRsx`g=dy;9LMN>eF= zRH=UY9XZ*}&kF#11?TkyyY{v7yH{NPOF4(lLv!3^IaIlN{s@7eZ-LODoiYcz)>C8m zh;A36+g%V!RejO1(0KFfR=vKqF*`REk%CH=Rn1-)1Af_S?(i|Sg-BaTa#;xRJ9|iO zDY(A!$l0n8%N0O|o{js~`LicXZM&?tc=Wr~rDNB3m-0jSD8ixP17NjQe{QqU_!$XE z8uHIGy>o$;oO%S=$z?% z_y?DVA4a}Qw{k1vaw(&Km#t+}AR zx$R)p_EE%f`o{=z$EYpb3uRy1b7y1HE&$X!9fP_k$umt#p&WS`ZMF* zUPJZlHq1NCpSRYiCJB=D4WtDd!N=WWG6{sDI-{a0R2ko^Xozn=QrWhX*4at@P}?&5 zr$+TX?lYe>ac-Th^-5)a{>mPa zR|>Aa@M3)Rg%{`IVH)AiAg$F_dyU5WZqT+<(6w3vLd10Tb4Q|6*Q+}ieZ(&9*UkV{ zZ@1r??e*^NGU>3`dfDTK-+$&^=cL&W8!{GOkO^Lr+B&3LqJ=Mh!> z!Z=+z1iKTGOJ;uETAX9~F!&?#yW`~8ZKx=Pic+!ETH2%ZJE8df#`gBNkG9+2-D^6F zKx@4O=+D)FdR+2Z@lsk=o1cHHe&WRCJs@>#$ZEAk9Yu9le!9=YtPB38n-)}ONrX@j z;;8fB!Gkv*|KRdr#ODwW4WA2V&;ER)T6?CUbnu`D4WI*H{)s2hIJxYUi>z;dP(ek# z`a>Zm1azYT#7yW*2|3&&6pCEfWmy!ZDKmU@mc>xY6Y-%&Mt+Af6n}(=3jvqSl=qC% zeM?-K;GUf#(Mb(0T8@U|(g@ zdnVZ|Uz9$!lfHJd&L{|NX7@!(M(vdW%-Obf+OT*sY9yEkFgvRj7v8OLLc zsA-Au*igwbRI)6VT1!6TKG~CxR{?6hQfWR=>LP$E`$Vo6l4`Zut5i<`h(pf0{#97^ z9YxLF!ja3pg@rPK=0kW0;Gs8s zI?e4rr-e8jdFCQ#f(pQP9HVw}8MUKF?K)aRXDOf!%gp~8<<@vhWuq}X0WJq21cdZ? zKy@(UpoCDGpQHpxXk8`O1PeEv@;ZKDN!#(r5v}9H<9eJzh7-N(FXh)fa^zA?X%FLY zy<~piDSX_kJ5IF9^ke&^e0#b8K!0!rxO1lvU#?7#d(c6dqm|3x85efa*KX`RndD@+ zvU_Mo8IWb@FEWmbB17Yxc2*R@+7IJNEG;2=`Q<(>o;78n9Q0oe*=!=deG97Dg!ULJ zg?)~g8<}MgjEDPXf;+S*_|s&=oW+d3C~9MzPkvl<{=&fLHcShrBOGbc2RjrsZG6|Li(LK|G!Gpr$m>dnmD>{hF5k6rnowfEG9A%U(Y(8nQ!%%2(HlXZR| zZ-DAZ*;BpV<$9+x6(286l{%q?XxL*K;piz?WAyD5QZ_}S@t$Zj4ty`uAxsB6^oCEj z-Tu);>r-KY04O2QtW*#$FQangh^-uG9TdV&*jxz6#tcLh*=a9QLJ(t!hItDj4hpU? zmxKQ;)n0h;6p2Afzt0P3=%je4DdYFngv}HitGma@_G*^(LfZXs&nS;N72&ZrRoeJn zzf34g|(o?3+K^|v43BF0^wFp8{sBw&z zQO9AAyEkjx_aHub5{)1K@jfk{H3hPEX@n*2^-$m3LbkpRUGl`kGGh^1Bmabfb)VuJ zDp`h?QHKzBD~0%k?eGalT3lNEd;-?|g&`D4qO=G$f>3=Ps|rW005J`g55$6$@=QhR zxPO4r7;;01R4*=G)QgJ;YV-g=rgb&d`h*4~b)NCOhY;j-ln~gC(7a9_jriGYiOt_ zLTh#walMY19YqbLfKI3X7a_4SxGMzW#YKop#rD(-WBAZ?0qXVOo+>4TQeb<8m6Ca? z8~^m2p~Y6bsrv7OVy_E7HLj>8g7nzndf#}2$M_Y?OVw};$zY^?`1&7>)nK!0g*}W# z`PhwK5BTsp9t_fZ_R1SSZO2(pTLfyF@a*<+);m2FZ<2oJUwR2o{!jk^T`8@PnGPso z&J?pQBdrm4I=J&;0_8gKECl=BZCmxT$BUr_<4FH7Jt)*J+|6HoouI(j_ zWguy_8)>Wk34nftmA|Jpcx7Z1HPy<>H`K|KQ}aQaq|-U2v~HA<>Bi)9-$}l96F{ZC z-i7Dic;kP2{*5=LX1s@Ru<+0tJ~!s)8#m?`PS2%jqoQ>jVuAww509pU)7TsVs`c=z z-ys-^Qz0O0H3)L5TR+WB3=JQebDVoUyb%r|d=Vdua#wLTcOG32M;!4oPG?S0=fe-N_D6r*FB>xzX)ia* z`#kA9?XKwLvPtL}br8WM2Gd?1XFbU&al2)_l!e`j+64l^#1))G>=MC%nhGdDaRS)<8S_t6PGu8d~uSZy*ch(-fuW!Z1+BjMmIv` zi(qlAZf_(4H3dcw03=dYQz;kSIW3l-Jt5MjluaST_k|Ffdram?$hw_4>von*#Ah`4 zo}1LVRjX9KU8_{4#t=}w-hx)D?q++GgV5UT#&K)AQaSKw-62dDJhX;SIyc`)=jI-- zDOHWM9x@W``&UY+dfk6)wbsyS1`NH0uE&gu6j`ZQ5`XtmQnSREU;>p=_k=MTsi3wL@af_{=fdH z0OxKY@3fC=x+$?`o>fsSCzlN&JeFhC?BSH6iRo&|?L2FsgISKGFW&PZg=4`F@o!MG# zY78GF$pVN2y2we&_q6$Tr{&jarLxhj*KKBT2oF63Pcr+sL+|9t1!%l9v$`q;`F=xg zr1W9nDH8%{GypLWkJK}pM|{}ESUVbFPIZe05uY4Kez1WYd>crR`aGgxrB?`{io8CQ z8tS*p@$kRCeA!VRN6MP0!4!@W!KvZ;BExrRp8RMpr+*x7uDh^k_hB;qyWnf(j)3FS z4jsPidO$$lx`nvi_Vip1_VT1P9--KaBbIlvhiB4MtulLac9Wi>IrJG%doj;GDBBmW zSGL{4U->1(pZ`1r`6y{M_YUV|R{JTieUdDlgVv7%ql^SrR%fCO007zUR5frMgO9NKV3h3XNSStpSo_plZ+>;J=$r_8y+y55-M7iit6BCUT7(mmMr3<&7PmIqHcRq%6J|_o;%vdsI)WMC&xYt zT*aZ-GfUfzd*V%~|FYgox_@dskEa{25&$Ymg4ykDY`pdwdaL6z@7>&cvR7$h(+&r@ z4=6O}zDU?kpGVn!F`;(j!l}RZ%cy?xljsFL03DjGdbf+t`xnrB|H4R`MvbMrx{CJo z>mzC9P93glm7RfNSCTJU#68)20inp%c)fXRPXUYQbvkoNr}Ox@?doaz*1VMe?_SU} z3Cwi6aiiN^5aa+O4x?m`s@HFK7Z<1E+dUL2z22P6vg+uXRPPACpLt2W+dVg{)Pa6~ z=@6z59$LdE>vS4fr*ldJ(%$Uv@!<;r)T+oMK9&&G8p4YB_`u+il`1fZ_zXcx$T%Jf zwopnKV`lCi9|3@*zlF>dJ2cF{bnI*48io0_KW$<(m{2x1y~3^Ch>8^j0T3 z*lN;Lt|ERLda6c7xuy}vSbPwHkB_@GO@zzW#^w8zzG+Eu%RzTK4b|xmQ{C)zbgL%v zd1y=7>uG&`M#`!9&J6%mm6BDZs^&cyEwjODMNxDunwfcfk4T&tqAZJr*1=DT_<@>v zZq2roy(~>XsA_d(H)uSBj}JVwhL7%ctGe4gj=}MkQ~@>2egINJ)@p#XbMUp+1H&gF zh7V<`RN3zTj;huEG5iEX6hTztVYtTW-_)cR%3<3=0z#yD+p;MXwlU!fG(WiMc;kSZ zMo}*Eqn&GC^e3<98>MjS3D12vwcOv79&OF|Xk*ZrokhOm!S5`?`XBrOdN*%cMo8%B z;c?T?%6Rz8?2Re%vKQ_D;bSPGe;lKz%y>8*9RH+8SyTFo498K%;W!3bXm0rLN%swv zo%^x|G5{n?OEG+zxc~ql07*naROp>NY0AjI^l*bAZfHmVtqT{hO+M~B=@`=o4=lsS zXI5~!@?T-c2Iu`gu=gG!SerX}IJcx9Rn6~kCz(x;OLsoc8g%Z%4k=$gLEhdjn}%&? zy3?(rFY7ItWquiMoI|rxX>JN}&}SyFQ9DA?6@EohEo?yfyoKd+BB+5%#lOuqT~D!J5h<5y5{@i| zB87fK%(MwJbWWcJl){nkdm8S7!W(zyg!N_ZiJLos>e~V z%hcKJIV>}-qCGsE*A(>~qTTnq%&B>0@D$Ng1hnn^PyYoR{pG)E%E-U&re(7h#kq43 z-!&O5qsACK++ckj>8)GC9w^7Pxj8p}Y{SBK!cci)#$u1LU&~|mTJ`y%12#RH(Cp)f zE~R|$so|A;J30Swc9?IPc@h>q0Gyw5rCsZ~XLOl&PcH zs2K>6CdezR zl2S@UafB4M%FewW)RQN)mQc!iep(0xDTilkb5R@i5c#E3*dK=A9a`qHChAS(N`gUZmevNC=7aE^m#H;z>8+v#`obB?m`BHxEkuAFcxVkDnPt+yna<-q5dsk5qw~e!<9F24*T)E;A<^ygiqcdR{{42>80{wjrowwgc>wDh|jMxul z4kRdZirG#hN0S&cJKe_;>TBFoz)|UDW$7HXhpMQ~CUC{ zGvySNv-v@LesQ46f04n)qewg&r}-~Y?$GYNgexz;v{;pLuAj=sO-_uP(q~L zxYCO5-#_=tyXQxrda+NCr0MY#7(}h?=gyx6z~7U>uSVl7Jv%$uhPFL|bY`ZJ&djXH z!N)?}^;N`!{BL&#s1)eU&CGmbZl=Dri{u`{#{?c)!za>OM#Olap9#%jsMrcXR4aaE zLTgB+hAn*xv~|~$_aDbGYV|r&A-qhCVF=z7QYaz%hL2Aq(wIwVtL!+crfpCZ9q8&M zcX*G+XWSVKC9RE6%E({Au6(fysI?@b!13*jkiyzI1JHPb-nrI-5OO5?bl>ptEHKzjEc z?tbY@P~C2jrjcoP?H0t)@Co~nHM^^wdVy!l$|!H}@iwq>-+hC5^d8HQgDZr$>{H8o zr|k_zCbe7d-*WeZ#>>@O?Wy`e*Wu1MTQ&%!QPf#Ka^%|Dks~{>rgtCGPG>ny(?P_C zM?$Hy&eSlaeORj1{>`yQW17*6?0AB^7Nf4h4^sI4e!KKa(0Uw`tg zx2DR_JA~1Ca-(10RpxVbTf-#OdDK~UrGsU5oW^gu&~bRoz#%>M9UHiDk5H3W7-&m^#0hS|9ILaA-zMhvSfCvob3{CkLx9rHM1?5F}g z%3iImqW9%51KsY(gEm4Cb$h6^^KVmz*Q_xNc*G?XD=`>B%5#2acv5Iy{O%q*t`efx zo--GpTh0$!C~gI(Z#Uip1waLdw@=Wn#Ggnjl@$cXX^$!v;^y43W4}Fr>{x4mDJ=%o zY@Seq0~-2J^xoLdrYmJ{wNlyWwpw3@)>9M30)Y1R_F{W``$Y^+*R`R@<>}W`G&EGL zw$@oZa&ZsHJ%o=5JhX;S9`OO}xPn&^*1G?_W1U|bMM$G>{0z%1k>Msh=qv+2>L!XJ zRI62FLJSQfLl^ht4E`z==aD8-KNS$IetlP6&(&?3tIkE8F%%39+}mkJ$4wG$Oc}$^ zO?%^%*==CmvHheWGs(A4#`rM>t3$_U&w_3Ea^EKJEi{rMZ>)JYj3Q)X@cF%@*1)}c z==|RAL$9p?X*$u#X5*w*apktC?OyD28MTijWR!&V-Hf8SC>4Af40u1(Pzuq*=ZdGixzQYVZs^FU2}+q+j+dH~luCQ7 z^d6lx*I1)Q5lule)Z}$B>|Hzk(J^tekJFU@Hgm$3YeMx7j17Rnq1z)vucPD3OJrJr>lkfDx$e8j-tBs{;0M- z7eXzoMpqFN1&C!a)ebP=f6QQdA@>y`3Ty6&PhMdQH(sI@hyW5*CJEQ~d7 zl_Zd9f+)+pJT0fdR=He0mD9;ru#a#1944;0Vk>es9t`KfuNc}q!{(j5jPq7MALTND zQb^?lEzv;#u$=>jq}Mx{X}w6Fj-l6nI^8I4b%nTj_Vsh$IT&igQ0evRk=6}YE}3iY zz{~%YQM4wOmijr;_oi#FzwUJE80=l*I>)dMc^{3;w69G)fmn!Za|=hly?>)PgpV3L zlo6kI-+i|l&CXV1KpGZk3qFg{YYlX}fKtUC;h`uI1PfW5T0CmJ;y=RZn9<%-1rh$7IHT;H=H8)oOwg=Mo@Z@!j zK+b_OLZ6?9(`W>`G4?18yhkbq1;bN=4lc@MQhyn55I#4Z`4p%>Gvs}R?7_wv)$6SQ2#m^< z$fHMJ6Hh!bl?wBbG_B8OS)I?a?^yum8EXi{oz8b&egD0`^XmKW^)O`+Z?_tiR_hdo zpXUh4XhDA7M*TWbh{tYK0|nn zXN7x-{OvUPn~%>{*Qz)c&AJ@f5)%2F192j%zrMb245L9RJz> z0`q6jB17?7y5?+rW9%_mDMaAp04Y@!@l>{zh77b=+4ew@66BL z-y?Dl;bQ_1rQw5idqDtmHj{8q#<$8dBaSU0Dg^ZKsB0r4bMwXW&1ntQ?F`e|`nLv4 zSqLPx8nl$8wO_Rs0Aj7H0}OvgNb?*!LCOMc4KhS z_=o!iix3{4m=(^y{+6^qQ(}a-TSj{nPCaf!x-IQK@^(Jv?T*=2$4npncG2a_NZx)6 zTW^F@nQ0VYH#X*%{a%f)t?zeEV^RI0dy~Sr$48er#=2|@->B%ws3Wh(*}qam$0#}{ zX;(x~nfGSyx#v(ha|Yc;!*WN_b?IHF7cT;L?m+j(b7*&N-bA*&4Lj~_@X~ckofdvR z$=lg$+WfX1V=f-B@!I1bju~+9&^}z8z6dr*FUE|q@%!ZhrPXz4X#h|VBGtQe;Krm- zOOi%B_-q9=UD-HPw^rN8()7YB=iiyCP4P*Z#=SKCw2q=h`g~n^>zbGXKvu0?>CHB7 z><_i$!PO@}u}~Fap)&Z!5|x9#_Z`D>5iG+@{eqgEovwVoLzp^vI1QgTmJlN9n;3oY zr6CMKYamU*4epc!lIHC8& zF_f+TDRx_AaYfD_>oTTS-nyEl*#6VkaP9y4b(DcodqsANqX^G#M203&ruUMbNnz5| z!=lK=xQ7XhN`)(%+82UJjnvAC6UbIpu(jO(TnSy>{WZ%^k?fbw+`fa#>MC@zITlUZ z@0>?+|2|o9W;NQ^u7Tq!X7-*xr+;fCwj+b$@fk#6;89^}av2PqPYSEPX1vRk3a73a z0p*%5j~rY{sf0XO6=P1RxT-bV%bY*40sTU#=CNbvo5zk#)yF*npqr#|H%(trLM*!O z!yIh$ls>D*?CfjvCs(F*qyHzWapkd^&W{>*fxcrl@H_phxyJ0DEG#WemGOEA(+3Zw z;iCZwA;`f7BNX_2kB`smDg;!e0xf)z3R~HRQs~}agX$%AEv=!W2Tt!T)X&0zGku`|)Tk>diBLF@OtZt^m;f+%mtH)X?Q)Vy17G>E!~}CzyR^ z2v?W{qt&$ZZ4sq^L+^@q#%o{u3>%}CxaLs)m;0m5v#4LaiX&h98a7^g4au7MP8*FZ zjI)`)DAF?O1h1mBud?ZJMKqV`hu;t3nS`EE5foq4CUg)$Xl_F7d~|dALhZ|3=MZ|v zzI#PDi|~w!=G9(&0Z0DU-|FLm>)rM7>Y7s6K7Ss!zWBvpKfdSOJNG@Pdv`rN+bG7| zVd7UXT_*?|q4hhD{}!!G+bv>8i*h_RJ$^?|UeD%~hQAvX+P?#d)-9Hk7>;WIN(<3D zr~?k(edX-(yJuhh2Z-ZkZYIzt2nayy&e0_K)uTyre!uA)1-;D&viIP@OIemJxKfb* zwPjBRsP#y8+Usk-3*e9Th1fBnv#{`qtkHP9uZZPt09)6v-32-1b;v1HcxdT1u!|4G?`HFXc%_CeZmE%{QeeSe1w*LQ$$0v+R&(#Tl;&wNwy4yf0PFmvn z{p@QxCR(OJXO7ZC6XIz9g0V>c42>x8`7|j#)WqT=q_d$}JDV+=GFmS*ejD8RHl}X4 zt_C2p403Z58^8U3qj&KVbfBcw8fbCg6q@F=s>;-L{!y_u)EyKLJTCGoMpl&#->*h+LsMD<1R%=?nSJQgx zB0i$ik)qRCf>PCx)V5{h;I|NIIyZkYsnyq>edG11I+{C&UXrY2z;W|t^Ysn(X3@V) zrbbaGot?eXlXCOv*I%EyLmUoay5QkF&RPiB-+XC%+BJ-K%o0~D;$wl7P&0K5Hggu@ z(OUO6qoq_MjEyNiB!;g|_s;tqvWUN#KP-be^2_2zo zZ6Zhx8a};5g6x4j zY5$nyPin2JoEPW60U(K^jb@{9`T5(oZXN`kqag0@TU!)b$F|G}J%fQP6{3?Z9{r9G z*u?$S1EGBsB)#6riq=b1*RdVPT%x&BjBGJ0C67FDk4We2i|p9KWH6D-ecK?X=8Gq}EtkTf^Po_)T2@SHFcxV4SGX zPVkIdXSYr;D{42cQMVt)#<+d#r(wEVC%6@YlxY9tPas=f_A2P+n(TRd@Bp*du0h>h z1w4lw_-LxP+K7{sC{u>h=4(@s|3YipHae&S#iO8EcWh%wI5=!t>^?!tZFxbtl8GF< ze9}aMa!M%A_Kn@;moCrJd7(liTZs7wY4-=j#)GiZPQa6vy)PHlSUJq~dJA^|Rz(Z;Hh%8H?6bZX%&o2x`g47z-?QN(e zL6(^hkZP?Vs#QeG%Mfu)*s7;b2nelFSzm`p5=RirI7Z{qM-kO)4h6O^#0c3;h{ax( zE>k>ON+=oiAcS&9;}hQ6^OqV(zH38|{ns|p?1&1yzuozwtu*);sOOjC31i}iH_$yk zHXS_4%%wG?)D^cY+Ee7Vv*$MCjiv8Jm|MvaTo6Y9gahsM|6s3?ffkPFd0YA@9&`aYsShTDjaf0pVehQi$7a-23LAptrsrOt;foBiY!%*7v`U zI87n7d$5bW>ih9Q`_WVln46uA!udwR{|v z2t=>F?Or9wT-#?>zHzgTdhUe>n)f&q4&*|i?rr;6m=d*c^43`2_dF3t?v+~ z3m#6xCrMH$m2j0l|7DDWfk06T^==nga~oN=Yv@LDGHvoP`+(Ex{w`Lpc*H2b=bC+7Q5tx> z9z1}!@B_5|(|<;G|9<~Nxp|!eW24OJU%q`ET-o>Sa|szRMf*d_?%B!l6zwqzjWJ}P zmDwBP>u;2CInD`dqxMqy%0dQas6F2UHfkRiZR4ZW_pHm%9V&;EnE&i&fRz;_)!`>T zEQQZr7l4EYx?OC3>sv@SHi8DE5CZAe7Fw4tLn<{~X&`@X9vLcgl}}+|)7Cv&%Qh+~ z$KgCcvumFV$8dHK+Nc4;`{#X7ddz5D%6+`kro}Rrln|mN4&ZQu_s^bPju#fr)cR2p z31ZMuUYylx-%o0__doI0n^SrGaVJezy1-d;W-Oie0^>7^u4j#zxAvI49Yd|xYs6VL zjL6w7o_ z7qVV*%qA!TqNt;Vup(Mqi4I}q=@}s7;2y}3I_DFau;#{QXIzP(v(NFC@ksj34BJ!e zsuZ*rZWP(2KH&E`ZKDj$kfOqCg+-qRu3ba&>%WfV$`z<)b1dcGSI5FWhC;qmD3dC+ zoBfvAE3_L>6xrDgm;~o2!(@{Fb~;}|O3Xa*1d`>GXe})G>kz{rce?0K2isrz60&>u z3h}2~TWDUq2%&Q>!Xp`e`t7K8sNC-a;XJQTy{g3!)jhFT8>`%sk^vRNZF6NIM`Mo2jPEDygSl6i@~y>V$NymODcl*%W?8kWly%5d4tzI{y?$(`fSjGZjOFF`_nO{A`1rs> zY4{|6=YP1YfXfO{RE5^V|Llbp0XU)+npdx2`_3IJ?W4M|fX6=fIq2D0q{AgiL$XHf zNB~EY1Y76NWBtANEIhJW#nLOUptp1s?MiHkK->H=_*9ZCs_mwg>V`@Ms8m+_Azvff z`XGkw%`exGp!ZFQIcuz3J9^DG)!O5*nVbC-xJS~=iOFrJ3W*sn72tILjA`#C)!Adj z5WlHm%6xOd6)AJCs6V5AXvX@DT4Jk2G=0T9PpLI#(iGSJ`d`P|um3vK`Z_3tQnh%* zq9}zdzvsi7+iXQN%1nP*ya^BFQ9u`IDfT`5y~e7Tf>yIn7Z`0ZMblQ$H)acn!$NfQ6ILU z!Z}J#CoH=8Z#owk;?fX6Dx}!J0YrQ@*VbNcb~>N)%Mgu3%HFNn*_-Q|oBt7ddXGNV z_f}Wb+WO089D@`4GZ{=I&A^Meo2 zzIDq=`-o~a%s=)Rwr3k?$1!UIg0l}*tj*-^tNGesUwYIR3$Viqo zL@$}MXpJI>D7pzDI(EDu|7DMmXPRqrB}m_NhG-cdr@>-J&TQZ@8UJrHZVm!I9NgsU z{~CCMEy3)jtkHNAFQa(qTrgG5s>f5;c#4YcSLNe3rj4yg*#@000`e!KQMJ$&?p(zU2zMEm1?Q=`CF&hgNDMJZyT zISkDOr+rlVS(&m?^oD5l;o-(pXg%@9F=tJ>dCU z6{jh--@kz7rAxv7#dZ_&>QzA7Yl!n#$_tE=Z?8E&@CdGpm%6fg{ zsW)Hu9I-kb5cj&(c(4b?9*3O^qDiTopZ~6$pTGX(+iy+nxH@SXce3o0S_qQnM*5%9 zxc~ql07*naR1G`x-WssAiCii+YxT8O9N&zR^!7fGeh42Ecqk2@r=NaWrAn!kq?A$5 zA2kj%xwC!w19WcPwv@N1QbA*R8QTjB=+vr~B1=;a@)i{Uo!fWNzI|tiBNzTsN~Db$ zr1iQbsJORph+nCyOp*p2n~Wr6B(DjenbVNB^RW`w=rgqY{_@}cGAfjFq0h}2cfS#z zHy5uh!i?|BuICD6b0Unko~Um=Tyx83it+zKW*#+Oisvn4;<+AX)1UcE>8|&BNWS|$ ztpDrJqx1gzP+QIZF~0#AmD)s=)H0C~-}N|9HifDT-Q(0Ndp!vbevd~&b{j@TdVF;8 zXS^3M&fGT6Jb}h@&!V@m(BHf0I8+Q@$p6M#WBdGhY`-7Qd=c#qqFc8BZ991kP@ZD1 zbNi?OJCf~Ni@YqyX#Gi$Ac z)|*1_uiNC&OYiT-HNs~GL z_T^y9NC5?*;v+}?q;mZDwS%F$9Q1lgwbx6Y(m)+>bpranX_&Qck0)9y{b~>c(kWuQ9`?(&?&7r~9WO%T^5zo{m7r8}?gjr#@HQ z!@xbGuE}C68s&hRAntgmT4AdgJjcG6a$)sj$GJw)XIQx1UwDMJC)QQ%G#eKc3VvJw z(Ae6-{Q2{E@4x@s*!s@5p|`h#6qHeP%S2%;N=g_D-5)9^gS6SKrp@MaO6!LGKD+Oi>*TN`bUJ_f^l#6desro0y+fEjcxVlu zR0@?z;e7DHPAsqm;q2NPs`u`pd;h-UL2bPB66%jX?g+{pTfVF{G@|PtBD#Lvn@it# z<{8X9^$9|a$#1oVt43ApcuuLg>no2ucYWoNIslyBSXTgkA4*;KdGqZ2a7rD$=E(sV z%!4U2f}i^xU2vl^2CpknxgtXG+Iw|;ep8!bcMP1?kkVsl3(W?LaCmTPi*CD}ohxrE zKVT)kBMr>>Lkf+C%rY!}@B!}q@BasG|JtuXZESd>HZ(mc6v3e>U7>v&9FMOqSMq)R z?IgN1E}l@7!)K%v?V~BeL+57{-M)GD_2(OduP$%L=?cw7*@#hSPI)~a{h{&DzqLmm z!DD~>Z$m6Bu+_*{rMYfwZJ~PS4qBHkK@UDZkdJNS+&OH&a~^Tl4@6KtV$^@zAjwPV z3Uy5q+d;wUSrznoXv3bT*vm|ql+nR`la-kQrAXx&4n zvx-hl&EU0L4;?kwc%tp}l+QnmnmUO*fgK8G+CJhX<7iXx?= zs0To~3aWrDSo;6YbvlsuSFv&Vvg1KreC}D)PMt#H#Y4r1Kz(xy)wQ+$32`hRzVX;& zn0e$>|085%@!M$9L&r>e%iGz{hR-QW0qEb+8r4<{c>8U1{=ctc{onrw=&dbakoLy+!cg#e z{e1?GZ%+-y!S~T{gMz17Q9MQaSjt3CI1M2@d~Zr<{AKsi`1<#Ok8a<*7Dd-Wbo=NU zwO5DtU38t(>Z6T5Z$2hp|3zbs=4LVX^2qdgyb!>i9Gluh}0NFq$zr3v}vw5501r!Ixpq=)4|2Z$Q zTL`P2J6Du$+NwOywmBeI$N^~G5lX!$l-k$ts$|>E&t_U3&*|1=4qBB;Q_1MfnT3UO z`$^Lz&}+4xO_f^7-$fqb1B?tw0i}TJn4SGy%r>Up@TqmXvexY`NUaB(qRrcC8{0Wh zgS-DsRU9|F_4>_c-+2Azvv0iK+DGya;bQ;~ui+C{l!_}IfEyL?{O(^bWEpVxF7CYL zo`8AmrI*ln{Bg8}QDwO|Ef;sQoet_753qgf7PL~9LNI^&G-gjd0&1wZYB9knWxY4s zcp;gc9U49WpoLIsaCn0;zPLA!3urrM*p9~d=f(|D`;81rJ2en;T5LR2A+Yh%U~^!bG+-F~DJ%Avcz?$9;mU@Y0UNs+J5`?e98yfP4<0~#``h^N zSAP}Rs{hOOL(C=9pvKuZQ#O67sLmv*bRmOa-2V3>)ooJP?bN~|Jfl+N%Fg9DI2}CI zDr%=rp|!Gt?))5C3&sF2>11YY_HN(Cy)S+dYVbX)eC}KCzmLx44~A_>PjC$t`kN~I ze7#WviLPa{9_{sQn^$)K6bG#SIBx=MjBd1w%MLSFHV>il!HkJI$vCg|$q6%~0suBo zZ*6|(^w#FS8a{e+^JSH!$LV-;SZg%yt^wyi@#dTF?I%r>K-z3RlcwpiPrd^78;}U0 zdTDz3m3Pno#w+i>JN1UoT$;+cG;K(&C4I;EyJ>(r3;T6cwYrfUJ9g2B{}2vg6g;$s z59a5Zn4h}{(T|GwR1({_-@k6X_dag^#&0@xk397hvM0~r=83+u$#}ysxHhb620(V> z2HyMEpGVrU<-(C?o5i=?@B6XsxgR?DqPab-G(xiF>LNcPIuLOL2uh zhGmpy`JD25Q9@|AJN zhs#enE;9z+19hI*hKy|}amS9!8*RF6dQ5Lzu{Nc=E~nbh-3|smTv<7OedUo)pJ=t0 zXR@qLW!(VM6DLmm%JCB?rh5NN4l|pZr{ZpR$-e8C*}bKubFI^-U*8|fb_N$;dG*u! zvB&;${*5Ir&**y5#5Cu1X1d+)EO)#A>weID2p>O)M_2iHLZv>_1aM6N$~JAN5^k%j z0zjqN#KMOkqPw<+cy<<&d|6x^I}Y*O^T6G^L@aS%eE%xj+(hf0cc9bMO2dib7^u&n zvv>r$(*d;d&_u+)QZbs_rvO-+Aa$kE(plC-lFV_Tm@^#XGc`cVd}_aaEr$QvvgM7Z-O<`?u_Er5lA)6MJI!k&+ei+cKO?e?0Kxc6EqEj7pJXA z@x(j-90&l_P6zn<*U^6P0C(=+$Kq!`gX;0)K&{rV?@40{?IZC)D5cGpCKu|Dr>60G z^xD^FD7DSE$H=!ICDcANcAoC%!{ZynC_H87(MRv7IWC&F5N)Gy7CqxK&)-)!FPney zMQlEC2AeZ8m~FQ}*9To4`P(nE3^Sb$Hs5$1GtWPd`pOEl)~If8L$uql*Pm}a+WgjU zSWvFOW43giYG)N=h>s>?`Ec^RLzJiF_wD^Xu1QFv;O_W*3AUy`cb@$GImd#08d>+16_HUNAgyL7P;X+3aE=WmD|aRdZX zKt;XYf6{ZaUp^?B$AMP2vl1)yd=+%H*ap6lU_L`$W4lt_R8e$$9=N<;bRWXU3m#s> zr&enLxCB56CMpbLw?&yoUvXSmUB%`HA7K88Cs03bPI8M*97pY?m!Q7>$D}Rwj}hUt zy^ZX`1?X-U&>En5_>CE)%gex>yMU%*#uh>vfY3UQJDp|lXBk%>09XUCLg}|tB76r6 z_!M?{>bm~Wu-RxU?XD-0CRzBH9mi)Y2X_BC-S)m4TP<+=b(IN2g=@~`WwZxcpT9Y- zionNf3_;D>+jGw8aqIa!!5G%AWEs@?cadJdfmWr0N|Hdo@B%7NJ_%Vj*^)j5d{nwv zQJrzFeS3u_rE65G)~G!`ql}kH##Izq8I@8vYF^6Dg|F@?W3lY~j@mO8!o&9l(XyG- zr_p@kar7R26tfq9V3qrnY^fD$slw*Bz6CiykJ|C$=&h|G-f9k0J$;TFHyWOo*!_#FOQ*9y1B!~Hl)m$bAD;_d!t;tJQfU_u-fPM6`=G!f4DSB z(`5iZ4wl8Dp^-^>w^gsd@xu9crhXHAPhbwf%QHX&Y%E;*YCt8G%BJjezVn$&m!^K- z5&$IK?y^qP!Rbx*`^8rSmBm!{cIx$;Nfcdu{^pIl`$6*|eEi^{O?8`9YGYQZbI@8* zQ^zoUzTEmAh?-V7nx=UF-+TcNKDc7xoPXj8ocfucMgjrFaQ3}mM!BxET3Eb%3EA2j z6ias#i;Jk7Jqtt;>7R-pqvT4hpOWvprdjE3-Z>S?w3VCxPM<07&mMv8qox0c@0oU?F_~OuuyXz@ z_n*>n*@_087rQ@Lqsx1h(5P%~V)nOw8@K-cui(Z%{ikSOya;%_{kxt1mAXImh z5mpqvDC(;$ZM^P5rp4`c-{b7T+3kLLe{^iobI+mji@ylYe@x02V7NekeXaki-$Lij zH<4~_;pXQ*4|V4bV$I_>#$7>qBj;Hz|I2@y4gY>V>A$uOFB_-3$A{LTriQuW?h_OP zK54tO-1I*4uPJj~fTky#|2gq6A)xm;ZBpI4x1#Rd`&^$Q!?iU8YH9k~N}B%az7jMc zw1CBJ;4f$)8etg%02)fl-h-1XuRT~PKwQ7tphhJc5%y z^h3BcH-}Cf!xkvET3k~K%e@3QzWGgTT)k@b(Rkz$oc{Tr2jY0Z0I1lr#i{^M(^|@v zM}F?Y*;hYH(ELu+ZvQ472Nm=*(qJ1A_O^L@>^=p{cxxyQyM|`0n;I~7KlD@=qx8F~ z1UI+1H!&yqv#h+;}UhYpz_goMUyLLH>)5XnuVimEZe) ztpAO_iHraEA7lM%UxV7*?AIhm4f_lUeq9s^;&nntB8Sq?%BIb$ny;wd2ob)N?8`<P~Jw7Nh2*<(0am_#X93K5Ie-W)p1*s5h4N*19 z@QlS9H&D586Wyyr)*BG`djQVwKc$mFwbh!fwENFVurX1f z?&2I$Ky5@(C)wQmcK}}7duk8i;|dRLs@s`&zMlfve7iB*lxfNb9uHnejPeTZSs9Hs3VF8`TP9wW<19~&{DCov#R@EAJ&z(c#%o*VM=ZEFh zg#{dY@kQLJ)llztQB!Kzl1E{3FJ&*wUhOrTmjQei0G_;cbM?cxa$Ax9Xe;}-f4t_d zG2JMHX36zCQrfwf$1W3~WAv#KKJStxX|3SF{!tDmI02nnJCuL9MSt&&;5`u>pPI0`TICn1B9x#0v`$l}f+mh_tQ{ zlueNuCq1eta>P@vLPp7`RJl=Ua---e)1KD}rTI;o8d&DK%oxhx7p!iWyZh| z!&Pc{pJ}Zb zJi4^hZnYML)`OgCF2AXCwAYP*0Ho}sv$L0>)a_T^czqLxa0nB^!yNGe0CZ}#%`A#~ zR5|3CroPRe%k4Hg7k+?y?=WYJlt`a`8hUQt5-YyH0I>PJ@1c9+hUN9F&d+1<>8G*Q zXrLV%+til>o44vE$xFRf>kK7D2?0fNt&Q)9kH?O{aGzvI+~u`6e%DA6pk6qzQhukL zDx{tHpmf;d;hF2&8Jll!k(>5n8xcN3N3;8sBHB;LKkW0RQZFnwVO_7Rts)uoiW^?0 zt^^QkjoEYO(D|3YfxG|9Kf&#P^9A&-UWIBlp#}$V>_$q}xRIL>_@X&wB2K(+8O6KI zxJT{V@%=kELmxMs` zYhT0Kul*Vt*RCPzb}c^0*R!ABn^L-tUA|6v3^j_}oq(g?x|~xPKE{nB%As6`j?LF7 zN3o45ZE**?zOb$Z!_B$hknahRqW_J&eSu!J`q{KrdqEOf6rg%Sq_vHW-~OSiSN~|g zXxRz8`^u}bktEgGZg){?EvY2oSrTcjc6a;Ov5!oo2LK?%>4w&e4V6^^TMu#5){U{< zF9#4sO})JQnqFT1NEoq)Fum~58$QvA<=3KG?PgdhcSS{BcjL=n#_AWoVAYm!jK_cZ zm(h6qF@hnyNp|aw_Nvu1 zsa7}4MpIX4b3AmkJe7fq>yWS1Rh8F}rOwY=KUDp=0R(HtK=#Ov0&a@VcH; zSLpkF!^RhDk4Lk1+;eQF)cAHDnen-!wdcY2ENS2NUI1Gut7Rz`Zr#G-zxWrp{bN6l z_doMly!i`%8QX8aJxCJ@!#E0k8G~b-4f>PzwOjjR%BE|LqNfN(wJVyhGW&Ky2R4jL z&p2~YgvXa&8U6q4y?JyU*L^1V-CN76x8MPI06`E0L2wmCQWQl>vSrC~yo_bXPA2Ze zabmYS)5&y_nV$5SK0Rl85+BEt)01(glT1#>&PgXdP8=sr9LI6u#J0RDl5ES0M2U+i zlA9NdDiA1$B*I3pZ9QT&(J0TY?yp_$(?TzRH065NcWy_W?RJLqcG5cRY z_ZiVhELtS_mQoK4t*a0sOwe$oI<0qhXY<5Uv#q27v^{&lieE@6GvpXIk~Rl zl%_7daVitTq?)}&9jDCl$Rdqimsr44%aJ!z%3GD?Q}CdbAyOZQt8JJ%rb{KaSxk(7^#jwnpMH(@F5 z-KkznX$P?mrae|BIy<$Mv0VuP?dd^d(teOewhJRz?+(5jxXHO1Uc%_u6f_ccvE%P9mBO1^Ey-9##r@&Pq9{eF$lwV z`jiF^3tN7*@rF>rl|NTWUtsqD09VNs`NbtzRB^9VBQuM0ZdKu^5H>%xJ&Jewdg9dTT- z{<3BlLLAv7Z;r=>FjJjX`b_--Qe086(H97n9yKRZ9~R3O^AO9hIR{@@fU~dw|7%}E zEt7%Y)rD~5M&QB)(A~RXUAhEDDY#{U+cx03fa8GJ4v1sJ@;uOD5!7{ISysn=H-OAE zzaF6{%0rB^)cr9@g2=6k6VW4vzmYvRO9}6kuG3zl>nZPg_xXrrnA(hFv#@sF4f@(^ z;O&E9OnYoZkv<;pD#NeNY4O}33O~9Mh)X#`N+-`prm?LHOs1J7rnS9;MysdHMym6z zJYSgSDfL&L*~B^#rA{)F+I}`eDZPT5j9RUp1Mo-@C`Ef>0D{oACmW7^;=U8dzuz%~ z+YD@>vYz>kk?7v!e$N2b$mgC6UH9}#6MR^uvYCafM_;@2{-yRV<~U~?`TWUL@JVqa zu<8Y$;qi+n4s`dNvl*LNQ?2&vY`Cr)j6!U1)F&r#_OqYG@Y~*oOivF8A;|Rgp*}c> z>Y9F_TuxYa<2=j9j$tmF1%B}tr4ly1``x(w!)GyPStx~}kV0M#BQ$`~O1b=F0RA%o zxV73GfNz8IcN3t8EAdT2hgx%8KvAElZ6hzhrgl@8^_VWbj5xiSrJVi93?4=^|gx^^w>zCPG%`jOkT3Hdj_8Q$B;YRFLj<~LyRxc`bQe2W^W>J9N)6*pw7wW=0DJ4_vY zs?tE7k@Pi0p9N#V#yt1RGm0p9iSwvB8_EKVNj)VCY-*jvxN4n})*%{(lZ3IG?vAvu zuuuf>p&U?h1bW><;Tbl({wp|m@Me>6L*dTO4q1$ih<#Ihn*qRTlmyv{%b&cfQD1TH zUd}>gkOV)?SXYBi4KL3xmj@lG=GFH9d0|3szzzMrndlJ>^Pnp7HR##l$ z2;7AQtbg$!7G8cC#Pg6JY<@2^zh@6{?i?U|>qlNj;z`Y$ngW0IBJ~S ztA?1C)ueoqV6t1t48CgV=vL4%PPHi)(JH1g4JPKqB*LhRf?h9`W*spjrY@rD z9s+s!iP%KZp-0vzizO1Y4n?RpkiR+&er*OA8v|?`gi;XO0vs1r&qK{|FheOgAqXh} zqZR_&2D5E&+XizT@N5>yc;J}~NFfhe>;}zdn}7KnsOQ1TW?|*?u=nkQ-Q5k_wvowX zVB0pV)_1Rf#hLgU#pJIv+z$0la)Y}bWQK;&*l`!G?c9aZE3d!`d|_0@y)+)VI&-h? z_fyB2tSFIHS8`Z$oRALbgxU*v=ys~4IZzhlOjdf6Yo6?=3ndS^y-anI<0j3~a(e;@ zf@xB#-E?zA_tcaNV5b#^?KN*QpC2b-`0Xt4)tg=Zjl!8d_fi1wrl(Ggk|1br{MY)C zo0+VyUHg3iHJmxUyhL9I#!96kfFXarT(mfMO>0k*QkDa_lFyG3N)Pvfr{C^LaYK*_ zJ^*MYbClL7zWnMgw$i4p(1!1%11AA>+DD=D@OG4GqEGwiP&gMxjO4SrQOX&h1PX<__=L^)3z^ z8-DraaR75?vbi}c3~OM_i_4dnM>$`4fsevsU7)O4G^LAa74uP9aj|sW9WJh8JTb}| zRVaCi6R7U$p|TnBo){A|A=4n27gMPU15l_VS~@vQEmLfxIHjqMY8k4gET&Tyuyg?} zrlnIpD&0*ADd!*|2f`5CZwT)*ZX*`;IYQch#IitJZgOY|NpKu+#{qB#wEX}8AOJ~3 zK~%|PK(g89NnWlCWHNy3fH@A3$$&q90AVhNz_JiHHi%_`(B{^4u?%ieaB4MwEvxB@ zXFGsnw}h%3i08q|dZ3vMXm2lQPY1cHWzhjb zzmCVPnrEB?G2Y8|CUI6$=ZTm)6K%4Hr!$aZDdu$IoD#eEiFK0aMM(KWyBBK2*aa|l zQ#tdOcHgs|Pfx$ao1FAW>yRb}4mi#~aNyWo2UpH<$Me-H1@Lf@kO3Qt+ZURsm!>;J!eP|@YJGJvLlh3g zJ5_XP{+G4AT%t#6s$y;VhqNN+{eN|Yohs4im>20}h#sXiG4NYgX7=WPlS3~pYc1}q12zN@sDMd&rnCpVsHkj*z zXI!Ar1$Z8i&BE#{!R}uJtG5?UsRXCH2z$#G*rTIxGZ{FJ1Iw~nmLwu|G)@T6`Xht@ zls2zhRxLtl^O;ze4ttkOE?|p&pGVDispypNVs3V-5b}Cl+Ll`XB$}dMcikY?eDH&q z`P}Eg7ZzY~)*cHo^B<2RwO<0wQJ@Mqa{niWd{m`{YA2D$pPY|6BSAp-`_Xm&E$30+6goX_dwK`3>dyoBDkj9)^3k<2>fHg&x8fA*?T#`*T9+ zDNNn;Ek-vC^+tmNcqAmG-;vrlejjy8YYooN?md3&i^)0Np76bF5x~wYAvS3*xN3is z6@jrG6G3t9+J6KvhVd2W-IwAvMJo8T(AczT4#1c5=gz$czCQqn8a|n66{n`?di_gB z;T$-Csl$gcuzx@DLqlk6+Klnu9&Elg1Dl133?2bNST1AYbAOHvZ+a8RNPB-H2KMen z0l+LW0 zb-915wUYBRhs!!(Q?KF1^7OZ;(-Pfl=kRk5-ePGvfRBzr3r42r2Zwsajr4 zXx17E=iiK;} z1_8Xc3$S%Xo_U=DfU0HH=3IAX|G|S_xHXifSS?5epB6ay3<3F6mGd1AAtjG9TV{-8 zrzLgyazRjj`DI-G{O2*Se}6M9#E>Z_I+ zmlmftg)R1Ls=sWLQ`u3ypSD7HtL|Y4&KR69Y+89J1+Ue>7s|lZtAOo*oIMBPd7xef zJfDZ}x(KpaRC9TtB`|TzLTKAynGCq+f#(jE50 zBw1aSa-6marhHz{vrgf!mo^ikbp0RxFv2hWE%-OTp}pVeO7AamNGrG2|9DI$3p>%Q zBab^`>{Ib*kmt!bciDO@<{!m1&u=EhkX$#_DlH0hqOv3E_pt>iY72AurnYQ?5z+u~ zQw5Lnn>O$8FJB%eK~MxZD%5KDVR(oG|Ihs^Oz1hc{jPrQ`E z01muV>^^4s{vdpR1km*ns=+_HwgF&l4F31NhlR_Rk?HORTo-&`06237@ZCR1~I#(QYPDGl7v9@{mM+B zESBi5KU_NStVXOx25Hn`&eZU(fFhn360%ID$s z^=W%+785>HVJQhqIhNb>tkd_GuGh_Nsff}QAAT6+BS%nq`e|gURc*ZLqPzM$dS()g zo7bvqt2l?oVY)V3?%%2nN1VCM^PsFYQiUpREJwAd#_>(_t4UEJ*U7y0se6`MwW0K; z3qEDW?#}}w7Go|z5ID}+kkS|Kf9=#WH?!QEfrT&}B*6Ve#+xS^>GL;hc-|p|;nDjJ zA3A@l>3gFX8(LqOpPA`ljP(vaTn9mAC8{steqKM*RCN~tzrBJKfvIF4}y3eoRJN{fdg7Ts{0WIM{gKn^7-dc zUAq=N+qSjCtwI5P@B2|Wue<_sE&dR7C%34eQrQ>OYh?hx1ptjqwnD(C>`G-sUbtm? zbQgx?ds(}w)=kz-CY5K-SC6CCIcCXaK{P2R)%p?(6=q>0s!x&|UT1W6ClhH_87bAh zCT4I<47}<%lO(vwea*E0K)hB3P;)IhwT>RDYp2z`n)#b-ePURgUZquSbA4ge+f{dU zQ`4H13~Jn%wTx|@5&afp;4B0Qe4tK%g>szR$&>AL3 zfU2QZHwG2pXC2Gh9{Gi3`buB5`ue`V(vGhNfQ684rks7Ch=jF=&|3G%$R7bXy)wUH z!-osNC=Z%$e6o_w%nt!T*|KVW5S+O0(7_eArzyqlk5ur{@zoj2V!Tw^Jup4J8s_F^9QonB-6rxP5-m}bbM%alWBS|Q1{)iLKR*x7WS0OF%M|6%Jv|Nk!Uasf@(OylZfzdZ zYgx#YN~mwx(EKjY!~}Gmzs!76aLU54giDtmBfwt}U>ux38xj&w&M86D-xlo@W?hV` zbr)+wr1DjJfe2j(s|qgSJu%P~r8H??)pB$SOm0y6!k(0$Nf=aFAa(lI-))>){%;bH zWcL?2Uo*Y>f+EusvqmOb;&^I1skM_?LtWU3%TxP?sfA2d9hGXiTz$-$kkmh7m~`lX z$Y4e#M5U`tU$VYZ)vue|Nf$xm{@pwx9AOBAK{K>oYxg$>0F=UV9Jtvm$hB**y1Rki z9#jv!fX13N;JGY9#{nA{fU{-*PJbUV0|Ri^t_5)%qs7Q{ly=g7ounhMlMZs|;#1hK1muoSz_+d~M6&!(!AKT> zc^y>yB4JfZ1J3z(Nhb5+Ehaa`DnTmv81TUO#ghO|9?Td1E%E(fi}Ml{CVBDKq=oCR zRx$S7@1p$5D{$7Y2U-@IX1KUes0#;2ceR3G>>?(gdk)>3Hi5Q|$+dDhu+3Y5Q?G$f zTmqDIs`iotKo}Ot?CgV~=M7%&D;@2-G;su$HAlj*01j8))*9_fKs#^IsZ14&k_8E_ zJkgnOvGruuSY%jD&Rh|-riE7)rDX9eX83i6T%}c}h$wlBwCL~8q(v5mAXT7*QgYn> zst6t350rx<;msCbVg5P7D#2<SMg>dR2NWBKkTx*w72jEi* zKa+u<%_6*e2fXcfA-i=e3U}`S*|`%mmunui?Ybb21L`=S7j_O*sn48>E z_8+PDCIKiZAE&1Wd~_7`Et`=!c@liqm)?`wH`M(!GW!q6y(ms0+d!ju;#2({nk;^D zzcHm#=P07(DbuNraq~PO&y5gjYz~X+Y)+@o4%D(r>5W-)ejt?z$%Ica>O2+Io_S)(ze_WXEN7S}}% zMhL4q&NOCczrSbv;<05TJjET2VUBM9W8|JE{Mf+>CVXyy;!V^$ZD6w;eQcIrzmQ=)(`Ac>nz< z+_x9q4?P6iwr_}g+^ss!S_P}uG4tGWsQt(P1MKiSE6 zWW!%=Iehr*H>EObP2C~;R_i2ksm?dJ@)_rN?W#IW@IBS+cx?I zE0PSI+3uR2-T~Xbm-h8-<<;7M2^kwEgpS%gW(iCJfjTMa)74U$8*^Rd7`kATwAjk+ zZ-TCuO}0?U&rEe*jGl<*rPNJ*rfQz`ctrRg)1<~z1%>3ci&-Heq(i$Ly?;lgsf>5B z;3eyl&Fw9+Z1S`CPVCDj5q~jdf>=L2Pca{{{i8bTV_Vje>mn)9p2T^YYpmN9)O)dC z=(C0#&onmUIFtjKAb`8D09LPqJoOapLx<4lE@G;;ghnolu-FaWS3>@whtRcuKfHD8 zS|4y-I^M{~RF@iNEZ1>R*tH9@BOBnq`YH-DGtxT(^l@r_Cq#RmG+#Hv)Kq2xSD{wu zL-A--MI#tPip3NT5+o3wOBJ>e|v(J%s=jI9kM(6tbpCPvWlmi^N z$wl5&u-Ya=@g>2EEWeUmM9rdWC} zl37|^EKutu_ZQP!SA|a#6>{#PKqc2%POn>hM8P+XWwP7I0+nemoIV0zh}TrYP|ktC zZ~k4X0hh+XKoAmwy5}J*c7c~lu&-W)Gd6~B-4Nh9VEH^qp#ZC^3%QXIkW2=oB@l^6 zYcEAmSxnf8@>%NlfZBVDD1}=p!MI&pju!x+=Uw`BnFJnP)DOl}(?Cbn8x% z;>9FT=t7K~CI%6HO?F(p{7!S2K0@_5RTQF>wSw`R{P9wr%S{6~R4Wvo0;MnBefaR? z%`ER`;n=-#(vP+IY`-f2>=y|{n!@`|;1)cn8&aDNveJ+!r-*u3_{ zV2(5CyY7j5j~q!)G)Qq%kqSPGVB^`-6BkA|l{1CHCuSxmKjj9&jsjzzDXv+35W`gA zM_>5F#b@A~-@>Jrj^fp!AvmL>2+p2G&-ge-TGpOq{jUrPpa#69%Gg@~fWP-WsD1r$ zWDgt=(k3m$GW*t<HYTft1VF-;3HG z#5`poT&`jK4&=PeOdy2V=2B&$!_2bk{a+S?{u_XoacD&zR0ja4`@Rd{;XENdR)@7moYCxD?wGx9-49mgvE30;VM7NL_%+hXPGLSj-nl`#=% z5gAMqZIIeDsU14yW2Q}ZD1(wt?nh?pmELY>r&Qm!sn4l-i8P9NsQIXx@WtMn+NKc7 zTdXgb%QrEH$>U9ZH)8vXHPS7IF;k}tElJw%TyLA|LZ&ILoR2ER>hqRNo8GJCIVCwCQ~y-M+HJn* z7zP5%uK12Ky|p~Qp~I_rL-@PD`#TDNe&7QixTylx&BpVkzC8f`c1^iF;s|?(=Q7@O z)p4H8jgEd4oY(F?d}zfAcP|vXhX6d^TdS2ajCqT#J=2a`=9K&XbiaVh% z#mYiz@mV4^oIMi&nEU5k_8G$Q$!d4c|Ku(#j95WX5|ak-82G$^a*E_+V{vK$ps^ycEC)yJjZ^tpPN>W@= zGpUx*DIamcm6!QAH!hW->r}-uWlg1=P83I=(ImEn%DBpmbM(X{zNqO%$nQ30d5LhX z(9&w$r!fcAdGHj8Q>JyGXE-fmktu1Sc6F-*LNkdNN;Ku8Uab5mWrq701mYdulWeb}8 zrsdLPk;~LCO|#fn}9lLcW*FWS(43`fdac@7k5KCNB4Tvokrl$Lebj2Vsm5=B{1)IJf9oT%K4) z%2TWsbe=I&bj11XckL)wDvvHyD!)2VE)Q8ObWLnCATzjT-L3966o;vcoty^7*!r_5 zeiZ=r?nT%C`3X$^+Q*PNe;)a={>?ePyd)Oa1UXE%dM#Fuq+CVT(RYRRycg$cNt?e^XWe*cwheV&j6sD5D%2(8AlH% z#O0j30E(P*4~)6s+~Z+b1Y;gJ_Yj6TFy>OmY|0p=fQ{xydE52UM}XSCs<5oHn`#bf ze~My?BBm}hs0^cN6v^+y)c;J3uJ}wBGR>l~Drl>8L{zF`(_&*^9p6zIRm(!HyBopK z5Hjz5FZ$p9Bgk*xj_k&bTKOit(W9|n-nAQ1pwfMEnENROtt!< z-pR>Ff%Y3`mp6_61AyP#|LUtxFE52}9EW%Ae81Ibe5m*AxqqpKsdHV;vT8G#%nS!M zJ}@`ixYe|#Se-~MK5q=?0els}k;dAAp9~f%4~CV>zTO}xm;@D32dIXb7&etj(HUu# zK~7>2i1h0MsGmNK3;)+|V$DZB0`klc5Pbb{*uv>dYI)6jwTxtjL|ryJ<-3@mW=bpS z&h>oMcH&YSCRxd%CnD34#E{1M>jDlmy7kr&^{-8;lGw`R_7-V1GbJ)>oFtM*_W`91 z!uh8O@D%`z0~G=^00aPPXj*t!>wae2wry=8=j6$g)Ib0LVC&YcZ5Nee$Bsp=Dh&W% zYF#%4BSP$!HHY%%+(cW{QMB$`&uHtJQYLAiw#Y6g2!Q|qAOJ~3K~&w$R&H7MGTR>F zKo94JM^BYB_Oy(bY<>uX)~0L>QhTAS?P~o3{ea1dV zLyU!cqWey3k7HRUvc0{33xHv6_EwXcVwEBleBJ;?PQFqBPysN0Y{RhU2jK-)ub&JY z=fN=W2T90!T*h2cs~;iR`jNy8tF{WW<`~*jG2iAD1z?xUuwHrzmAAbOc6Se~UAtf% zJ=#hy>%q-T`(heFkD{#hhEM#3^B!`f>1aJvp(=cocRIPUF;e?i8+a(U< zIOiS!mvinB&M7#jg!7i*CBJ=rH(cyGC-*OTG>h*|6=vdorE2ZP5vRJ6F3zZ$Xfutt zcm#B^2SBS$6mEfo1_2QGAPWl!FI+%qTL>Hn!I`sYjE|#w@+8>HFC*+K!s_dTy>Y!o+<)U5`GB>ha@fyzl~C-xt_?+2ux74%|hp8f}&q%VQu6oR>$gtlZ<5Zv{^7-MgKie09k3{T`q-7g*Lr z=y^x?zxwLY<)!Y%!L3w!DCa}bebU;~uTfHg@s7cBQhj)M&UO!=o}OW&y~=ek5rC^_+CbE*J!W!$J(y8IFJP zizqzs0Q%nle&FcQc3wJ7o!Byc=~Q>z$ZWZ8fk~>noTs{1f=i*vg2Li1ZYrB%rVR!< zW#&XJPozvvpG2S97F@YD3888o%rwR6RG)u<6T&Rc{>@0kza!iYHe8uJkETWTx-eGg z+6_wgh57t_VHl2Z#`;+plt>VkNEmvAF^@Lg>;%_3+5V`$b&~}}jukW7Nwxuf4ls#R zVk*^Y$qiu!P}C8@vY| zK-b$IMSjyJ`5Qe;JRV!UFFo`SE`96USjcAK`o2VqYPE|)OzotOMIDQ)Da6#etD7qP zh+mh6^-X({Y>=xzW+ja{#LO7Ro(+B5HY({;x=_2A*d&)jMnQ><<*Wu8uPojA64uU#LzNDI9^gO=+) zIz2P{3r@MbHB+e!bhS@8>Fme76u*BtohaUzm>Cn}+)R08<3>zv*^I%#b?AEPDYzFe zD(Q4>q-=Xq!vGl4sq~nA=w*#3k;3A9<Tw2jb#lBID6$%v8LYU9fFbpYY3_J{h zAfTjmoV>*t2xA~aU?IhgviSU#ZkP*u2>(V{UrHk(q^To={hVeswSPKENNyzB&{^iQ_V&3?@M22udPb880=_Ri0b4xhU#WtkCuE!|v{HZ8~@M*s>9vVpSrw_}m2S zJai}kfYa-Troj2LoRBgHdSF>SjjqDm8x4Ou4}t-!(I{p)w}DtU8e3)JMVTmzTFeju zs>V=W%m`#gLl&Q+bX`}kf)ZeE?I6gOEy#Huyw_f9#nSUp1q!o3WOf%x&PTqNnG&&9 zq97or67R$|h;`(qm?|@)FYKloLrAOYUB&u|w8nKeu@C095r0L?K&+)rvPhXVw5qj{ z0gN+t8kGJO2frg0-#3Eca=FL*0%Z~B-Per8s7w8uN=Kp29DA?zSPc6_{fboTT$TswXY;lNsi0|O|&y z)b3gFY0I}<sF2_G1^TwTOyU!vrM z@_pj!cHrmnykOt+W74fhvv$OFvn@`Nts`gRMbJCQvT<8=g%dGvbG@-)7NmC3OL2$e z?@?&dV&m-bb4(D!85)YXEc(25++(`eTZw&~=*5|_b?*f%MYP91aHL=!{I%k z=?bbEKZ$z%$p3rmZ-3Hj1>z{a8JH&WG4Zw+16^f|i$I9Jc{r+n5HdhYAQ&8-D8JFv z!l4j&+*}42a(vl^`uU=?i9_F|=ou zb_pl}c^iG9NE2{R+&Q$crO2tp36fB|xcMOe(d(A-&luFqTk|I87dia*Xtx<`YhmZV zS6qq8Y~o`auf@L$Awdx^2+D`y)_d0h+blD^64B8{?UI0pu98pc9D;s=MA+D(diXC= zpfeBgotX+$XySGWXF}Ky< zean72wKX6v@iUeK1O{5g4BV=yaYAf6&RLU|WuP+|Nqoj=3b zJJPLNA5tc&AdX@Dr8o-{(sCSsSTP9Vz4%$hnXQGdeDlm==Mp|+)Sm|}+85*QcWjhkL*QTzqOLA%)_ z&{1Va)~_6o{Lc2IiRu^!q>j>$B!&@qia^ABOFR{@Tl@SClkB93{;6GtyZM&~?mT1V zJB%HFqgP!LN{YJ~OQR6{Q46tx=Q&vbE!}`hVoEsVS6F{gAfMc|YdD zrs8XVK>&`~?;21)G0Arm*xE~ z+uSIbd7UN)hT1Q$1(@V7n-iCD4+u(56}gAIUpQekgioN~FCZ?&G$!P?wF@GW+}kBl zcFaO35|LM%G@HefEvd1O@F$0laT4aUea!ndR-K!F5??F9 za@2k&CR9LivG*FO=rZsu2biEg;c;uSyy96z;5qmNx*%Le#Ya=25E}2=t|nEo8a5#Z zKSb8g%XQXfm53kUGc3?t(tp4=RL)tFfrBkiE#@~h)*bSD?xJoPo(Z%&ES!DsXJq$o zQhRBQ0x0>&Prp&i5R-?0Ak;P}=Dv4H<@n+@VN@dghs$AGt2o)P^!uPG)nc+czU`C%=)aXFm9Iw1pvjhPGsG47&oNyU5rUI~udn}0M)GYXP(cE%@O@WAO@ zD$sWymuS(CFVgxCBrwZSvaNZmBA#%wkR^jrm<%bWh*&sK!RlQ6FHo7uU2X}!7lKMh zr|{SJ5i%ra9Z3YVdX|#smNSU59Uqj0+ThC?&X1o%;~xCm1W0fFB`ao?TbFm}q~da= zR-2^aYDjCSc+%G@w=8u?aHp8&$Y~0fNjfl}SOO*>etQE>ZsM17HFW*qe4;HWVWxN? zPO11?4Qa9Mbf2znLK9;sSo!oswR|G}a3&tOMF`~~l)zSz-7Xi(3X8(MJDQ;ExFqxT;nf%6lt808C4{B`;_27@x>Y4+n@ zZnNzn6M4VSpy*7|s+(EKn1h0mUnq9(fF|r>J)M{z`Tby$2QE7&v$BI0hPGC4qD*3} zeUp-H!)P(K(Vw6D++#jD+N#S)MF=ONT`jmnn_9Ew`^r_qztYe&6#QR0ZxiEyCuHi1 zF@6P%j;JtTwqJHO)dVyx@x#7cpPey*oYn&EsG*(TEP=-)AG_P2u@4Wf|5EAYLbQ(c zOy_(zkM%&-NWLybHe4R=4%SV+ctcyR94Y%)oF)BiBfQA5U^TMuL&Ba?;oZWX!sd?s zx@!yFPHg-@Z{SK*O%3#w1(dO8a^?tn8pz6oS-Us_&!rRE{Tndi7?6`E88Rmx4c78hyM18?|2i47$_(n@wdH?H6)xTGGuR(5gf&KlEJ|*A6 z*OKdYjD{u1vJ=SI-fQh1n1IC(+qJK~!a1JNXerz9fatI8C>p)*Gy=he;g(DdxvSUe zpxdVXWx|)$qMN=n7Z3Cf|4B!_9$DwXX0!@YwAykXSGj4C${r5-aHVUj_;5pSL}1u$RpU z9qO>|&!%`-f04JAVh($~oe)zgQ^9y5yyr^D3GApv6lZb^Q!Z=Mw$5B8=w9nsiDTIq z1qGi!#>jqCJlCSq$f=}@r8DaKSM;c%qEUv_e-McU>ERr6Lo(U-4q*}K5Bf7RefEh1 zavw$*A_syR95BC~v+fgt0;>>NBN=g-QUWb3rPKBJD>Rk&^6_EA%ppWdXQFaw%apTR z3pyrs+ydEhSIx?Wb5|r3km{lNi;yA@>}>u*s=vY<5zLx5A`aW$t<=rhZUqMF^Iq*5 z(}Dv&Jle`eT9QfZ?g%6y=g%^$l5y`8{{vy*Q4hO}I*bZm9<2l@r1!?yFFs0ogTYUJ zk9$*h7azVgTdJzTU7?`+xrf;TZ8_UWq6W&M)r9r1je}ux486E3nJcc99nlL@*ZX|EiVZ5hOe5HE?Vt11A^cw2w#VXWl}rtlGwYBz zpmizCv+$<_i%sWk*8bsLxBcpqY>~bLO+KrhFz3Efz2goc?N_VK;oZ;kn5ywPp;ObG zy*L4!2JtN0wk+*iE#)m(3P16^tYI=EKD*E5AP`$f|53RN$t*~;-YF#|PM@3{&5 z3}2OLZk~oJq*{to^3-Gan}?&DcKwGE8~9;dXQEXY8TD8z_h0r##Ccr$~4rL1=LY3(SvK!EAQk4w(> z8KfO_PzIC!#+6N4lovZ|{yZS*W+6K2P6r{x!;dq%Q7=iHZnNPZ;VQB(gjKB6+=$WR z&`{>hjXlL9PUAyyYoDUF1eHJa(oRxSrTr$fncfu`w|&Vb$ISf7|Dz$EfQqr2nC+df zz>pA}Q0#@tJN$E(m)@W%Bzwzx@g6p+RSu5$IIoc4m&eJ-Jr*@nMm;JsnDJF4WPZX~ zB9H4l2}uebCUT-GwLO0}IMba}c84Pqgj77)6KsN9GSat^!HcOq63BJ!IrM#E+_t76 zKQtC4B8WQl_g@u)T0~5=`)6*R;y4)tVT#(Xn-gs;629fw z+omO%;T#{Yw)W{_!S{*0v92GMflK#Z2D0MU%vD>4VUJC@aA3>kykh;2K{{2hi7r;$ zONC{eZj`VpQhFuYy!u}x7!MSkC1jM*!ofOlrmPaGi{~|5;uvfuVaTRu&vHg-iBuS2 zUI+!3nO)Z(B7fzsDMfmHoSzhqQfjOB>SK}bR?u$Aik|!%&Afkc#Xg!#1VS~AJ1f8S z{kYku6{v`}r;NmQscHBemRug48W9`aHGF-!v6^{}de8r--fip7;u|n^N+FILr5=iE z@tdJp(BQ+SsNbf{pn_~ORG|SQ%9Ht4CJ5ch{SjfqZf+V@(02` zqa%lNCpMsf=KcOP%^`VnReVf|?wWbL(Dn`zQz6@ib_*J860?_S zOz#&ea!Hu5)tS+iVqS|@{&5-R5)ctL6Q9b^YF72ZHq$r5;q!zcB@ZP2c%>S@zP*lz zL^Lk;aS{Dp+CE~v{vr9_)ls2oYVMDws%crV0gc+Q)K`>&2WGaoQPNtyia!@i3=7zd z&M6Ox_DFRO-)3&p!7NohYgOytM~un&q$(;TMh%>DjA@RDN^XpR{?L7)?cM9KyMhUR z3k?&0Q7R+E@pv_XII4@gghb_ai4KCrn4a81epIs?z~xEri5f?gUo$M z^L-Rai9vHYYO@Fa#4=W*s3`57KV^BfPZVKBxyfYEcq*`#lpMEa^zVg0S&sn@>E2rY zT|Bl!c;Y3kT%^Gd)s(($&FoC08Y?{by{)csD!MtEv|>=;Ip6!cE-%~+pAhh(Sb^DZ z87jLfnCy$mB#IuMz8(Uy!jwaXTg)V?5x4zl#TrVJx}iVdB(bNRvuOP+-wOS-EiD($ zt?GQ9MF}Knt2ndiocc;06~Cu7dTR&jH+!0N`5W%D;+04JYYan*W|2e@z-LKBo|N*; zTzW zqN0*3OI z=F?p)Mb+L|SjzEz(%>O*C_Pmpk0;jt5c!I4{hCFwU?A@U0htEuGH+JK($M|l!FGtJRtYSVj|dpSV-34H7Jw}pX`NKZ0xAeJ2P)tNQ{?L9D7E8Jr!_N zxY~1;N$t0giYyUs3o#`SoA_0z=;CAAz4QX*gX2S;%cy(ys+4mw_)#2x0$V!lUBBXI zy%qvgaRA(Y-MCo5B7gciOPzk5gcdnQ@M7tDXE9S$iWJCc|F9jIK0{h3a9AMuqT`=x zv^ji61ah{WmWF1@v9izd6-`=auJ^ba6)8f)@GXhUD!yV;9WhA ze$k6FT2_%CO&M|2{8y-0P|G2T%Z-!rg)os|yHB#W6~&C3i8=r1M>M5*+bPmA${^ zV$mRYsrh@1r23U&rluQGaVB0q-|(55D@a%O->GVqNT=Tu@48USp>18AKQbL%bfm-B zKfBrX+J@>l>MwEY^?mCC4@7JTo{T;AM**DfC%VZm=Ov~h6qV=ZIa|KN$tiiKKd&&t zRgzdI`qhMIWeQ0JQ9;s=W<5^6ZZ>a~d^5$Lv zpU5dLbz0T?_k~9t8uQc-GT)d>5{qByzz*m|dYu15KF;53)XdY>qwR!(h%o8Im+EOQ zIrkM5Nn<{&9Q_6*!Q?iyl*%1jEdw;54ecfh4O9cUBJ2a3+v%+)itV#!FUw?I9=Dqm zi9=POpP_CTQ^hXQQ)(p8LdB@c&YiQHV4?-53#vxt z+Ix|=G+s6Co6P15-)S-5C0=G5i(b=W;MzBdbuB9{7AU%in(j{}StETeZDD~1?Z(Bu zK?}3NKX)El=q7s|bVk2rc#sM^?&M1^-!|>gg=D_f;z9ayMJM>a>9PRf+!3UhJeo4#6P$n1T@1} zqpVXPeE{Y>2YeNKmAgNJKQP;e6LJ@Xc96?)u4T>dm#uOpF<>*YY4H-9wAdh`F-Vuh zd$YRtL0IEU^+tx*UG;nGVFb_60?z6NNCNafBy=Io$}9qX&AGq;3AI=~9EEP3GUScV zWnv*$+-Pa3gyV;z8Z{AMi}DmOE!6Gs7dAKSo`t-JqzB(8zUG}4DFUbJ#l3`6yiV{+ z_Fh(I9b@R-lYRRF1LRv76-T-C0wfhZ(OEQYGRIH;f3pCOJ6%ol0lJR2AMud|7I zyDNK$i7$WGA&JRFPJ>2ITtHlPr7s%}ucokWfOeSy)rwQV?0ZVhZE?w}KjsUs&C><7 zqBue*wHgFm0%C-+QwA+qBM42(lBo*s+)aiwBL$A z8yHCV(bS;+z5}GTc3ri_%qJrHl9u%=R@x}O z-HLh<*}v@^x~6djDq7IxN@Ck)JKE=s5;6N@<3zBwRQK)IS0kJh`riMb1X5BWGeau+!Sz+ZeUJ6Phiv`u1E^amN$1o&#QNHtz zG>Ci^`s_h_rW)Qq!H=IP&si|xTuAou+Hqsv@4r3zWwF+kK16Q}mfLS%zkv|#N2nhB zXur`fMM?4RZPpldK$VISQ8O+5_g2s_OWl*FQ1|T|kDw-sbM3`KLXK8j; zqAXON;2U91vEIGFqq{|;#denNv=OO)m1Pk(zKC87Tq>2aKsFu9pEr#}Y&ryL6?*%~ z*O5k9E9N(kAI@7Rf1_mnYUy=bNXoS{*dh&N_b#f>-2bgP!v$^XKJ&*75o_`HM{NFl z^FDqZ7h3djg*xyl$-sC{MQ1;v1@i8%973^Gv&|Y6#St_xRXZ^oDpj#qW_9CX<;A`GoXP5{p*h z#(;mo&(w`g!1;}Y@hu(>z*ikS!(FCVm;R&rHQV2ZR5c@sDoBb-3?hkuyRwa~K)9KM z+tdx1C!o`;yf(2Dt1VX9*?ajhEH|15p@Q zS_2ftHPtvY#}qQuH@mTWsx7{YAS0vm;JXrKU8y^@zS1n!YOVL=M(f8^%OC~8J-UZt z;-%MGYD=7v%}J6Wtwe0bBmhpx{f7h$wn|dhWF~-?QNrniU%-k~h<|Fy|2+V6Hn?~kInEiJ7GRxZm!t!io6PZj-VvA1%x!UY`fidy_sZXznF;F5DO zXR^}Hk=ozv_+}66X3PCHT?Gs>Tr&y*s*9Xz=-B)*?<>P|KL7aVmjyCyHWq7J_M6M6 z2G1v655rkg=vW)y5tJ(+JZHC#b?BvBRWW`^M0&B>M^W2Ia``Dzhjdc3AIvwoigC?P zwc+EaHlK;)O!?oLlcKHXp`}m#iEmU}ZFV=D#rim_vnAlJt?i+u{Y&ZL*Yh2F>H1tL zPAibKMakvSWiVo?!H_GZMfv28pvTY2e<@9{lk4(8_-e_N;M&~64l~#PgY``D#7ych~J5q|!bZj4V4oZCLSJ;}ZKXMtu-B@gY>DiOv^ln?)x}OAR_<-IE^6F{!F8w(&FfvXiE@~bf z3v$cmTj>(Otd=XWQOYbe9)EmM1xCY;S5KkOUR;MFfm;51-n{51`IPh=Uv{G2qxqb~ zxl~$#V#kJk5x;mtFzVfY1di=F>aJsbe@yPmzMka7Jc$Xs=G~e%ZR#tiO)2ZK9cU!O*rS(7iu!gNvzdUQQTKZvLq7}`;g^w@upFvttA z>K8iq9s|aF+nf=Lkh>|6v)!*LoW`(EgUeC`Ufq@fL0{V5zK6+v<8Rj`UvYu*No~So z+Wa4HhI>Kw{~|ENM@?M$qIQqd0iMUD%zyJ5J zkyJSABLFHtoci5xODu~r&rsP`%8cfI)w#a!rR$L9}@oUKj@2hz>E5t2FrJIJJe-P z{aPOHW)6-57X2j^@p}x1Yi7~{}W3|o6pnT_*q5Od%;RMV_)dn_uf#v zJ3%gq?Tn@U9?r%BvdBC%XPY4XM{AYWDR0%r48*+|mrXJ#g^rK%SRYp(YpmPrnP^^s z9FgwjNalVG01r?VH#HsP_zSp&i^E*+3Xa?)neK*^1$S%$um&xI*Q*>4h5Q`7598Xh zFU6=hrh~f_y5^4+dQct~tBj{V!C$-kY=~Vnah*+xJFMI9b6z#s(&R#3?HPdxj zIDsRgm)rU34HX^W)z0p%)6;k}F=I@6KLWU%taPCm?9qiWoWG=^L_RP5eDKV8GvYDt z_HR|M;L{f$2`tcoF~157&hJy)_bCe>%n-ib%2*lFv{GU3XWEDDcQ%-L5tHyh)N9cMiLs0 z^a>h5rh=nv`odi?HSXx0{}bEn{!_}`Sk}!&W~(P?E*gl3S@_RzF+HD-1ITtD;mJ=_ zx@P8;`d5`v1bsYxX}_Qz)(NBSj-G|JnGO;gQ<=vZ1}6P!yfT>I_XalODSWL!0@l2! zH#y;AO!Pi-7D6MBJxAcTqQHBlKV&y^2!9hYfGpxpkObC3Z^WNkN{ca~3YK59(<)K? z+q>X9RV`}Sa-JO=*c??i*?;?O!s)a&zsoxIS>CucU{`>kY}VL^q; zv%rJPOMkOp<}x{YD!~p%jK^SPVBx3Nq3CmY#j^(GL#IDE4WgnV(J%BX3b^)!sHXDH z9F`WI>UpJx|C{H~@W~jZ?!Cj>?x?+x?!PZ2RzI&mEY1eFulV~LKQDr~qaOs(LM-~^ zq-H-A8w?eN)V-6NS>AtFfWvqSn{~T^WNw&$a`b#zu`|`u7!#N3VhHE(DWzg-kCB3e zhu_78Q_fd^#{hvUM*!$o1* zH}0-~TWJKRhX_V)G<=HAwthqT*l8hTIJc7k7-h~u7m6{L$6!5{;nO5}9L?R=Zu7qI zF%G|msontc>eF4?^hXiKE)B{WiXI_%Lw|wzQHI@kSI)W-Z<%(-^zU+;ox|S&Ih8~4 zCrFN+jk76A9UY{t6=ug(#*m$*dq{s)c5?Zy%LMwIiwUC6$Z&c&ma7UfeMwtdDn_ZJ zgXs$bb#g{^Vl*OcEsGEJW{I1)jb=8ih4ziNn|G(t z($$EM?qdBkE@|;l*C$Hx2sLQ4rn!Bf@m{0y^rJod{r}u-6`fMp)5>l0Agv;#K$xcI ztw7380|SS~Cq^ysWF?W{PdY-EUFskBsr^~#EH?0jh5Q~gHo zhZ1ptOJ4}$EO%VS(Q<3zLXrybnoNa0jJf~7VD2X4qr|L~8so)pjdc#enOw}|v2~n; z6T%X|Yq&@0CJu`y@4}D3`Ode?TC`i4UDK~j%I^QrlpT!ti2&<> zv3GpM4BWr8oxj5(xM@}NbjWg>&U9nH!h;IvQd+ zno+piahGL#x)jER39JDd0FrveEkjM=Px)ztkmxG1l=|^^o>CeF6v;a^=B5xXW$b|q z`(&uw7rxnsP&b&DeAANl)<M?EGjj?JjV)Cv zERc!9+MO>Y?01cteQkVS4IAa-5Dqpkh>}Lfg!cy!$3v?dumBw?;{$erGZEZ;qJIt;j_;bV!NofxL*d#rZH@Cb>dCCKaCK@ zd(6F{#s$`-mQzAe*=vlrFgc^6Jl(^?Zl>jQ(EDXQ*@JL&QP+2~f>u#2OmI1Q#HcN3 z(V=U3hn$n!EZP@-%e{2hPEJX$_ALO?GMVh-^_cI~`YAdM2_Uqdb_2z}s+mI}`f9rL z!JAo4u<{?Y_P25a2WN8iF0E{~6Z~MVVz>2|P%{;a+vuceYWq=jF)q=@NiO}V3yZ7s zj5PpYb8M&*oy2zAaCJpJ;~)8!yJorcRjC~40yhG4j)#9POPBUUZdG8fxnS%s4NMwl zXYH~WR&y4jZmyI=>=u6cjQn}UU15vj>p|2Ivu(a<9aVZojhZ272~ zHNAMSCw_A{c_orDuJ>zceD{mTQ0XX>O}k>Y4N8uoi?L_k6722rlNSC>Fj>QM6dwY> z5BY3(*_TjlNUTJGH@EtmQ@(~QNockJMxGCLbnW`EK)xtYuSP*M_p=G1csK*uR`dSxim3bS8pAvU6)=N5Ule_0qdfAUY9A4Zldu~ zMwU6DiKlhn&Q%j-!xy;#+9lFUAqHk*tx)n2NJEx0n#+>ct7r0M`fG zV2H1t(LE(Es%Hp?^7!YSuIMzYV{d ziMGxAYBad1P4V7RgNM@gz`-H{%Tevw-u{m#BzP+iv(Ib6cwS3aTSc3P#1DBW1w4J1 zc+pBEJo9yK4Zncy`{4M-z(2|{s_^T)sDi!tb4prXXzwbUL+t95mp-CBlf2LcG5%pT z_CLVpxb(+Jjz5+31OS`ij=qIygC{<-Fy6g2$SSCj^(c?kG);6f-W3+}&bSsCJ*x7e z6ZiZhy1^nC(|+_p`%2*T>nr!{=PY*#@Lk8`rl$AX+c8n#p(=R#1NoxfegU$`_Z;|Q z|HA&9lj^XSi?P~pN62Q0qkJWC2BVx?Jse^&X4jA}zA`aJwC8QioN%-64?q^kARI`V zN`@Lgj5H`o;K^XGv`fF{K{c-Xh&%g5Wa~@0F3N=DLU(9d7U56x$tBHlJVS5Qhculd zdQ@(@;~I1j=z!Iq4A@yUYoDct?@(x^#o{~lIx>hO>y-brL*+(&EuFMywu4TY^18IZ z=0HQU8iZ=6Ny2-m^riSkQ|pE`4qz1_c z(ohy;5i?}%-25qmE*iJaEdgzjx+{w&IP0E+0p=?@%H%rnRT5W@7qLVNC`jO0(ba=D z1q3z|)b`aPG|c5elMbZf(}!c*!!ttaETh2lafMA3eh>Wj5gT*+qmFqFk}1j^-W`;Y zW^8xkfKuwQg7BOZ-pcXXw$d!(cNHD~O*^EAIp;3Ee>X)OF969kiJG@IQ1$Fix?Hg{ zk_^+iOrH?0Z~)UTbBiF;0D;i`90;Ymc*HV#Tp}(x)+=*q;#Jt`is?pJ>~1b*DEYN= zbt7azq!uusyR%gBbJo`&%8@Nv8s_!h-Gb-deGy5%dXyeN>hSkd2lVsV?%5CZcm?kz z<@6+Ie!k>iB-U@JiBZ^T60BmoDD{{*G+ty;JBr_&tDoX*P+tiRcPuSSF{#_=^O)J= z6K-G~HEJ&Z4j8Ml_jzR_cPxfR0h8N46T?c7lL0~Ye`MqkwrV}%l=+n~=4PUDah(pe z8i^D4)}Im+0&jL=$c{K)$Y6Glh&^+FGU-*6C5x+U@iP{Hpcr`AXc&|Vyqb}Yy7ap* z=HnFEm_RfLXP=WUj&M@(wa;3*H}6Vc`Mdoxx5l0~)QH3X`}l9l(-`_cg5*eoQnI6<9I&sf>}Wt7son;B9xK07u0Z#D~YI69+rG)+9TOC|-o z2+rW&7#RN`s9-HY`HcJT0EUF_uBYR!SMDwVNwTa$QgXz}ZZ!H%YQq;&o9TYl>L_YA zJAk?ClepPNH6*}ac{fD#W_hJ~1<9wEkxXA*Onh1$S~9}~b|a--P9|v8d@=L!)(}TPtsv-#vazNk0tzHJ z=Vw1~ScQj0IXWU5!#Tw!`ONKyzEHIH^i5XPxiz4D^DBdn-?CxzLUrWq{}hZFKFtYG{`XRL@7C{k`O{y|OPWS&!dL>p=O>?a$G@AT z9DSwj`Fq@LB*QI6-QGGgjN)cq#HQO;JRR4-jhd}~at00`jp|Bt$omCxEOe;J6k?NR z&1dzW5x!YsrlA8gR{)TyMH?P5re*HP9x`G2X0kxSOi7!kkuCp;X*c@<=9;)a11J$* zE0^xdU=fl1MD%yNFX+T$9#yol(!tgdNhID(CGNAztd$& zct#8BOZdi4r&Pf7eCizE$abh(@HfYbw=n0;;nN)?n<-*6Z(YVgb(gc?byd#XmHE1$`$o(%S7(UQC0*r}>Um;r7iejt ztR(+wOV{RCL5{!5OrWQtdM#P){(=@YR}(n!bA2h(o#d32qvRB<=i^lUGsBoIlrfTA zEcNhIt`QTlCcN6owVzL8gy&CGH)%E07$ELzIi<_540bY-d}#RIskMer$(`J>gn>Jf z+Ju3K;*8Ix9Shq{J zp(QtLyEcUSt*{RKj(^0L?X-VD#_1_E;rsG(_Dwv1tB&{pZ6Db)c4}f9>h(32wbq!> zdv_QqH0dWYYi82mmE;&B-ynx+EA|kuaL9RhRN&LHHL7&P;_+jDRFGYoH3~nbx%hV1 z^^Km8A2et!j2a+(o$dhM*QaGeME7qWNX`=j+TO=H5X3>)CON}u=;UMeGg^$(AXz7q zds!qEHqdc!MDwg-OXp3q1dp!1cixwDyvY9XgTv>Dpatp+kfhhI$C5WrW@TNG6zrYx zJGSB1!(19euTnzYV>2D`2C85eKIdPhCRP=%e(ZDg7E4wb8M+sfSNPVF8$Cz?$KXb+ zXUEl;nh_h_uI)<_CSEzX!_H$^(xfVKs8ey&A-P0nHyI_=cKTUyX&-oe9iP6H8{!~kU znZsn4ri63Er;Iv}*cr}_eSwykPfpNTRq~z;;PD^+hXzblGT)QbkX5|W7dtnSln5imgC6Xpa)$zoD6Vx&gT=f(X=0FwLBak~Xo_o+qB%Ue0wzVv2C~1yKUXIFi_8{WS zc0$6gzAGq8%HnF?Cslii7>)+i0YL9#Vmg8MUI2Wrj@8Tk@NU=KG6g*xYH*OsSWgM~302rmuzs-?+Wo zplURYXX6KQfC0)0fhb4-oYhbxBckbNTnMs5+&@lx!hw;NDmg29hi#@T8VC_8pZxIO zIlGVpFDWY6asyZAq;gHc?7!>^@xSHjn~aJ^!~u>Ui8;Oi$*Ud}5EvY}w$$LO(Hgfc zo3mt==YcfohBZMd($iT05uaF7c_D5VjKaz3H0hZ%s`PX8Y1c1olTX-v#oI)a2ybSY z20+|E&UdRQ2frBLqxbTS-WO-}3JY2<91jfB1m0l|++ALrmvAwF?VgmXnM$eV(Uo)h z29mnxtL%ThGVwI9YnZj`6;{3V7QwVFwxv^Hc3S)w{EU+5fU|wqZd?93vCoZu5!Ion z|H0~vPvp4rXd|G;RqVD6aLS(3AC#>D0HgA; zGFoMKdbQyTOE2JN>arRE8|p$F4%hvgIe{5^Sd-Xj$Xgd;4bNf24V#uR_j2wWKFQmD z{-}IDan1xw+ojqT3IHc?fc)v98{qh;<>ZLK=EeRwdJ>_E=0<7(pP73LwhCpa3U%|z z!l^mm7f{@*ABgcZm0{N~-~3AjGm~0rz&V-eJNGB@I`0dL%mc6bz#y%-=Zr=Y)w@vwCS z+IlA;?)V#(S{m~`=a=o$OZ2e^)3^6Wn+l&#E7(AuYmw!nlz1)YGF~4Pql$c9Qh7Ou zPTXX#zBE~%?fB_3g2@eFL;Si*71jo&&5*lFUF*)yIUWysb5MTA)NQl^3KqL4(Cgi72 z>PFdi8LaDSVxT>58X`P527Sl@mVsM_D}SyDVeBqI{)z-K_? zOd{l{tURW=jVZ+ay56GF+PEdBjxtCIzJf90O0E!J&<_0UbY7C zetl|HTqsz$G@JBU2$bU{u(iGkVf3}FdEHcxRj;KwRZoiO0kIkLiL0CNwa1JQeY>Ho z?^jKh*7IjO_a+g%oO%w%vZY-L5I&B~*N^E|#WJqGFAqh&V<*??X#P;r*<cJAY@hxZscpyJs#1jrQ50fgURtCikMDhnIdQKS*$d;r|5Xu3o3y376 zy}604!hF`pS~eeep}|Dh6mLJ4QYS;^#<|bp%S9%r9qeAcMf_7^feHLdbexR3#p!~N zqYZ2+OJVnaBJ8W3R(gT%-q`Mi_j$2k^FgV>CiC_mQjeZ2TLF`VFd1(cRg0F5;qVj_ zyl>ijoxafHKDEhJNr-U*2S{L``sY4Fi{E&(yrl4@|5Ui$0cD1vRj<|8W*>9x-QE4K zS`v$dCWX_ZDE=F<_h5DfRz%TOF!BgAYxX-o`ZP~1#VKZ^tBb}d}_&N zIr6cI-2F^$+5N@pz>s-o3;ogSViRS{>vI^uOEjs9ZMaR|v=0?m)eviZv(w=@L`@68 z9{Y0;w***f!8+dD;+#D0t}dBze6b^W`f=o79#q?{KfJUVIc;`4fnz4?Cd6o$n^)sL z;U4PNjUwKmkWDq!zPc3ukAEd#Wiz zS+!b^)vVutZ7$-Ji{dN>!(e9ddLN`BMlXAYHON!lW-lY|TEHRpdP{XcnPb0fxd?of zG1u}WnK{?+p^e#cbeuI@H5U}BGarA zpLT}Ms*5|XU6^FP_zvB#HXPp4o)DPzh_wWBHlGj2B)5bajX;ZBTt7{0Io1&C8i|fx zu_Qc<-gU68Q}0tg?-dK!E9cIfC%1$#4mwRsv*(cuh2k(f=YDeOZXbHHlJvo})zH85 zb|G)YdNdb41k{)TUGLUA2T`bNc>nI?cHiwQz)lUmJ?{|>o<#(gFdjMb;<`4|=&6sx zwR?gF%G7XTnl~ck}vJ${&*&@P%V`J<9 zk&)45rfLk$^o1Jv1ouPlQn;U(B-1F9@Nw(!1a3#lh87pngl;z0U;H*K*>{tgY~0yB z_apzBKYNrsk5-iF#D4Vu?fAbw>wCJch5t_6H1?MM69J3`wt83AyhQ#zWqB`eWIz8` zw-|;cdU{BQu6%9jZ{C?o7wlvqcx6vziW6q+-k$6r^RKd3e{H?lw3LL z0{C%46iE`H@>IcM*@x!NMv|IUor_eXg0>2CM@4~C2k4Ywn9UN&cVaO`gN zw>PSSJ5`j5k}SSQ2Z_|j&8gn7?zKCx_uet@zwlhao75~ zR{vZgIcLipJ@fzS_a^UMy=c`tp@~1GE8T#3^qbo;_KI%HGT@2cf9~y_pY`nd{vgZ! z5_6d2~~nbo^}wjkEkuuL->pJ6Ov7w&=ZU)09$Mo!7JMft65$ z@8X?|AH=?YdU)9Wc2AgG{<^-pk^tXlrFL%m*0*w<5{zwkK4adwxRf*I)&+&Qp4GGdoIYz0>^&A(Kh|GyAUd`nUU=f;M^B0`z5aXk)b97Zj(t-fe{@{F zX`kVbNL$~(t8a(7mvz6ciPg{8Rmc3=Yumar9}YKYxJGk z=jY}KOcYj9TK>M~c9QN{?S*;wVtdrNAKkTmbf;0RY{CcavlTU0 zLa#E0U$5?7_VRbm%G*^p=l;1L`XzGbCNsf_QA<_=+X(FM_lLyo{l(cN2Mm(M!200! zjoS3{I$b9g-TQWE-u77FvaOa?Rx0;^hb78&6sK}H9@)7!Xxe?@J@Fh$pA}mcwiw7e zX~c3YZFvyO@pg|Q@RDwpP?s?ANZxa%k788Mg+97_!?OOM=~>qB>(5nN9v}Q|4s1>Q z-uLkoxA{*)U`*a-SenOR_GW5p>u#4j{o&{KTDKIeSoyj+D4cz1-m|5dzkpM#nNs>n z_gWw9F0KkYeREEB)Az>@YE@g~r^V%;{Qm2B_o`p@bw-unV)NsEo#Lyp0cM0D01ov1 Z{(suZc)xB=%^#3+Jzf1=);T3K0RW_Ar+xqc literal 0 HcmV?d00001 diff --git a/docs/release_notes.md b/docs/release_notes.md new file mode 100644 index 0000000000..dfb01a43f2 --- /dev/null +++ b/docs/release_notes.md @@ -0,0 +1,88 @@ +English | [简体中文](release_notes_cn.md) + +## Release Notes + +* 2020.12.18 + + **`v2.0.0-rc`** + * Newly release 2.0-rc version, fully upgraded to dynamic graph. It supports 15+ segmentation models, 4 backbone networks, 3 datasets, and 4 types of loss functions: + * Segmentation models: ANN, BiSeNetV2, DANet, DeeplabV3, DeeplabV3+, FCN, FastSCNN, Gated-scnn, GCNet, OCRNet, PSPNet, UNet, and U2-Net, Attention UNet. + * Backbone networks: ResNet, HRNet, MobileNetV3, and Xception. + * Datasets: Cityscapes, ADE20K, and Pascal VOC. + * Loss: CrossEntropy Loss, BootstrappedCrossEntropy Loss, Dice Loss, BCE Loss. + * Provide 40+ high quality pre-trained models based on Cityscapes and Pascal Voc datasets. + * Support multi-card GPU parallel evaluation. This provides the efficient index calculation function. Support multiple evaluation methods such as multi-scale evaluation/flip evaluation/sliding window evaluation. + +* 2020.12.02 + + **`v0.8.0`** + * Add multi-scale/flipping/sliding-window inference. + * Add the fast multi-GPUs evaluation, and high-efficient metric calculation. + * Add Pascal VOC 2012 dataset. + * Add high-accuracy pre-trained models on Pascal VOC 2012, see [detailed models](../dygraph/configs/). + * Support visualizing pseudo-color images in PNG format while predicting. + +* 2020.10.28 + + **`v0.7.0`** + * 全面支持Paddle2.0-rc动态图模式,推出PaddleSeg[动态图体验版](../dygraph/) + * 发布大量动态图模型,支持11个分割模型,4个骨干网络,3个数据集: + * 分割模型:ANN, BiSeNetV2, DANet, DeeplabV3, DeeplabV3+, FCN, FastSCNN, GCNet, OCRNet, PSPNet, UNet + * 骨干网络:ResNet, HRNet, MobileNetV3, Xception + * 数据集:Cityscapes, ADE20K, Pascal VOC + + * 提供高精度骨干网络预训练模型以及基于Cityscapes数据集的语义分割[预训练模型](../dygraph/configs/)。Cityscapes精度超过**82%**。 + + +* 2020.08.31 + + **`v0.6.0`** + * 丰富Deeplabv3p网络结构,新增ResNet-vd、MobileNetv3两种backbone,满足高性能与高精度场景,并提供基于Cityscapes和ImageNet的[预训练模型](./model_zoo.md)4个。 + * 新增高精度分割模型OCRNet,支持以HRNet作为backbone,提供基于Cityscapes的[预训练模型](https://github.com/PaddlePaddle/PaddleSeg/blob/develop/docs/model_zoo.md#cityscapes%E9%A2%84%E8%AE%AD%E7%BB%83%E6%A8%A1%E5%9E%8B),mIoU超过80%。 + * 新增proposal free的实例分割模型[Spatial Embedding](https://github.com/PaddlePaddle/PaddleSeg/tree/develop/contrib/SpatialEmbeddings),性能与精度均超越MaskRCNN。提供了基于kitti的预训练模型。 + +* 2020.05.12 + + **`v0.5.0`** + * 全面升级[HumanSeg人像分割模型](../contrib/HumanSeg),新增超轻量级人像分割模型HumanSeg-lite支持移动端实时人像分割处理,并提供基于光流的视频分割后处理提升分割流畅性。 + * 新增[气象遥感分割方案](../contrib/RemoteSensing),支持积雪识别、云检测等气象遥感场景。 + * 新增[Lovasz Loss](lovasz_loss.md),解决数据类别不均衡问题。 + * 使用VisualDL 2.0作为训练可视化工具 + +* 2020.02.25 + + **`v0.4.0`** + * 新增适用于实时场景且不需要预训练模型的分割网络Fast-SCNN,提供基于Cityscapes的[预训练模型](./model_zoo.md)1个 + * 新增LaneNet车道线检测网络,提供[预训练模型](https://github.com/PaddlePaddle/PaddleSeg/tree/release/v0.4.0/contrib/LaneNet#%E4%B8%83-%E5%8F%AF%E8%A7%86%E5%8C%96)一个 + * 新增基于PaddleSlim的分割库压缩策略([量化](../slim/quantization/README.md), [蒸馏](../slim/distillation/README.md), [剪枝](../slim/prune/README.md), [搜索](../slim/nas/README.md)) + + +* 2019.12.15 + + **`v0.3.0`** + * 新增HRNet分割网络,提供基于cityscapes和ImageNet的[预训练模型](./model_zoo.md)8个 + * 支持使用[伪彩色标签](./data_prepare.md#%E7%81%B0%E5%BA%A6%E6%A0%87%E6%B3%A8vs%E4%BC%AA%E5%BD%A9%E8%89%B2%E6%A0%87%E6%B3%A8)进行训练/评估/预测,提升训练体验,并提供将灰度标注图转为伪彩色标注图的脚本 + * 新增[学习率warmup](./configs/solver_group.md#lr_warmup)功能,支持与不同的学习率Decay策略配合使用 + * 新增图像归一化操作的GPU化实现,进一步提升预测速度。 + * 新增Python部署方案,更低成本完成工业级部署。 + * 新增Paddle-Lite移动端部署方案,支持人像分割模型的移动端部署。 + * 新增不同分割模型的预测[性能数据Benchmark](../deploy/python/docs/PaddleSeg_Infer_Benchmark.md), 便于开发者提供模型选型性能参考。 + + +* 2019.11.04 + + **`v0.2.0`** + * 新增PSPNet分割网络,提供基于COCO和cityscapes数据集的[预训练模型](./model_zoo.md)4个。 + * 新增Dice Loss、BCE Loss以及组合Loss配置,支持样本不均衡场景下的[模型优化](./loss_select.md)。 + * 支持[FP16混合精度训练](./multiple_gpus_train_and_mixed_precision_train.md)以及动态Loss Scaling,在不损耗精度的情况下,训练速度提升30%+。 + * 支持[PaddlePaddle多卡多进程训练](./multiple_gpus_train_and_mixed_precision_train.md),多卡训练时训练速度提升15%+。 + * 发布基于UNet的[工业标记表盘分割模型](../contrib#%E5%B7%A5%E4%B8%9A%E7%94%A8%E8%A1%A8%E5%88%86%E5%89%B2)。 + +* 2019.09.10 + + **`v0.1.0`** + * PaddleSeg分割库初始版本发布,包含DeepLabv3+, U-Net, ICNet三类分割模型, 其中DeepLabv3+支持Xception, MobileNet v2两种可调节的骨干网络。 + * CVPR19 LIP人体部件分割比赛冠军预测模型发布[ACE2P](../contrib/ACE2P)。 + * 预置基于DeepLabv3+网络的[人像分割](../contrib/HumanSeg/)和[车道线分割](../contrib/RoadLine)预测模型发布。 + +
diff --git a/docs/release_notes_cn.md b/docs/release_notes_cn.md new file mode 100644 index 0000000000..f23e38d085 --- /dev/null +++ b/docs/release_notes_cn.md @@ -0,0 +1,88 @@ +简体中文 | [English](release_notes.md) + +## Release Notes + +* 2020.12.18 + + **`v2.0.0-rc`** + * 全新发布2.0-rc版本,全面升级至动态图,支持15+分割模型,4个骨干网络,3个数据集,4种Loss: + * 分割模型:ANN, BiSeNetV2, DANet, DeeplabV3, DeeplabV3+, FCN, FastSCNN, Gated-scnn, GCNet, HarDNet, OCRNet, PSPNet, UNet, UNet++, U2-Net, Attention UNet + * 骨干网络:ResNet, HRNet, MobileNetV3, Xception + * 数据集:Cityscapes, ADE20K, Pascal VOC + * Loss:CrossEntropy Loss、BootstrappedCrossEntropy Loss、Dice Loss、BCE Loss + * 提供基于Cityscapes和Pascal Voc数据集的高质量预训练模型 40+。 + * 支持多卡GPU并行评估,提供了高效的指标计算功能。支持多尺度评估/翻转评估/滑动窗口评估等多种评估方式。 + +* 2020.12.02 + + **`v0.8.0`** + * 增加多尺度评估/翻转评估/滑动窗口评估等功能。 + * 支持多卡GPU并行评估,提供了高效的指标计算功能。 + * 增加Pascal VOC 2012数据集。 + * 新增在Pascal VOC 2012数据集上的高精度预训练模型,详见[模型库](../configs/)。 + * 支持对PNG格式的伪彩色图片进行预测可视化。 + +* 2020.10.28 + + **`v0.7.0`** + * 全面支持Paddle2.0-rc动态图模式,推出PaddleSeg[动态图体验版](../dygraph/) + * 发布大量动态图模型,支持11个分割模型,4个骨干网络,3个数据集: + * 分割模型:ANN, BiSeNetV2, DANet, DeeplabV3, DeeplabV3+, FCN, FastSCNN, GCNet, OCRNet, PSPNet, UNet + * 骨干网络:ResNet, HRNet, MobileNetV3, Xception + * 数据集:Cityscapes, ADE20K, Pascal VOC + + * 提供高精度骨干网络预训练模型以及基于Cityscapes数据集的语义分割[预训练模型](../dygraph/configs/)。Cityscapes精度超过**82%**。 + + +* 2020.08.31 + + **`v0.6.0`** + * 丰富Deeplabv3p网络结构,新增ResNet-vd、MobileNetv3两种backbone,满足高性能与高精度场景,并提供基于Cityscapes和ImageNet的[预训练模型](./model_zoo.md)4个。 + * 新增高精度分割模型OCRNet,支持以HRNet作为backbone,提供基于Cityscapes的[预训练模型](https://github.com/PaddlePaddle/PaddleSeg/blob/develop/docs/model_zoo.md#cityscapes%E9%A2%84%E8%AE%AD%E7%BB%83%E6%A8%A1%E5%9E%8B),mIoU超过80%。 + * 新增proposal free的实例分割模型[Spatial Embedding](https://github.com/PaddlePaddle/PaddleSeg/tree/develop/contrib/SpatialEmbeddings),性能与精度均超越MaskRCNN。提供了基于kitti的预训练模型。 + +* 2020.05.12 + + **`v0.5.0`** + * 全面升级[HumanSeg人像分割模型](../contrib/HumanSeg),新增超轻量级人像分割模型HumanSeg-lite支持移动端实时人像分割处理,并提供基于光流的视频分割后处理提升分割流畅性。 + * 新增[气象遥感分割方案](../contrib/RemoteSensing),支持积雪识别、云检测等气象遥感场景。 + * 新增[Lovasz Loss](lovasz_loss.md),解决数据类别不均衡问题。 + * 使用VisualDL 2.0作为训练可视化工具 + +* 2020.02.25 + + **`v0.4.0`** + * 新增适用于实时场景且不需要预训练模型的分割网络Fast-SCNN,提供基于Cityscapes的[预训练模型](./model_zoo.md)1个 + * 新增LaneNet车道线检测网络,提供[预训练模型](https://github.com/PaddlePaddle/PaddleSeg/tree/release/v0.4.0/contrib/LaneNet#%E4%B8%83-%E5%8F%AF%E8%A7%86%E5%8C%96)一个 + * 新增基于PaddleSlim的分割库压缩策略([量化](../slim/quantization/README.md), [蒸馏](../slim/distillation/README.md), [剪枝](../slim/prune/README.md), [搜索](../slim/nas/README.md)) + + +* 2019.12.15 + + **`v0.3.0`** + * 新增HRNet分割网络,提供基于cityscapes和ImageNet的[预训练模型](./model_zoo.md)8个 + * 支持使用[伪彩色标签](./data_prepare.md#%E7%81%B0%E5%BA%A6%E6%A0%87%E6%B3%A8vs%E4%BC%AA%E5%BD%A9%E8%89%B2%E6%A0%87%E6%B3%A8)进行训练/评估/预测,提升训练体验,并提供将灰度标注图转为伪彩色标注图的脚本 + * 新增[学习率warmup](./configs/solver_group.md#lr_warmup)功能,支持与不同的学习率Decay策略配合使用 + * 新增图像归一化操作的GPU化实现,进一步提升预测速度。 + * 新增Python部署方案,更低成本完成工业级部署。 + * 新增Paddle-Lite移动端部署方案,支持人像分割模型的移动端部署。 + * 新增不同分割模型的预测[性能数据Benchmark](../deploy/python/docs/PaddleSeg_Infer_Benchmark.md), 便于开发者提供模型选型性能参考。 + + +* 2019.11.04 + + **`v0.2.0`** + * 新增PSPNet分割网络,提供基于COCO和cityscapes数据集的[预训练模型](./model_zoo.md)4个。 + * 新增Dice Loss、BCE Loss以及组合Loss配置,支持样本不均衡场景下的[模型优化](./loss_select.md)。 + * 支持[FP16混合精度训练](./multiple_gpus_train_and_mixed_precision_train.md)以及动态Loss Scaling,在不损耗精度的情况下,训练速度提升30%+。 + * 支持[PaddlePaddle多卡多进程训练](./multiple_gpus_train_and_mixed_precision_train.md),多卡训练时训练速度提升15%+。 + * 发布基于UNet的[工业标记表盘分割模型](../contrib#%E5%B7%A5%E4%B8%9A%E7%94%A8%E8%A1%A8%E5%88%86%E5%89%B2)。 + +* 2019.09.10 + + **`v0.1.0`** + * PaddleSeg分割库初始版本发布,包含DeepLabv3+, U-Net, ICNet三类分割模型, 其中DeepLabv3+支持Xception, MobileNet v2两种可调节的骨干网络。 + * CVPR19 LIP人体部件分割比赛冠军预测模型发布[ACE2P](../contrib/ACE2P)。 + * 预置基于DeepLabv3+网络的[人像分割](../contrib/HumanSeg/)和[车道线分割](../contrib/RoadLine)预测模型发布。 + +
From e2b4cb00d19f1d5319e92938bf86fe3025341d78 Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Tue, 22 Dec 2020 10:57:21 +0800 Subject: [PATCH 06/52] Add no_grad when evaluating --- paddleseg/core/predict.py | 117 ++++++++++++++++++----------------- paddleseg/core/val.py | 127 +++++++++++++++++++------------------- 2 files changed, 123 insertions(+), 121 deletions(-) diff --git a/paddleseg/core/predict.py b/paddleseg/core/predict.py index c95c45c493..1f7a16a0a6 100644 --- a/paddleseg/core/predict.py +++ b/paddleseg/core/predict.py @@ -72,61 +72,62 @@ def predict(model, logger.info("Start to predict...") progbar_pred = progbar.Progbar(target=len(image_list), verbose=1) - for i, im_path in enumerate(image_list): - im = cv2.imread(im_path) - ori_shape = im.shape[:2] - im, _ = transforms(im) - im = im[np.newaxis, ...] - im = paddle.to_tensor(im) - - if aug_pred: - pred = infer.aug_inference( - model, - im, - ori_shape=ori_shape, - transforms=transforms.transforms, - scales=scales, - flip_horizontal=flip_horizontal, - flip_vertical=flip_vertical, - is_slide=is_slide, - stride=stride, - crop_size=crop_size) - else: - pred = infer.inference( - model, - im, - ori_shape=ori_shape, - transforms=transforms.transforms, - is_slide=is_slide, - stride=stride, - crop_size=crop_size) - pred = paddle.squeeze(pred) - pred = pred.numpy().astype('uint8') - - # get the saved name - if image_dir is not None: - im_file = im_path.replace(image_dir, '') - else: - im_file = os.path.basename(im_path) - if im_file[0] == '/': - im_file = im_file[1:] - - # save added image - added_image = utils.visualize.visualize(im_path, pred, weight=0.6) - added_image_path = os.path.join(added_saved_dir, im_file) - mkdir(added_image_path) - cv2.imwrite(added_image_path, added_image) - - # save pseudo color prediction - pred_mask = utils.visualize.get_pseudo_color_map(pred) - pred_saved_path = os.path.join(pred_saved_dir, - im_file.rsplit(".")[0] + ".png") - mkdir(pred_saved_path) - pred_mask.save(pred_saved_path) - - # pred_im = utils.visualize(im_path, pred, weight=0.0) - # pred_saved_path = os.path.join(pred_saved_dir, im_file) - # mkdir(pred_saved_path) - # cv2.imwrite(pred_saved_path, pred_im) - - progbar_pred.update(i + 1) + with paddle.no_grad(): + for i, im_path in enumerate(image_list): + im = cv2.imread(im_path) + ori_shape = im.shape[:2] + im, _ = transforms(im) + im = im[np.newaxis, ...] + im = paddle.to_tensor(im) + + if aug_pred: + pred = infer.aug_inference( + model, + im, + ori_shape=ori_shape, + transforms=transforms.transforms, + scales=scales, + flip_horizontal=flip_horizontal, + flip_vertical=flip_vertical, + is_slide=is_slide, + stride=stride, + crop_size=crop_size) + else: + pred = infer.inference( + model, + im, + ori_shape=ori_shape, + transforms=transforms.transforms, + is_slide=is_slide, + stride=stride, + crop_size=crop_size) + pred = paddle.squeeze(pred) + pred = pred.numpy().astype('uint8') + + # get the saved name + if image_dir is not None: + im_file = im_path.replace(image_dir, '') + else: + im_file = os.path.basename(im_path) + if im_file[0] == '/': + im_file = im_file[1:] + + # save added image + added_image = utils.visualize.visualize(im_path, pred, weight=0.6) + added_image_path = os.path.join(added_saved_dir, im_file) + mkdir(added_image_path) + cv2.imwrite(added_image_path, added_image) + + # save pseudo color prediction + pred_mask = utils.visualize.get_pseudo_color_map(pred) + pred_saved_path = os.path.join(pred_saved_dir, + im_file.rsplit(".")[0] + ".png") + mkdir(pred_saved_path) + pred_mask.save(pred_saved_path) + + # pred_im = utils.visualize(im_path, pred, weight=0.0) + # pred_saved_path = os.path.join(pred_saved_dir, im_file) + # mkdir(pred_saved_path) + # cv2.imwrite(pred_saved_path, pred_im) + + progbar_pred.update(i + 1) diff --git a/paddleseg/core/val.py b/paddleseg/core/val.py index c5447cbcb8..cdf0a348b9 100644 --- a/paddleseg/core/val.py +++ b/paddleseg/core/val.py @@ -81,69 +81,70 @@ def evaluate(model, len(eval_dataset), total_iters)) progbar_val = progbar.Progbar(target=total_iters, verbose=1) timer = Timer() - for iter, (im, label) in enumerate(loader): - reader_cost = timer.elapsed_time() - label = label.astype('int64') - - ori_shape = label.shape[-2:] - if aug_eval: - pred = infer.aug_inference( - model, - im, - ori_shape=ori_shape, - transforms=eval_dataset.transforms.transforms, - scales=scales, - flip_horizontal=flip_horizontal, - flip_vertical=flip_vertical, - is_slide=is_slide, - stride=stride, - crop_size=crop_size) - else: - pred = infer.inference( - model, - im, - ori_shape=ori_shape, - transforms=eval_dataset.transforms.transforms, - is_slide=is_slide, - stride=stride, - crop_size=crop_size) - - intersect_area, pred_area, label_area = metrics.calculate_area( - pred, - label, - eval_dataset.num_classes, - ignore_index=eval_dataset.ignore_index) - - # Gather from all ranks - if nranks > 1: - intersect_area_list = [] - pred_area_list = [] - label_area_list = [] - paddle.distributed.all_gather(intersect_area_list, intersect_area) - paddle.distributed.all_gather(pred_area_list, pred_area) - paddle.distributed.all_gather(label_area_list, label_area) - - # Some image has been evaluated and should be eliminated in last iter - if (iter + 1) * nranks > len(eval_dataset): - valid = len(eval_dataset) - iter * nranks - intersect_area_list = intersect_area_list[:valid] - pred_area_list = pred_area_list[:valid] - label_area_list = label_area_list[:valid] - - for i in range(len(intersect_area_list)): - intersect_area_all = intersect_area_all + intersect_area_list[i] - pred_area_all = pred_area_all + pred_area_list[i] - label_area_all = label_area_all + label_area_list[i] - else: - intersect_area_all = intersect_area_all + intersect_area - pred_area_all = pred_area_all + pred_area - label_area_all = label_area_all + label_area - batch_cost = timer.elapsed_time() - timer.restart() - - if local_rank == 0: - progbar_val.update(iter + 1, [('batch_cost', batch_cost), - ('reader cost', reader_cost)]) + with paddle.no_grad(): + for iter, (im, label) in enumerate(loader): + reader_cost = timer.elapsed_time() + label = label.astype('int64') + + ori_shape = label.shape[-2:] + if aug_eval: + pred = infer.aug_inference( + model, + im, + ori_shape=ori_shape, + transforms=eval_dataset.transforms.transforms, + scales=scales, + flip_horizontal=flip_horizontal, + flip_vertical=flip_vertical, + is_slide=is_slide, + stride=stride, + crop_size=crop_size) + else: + pred = infer.inference( + model, + im, + ori_shape=ori_shape, + transforms=eval_dataset.transforms.transforms, + is_slide=is_slide, + stride=stride, + crop_size=crop_size) + + intersect_area, pred_area, label_area = metrics.calculate_area( + pred, + label, + eval_dataset.num_classes, + ignore_index=eval_dataset.ignore_index) + + # Gather from all ranks + if nranks > 1: + intersect_area_list = [] + pred_area_list = [] + label_area_list = [] + paddle.distributed.all_gather(intersect_area_list, intersect_area) + paddle.distributed.all_gather(pred_area_list, pred_area) + paddle.distributed.all_gather(label_area_list, label_area) + + # Some image has been evaluated and should be eliminated in last iter + if (iter + 1) * nranks > len(eval_dataset): + valid = len(eval_dataset) - iter * nranks + intersect_area_list = intersect_area_list[:valid] + pred_area_list = pred_area_list[:valid] + label_area_list = label_area_list[:valid] + + for i in range(len(intersect_area_list)): + intersect_area_all = intersect_area_all + intersect_area_list[i] + pred_area_all = pred_area_all + pred_area_list[i] + label_area_all = label_area_all + label_area_list[i] + else: + intersect_area_all = intersect_area_all + intersect_area + pred_area_all = pred_area_all + pred_area + label_area_all = label_area_all + label_area + batch_cost = timer.elapsed_time() + timer.restart() + + if local_rank == 0: + progbar_val.update(iter + 1, [('batch_cost', batch_cost), + ('reader cost', reader_cost)]) class_iou, miou = metrics.mean_iou(intersect_area_all, pred_area_all, label_area_all) From c02e264bd95061ac2beb51eeffe9d358b75163a0 Mon Sep 17 00:00:00 2001 From: wuzewu Date: Wed, 23 Dec 2020 16:06:36 +0800 Subject: [PATCH 07/52] Fix icnet bug --- legacy/pdseg/models/modeling/icnet.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/legacy/pdseg/models/modeling/icnet.py b/legacy/pdseg/models/modeling/icnet.py index aee0461459..2f4a393e4d 100644 --- a/legacy/pdseg/models/modeling/icnet.py +++ b/legacy/pdseg/models/modeling/icnet.py @@ -116,9 +116,18 @@ def resnet(input): scale = cfg.MODEL.ICNET.DEPTH_MULTIPLIER layers = cfg.MODEL.ICNET.LAYERS model = resnet_backbone(scale=scale, layers=layers, stem='icnet') - end_points = 49 - decode_point = 13 - resize_point = 13 + if layers >= 50: + end_points = layers - 1 + decode_point = 13 + resize_point = 13 + elif layers == 18: + end_points = 13 + decode_point = 9 + resize_point = 9 + elif layers == 34: + end_points = 27 + decode_point = 15 + resize_point = 15 dilation_dict = {2: 2, 3: 4} data, decode_shortcuts = model.net( input, From de3b952562c5e9c2eff30065c321047c12466749 Mon Sep 17 00:00:00 2001 From: LutaoChu <30695251+LutaoChu@users.noreply.github.com> Date: Wed, 23 Dec 2020 16:09:19 +0800 Subject: [PATCH 08/52] fix "cross entropy" typo --- legacy/docs/configs/solver_group.md | 4 +- legacy/docs/dice_loss.md | 2 +- paddleseg/models/losses/__init__.py | 4 +- ...y_loss.py => binary_cross_entropy_loss.py} | 0 paddleseg/models/losses/cross_entropy_loss.py | 64 +++++++++ paddleseg/models/losses/cross_entroy_loss.py | 123 ------------------ 6 files changed, 69 insertions(+), 128 deletions(-) rename paddleseg/models/losses/{binary_cross_entroy_loss.py => binary_cross_entropy_loss.py} (100%) create mode 100644 paddleseg/models/losses/cross_entropy_loss.py delete mode 100644 paddleseg/models/losses/cross_entroy_loss.py diff --git a/legacy/docs/configs/solver_group.md b/legacy/docs/configs/solver_group.md index 8588ccacbd..f43b8a0397 100644 --- a/legacy/docs/configs/solver_group.md +++ b/legacy/docs/configs/solver_group.md @@ -153,8 +153,8 @@ L2正则化系数 ## `loss` -训练时选择的损失函数, 支持`softmax_loss(sotfmax with cross entroy loss)`, -`dice_loss(dice coefficient loss)`, `bce_loss(binary cross entroy loss)`三种损失函数。 +训练时选择的损失函数, 支持`softmax_loss(sotfmax with cross entropy loss)`, +`dice_loss(dice coefficient loss)`, `bce_loss(binary cross entropy loss)`三种损失函数。 其中`dice_loss`和`bce_loss`仅在两类分割问题中适用,`softmax_loss`不能与`dice_loss` 或`bce_loss`组合,`dice_loss`可以和`bce_loss`组合使用。使用示例如下: diff --git a/legacy/docs/dice_loss.md b/legacy/docs/dice_loss.md index 3a6e94b938..f173e1d934 100644 --- a/legacy/docs/dice_loss.md +++ b/legacy/docs/dice_loss.md @@ -37,7 +37,7 @@ Dice系数请参见[维基百科](https://zh.wikipedia.org/wiki/Dice%E7%B3%BB%E6 在图像分割中,`softmax loss`评估每一个像素点的类别预测,然后平均所有的像素点。这个本质上就是对图片上的每个像素进行平等的学习。这就造成了一个问题,如果在图像上的多种类别有不平衡的表征,那么训练会由最主流的类别主导。以上面DeepGlobe道路提取的数据为例子,网络将偏向于背景的学习,降低了网络对前景目标的提取能力。 而`dice loss(dice coefficient loss)`通过预测和标注的交集除以它们的总体像素进行计算,它将一个类别的所有像素作为一个整体作为考量,而且计算交集在总体中的占比,所以不受大量背景像素的影响,能够取得更好的效果。 -在实际应用中`dice loss`往往与`bce loss(binary cross entroy loss)`结合使用,提高模型训练的稳定性。 +在实际应用中`dice loss`往往与`bce loss(binary cross entropy loss)`结合使用,提高模型训练的稳定性。 ## PaddleSeg指定训练loss diff --git a/paddleseg/models/losses/__init__.py b/paddleseg/models/losses/__init__.py index c62b560770..6e8f5a67af 100644 --- a/paddleseg/models/losses/__init__.py +++ b/paddleseg/models/losses/__init__.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .cross_entroy_loss import CrossEntropyLoss -from .binary_cross_entroy_loss import BCELoss +from .cross_entropy_loss import CrossEntropyLoss +from .binary_cross_entropy_loss import BCELoss from .gscnn_dual_task_loss import DualTaskLoss from .edge_attention_loss import EdgeAttentionLoss from .bootstrapped_cross_entropy import BootstrappedCrossEntropyLoss diff --git a/paddleseg/models/losses/binary_cross_entroy_loss.py b/paddleseg/models/losses/binary_cross_entropy_loss.py similarity index 100% rename from paddleseg/models/losses/binary_cross_entroy_loss.py rename to paddleseg/models/losses/binary_cross_entropy_loss.py diff --git a/paddleseg/models/losses/cross_entropy_loss.py b/paddleseg/models/losses/cross_entropy_loss.py new file mode 100644 index 0000000000..da71cb3461 --- /dev/null +++ b/paddleseg/models/losses/cross_entropy_loss.py @@ -0,0 +1,64 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +from paddle import nn +import paddle.nn.functional as F + +from paddleseg.cvlibs import manager + + +@manager.LOSSES.add_component +class CrossEntropyLoss(nn.Layer): + """ + Implements the cross entropy loss function. + + Args: + ignore_index (int64): Specifies a target value that is ignored + and does not contribute to the input gradient. Default ``255``. + """ + + def __init__(self, ignore_index=255): + super(CrossEntropyLoss, self).__init__() + self.ignore_index = ignore_index + self.EPS = 1e-5 + + def forward(self, logit, label): + """ + Forward computation. + + Args: + logit (Tensor): Logit tensor, the data type is float32, float64. Shape is + (N, C), where C is number of classes, and if shape is more than 2D, this + is (N, C, D1, D2,..., Dk), k >= 1. + label (Tensor): Label tensor, the data type is int64. Shape is (N), where each + value is 0 <= label[i] <= C-1, and if shape is more than 2D, this is + (N, D1, D2,..., Dk), k >= 1. + """ + if len(label.shape) != len(logit.shape): + label = paddle.unsqueeze(label, 1) + + logit = paddle.transpose(logit, [0, 2, 3, 1]) + label = paddle.transpose(label, [0, 2, 3, 1]) + loss = F.softmax_with_cross_entropy( + logit, label, ignore_index=self.ignore_index, axis=-1) + + mask = label != self.ignore_index + mask = paddle.cast(mask, 'float32') + loss = loss * mask + avg_loss = paddle.mean(loss) / (paddle.mean(mask) + self.EPS) + + label.stop_gradient = True + mask.stop_gradient = True + return avg_loss diff --git a/paddleseg/models/losses/cross_entroy_loss.py b/paddleseg/models/losses/cross_entroy_loss.py deleted file mode 100644 index 51b4a3ba4a..0000000000 --- a/paddleseg/models/losses/cross_entroy_loss.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import paddle -from paddle import nn -import paddle.nn.functional as F - -from paddleseg.cvlibs import manager -''' -@manager.LOSSES.add_component -class CrossEntropyLoss(nn.CrossEntropyLoss): - """ - Implements the cross entropy loss function. - - Args: - weight (Tensor): Weight tensor, a manual rescaling weight given - to each class and the shape is (C). It has the same dimensions as class - number and the data type is float32, float64. Default ``'None'``. - ignore_index (int64): Specifies a target value that is ignored - and does not contribute to the input gradient. Default ``255``. - reduction (str): Indicate how to average the loss by batch_size, - the candicates are ``'none'`` | ``'mean'`` | ``'sum'``. - If :attr:`reduction` is ``'mean'``, the reduced mean loss is returned; - If :attr:`size_average` is ``'sum'``, the reduced sum loss is returned. - If :attr:`reduction` is ``'none'``, the unreduced loss is returned. - Default ``'mean'``. - - """ - - def __init__(self, weight=None, ignore_index=255, reduction='mean'): - self.weight = weight - self.ignore_index = ignore_index - self.reduction = reduction - self.EPS = 1e-5 - if self.reduction not in ['sum', 'mean', 'none']: - raise ValueError( - "The value of 'reduction' in cross_entropy_loss should be 'sum', 'mean' or" - " 'none', but received %s, which is not allowed." % - self.reduction) - - def forward(self, logit, label): - """ - Forward computation. - - Args: - logit (Tensor): Logit tensor, the data type is float32, float64. Shape is - (N, C), where C is number of classes, and if shape is more than 2D, this - is (N, C, D1, D2,..., Dk), k >= 1. - label (Tensor): Label tensor, the data type is int64. Shape is (N), where each - value is 0 <= label[i] <= C-1, and if shape is more than 2D, this is - (N, D1, D2,..., Dk), k >= 1. - """ - loss = paddle.nn.functional.cross_entropy( - logit, - label, - weight=self.weight, - ignore_index=self.ignore_index, - reduction=self.reduction) - - mask = label != self.ignore_index - mask = paddle.cast(mask, 'float32') - avg_loss = loss / (paddle.mean(mask) + self.EPS) - - label.stop_gradient = True - mask.stop_gradient = True - return avg_loss -''' - - -@manager.LOSSES.add_component -class CrossEntropyLoss(nn.Layer): - """ - Implements the cross entropy loss function. - - Args: - ignore_index (int64): Specifies a target value that is ignored - and does not contribute to the input gradient. Default ``255``. - """ - - def __init__(self, ignore_index=255): - super(CrossEntropyLoss, self).__init__() - self.ignore_index = ignore_index - self.EPS = 1e-5 - - def forward(self, logit, label): - """ - Forward computation. - - Args: - logit (Tensor): Logit tensor, the data type is float32, float64. Shape is - (N, C), where C is number of classes, and if shape is more than 2D, this - is (N, C, D1, D2,..., Dk), k >= 1. - label (Tensor): Label tensor, the data type is int64. Shape is (N), where each - value is 0 <= label[i] <= C-1, and if shape is more than 2D, this is - (N, D1, D2,..., Dk), k >= 1. - """ - if len(label.shape) != len(logit.shape): - label = paddle.unsqueeze(label, 1) - - logit = paddle.transpose(logit, [0, 2, 3, 1]) - label = paddle.transpose(label, [0, 2, 3, 1]) - loss = F.softmax_with_cross_entropy( - logit, label, ignore_index=self.ignore_index, axis=-1) - - mask = label != self.ignore_index - mask = paddle.cast(mask, 'float32') - loss = loss * mask - avg_loss = paddle.mean(loss) / (paddle.mean(mask) + self.EPS) - - label.stop_gradient = True - mask.stop_gradient = True - return avg_loss From 99f5fb11f72523e9eb7116d7044d30f0d3d1d040 Mon Sep 17 00:00:00 2001 From: LutaoChu <30695251+LutaoChu@users.noreply.github.com> Date: Fri, 25 Dec 2020 14:44:32 +0800 Subject: [PATCH 09/52] [cherry-pick] fix bce loss with edge and dual task loss bug --- paddleseg/core/train.py | 8 ++++---- paddleseg/models/losses/gscnn_dual_task_loss.py | 2 ++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/paddleseg/core/train.py b/paddleseg/core/train.py index 06d003caf9..32298b22e5 100644 --- a/paddleseg/core/train.py +++ b/paddleseg/core/train.py @@ -38,10 +38,10 @@ def loss_computation(logits_list, labels, losses, edges=None): logits = logits_list[i] loss_i = losses['types'][i] # Whether to use edges as labels According to loss type . - if loss_i.__class__.__name__ in ('BCELoss', ): - if loss_i.edge_label: - labels = edges - loss += losses['coef'][i] * loss_i(logits, labels) + if loss_i.__class__.__name__ in ('BCELoss', ) and loss_i.edge_label: + loss += losses['coef'][i] * loss_i(logits, edges) + else: + loss += losses['coef'][i] * loss_i(logits, labels) return loss diff --git a/paddleseg/models/losses/gscnn_dual_task_loss.py b/paddleseg/models/losses/gscnn_dual_task_loss.py index 964cecc82b..6a3d81b7c1 100644 --- a/paddleseg/models/losses/gscnn_dual_task_loss.py +++ b/paddleseg/models/losses/gscnn_dual_task_loss.py @@ -103,6 +103,8 @@ def forward(self, logit, labels): n, c, h, w = logit.shape th = 1e-8 eps = 1e-10 + if len(labels.shape) == 3: + labels = labels.unsqueeze(1) mask = (labels != self.ignore_index) mask.stop_gradient = True logit = logit * mask From 46e0323f93e34828e20574d0c4cd5a132b30b0dc Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Mon, 28 Dec 2020 10:20:52 +0800 Subject: [PATCH 10/52] fix bce loss bug --- .../losses/binary_cross_entropy_loss.py | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/paddleseg/models/losses/binary_cross_entropy_loss.py b/paddleseg/models/losses/binary_cross_entropy_loss.py index cb1f2ce8d8..f81bb77307 100644 --- a/paddleseg/models/losses/binary_cross_entropy_loss.py +++ b/paddleseg/models/losses/binary_cross_entropy_loss.py @@ -91,6 +91,7 @@ def __init__(self, self.pos_weight = pos_weight self.ignore_index = ignore_index self.edge_label = edge_label + self.EPS = 1e-10 if self.weight is not None: if isinstance(self.weight, str): @@ -101,7 +102,7 @@ def __init__(self, elif isinstance(self.weight, paddle.VarBase): raise TypeError( 'The type of `weight` is wrong, it should be Tensor or str, but it is {}' - .format(type(self.pos_weight))) + .format(type(self.weight))) if self.pos_weight is not None: if isinstance(self.pos_weight, str): @@ -129,41 +130,44 @@ def forward(self, logit, label): value is 0 or 1, and if shape is more than 2D, this is (N, C, D1, D2,..., Dk), k >= 1. """ - eps = 1e-6 if len(label.shape) != len(logit.shape): label = paddle.unsqueeze(label, 1) + mask = (label != self.ignore_index) + mask = paddle.cast(mask, 'float32') # label.shape should equal to the logit.shape if label.shape[1] != logit.shape[1]: label = label.squeeze(1) label = F.one_hot(label, logit.shape[1]) label = label.transpose((0, 3, 1, 2)) - mask = (label != self.ignore_index) - mask = paddle.cast(mask, 'float32') if isinstance(self.weight, str): pos_index = (label == 1) neg_index = (label == 0) pos_num = paddle.sum(pos_index.astype('float32')) neg_num = paddle.sum(neg_index.astype('float32')) sum_num = pos_num + neg_num - weight_pos = 2 * neg_num / (sum_num + eps) - weight_neg = 2 * pos_num / (sum_num + eps) - self.weight = weight_pos * label + weight_neg * (1 - label) + weight_pos = 2 * neg_num / (sum_num + self.EPS) + weight_neg = 2 * pos_num / (sum_num + self.EPS) + weight = weight_pos * label + weight_neg * (1 - label) + else: + weight = self.weight if isinstance(self.pos_weight, str): pos_index = (label == 1) neg_index = (label == 0) pos_num = paddle.sum(pos_index.astype('float32')) neg_num = paddle.sum(neg_index.astype('float32')) sum_num = pos_num + neg_num - self.pos_weight = 2 * neg_num / (sum_num + eps) + pos_weight = 2 * neg_num / (sum_num + self.EPS) + else: + pos_weight = self.pos_weight label = label.astype('float32') loss = paddle.nn.functional.binary_cross_entropy_with_logits( logit, label, - weight=self.weight, + weight=weight, reduction='none', - pos_weight=self.pos_weight) + pos_weight=pos_weight) loss = loss * mask - loss = paddle.mean(loss) / paddle.mean(mask + eps) + loss = paddle.mean(loss) / (paddle.mean(mask) + self.EPS) label.stop_gradient = True mask.stop_gradient = True From 0fa7727ed919f5e6c70001fa3b2729e4f7f63e83 Mon Sep 17 00:00:00 2001 From: LutaoChu <30695251+LutaoChu@users.noreply.github.com> Date: Mon, 28 Dec 2020 10:52:22 +0800 Subject: [PATCH 11/52] [cherry-pick] Remote sensing fitting paddle version 2.0 --- legacy/contrib/RemoteSensing/models/base.py | 2 +- legacy/contrib/RemoteSensing/predict_demo.py | 2 ++ legacy/contrib/RemoteSensing/train_demo.py | 3 ++ .../RemoteSensing/utils/paddle_utils.py | 28 +++++++++++++++++++ 4 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 legacy/contrib/RemoteSensing/utils/paddle_utils.py diff --git a/legacy/contrib/RemoteSensing/models/base.py b/legacy/contrib/RemoteSensing/models/base.py index 0b5c858f17..0a0b73ce26 100644 --- a/legacy/contrib/RemoteSensing/models/base.py +++ b/legacy/contrib/RemoteSensing/models/base.py @@ -37,7 +37,7 @@ def save_infer_program(test_program, ckpt_dir): _test_program = test_program.clone() _test_program.desc.flush() _test_program.desc._set_version() - paddle.fluid.core.save_op_compatible_info(_test_program.desc) + utils.paddle_utils.save_op_version_info(_test_program.desc) with open(os.path.join(ckpt_dir, 'model') + ".pdmodel", "wb") as f: f.write(_test_program.desc.serialize_to_string()) diff --git a/legacy/contrib/RemoteSensing/predict_demo.py b/legacy/contrib/RemoteSensing/predict_demo.py index 5da7f0c55b..bd80f57ed5 100644 --- a/legacy/contrib/RemoteSensing/predict_demo.py +++ b/legacy/contrib/RemoteSensing/predict_demo.py @@ -21,6 +21,7 @@ import argparse from models import load_model from models.utils.visualize import get_color_map_list +from utils import paddle_utils def parse_args(): @@ -68,6 +69,7 @@ def parse_args(): return parser.parse_args() +paddle_utils.enable_static() args = parse_args() data_dir = args.data_dir file_list = args.file_list diff --git a/legacy/contrib/RemoteSensing/train_demo.py b/legacy/contrib/RemoteSensing/train_demo.py index a5721f2417..91842138a5 100644 --- a/legacy/contrib/RemoteSensing/train_demo.py +++ b/legacy/contrib/RemoteSensing/train_demo.py @@ -15,9 +15,11 @@ import os.path as osp import argparse + import transforms.transforms as T from readers.reader import Reader from models import UNet, HRNet +from utils import paddle_utils def parse_args(): @@ -97,6 +99,7 @@ def parse_args(): return parser.parse_args() +paddle_utils.enable_static() args = parse_args() data_dir = args.data_dir save_dir = args.save_dir diff --git a/legacy/contrib/RemoteSensing/utils/paddle_utils.py b/legacy/contrib/RemoteSensing/utils/paddle_utils.py new file mode 100644 index 0000000000..b30d46ffe7 --- /dev/null +++ b/legacy/contrib/RemoteSensing/utils/paddle_utils.py @@ -0,0 +1,28 @@ +# coding: utf8 +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle + + +def enable_static(): + if hasattr(paddle, 'enable_static'): + paddle.enable_static() + + +def save_op_version_info(program_desc): + if hasattr(paddle.fluid.core, 'save_op_version_info'): + paddle.fluid.core.save_op_version_info(program_desc) + else: + paddle.fluid.core.save_op_compatible_info(program_desc) From ed8afc8f37670b990dab2a9bb7281ebed6aa049c Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Thu, 31 Dec 2020 12:13:43 +0800 Subject: [PATCH 12/52] Update README.md --- configs/danet/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/danet/README.md b/configs/danet/README.md index 9cc775ea36..af47879402 100644 --- a/configs/danet/README.md +++ b/configs/danet/README.md @@ -2,7 +2,7 @@ ## Reference -> Fu J, Liu J, Tian H, et al. Dual attention network for scene segmentation[C]//Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition. 2019: 3146-3154. +> Fu, Jun, Jing Liu, Haijie Tian, Yong Li, Yongjun Bao, Zhiwei Fang, and Hanqing Lu. "Dual attention network for scene segmentation." In Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pp. 3146-3154. 2019. ## Performance From aa81b8fdb480586b834b7136ff0e6e4a1b82709b Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Thu, 31 Dec 2020 12:14:31 +0800 Subject: [PATCH 13/52] Update README.md --- configs/bisenet/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/bisenet/README.md b/configs/bisenet/README.md index 55b7eeac72..147de8e12c 100644 --- a/configs/bisenet/README.md +++ b/configs/bisenet/README.md @@ -2,7 +2,7 @@ ## Reference -> Yu C, Gao C, Wang J, et al. BiSeNet V2: Bilateral Network with Guided Aggregation for Real-time Semantic Segmentation[J]. arXiv preprint arXiv:2004.02147, 2020. +> Yu, Changqian, Changxin Gao, Jingbo Wang, Gang Yu, Chunhua Shen, and Nong Sang. "BiSeNet V2: Bilateral Network with Guided Aggregation for Real-time Semantic Segmentation." arXiv preprint arXiv:2004.02147 (2020). ## Performance From 24b3700fe14d8a0ed9f0bd8c3a8fc81674098f99 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Thu, 31 Dec 2020 12:15:10 +0800 Subject: [PATCH 14/52] Update README.md --- configs/fcn/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/fcn/README.md b/configs/fcn/README.md index 8bb9786c53..87fe6e4ee0 100644 --- a/configs/fcn/README.md +++ b/configs/fcn/README.md @@ -1,7 +1,7 @@ # Deep High-Resolution Representation Learning for Visual Recognition ## Reference -> Wang J, Sun K, Cheng T, et al. Deep high-resolution representation learning for visual recognition[J]. IEEE transactions on pattern analysis and machine intelligence, 2020. +> Wang, Jingdong, Ke Sun, Tianheng Cheng, Borui Jiang, Chaorui Deng, Yang Zhao, Dong Liu et al. "Deep high-resolution representation learning for visual recognition." IEEE transactions on pattern analysis and machine intelligence (2020). ## Performance From 8289b80c6f40f1f01090ca40ab9f354a005693b9 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Thu, 31 Dec 2020 12:15:48 +0800 Subject: [PATCH 15/52] Update README.md --- configs/gscnn/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/gscnn/README.md b/configs/gscnn/README.md index 81e99c7c96..bb144b95b6 100644 --- a/configs/gscnn/README.md +++ b/configs/gscnn/README.md @@ -2,7 +2,7 @@ ## Reference -> Takikawa T, Acuna D, Jampani V, et al. Gated-scnn: Gated shape cnns for semantic segmentation[C]//Proceedings of the IEEE International Conference on Computer Vision. 2019: 5229-5238. +> Takikawa, Towaki, David Acuna, Varun Jampani, and Sanja Fidler. "Gated-scnn: Gated shape cnns for semantic segmentation." In Proceedings of the IEEE International Conference on Computer Vision, pp. 5229-5238. 2019. ## Performance From c7e1b390b28746c77708183d2ff52dbf684331d5 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Thu, 31 Dec 2020 12:16:26 +0800 Subject: [PATCH 16/52] Update README.md --- configs/ocrnet/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/ocrnet/README.md b/configs/ocrnet/README.md index 9f8fd6b597..6a89caaa6e 100644 --- a/configs/ocrnet/README.md +++ b/configs/ocrnet/README.md @@ -2,7 +2,7 @@ ## Reference -> Yuan Y, Chen X, Wang J. Object-contextual representations for semantic segmentation[J]. arXiv preprint arXiv:1909.11065, 2019. +> Yuan, Yuhui, Xilin Chen, and Jingdong Wang. "Object-contextual representations for semantic segmentation." arXiv preprint arXiv:1909.11065 (2019). ## Performance From b79884917b15a145ebc0aee86f67288129ccf00b Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 4 Jan 2021 10:05:45 +0800 Subject: [PATCH 17/52] Add License --- paddleseg/models/u2net.py | 495 +++++++++++++++++++------------------- 1 file changed, 254 insertions(+), 241 deletions(-) diff --git a/paddleseg/models/u2net.py b/paddleseg/models/u2net.py index eaa956f022..2511e5b656 100644 --- a/paddleseg/models/u2net.py +++ b/paddleseg/models/u2net.py @@ -1,11 +1,264 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import paddle import paddle.nn as nn import paddle.nn.functional as F -from paddleseg import utils + from paddleseg.cvlibs import manager +from paddleseg.models import layers +from paddleseg.utils import utils __all__ = ['U2Net', 'U2Netp'] +@manager.MODELS.add_component +class U2Net(nn.Layer): + """ + The U^2-Net implementation based on PaddlePaddle. + + The original article refers to + Xuebin Qin, et, al. "U^2-Net: Going Deeper with Nested U-Structure for Salient Object Detection" + (https://arxiv.org/abs/2005.09007). + + Args: + num_classes (int): The unique number of target classes. + in_ch (int, optional): Input channels. Default: 3. + pretrained (str, optional): The path or url of pretrained model for fine tuning. Default: None. + + """ + + def __init__(self, num_classes, in_ch=3, pretrained=None): + super(U2Net, self).__init__() + + self.stage1 = RSU7(in_ch, 32, 64) + self.pool12 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage2 = RSU6(64, 32, 128) + self.pool23 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage3 = RSU5(128, 64, 256) + self.pool34 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage4 = RSU4(256, 128, 512) + self.pool45 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage5 = RSU4F(512, 256, 512) + self.pool56 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage6 = RSU4F(512, 256, 512) + + # decoder + self.stage5d = RSU4F(1024, 256, 512) + self.stage4d = RSU4(1024, 128, 256) + self.stage3d = RSU5(512, 64, 128) + self.stage2d = RSU6(256, 32, 64) + self.stage1d = RSU7(128, 16, 64) + + self.side1 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side2 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side3 = nn.Conv2D(128, num_classes, 3, padding=1) + self.side4 = nn.Conv2D(256, num_classes, 3, padding=1) + self.side5 = nn.Conv2D(512, num_classes, 3, padding=1) + self.side6 = nn.Conv2D(512, num_classes, 3, padding=1) + + self.outconv = nn.Conv2D(6 * num_classes, num_classes, 1) + + self.pretrained = pretrained + self.init_weight() + + def forward(self, x): + + hx = x + + #stage 1 + hx1 = self.stage1(hx) + hx = self.pool12(hx1) + + #stage 2 + hx2 = self.stage2(hx) + hx = self.pool23(hx2) + + #stage 3 + hx3 = self.stage3(hx) + hx = self.pool34(hx3) + + #stage 4 + hx4 = self.stage4(hx) + hx = self.pool45(hx4) + + #stage 5 + hx5 = self.stage5(hx) + hx = self.pool56(hx5) + + #stage 6 + hx6 = self.stage6(hx) + hx6up = _upsample_like(hx6, hx5) + + #-------------------- decoder -------------------- + hx5d = self.stage5d(paddle.concat((hx6up, hx5), 1)) + hx5dup = _upsample_like(hx5d, hx4) + + hx4d = self.stage4d(paddle.concat((hx5dup, hx4), 1)) + hx4dup = _upsample_like(hx4d, hx3) + + hx3d = self.stage3d(paddle.concat((hx4dup, hx3), 1)) + hx3dup = _upsample_like(hx3d, hx2) + + hx2d = self.stage2d(paddle.concat((hx3dup, hx2), 1)) + hx2dup = _upsample_like(hx2d, hx1) + + hx1d = self.stage1d(paddle.concat((hx2dup, hx1), 1)) + + #side output + d1 = self.side1(hx1d) + + d2 = self.side2(hx2d) + d2 = _upsample_like(d2, d1) + + d3 = self.side3(hx3d) + d3 = _upsample_like(d3, d1) + + d4 = self.side4(hx4d) + d4 = _upsample_like(d4, d1) + + d5 = self.side5(hx5d) + d5 = _upsample_like(d5, d1) + + d6 = self.side6(hx6) + d6 = _upsample_like(d6, d1) + + d0 = self.outconv(paddle.concat((d1, d2, d3, d4, d5, d6), 1)) + + return [d0, d1, d2, d3, d4, d5, d6] + + def init_weight(self): + if self.pretrained is not None: + utils.load_entire_model(self, self.pretrained) + + +### U^2-Net small ### +@manager.MODELS.add_component +class U2Netp(nn.Layer): + """Please Refer to U2Net above.""" + + def __init__(self, num_classes, in_ch=3, pretrained=None): + super(U2Netp, self).__init__() + + self.stage1 = RSU7(in_ch, 16, 64) + self.pool12 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage2 = RSU6(64, 16, 64) + self.pool23 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage3 = RSU5(64, 16, 64) + self.pool34 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage4 = RSU4(64, 16, 64) + self.pool45 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage5 = RSU4F(64, 16, 64) + self.pool56 = nn.MaxPool2D(2, stride=2, ceil_mode=True) + + self.stage6 = RSU4F(64, 16, 64) + + # decoder + self.stage5d = RSU4F(128, 16, 64) + self.stage4d = RSU4(128, 16, 64) + self.stage3d = RSU5(128, 16, 64) + self.stage2d = RSU6(128, 16, 64) + self.stage1d = RSU7(128, 16, 64) + + self.side1 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side2 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side3 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side4 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side5 = nn.Conv2D(64, num_classes, 3, padding=1) + self.side6 = nn.Conv2D(64, num_classes, 3, padding=1) + + self.outconv = nn.Conv2D(6 * num_classes, num_classes, 1) + + self.pretrained = pretrained + self.init_weight() + + def forward(self, x): + + hx = x + + #stage 1 + hx1 = self.stage1(hx) + hx = self.pool12(hx1) + + #stage 2 + hx2 = self.stage2(hx) + hx = self.pool23(hx2) + + #stage 3 + hx3 = self.stage3(hx) + hx = self.pool34(hx3) + + #stage 4 + hx4 = self.stage4(hx) + hx = self.pool45(hx4) + + #stage 5 + hx5 = self.stage5(hx) + hx = self.pool56(hx5) + + #stage 6 + hx6 = self.stage6(hx) + hx6up = _upsample_like(hx6, hx5) + + #decoder + hx5d = self.stage5d(paddle.concat((hx6up, hx5), 1)) + hx5dup = _upsample_like(hx5d, hx4) + + hx4d = self.stage4d(paddle.concat((hx5dup, hx4), 1)) + hx4dup = _upsample_like(hx4d, hx3) + + hx3d = self.stage3d(paddle.concat((hx4dup, hx3), 1)) + hx3dup = _upsample_like(hx3d, hx2) + + hx2d = self.stage2d(paddle.concat((hx3dup, hx2), 1)) + hx2dup = _upsample_like(hx2d, hx1) + + hx1d = self.stage1d(paddle.concat((hx2dup, hx1), 1)) + + #side output + d1 = self.side1(hx1d) + + d2 = self.side2(hx2d) + d2 = _upsample_like(d2, d1) + + d3 = self.side3(hx3d) + d3 = _upsample_like(d3, d1) + + d4 = self.side4(hx4d) + d4 = _upsample_like(d4, d1) + + d5 = self.side5(hx5d) + d5 = _upsample_like(d5, d1) + + d6 = self.side6(hx6) + d6 = _upsample_like(d6, d1) + + d0 = self.outconv(paddle.concat((d1, d2, d3, d4, d5, d6), 1)) + + return [d0, d1, d2, d3, d4, d5, d6] + + def init_weight(self): + if self.pretrained is not None: + utils.load_entire_model(self, self.pretrained) class REBNCONV(nn.Layer): def __init__(self, in_ch=3, out_ch=3, dirate=1): @@ -317,243 +570,3 @@ def forward(self, x): hx1d = self.rebnconv1d(paddle.concat((hx2d, hx1), 1)) return hx1d + hxin - - -##### U^2-Net #### -@manager.MODELS.add_component -class U2Net(nn.Layer): - """ - The U^2-Net implementation based on PaddlePaddle. - - The original article refers to - Xuebin Qin, et, al. "U^2-Net: Going Deeper with Nested U-Structure for Salient Object Detection" - (https://arxiv.org/abs/2005.09007). - - Args: - num_classes (int): The unique number of target classes. - in_ch (int, optional): Input channels. Default: 3. - pretrained (str, optional): The path or url of pretrained model for fine tuning. Default: None. - - """ - - def __init__(self, num_classes, in_ch=3, pretrained=None): - super(U2Net, self).__init__() - - self.stage1 = RSU7(in_ch, 32, 64) - self.pool12 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage2 = RSU6(64, 32, 128) - self.pool23 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage3 = RSU5(128, 64, 256) - self.pool34 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage4 = RSU4(256, 128, 512) - self.pool45 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage5 = RSU4F(512, 256, 512) - self.pool56 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage6 = RSU4F(512, 256, 512) - - # decoder - self.stage5d = RSU4F(1024, 256, 512) - self.stage4d = RSU4(1024, 128, 256) - self.stage3d = RSU5(512, 64, 128) - self.stage2d = RSU6(256, 32, 64) - self.stage1d = RSU7(128, 16, 64) - - self.side1 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side2 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side3 = nn.Conv2D(128, num_classes, 3, padding=1) - self.side4 = nn.Conv2D(256, num_classes, 3, padding=1) - self.side5 = nn.Conv2D(512, num_classes, 3, padding=1) - self.side6 = nn.Conv2D(512, num_classes, 3, padding=1) - - self.outconv = nn.Conv2D(6 * num_classes, num_classes, 1) - - self.pretrained = pretrained - self.init_weight() - - def forward(self, x): - - hx = x - - #stage 1 - hx1 = self.stage1(hx) - hx = self.pool12(hx1) - - #stage 2 - hx2 = self.stage2(hx) - hx = self.pool23(hx2) - - #stage 3 - hx3 = self.stage3(hx) - hx = self.pool34(hx3) - - #stage 4 - hx4 = self.stage4(hx) - hx = self.pool45(hx4) - - #stage 5 - hx5 = self.stage5(hx) - hx = self.pool56(hx5) - - #stage 6 - hx6 = self.stage6(hx) - hx6up = _upsample_like(hx6, hx5) - - #-------------------- decoder -------------------- - hx5d = self.stage5d(paddle.concat((hx6up, hx5), 1)) - hx5dup = _upsample_like(hx5d, hx4) - - hx4d = self.stage4d(paddle.concat((hx5dup, hx4), 1)) - hx4dup = _upsample_like(hx4d, hx3) - - hx3d = self.stage3d(paddle.concat((hx4dup, hx3), 1)) - hx3dup = _upsample_like(hx3d, hx2) - - hx2d = self.stage2d(paddle.concat((hx3dup, hx2), 1)) - hx2dup = _upsample_like(hx2d, hx1) - - hx1d = self.stage1d(paddle.concat((hx2dup, hx1), 1)) - - #side output - d1 = self.side1(hx1d) - - d2 = self.side2(hx2d) - d2 = _upsample_like(d2, d1) - - d3 = self.side3(hx3d) - d3 = _upsample_like(d3, d1) - - d4 = self.side4(hx4d) - d4 = _upsample_like(d4, d1) - - d5 = self.side5(hx5d) - d5 = _upsample_like(d5, d1) - - d6 = self.side6(hx6) - d6 = _upsample_like(d6, d1) - - d0 = self.outconv(paddle.concat((d1, d2, d3, d4, d5, d6), 1)) - - return [d0, d1, d2, d3, d4, d5, d6] - - def init_weight(self): - if self.pretrained is not None: - utils.load_entire_model(self, self.pretrained) - - -### U^2-Net small ### -@manager.MODELS.add_component -class U2Netp(nn.Layer): - """Please Refer to U2Net above.""" - - def __init__(self, num_classes, in_ch=3, pretrained=None): - super(U2Netp, self).__init__() - - self.stage1 = RSU7(in_ch, 16, 64) - self.pool12 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage2 = RSU6(64, 16, 64) - self.pool23 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage3 = RSU5(64, 16, 64) - self.pool34 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage4 = RSU4(64, 16, 64) - self.pool45 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage5 = RSU4F(64, 16, 64) - self.pool56 = nn.MaxPool2D(2, stride=2, ceil_mode=True) - - self.stage6 = RSU4F(64, 16, 64) - - # decoder - self.stage5d = RSU4F(128, 16, 64) - self.stage4d = RSU4(128, 16, 64) - self.stage3d = RSU5(128, 16, 64) - self.stage2d = RSU6(128, 16, 64) - self.stage1d = RSU7(128, 16, 64) - - self.side1 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side2 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side3 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side4 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side5 = nn.Conv2D(64, num_classes, 3, padding=1) - self.side6 = nn.Conv2D(64, num_classes, 3, padding=1) - - self.outconv = nn.Conv2D(6 * num_classes, num_classes, 1) - - self.pretrained = pretrained - self.init_weight() - - def forward(self, x): - - hx = x - - #stage 1 - hx1 = self.stage1(hx) - hx = self.pool12(hx1) - - #stage 2 - hx2 = self.stage2(hx) - hx = self.pool23(hx2) - - #stage 3 - hx3 = self.stage3(hx) - hx = self.pool34(hx3) - - #stage 4 - hx4 = self.stage4(hx) - hx = self.pool45(hx4) - - #stage 5 - hx5 = self.stage5(hx) - hx = self.pool56(hx5) - - #stage 6 - hx6 = self.stage6(hx) - hx6up = _upsample_like(hx6, hx5) - - #decoder - hx5d = self.stage5d(paddle.concat((hx6up, hx5), 1)) - hx5dup = _upsample_like(hx5d, hx4) - - hx4d = self.stage4d(paddle.concat((hx5dup, hx4), 1)) - hx4dup = _upsample_like(hx4d, hx3) - - hx3d = self.stage3d(paddle.concat((hx4dup, hx3), 1)) - hx3dup = _upsample_like(hx3d, hx2) - - hx2d = self.stage2d(paddle.concat((hx3dup, hx2), 1)) - hx2dup = _upsample_like(hx2d, hx1) - - hx1d = self.stage1d(paddle.concat((hx2dup, hx1), 1)) - - #side output - d1 = self.side1(hx1d) - - d2 = self.side2(hx2d) - d2 = _upsample_like(d2, d1) - - d3 = self.side3(hx3d) - d3 = _upsample_like(d3, d1) - - d4 = self.side4(hx4d) - d4 = _upsample_like(d4, d1) - - d5 = self.side5(hx5d) - d5 = _upsample_like(d5, d1) - - d6 = self.side6(hx6) - d6 = _upsample_like(d6, d1) - - d0 = self.outconv(paddle.concat((d1, d2, d3, d4, d5, d6), 1)) - - return [d0, d1, d2, d3, d4, d5, d6] - - def init_weight(self): - if self.pretrained is not None: - utils.load_entire_model(self, self.pretrained) From 0b0ab8cfb7a7b9dbd90bc50f2383eba2b9bac352 Mon Sep 17 00:00:00 2001 From: chliang Date: Mon, 4 Jan 2021 10:59:16 +0800 Subject: [PATCH 18/52] Fix the bug of feature fusion(FeatureFusionModule) module in FastSCNN(fast_scnn.py). (#756) --- paddleseg/models/fast_scnn.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/paddleseg/models/fast_scnn.py b/paddleseg/models/fast_scnn.py index 10fda4c96f..8ebf36668e 100644 --- a/paddleseg/models/fast_scnn.py +++ b/paddleseg/models/fast_scnn.py @@ -265,9 +265,10 @@ def __init__(self, high_in_channels, low_in_channels, out_channels, self.align_corners = align_corners def forward(self, high_res_input, low_res_input): + h, w = high_res_input.shape[2:] low_res_input = F.interpolate( low_res_input, - scale_factor=4, + [h, w], mode='bilinear', align_corners=self.align_corners) low_res_input = self.dwconv(low_res_input) From fa785736e53a49b94216fc662acf944974037e60 Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Mon, 4 Jan 2021 11:25:21 +0800 Subject: [PATCH 19/52] update unet.py (#758) --- paddleseg/models/unet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/paddleseg/models/unet.py b/paddleseg/models/unet.py index 2c9eaa9e50..0dc8cf75e8 100644 --- a/paddleseg/models/unet.py +++ b/paddleseg/models/unet.py @@ -127,7 +127,7 @@ def __init__(self, self.use_deconv = use_deconv if self.use_deconv: - self.deconv = nn.ConvTranspose2D( + self.deconv = nn.Conv2DTranspose( in_channels, out_channels // 2, kernel_size=2, From 7a54ed56e01d66995cd640b885eb324bb82522f6 Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Wed, 6 Jan 2021 10:30:24 +0800 Subject: [PATCH 20/52] add isanet and emanet --- README.md | 3 +- README_CN.md | 2 + configs/emanet/README.md | 23 ++ ..._resnet101_os8_cityscapes_1024x512_80k.yml | 31 +++ ...net_resnet101_os8_voc12aug_512x512_40k.yml | 28 +++ ...t_resnet50_os8_cityscapes_1024x512_80k.yml | 32 +++ ...anet_resnet50_os8_voc12aug_512x512_40k.yml | 30 +++ configs/isanet/README.md | 21 ++ ...t_resnet101_os8_cityscapes_769x769_80k.yml | 30 +++ ...net_resnet101_os8_voc12aug_512x512_40k.yml | 28 +++ ...et_resnet50_os8_cityscapes_769x769_80k.yml | 31 +++ ...anet_resnet50_os8_voc12aug_512x512_40k.yml | 28 +++ docs/apis/models.md | 47 +++++ paddleseg/models/__init__.py | 2 + paddleseg/models/emanet.py | 199 ++++++++++++++++++ paddleseg/models/isanet.py | 178 ++++++++++++++++ 16 files changed, 712 insertions(+), 1 deletion(-) create mode 100644 configs/emanet/README.md create mode 100644 configs/emanet/emanet_resnet101_os8_cityscapes_1024x512_80k.yml create mode 100644 configs/emanet/emanet_resnet101_os8_voc12aug_512x512_40k.yml create mode 100644 configs/emanet/emanet_resnet50_os8_cityscapes_1024x512_80k.yml create mode 100644 configs/emanet/emanet_resnet50_os8_voc12aug_512x512_40k.yml create mode 100644 configs/isanet/README.md create mode 100644 configs/isanet/isanet_resnet101_os8_cityscapes_769x769_80k.yml create mode 100644 configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml create mode 100644 configs/isanet/isanet_resnet50_os8_cityscapes_769x769_80k.yml create mode 100644 configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml create mode 100644 paddleseg/models/emanet.py create mode 100644 paddleseg/models/isanet.py diff --git a/README.md b/README.md index 4a77d36e9e..d8fc04fe15 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,8 @@ Welcome to PaddleSeg! PaddleSeg is an end-to-end image segmentation development |[U2-Net](./configs/u2net)|-|-|-|-| |[Att U-Net](./configs/attention_unet)|-|-|-|-| |[U-Net++](./configs/unet_plusplus)|-|-|-|-| - +|[EMANet](./configs/emanet)|✔|✔|-|-| +|[ISANet](./configs/isanet)|✔|✔|-|-| ## Dataset - [x] Cityscapes diff --git a/README_CN.md b/README_CN.md index 558a6fbddd..745a89be48 100644 --- a/README_CN.md +++ b/README_CN.md @@ -42,6 +42,8 @@ PaddleSeg是基于飞桨[PaddlePaddle](https://www.paddlepaddle.org.cn)开发的 |[U2-Net](./configs/u2net)|-|-|-|-| |[Att U-Net](./configs/attention_unet)|-|-|-|-| |[U-Net++](./configs/unet_plusplus)|-|-|-|-| +|[EMANet](./configs/emanet)|✔|✔|-|-| +|[ISANet](./configs/isanet)|✔|✔|-|-| ## 数据集 diff --git a/configs/emanet/README.md b/configs/emanet/README.md new file mode 100644 index 0000000000..3dd4d61f1b --- /dev/null +++ b/configs/emanet/README.md @@ -0,0 +1,23 @@ +# Expectation-Maximization Attention Networks for Semantic Segmentation + +## Reference + +> Xia Li, Zhisheng Zhong, Jianlong Wu, Yibo Yang, Zhouchen Lin, Hong Liu: +Expectation-Maximization Attention Networks for Semantic Segmentation. ICCV 2019: 9166-9175. + +## Performance + +### Cityscapes + +| Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | +|-|-|-|-|-|-|-|-| +|EMANet|ResNet50_OS8|1024x512|80000|77.58%|77.98%|78.23%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e053a214d60822d6e65445b8614d052)| +|EMANet|ResNet101_OS8|769x769|80000|79.42%|79.83%|80.33%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| + +### Pascal VOC 2012 + Aug + +| Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | +|-|-|-|-|-|-|-|-| +|EMANet|ResNet50_OS8|512x512|40000|78.79%|78.90%|79.17%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e60b80b984a71f3d2b83b8a746a819c)| +|EMANet|ResNet101_OS8|512x512|40000|79.73%|79.97%| 80.67%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=f33479772409766dbc40b5f031cbdb1a)| + diff --git a/configs/emanet/emanet_resnet101_os8_cityscapes_1024x512_80k.yml b/configs/emanet/emanet_resnet101_os8_cityscapes_1024x512_80k.yml new file mode 100644 index 0000000000..c357b6dbcd --- /dev/null +++ b/configs/emanet/emanet_resnet101_os8_cityscapes_1024x512_80k.yml @@ -0,0 +1,31 @@ +_base_: '../_base_/cityscapes.yml' + +batch_size: 2 +iters: 80000 + +model: + type: EMANet + backbone: + type: ResNet101_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz + num_classes: 19 + ema_channels: 512 + gc_channels: 256 + num_bases: 64 + stage_num: 3 + momentum: 0.1 + concat_input: True + enable_auxiliary_loss: True + align_corners: False + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.0005 + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/configs/emanet/emanet_resnet101_os8_voc12aug_512x512_40k.yml b/configs/emanet/emanet_resnet101_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..a14f63962d --- /dev/null +++ b/configs/emanet/emanet_resnet101_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,28 @@ +_base_: '../_base_/pascal_voc12aug.yml' + +model: + type: EMANet + backbone: + type: ResNet101_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz + ema_channels: 512 + gc_channels: 256 + num_bases: 64 + stage_num: 3 + momentum: 0.1 + concat_input: True + enable_auxiliary_loss: True + align_corners: True + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.0005 + + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/configs/emanet/emanet_resnet50_os8_cityscapes_1024x512_80k.yml b/configs/emanet/emanet_resnet50_os8_cityscapes_1024x512_80k.yml new file mode 100644 index 0000000000..0230ab44f1 --- /dev/null +++ b/configs/emanet/emanet_resnet50_os8_cityscapes_1024x512_80k.yml @@ -0,0 +1,32 @@ +_base_: '../_base_/cityscapes.yml' + +batch_size: 2 +iters: 80000 + +model: + type: EMANet + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + num_classes: 19 + ema_channels: 512 + gc_channels: 256 + num_bases: 64 + stage_num: 3 + momentum: 0.1 + concat_input: True + enable_auxiliary_loss: True + align_corners: False + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.0005 + + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/configs/emanet/emanet_resnet50_os8_voc12aug_512x512_40k.yml b/configs/emanet/emanet_resnet50_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..9644881dcd --- /dev/null +++ b/configs/emanet/emanet_resnet50_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,30 @@ +_base_: '../_base_/pascal_voc12aug.yml' + + +model: + type: EMANet + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + ema_channels: 512 + gc_channels: 256 + num_bases: 64 + stage_num: 3 + momentum: 0.1 + concat_input: True + enable_auxiliary_loss: True + align_corners: True + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.0005 + + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] + \ No newline at end of file diff --git a/configs/isanet/README.md b/configs/isanet/README.md new file mode 100644 index 0000000000..4c55fb1239 --- /dev/null +++ b/configs/isanet/README.md @@ -0,0 +1,21 @@ +# Interlaced Sparse Self-Attention for Semantic Segmentation + +## Reference + +> Lang Huang, Yuhui Yuan, Jianyuan Guo, Chao Zhang, Xilin Chen, Jingdong Wang: Interlaced Sparse Self-Attention for Semantic Segmentation. CoRR abs/1907.12273 (2019). + +## Performance + +### Cityscapes + +| Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | +|-|-|-|-|-|-|-|-| +|ISANet|ResNet50_OS8|769x769|80000|79.03%|79.43%|79.52%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet50_os8_cityscapes_769x769_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet50_os8_cityscapes_769x769_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=ab7cc0627fdbf1e210557c33d94d2e8c)| +|ISANet|ResNet101_OS8|769x769|80000|80.10%|80.30%|80.26%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet101_os8_cityscapes_769x769_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet101_os8_cityscapes_769x769_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=76366b80293c3ac2374d981b4573eb52)| + +### Pascal VOC 2012 + Aug + +| Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | +|-|-|-|-|-|-|-|-| +|ISANet|ResNet50_OS8|512x512|40000|79.69%|79.93%|80.53%|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/isanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/isanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=84af8df983e48f1a0c89154a26f55032)| +|ISANet|ResNet101_OS8|512x512|40000|79.57%|79.69%|80.01%|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6874531f0adbfc72f22fb816bb231a46)| \ No newline at end of file diff --git a/configs/isanet/isanet_resnet101_os8_cityscapes_769x769_80k.yml b/configs/isanet/isanet_resnet101_os8_cityscapes_769x769_80k.yml new file mode 100644 index 0000000000..0c135845cb --- /dev/null +++ b/configs/isanet/isanet_resnet101_os8_cityscapes_769x769_80k.yml @@ -0,0 +1,30 @@ +_base_: '../_base_/cityscapes_769x769.yml' + +batch_size: 2 +iters: 80000 + +model: + type: ISANet + isa_channels: 256 + backbone: + type: ResNet101_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz + num_classes: 19 + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.00001 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml b/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..9272069d0f --- /dev/null +++ b/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,28 @@ +_base_: '../_base_/pascal_voc12aug.yml' + +model: + type: ISANet + isa_channels: 256 + backbone: + type: ResNet101_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz + num_classes: 19 + align_corners: True + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 4.0e-05 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/configs/isanet/isanet_resnet50_os8_cityscapes_769x769_80k.yml b/configs/isanet/isanet_resnet50_os8_cityscapes_769x769_80k.yml new file mode 100644 index 0000000000..dbb0eba71e --- /dev/null +++ b/configs/isanet/isanet_resnet50_os8_cityscapes_769x769_80k.yml @@ -0,0 +1,31 @@ +_base_: '../_base_/cityscapes_769x769.yml' + +batch_size: 2 +iters: 80000 + +model: + type: ISANet + isa_channels: 256 + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + num_classes: 19 + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.00001 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml b/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..2da023a696 --- /dev/null +++ b/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,28 @@ +_base_: '../_base_/pascal_voc12aug.yml' + +model: + type: ISANet + isa_channels: 256 + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + num_classes: 19 + align_corners: True + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.00001 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] \ No newline at end of file diff --git a/docs/apis/models.md b/docs/apis/models.md index 5672c11621..026b8ac89a 100644 --- a/docs/apis/models.md +++ b/docs/apis/models.md @@ -18,6 +18,9 @@ The models subpackage contains the following model for image sementic segmentaio - [U2Net+](#U2Net-1) - [AttentionUNet](#AttentionUNet) - [UNet++](#UNet-1) +- [ISANet](#ISANet) +- [EMANet](#EMANet) + ## [DeepLabV3+](../../paddleseg/models/deeplab.py) > CLASS paddleseg.models.DeepLabV3P(num_classes, backbone, backbone_indices=(0, 3), aspp_ratios=(1, 6, 12, 18), aspp_out_channels=256, align_corners=False, pretrained=None) @@ -402,3 +405,47 @@ The models subpackage contains the following model for image sementic segmentaio is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. > > > - **pretrained** (str, optional): The path or url of pretrained model for fine tuning. Default: None. > > > - **is_ds** (bool): use deep supervision or not. Default: True + + +## [ISANet](../../paddleseg/models/isanet.py) +> CLASS paddleseg.models.ISANet(num_classes, backbone, backbone_indices=(2, 3), isa_channels=256, down_factor=(8, 8), enable_auxiliary_loss=True, align_corners=False, pretrained=None) + + The ISANet implementation based on PaddlePaddle. + + The original article refers to Lang Huang, et al. "Interlaced Sparse Self-Attention for Semantic Segmentation" + (https://arxiv.org/abs/1907.12273). + +> > Args +> > > - **num_classes** (int): The unique number of target classes. +> > > - **backbone** (Paddle.nn.Layer): A backbone network. +> > > - **backbone_indices** (tuple): The values in the tuple indicate the indices of output of backbone. +> > > - **isa_channels** (int): The channels of ISA Module. +> > > - **down_factor** (tuple): Divide the height and width dimension to (Ph, PW) groups. +> > > - **enable_auxiliary_loss** (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. +> > > - **align_corners** (bool): An argument of F.interpolate. It should be set to False when the output size of feature + is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. +> > > - **pretrained** (str, optional): The path or url of pretrained model. Default: None. + +## [EMANet](../../paddleseg/models/emanet.py) +> CLASS paddleseg.models.EMANet(num_classes, backbone, backbone_indices=(2, 3), ema_channels=512, gc_channels=256, num_bases=64, stage_num=3, momentum=0.1, concat_input=True, enable_auxiliary_loss=True, align_corners=False, pretrained=None) + + The EMANet implementation based on PaddlePaddle. + + The original article refers to + Xia Li, et al. "Expectation-Maximization Attention Networks for Semantic Segmentation" + (https://arxiv.org/abs/1907.13426) + +> > Args +> > > - **num_classes** (int): The unique number of target classes. +> > > - **backbone** (Paddle.nn.Layer): A backbone network. +> > > - **backbone_indices** (tuple): The values in the tuple indicate the indices of output of backbone. +> > > - **ema_channels** (int): EMA module channels. +> > > - **gc_channels** (int): The input channels to Global Context Block. +> > > - **num_bases** (int): Number of bases. +> > > - **stage_num** (int): The iteration number for EM. +> > > - **momentum** (float): The parameter for updating bases. +> > > - **concat_input** (bool): Whether concat the input and output of convs before classification layer. Default: True +> > > - **enable_auxiliary_loss** (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. +> > > - **align_corners** (bool): An argument of F.interpolate. It should be set to False when the output size of feature + is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. +> > > - **pretrained** (str, optional): The path or url of pretrained model. Default: None. \ No newline at end of file diff --git a/paddleseg/models/__init__.py b/paddleseg/models/__init__.py index 031eb78559..296c00a900 100644 --- a/paddleseg/models/__init__.py +++ b/paddleseg/models/__init__.py @@ -30,3 +30,5 @@ from .u2net import U2Net, U2Netp from .attention_unet import AttentionUNet from .unet_plusplus import UNetPlusPlus +from .emanet import * +from .isanet import * diff --git a/paddleseg/models/emanet.py b/paddleseg/models/emanet.py new file mode 100644 index 0000000000..a567e433a8 --- /dev/null +++ b/paddleseg/models/emanet.py @@ -0,0 +1,199 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + +from paddleseg.models import layers +from paddleseg.cvlibs import manager +from paddleseg.utils import utils + + +@manager.MODELS.add_component +class EMANet(nn.Layer): + """ + Expectation Maximization Attention Networks for Semantic Segmentation based on PaddlePaddle. + + The original article refers to + Xia Li, et al. "Expectation-Maximization Attention Networks for Semantic Segmentation" + (https://arxiv.org/abs/1907.13426) + + Args: + num_classes (int): The unique number of target classes. + backbone (Paddle.nn.Layer): A backbone network. + backbone_indices (tuple): The values in the tuple indicate the indices of output of backbone. + ema_channels (int): EMA module channels. + gc_channels (int): The input channels to Global Context Block. + num_bases (int): Number of bases. + stage_num (int): The iteration number for EM. + momentum (float): The parameter for updating bases. + concat_input (bool): Whether concat the input and output of convs before classification layer. Default: True + enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. + align_corners (bool): An argument of F.interpolate. It should be set to False when the output size of feature + is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. + pretrained (str, optional): The path or url of pretrained model. Default: None. + """ + + def __init__(self, + num_classes, + backbone, + backbone_indices=(2, 3), + ema_channels=512, + gc_channels=256, + num_bases=64, + stage_num=3, + momentum=0.1, + concat_input=True, + enable_auxiliary_loss=True, + align_corners=False, + pretrained=None): + super().__init__() + + self.backbone = backbone + self.backbone_indices = backbone_indices + in_channels = [self.backbone.feat_channels[i] for i in backbone_indices] + self.head = EMAHead(num_classes, in_channels, ema_channels, gc_channels, + num_bases, stage_num, momentum, concat_input, enable_auxiliary_loss) + self.align_corners = align_corners + self.pretrained = pretrained + self.init_weight() + + def forward(self, x): + feats = self.backbone(x) + feats = [feats[i] for i in self.backbone_indices] + logit_list = self.head(feats) + logit_list = [F.interpolate( + logit, + x.shape[2:], + mode='bilinear', + align_corners=self.align_corners) for logit in logit_list] + + return logit_list + + def init_weight(self): + if self.pretrained is not None: + utils.load_entire_model(self, self.pretrained) + + +class EMAHead(nn.Layer): + """ + The EMANet head. + + Args: + num_classes (int): The unique number of target classes. + in_channels (tuple): The number of input channels. + ema_channels (int): EMA module channels. + gc_channels (int): The input channels to Global Context Block. + num_bases (int): Number of bases. + stage_num (int): The iteration number for EM. + momentum (float): The parameter for updating bases. + concat_input (bool): Whether concat the input and output of convs before classification layer. Default: True + enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. + """ + + def __init__(self, + num_classes, + in_channels, + ema_channels, + gc_channels, + num_bases, + stage_num, + momentum, + concat_input=True, + enable_auxiliary_loss=True): + super(EMAHead, self).__init__() + + self.in_channels = in_channels[-1] + self.concat_input = concat_input + self.enable_auxiliary_loss = enable_auxiliary_loss + + self.emau = EMAU(ema_channels, num_bases, stage_num, momentum=momentum) + self.ema_in_conv = layers.ConvBNReLU(in_channels=self.in_channels, out_channels=ema_channels, kernel_size=3) + self.ema_mid_conv = nn.Conv2D(ema_channels, ema_channels, kernel_size=1) + for param in self.ema_mid_conv.parameters(): + param.stop_gradient = True + self.ema_out_conv = layers.ConvBNReLU(in_channels=ema_channels, out_channels=ema_channels, kernel_size=1) + self.bottleneck = layers.ConvBNReLU(in_channels=ema_channels, out_channels=gc_channels, kernel_size=3) + self.cls = nn.Sequential(nn.Dropout2D(p=0.1),nn.Conv2D(gc_channels, num_classes, 1)) + self.aux = nn.Sequential(layers.ConvBNReLU(in_channels=1024, out_channels=256, kernel_size=3), + nn.Dropout2D(p=0.1), + nn.Conv2D(256, num_classes, 1)) + if self.concat_input: + self.conv_cat = layers.ConvBNReLU(self.in_channels+gc_channels, gc_channels, kernel_size=3) + + def forward(self, feat_list): + C3, C4 = feat_list + feats = self.ema_in_conv(C4) + identity = feats + feats = self.ema_mid_conv(feats) + recon = self.emau(feats) + recon = F.relu(recon) + recon = self.ema_out_conv(recon) + output = F.relu(identity + recon) + output = self.bottleneck(output) + if self.concat_input: + output = self.conv_cat(paddle.concat([C4, output], axis=1)) + output = self.cls(output) + if self.enable_auxiliary_loss: + auxout = self.aux(C3) + return [output, auxout] + else: + return [output] + + +class EMAU(nn.Layer): + '''The Expectation-Maximization Attention Unit (EMAU). + + Arguments: + c (int): The input and output channel number. + k (int): The number of the bases. + stage_num (int): The iteration number for EM. + momentum (float): The parameter for updating bases. + ''' + def __init__(self, c, k, stage_num=3, momentum=0.1): + super(EMAU, self).__init__() + assert stage_num >= 1 + self.stage_num = stage_num + self.momentum = momentum + + tmp_mu = self.create_parameter(shape=[1, c, k], default_initializer=paddle.nn.initializer.KaimingNormal(k)) + self.mu = F.normalize(paddle.to_tensor(tmp_mu), axis=1, p=2) + self.register_buffer('bases', self.mu) + + def forward(self, x): + b, c, h, w = x.shape + x = paddle.reshape(x, [b, c, h*w]) + mu = paddle.tile(self.mu, [b, 1, 1]) + + with paddle.no_grad(): + for i in range(self.stage_num): + x_t = paddle.transpose(x, [0, 2, 1]) + z = paddle.bmm(x_t, mu) + z = F.softmax(z, axis=2) + z_ = F.normalize(z, axis=1, p=1) + mu = paddle.bmm(x, z_) + mu = F.normalize(mu, axis=1, p=2) + + z_t = paddle.transpose(z, [0, 2, 1]) + x = paddle.matmul(mu, z_t) + x = paddle.reshape(x, [b, c, h, w]) + + if self.training: + mu = paddle.mean(mu, 0, keepdim=True) + if paddle.distributed.get_world_size() >1: + paddle.distributed.reduce(mu/paddle.distributed.get_world_size(), 0) + mu = F.normalize(mu, axis=1, p=2) + self.mu = self.mu * (1 - self.momentum) + mu * self.momentum + return x diff --git a/paddleseg/models/isanet.py b/paddleseg/models/isanet.py new file mode 100644 index 0000000000..4a083d9bd1 --- /dev/null +++ b/paddleseg/models/isanet.py @@ -0,0 +1,178 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + +from paddleseg.models import layers +from paddleseg.cvlibs import manager +from paddleseg.utils import utils + + +@manager.MODELS.add_component +class ISANet(nn.Layer): + """Interlaced Sparse Self-Attention for Semantic Segmentation. + + The original article refers to Lang Huang, et al. "Interlaced Sparse Self-Attention for Semantic Segmentation" + (https://arxiv.org/abs/1907.12273). + + Args: + num_classes (int): The unique number of target classes. + backbone (Paddle.nn.Layer): A backbone network. + backbone_indices (tuple): The values in the tuple indicate the indices of output of backbone. + isa_channels (int): The channels of ISA Module. + down_factor (tuple): Divide the height and width dimension to (Ph, PW) groups. + enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. + align_corners (bool): An argument of F.interpolate. It should be set to False when the output size of feature + is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. + pretrained (str, optional): The path or url of pretrained model. Default: None. + + """ + + def __init__(self, + num_classes, + backbone, + backbone_indices=(2, 3), + isa_channels=256, + down_factor=(8, 8), + enable_auxiliary_loss=True, + align_corners=False, + pretrained=None): + super().__init__() + + self.backbone = backbone + self.backbone_indices = backbone_indices + in_channels = [self.backbone.feat_channels[i] for i in backbone_indices] + self.head = ISAHead(num_classes, in_channels, isa_channels, down_factor, enable_auxiliary_loss) + self.align_corners = align_corners + self.pretrained = pretrained + self.init_weight() + + def forward(self, x): + feats = self.backbone(x) + feats = [feats[i] for i in self.backbone_indices] + logit_list = self.head(feats) + logit_list = [F.interpolate( + logit, + x.shape[2:], + mode='bilinear', + align_corners=self.align_corners, + align_mode=1) for logit in logit_list] + + return logit_list + + def init_weight(self): + if self.pretrained is not None: + utils.load_entire_model(self, self.pretrained) + + +class ISAHead(nn.Layer): + """ + The ISAHead. + + Args: + num_classes (int): The unique number of target classes. + in_channels (tuple): The number of input channels. + isa_channels (int): The channels of ISA Module. + down_factor (tuple): Divide the height and width dimension to (Ph, PW) groups. + enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. + """ + def __init__(self, num_classes, in_channels, isa_channels, down_factor, enable_auxiliary_loss): + super(ISAHead, self).__init__() + self.in_channels = in_channels[-1] + inter_channels = self.in_channels // 4 + self.down_factor = down_factor + self.enable_auxiliary_loss = enable_auxiliary_loss + self.in_conv = layers.ConvBNReLU(self.in_channels, inter_channels, 3, bias_attr=False) + self.global_relation = SelfAttentionBlock(inter_channels, isa_channels) + self.local_relation = SelfAttentionBlock(inter_channels, isa_channels) + self.out_conv = layers.ConvBNReLU(inter_channels * 2, inter_channels, 1, bias_attr=False) + self.cls = nn.Sequential(nn.Dropout2D(p=0.1), nn.Conv2D(inter_channels, num_classes, 1)) + self.aux = nn.Sequential( + layers.ConvBNReLU(in_channels=1024, out_channels=256, kernel_size=3, bias_attr=False), + nn.Dropout2D(p=0.1), + nn.Conv2D(256, num_classes, 1)) + + def forward(self, feat_list): + C3, C4 = feat_list + x = self.in_conv(C4) + n, c, h, w = x.shape + P_h, P_w = self.down_factor + Q_h, Q_w = math.ceil(h / P_h), math.ceil(w / P_w) + pad_h, pad_w = Q_h * P_h - h, Q_w * P_w - w + if pad_h > 0 or pad_w > 0: + padding = [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2] + feat = F.pad(x, padding) + else: + feat = x + + feat = feat.reshape([n, c, Q_h, P_h, Q_w, P_w]) + feat = feat.transpose([0, 3, 5, 1, 2, 4]).reshape([-1, c, Q_h, Q_w]) + feat = self.global_relation(feat) + + feat = feat.reshape([n, P_h, P_w, c, Q_h, Q_w]) + feat = feat.transpose([0, 4, 5, 3, 1, 2]).reshape([-1, c, P_h, P_w]) + feat = self.local_relation(feat) + + feat = feat.reshape([n, Q_h, Q_w, c, P_h, P_w]) + feat = feat.transpose([0, 3, 1, 4, 2, 5]).reshape([n, c, P_h * Q_h, P_w * Q_w]) + if pad_h > 0 or pad_w > 0: + feat = feat[:, :, pad_h // 2:pad_h // 2 + h, pad_w // 2:pad_w // 2 + w] + + feat = self.out_conv(paddle.concat([feat, x], axis=1)) + output = self.cls(feat) + + if self.enable_auxiliary_loss: + auxout = self.aux(C3) + return [output, auxout] + else: + return [output] + + +class SelfAttentionBlock(layers.AttentionBlock): + """General self-attention block/non-local block. + + Args: + in_channels (int): Input channels of key/query feature. + channels (int): Output channels of key/query transform. + """ + def __init__(self, in_channels, channels): + super(SelfAttentionBlock, self).__init__( + key_in_channels=in_channels, + query_in_channels=in_channels, + channels=channels, + out_channels=in_channels, + share_key_query=False, + query_downsample=None, + key_downsample=None, + key_query_num_convs=2, + key_query_norm=True, + value_out_num_convs=1, + value_out_norm=False, + matmul_norm=True, + with_out=False) + + self.output_project = self.build_project( + in_channels, + in_channels, + num_convs=1, + use_conv_module=True) + + def forward(self, x): + context = super(SelfAttentionBlock, self).forward(x, x) + return self.output_project(context) + From 03ae0ae5ead7da86b67a82dd35718b3d9cf8adfb Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Wed, 6 Jan 2021 21:10:00 +0800 Subject: [PATCH 21/52] add attention block --- paddleseg/models/layers/__init__.py | 1 + paddleseg/models/layers/attention.py | 131 +++++++++++++++++++++++++++ 2 files changed, 132 insertions(+) create mode 100644 paddleseg/models/layers/attention.py diff --git a/paddleseg/models/layers/__init__.py b/paddleseg/models/layers/__init__.py index 27fbbba370..b334c88476 100644 --- a/paddleseg/models/layers/__init__.py +++ b/paddleseg/models/layers/__init__.py @@ -15,3 +15,4 @@ from .layer_libs import ConvBNReLU, ConvBN, SeparableConvBNReLU, DepthwiseConvBN, AuxLayer, SyncBatchNorm from .activation import Activation from .pyramid_pool import ASPPModule, PPModule +from .attention import AttentionBlock diff --git a/paddleseg/models/layers/attention.py b/paddleseg/models/layers/attention.py new file mode 100644 index 0000000000..dabcdd358c --- /dev/null +++ b/paddleseg/models/layers/attention.py @@ -0,0 +1,131 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + +from paddleseg.models import layers + + +class AttentionBlock(nn.Layer): + """General self-attention block/non-local block. + + The original article refers to refer to https://arxiv.org/abs/1706.03762. + Args: + key_in_channels (int): Input channels of key feature. + query_in_channels (int): Input channels of query feature. + channels (int): Output channels of key/query transform. + out_channels (int): Output channels. + share_key_query (bool): Whether share projection weight between key + and query projection. + query_downsample (nn.Module): Query downsample module. + key_downsample (nn.Module): Key downsample module. + key_query_num_convs (int): Number of convs for key/query projection. + value_out_num_convs (int): Number of convs for value projection. + key_query_norm (bool): Whether to use BN for key/query projection. + value_out_norm (bool): Whether to use BN for value projection. + matmul_norm (bool): Whether normalize attention map with sqrt of + channels + with_out (bool): Whether use out projection. + """ + def __init__(self, key_in_channels, query_in_channels, channels, + out_channels, share_key_query, query_downsample, + key_downsample, key_query_num_convs, value_out_num_convs, + key_query_norm, value_out_norm, matmul_norm, with_out): + super(AttentionBlock, self).__init__() + if share_key_query: + assert key_in_channels == query_in_channels + self.key_in_channels = key_in_channels + self.query_in_channels = query_in_channels + self.out_channels = out_channels + self.channels = channels + self.share_key_query = share_key_query + self.key_project = self.build_project(key_in_channels, + channels, + num_convs=key_query_num_convs, + use_conv_module=key_query_norm) + if share_key_query: + self.query_project = self.key_project + else: + self.query_project = self.build_project( + query_in_channels, + channels, + num_convs=key_query_num_convs, + use_conv_module=key_query_norm) + + self.value_project = self.build_project( + key_in_channels, + channels if with_out else out_channels, + num_convs=value_out_num_convs, + use_conv_module=value_out_norm) + + if with_out: + self.out_project = self.build_project( + channels, + out_channels, + num_convs=value_out_num_convs, + use_conv_module=value_out_norm) + else: + self.out_project = None + + self.query_downsample = query_downsample + self.key_downsample = key_downsample + self.matmul_norm = matmul_norm + + def build_project(self, in_channels, channels, num_convs, use_conv_module): + if use_conv_module: + convs = [layers.ConvBNReLU(in_channels=in_channels, out_channels=channels, kernel_size=1, bias_attr=False)] + for _ in range(num_convs - 1): + convs.append( + layers.ConvBNReLU(in_channels=channels, out_channels=channels, kernel_size=1, bias_attr=False)) + else: + convs = [nn.Conv2D(in_channels, channels, 1)] + for _ in range(num_convs - 1): + convs.append(nn.Conv2D(channels, channels, 1)) + + if len(convs) > 1: + convs = nn.Sequential(*convs) + else: + convs = convs[0] + return convs + + def forward(self, query_feats, key_feats): + b, c, h, w = query_feats.shape + query = self.query_project(query_feats) + if self.query_downsample is not None: + query = self.query_downsample(query) + query = query.reshape([*query.shape[:2], -1]).transpose([0, 2, 1]) + + key = self.key_project(key_feats) + value = self.value_project(key_feats) + + if self.key_downsample is not None: + key = self.key_downsample(key) + value = self.key_downsample(value) + + key = key.reshape([*key.shape[:2], -1]) + value = value.reshape([*value.shape[:2], -1]).transpose([0, 2, 1]) + sim_map = paddle.matmul(query, key) + if self.matmul_norm: + sim_map = (self.channels ** -0.5) * sim_map + sim_map = F.softmax(sim_map, axis=-1) + + context = paddle.matmul(sim_map, value) + context = paddle.transpose(context, [0, 2, 1]) + context = paddle.reshape(context, [b, -1, *query_feats.shape[2:]]) + + if self.out_project is not None: + context = self.out_project(context) + return context \ No newline at end of file From 497ccb98c011ffe643663a8549cc68996134d01f Mon Sep 17 00:00:00 2001 From: wuzewu Date: Mon, 11 Jan 2021 21:22:08 +0800 Subject: [PATCH 22/52] Update model links --- configs/bisenet/README.md | 2 +- configs/danet/README.md | 4 ++-- configs/emanet/README.md | 9 ++++----- configs/isanet/README.md | 8 ++++---- configs/ocrnet/README.md | 8 ++++---- 5 files changed, 15 insertions(+), 16 deletions(-) diff --git a/configs/bisenet/README.md b/configs/bisenet/README.md index 147de8e12c..e5640308d6 100644 --- a/configs/bisenet/README.md +++ b/configs/bisenet/README.md @@ -10,4 +10,4 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|BiSeNetv2|-|1024x1024|160000|73.19%|74.19%|74.43%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/bisenet_cityscapes_1024x1024_160k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/bisenet_cityscapes_1024x1024_160k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3ccfaff613de769eadb76f8379afffa5)| +|BiSeNetv2|-|1024x1024|160000|73.19%|74.19%|74.43%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/bisenet_cityscapes_1024x1024_160k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/bisenet_cityscapes_1024x1024_160k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3ccfaff613de769eadb76f8379afffa5)| diff --git a/configs/danet/README.md b/configs/danet/README.md index af47879402..f9474a70e6 100644 --- a/configs/danet/README.md +++ b/configs/danet/README.md @@ -10,10 +10,10 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|DANet|ResNet50_OS8|1024x512|80000|80.27%|-|-|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6caecf1222a0cc9124a376284a402cbe)| +|DANet|ResNet50_OS8|1024x512|80000|80.27%|-|-|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6caecf1222a0cc9124a376284a402cbe)| ### Pascal VOC 2012 + Aug | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|DANet|ResNet50_OS8|1024x512|40000|78.55%|-|-|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=51a403a54302bc81dd5ec0310a6d50ba)| +|DANet|ResNet50_OS8|1024x512|40000|78.55%|-|-|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=51a403a54302bc81dd5ec0310a6d50ba)| diff --git a/configs/emanet/README.md b/configs/emanet/README.md index 3dd4d61f1b..cfe34bf2f3 100644 --- a/configs/emanet/README.md +++ b/configs/emanet/README.md @@ -11,13 +11,12 @@ Expectation-Maximization Attention Networks for Semantic Segmentation. ICCV 2019 | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | |-|-|-|-|-|-|-|-| -|EMANet|ResNet50_OS8|1024x512|80000|77.58%|77.98%|78.23%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e053a214d60822d6e65445b8614d052)| -|EMANet|ResNet101_OS8|769x769|80000|79.42%|79.83%|80.33%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| +|EMANet|ResNet50_OS8|1024x512|80000|77.58%|77.98%|78.23%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e053a214d60822d6e65445b8614d052)| +|EMANet|ResNet101_OS8|769x769|80000|79.42%|79.83%|80.33%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| ### Pascal VOC 2012 + Aug | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|EMANet|ResNet50_OS8|512x512|40000|78.79%|78.90%|79.17%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e60b80b984a71f3d2b83b8a746a819c)| -|EMANet|ResNet101_OS8|512x512|40000|79.73%|79.97%| 80.67%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=f33479772409766dbc40b5f031cbdb1a)| - +|EMANet|ResNet50_OS8|512x512|40000|78.79%|78.90%|79.17%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e60b80b984a71f3d2b83b8a746a819c)| +|EMANet|ResNet101_OS8|512x512|40000|79.73%|79.97%| 80.67%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=f33479772409766dbc40b5f031cbdb1a)| diff --git a/configs/isanet/README.md b/configs/isanet/README.md index 4c55fb1239..3e72f66d64 100644 --- a/configs/isanet/README.md +++ b/configs/isanet/README.md @@ -10,12 +10,12 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|ISANet|ResNet50_OS8|769x769|80000|79.03%|79.43%|79.52%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet50_os8_cityscapes_769x769_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet50_os8_cityscapes_769x769_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=ab7cc0627fdbf1e210557c33d94d2e8c)| -|ISANet|ResNet101_OS8|769x769|80000|80.10%|80.30%|80.26%|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet101_os8_cityscapes_769x769_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/isanet_resnet101_os8_cityscapes_769x769_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=76366b80293c3ac2374d981b4573eb52)| +|ISANet|ResNet50_OS8|769x769|80000|79.03%|79.43%|79.52%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/isanet_resnet50_os8_cityscapes_769x769_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/isanet_resnet50_os8_cityscapes_769x769_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=ab7cc0627fdbf1e210557c33d94d2e8c)| +|ISANet|ResNet101_OS8|769x769|80000|80.10%|80.30%|80.26%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/isanet_resnet101_os8_cityscapes_769x769_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/isanet_resnet101_os8_cityscapes_769x769_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=76366b80293c3ac2374d981b4573eb52)| ### Pascal VOC 2012 + Aug | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | |-|-|-|-|-|-|-|-| -|ISANet|ResNet50_OS8|512x512|40000|79.69%|79.93%|80.53%|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/isanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/isanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=84af8df983e48f1a0c89154a26f55032)| -|ISANet|ResNet101_OS8|512x512|40000|79.57%|79.69%|80.01%|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6874531f0adbfc72f22fb816bb231a46)| \ No newline at end of file +|ISANet|ResNet50_OS8|512x512|40000|79.69%|79.93%|80.53%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/isanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/isanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=84af8df983e48f1a0c89154a26f55032)| +|ISANet|ResNet101_OS8|512x512|40000|79.57%|79.69%|80.01%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6874531f0adbfc72f22fb816bb231a46)| diff --git a/configs/ocrnet/README.md b/configs/ocrnet/README.md index 6a89caaa6e..b82820c68b 100644 --- a/configs/ocrnet/README.md +++ b/configs/ocrnet/README.md @@ -10,12 +10,12 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| -|OCRNet|HRNet_w18|1024x512|160000|80.67%|81.21%|81.30%|[model](https://paddleseg.bj.bcebos.com/dygraph/ocrnet_hrnetw18_cityscapes_1024x512_160k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/ocrnet_hrnetw18_cityscapes_1024x512_160k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=901a5d0a78b71ca56f06002f05547837)| -|OCRNet|HRNet_w48|1024x512|160000|82.15%|82.59%|82.85%|[model](https://paddleseg.bj.bcebos.com/dygraph/ocrnet_hrnetw48_cityscapes_1024x512_160k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/ocrnet_hrnetw48_cityscapes_1024x512_160k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=176bf6ca4d89957ffe62ac7c30fcd039) | +|OCRNet|HRNet_w18|1024x512|160000|80.67%|81.21%|81.30%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/ocrnet_hrnetw18_cityscapes_1024x512_160k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/ocrnet_hrnetw18_cityscapes_1024x512_160k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=901a5d0a78b71ca56f06002f05547837)| +|OCRNet|HRNet_w48|1024x512|160000|82.15%|82.59%|82.85%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/ocrnet_hrnetw48_cityscapes_1024x512_160k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/ocrnet_hrnetw48_cityscapes_1024x512_160k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=176bf6ca4d89957ffe62ac7c30fcd039) | ### Pascal VOC 2012 + Aug | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| -|OCRNet|HRNet_w18|1024x512|40000|75.76%|76.39%|77.95%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw18_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw18_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=74707b83bc14b7d236146ac4ceaf6c9c)| -|OCRNet|HRNet_w48|1024x512|40000|79.98%|80.47%|81.02%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw48_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw48_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=8f695743c799f8966a72973f3259fad4) | +|OCRNet|HRNet_w18|512x512|40000|75.76%|76.39%|77.95%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw18_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw18_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=74707b83bc14b7d236146ac4ceaf6c9c)| +|OCRNet|HRNet_w48|512x512|40000|79.98%|80.47%|81.02%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw48_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/ocrnet_hrnetw48_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=8f695743c799f8966a72973f3259fad4) | From caee81484a5011169015ec2c094792b4914803a0 Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Thu, 14 Jan 2021 19:22:35 +0800 Subject: [PATCH 23/52] rm prepare_context (#784) --- paddleseg/core/train.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/paddleseg/core/train.py b/paddleseg/core/train.py index 32298b22e5..3c372ee4f2 100644 --- a/paddleseg/core/train.py +++ b/paddleseg/core/train.py @@ -92,8 +92,7 @@ def train(model, if nranks > 1: # Initialize parallel training environment. paddle.distributed.init_parallel_env() - strategy = paddle.distributed.prepare_context() - ddp_model = paddle.DataParallel(model, strategy) + ddp_model = paddle.DataParallel(model) batch_sampler = paddle.io.DistributedBatchSampler( train_dataset, batch_size=batch_size, shuffle=True, drop_last=True) From 866254ad28d47b2d91b726eb0e46a732c70a299a Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 09:54:50 +0800 Subject: [PATCH 24/52] Update README.md --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index d8fc04fe15..304fe3b759 100644 --- a/README.md +++ b/README.md @@ -99,3 +99,17 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml * Thanks [jm12138](https://github.com/jm12138) for contributing U2-Net. * Thanks [zjhellofss](https://github.com/zjhellofss) (Fu Shenshen) for contributing Attention U-Net, and Dice Loss. * Thanks [liuguoyu666](https://github.com/liguoyu666) for contributing U-Net++. + +## Citation +If you find our project useful in your research, please consider citing: + +```latex +@misc{liu2021paddleseg, + title={PaddleSeg: A High-Efficient Development Toolkit for Image Segmentation}, + author={Yi Liu and Lutao Chu and Guowei Chen and Zewu Wu and Zeyu Chen and Baohua Lai and Yuying Hao}, + year={2021}, + eprint={2101.06175}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` From 736a56c9aaaad430506ce0a4ae91ccf0c2db3630 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 09:59:18 +0800 Subject: [PATCH 25/52] Update README.md --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 304fe3b759..33302c952b 100644 --- a/README.md +++ b/README.md @@ -112,4 +112,11 @@ If you find our project useful in your research, please consider citing: archivePrefix={arXiv}, primaryClass={cs.CV} } + +@misc{paddleseg2019, + title={PaddleSeg, End-to-end image segmentation kit based on PaddlePaddle}, + author={PaddlePaddle Authors}, + howpublished = {\url{https://github.com/PaddlePaddle/PaddleSeg}}, + year={2019} +} ``` From b27ba24a8d80e841222abeceed8a230cfd2de603 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 10:01:47 +0800 Subject: [PATCH 26/52] Update README_CN.md --- README_CN.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/README_CN.md b/README_CN.md index 745a89be48..4da1e9c1af 100644 --- a/README_CN.md +++ b/README_CN.md @@ -100,3 +100,25 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml * 非常感谢[jm12138](https://github.com/jm12138)贡献U2-Net模型。 * 非常感谢[zjhellofss](https://github.com/zjhellofss)(傅莘莘)贡献Attention U-Net模型,和Dice loss损失函数。 * 非常感谢[liuguoyu666](https://github.com/liguoyu666)贡献U-Net++模型。 + +## 学术引用 + +假如我们的项目对你的学术有帮助,请考虑引用: + +```latex +@misc{liu2021paddleseg, + title={PaddleSeg: A High-Efficient Development Toolkit for Image Segmentation}, + author={Yi Liu and Lutao Chu and Guowei Chen and Zewu Wu and Zeyu Chen and Baohua Lai and Yuying Hao}, + year={2021}, + eprint={2101.06175}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} + +@misc{paddleseg2019, + title={PaddleSeg, End-to-end image segmentation kit based on PaddlePaddle}, + author={PaddlePaddle Authors}, + howpublished = {\url{https://github.com/PaddlePaddle/PaddleSeg}}, + year={2019} +} +``` From 4e673fdfd31ba3d535e7fb8015dc43ce82bd55a8 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 10:03:26 +0800 Subject: [PATCH 27/52] Add Citation --- README.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/README.md b/README.md index 1ef242ddf3..72966c3fdb 100644 --- a/README.md +++ b/README.md @@ -101,3 +101,24 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml * Thanks [jm12138](https://github.com/jm12138) for contributing U2-Net. * Thanks [zjhellofss](https://github.com/zjhellofss) (Fu Shenshen) for contributing Attention U-Net, and Dice Loss. * Thanks [liuguoyu666](https://github.com/liguoyu666) for contributing U-Net++. + +## Citation +If you find our project useful in your research, please consider citing: + +```latex +@misc{liu2021paddleseg, + title={PaddleSeg: A High-Efficient Development Toolkit for Image Segmentation}, + author={Yi Liu and Lutao Chu and Guowei Chen and Zewu Wu and Zeyu Chen and Baohua Lai and Yuying Hao}, + year={2021}, + eprint={2101.06175}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} + +@misc{paddleseg2019, + title={PaddleSeg, End-to-end image segmentation kit based on PaddlePaddle}, + author={PaddlePaddle Authors}, + howpublished = {\url{https://github.com/PaddlePaddle/PaddleSeg}}, + year={2019} +} +``` From c5f76e7d80632a5e4d97f8579cc48a4b9e2dd342 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 10:03:59 +0800 Subject: [PATCH 28/52] Update README_CN.md --- README_CN.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/README_CN.md b/README_CN.md index c040acfb8f..3e9c0e1a57 100644 --- a/README_CN.md +++ b/README_CN.md @@ -98,3 +98,25 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml * 非常感谢[jm12138](https://github.com/jm12138)贡献U2-Net模型。 * 非常感谢[zjhellofss](https://github.com/zjhellofss)(傅莘莘)贡献Attention U-Net模型,和Dice loss损失函数。 * 非常感谢[liuguoyu666](https://github.com/liguoyu666)贡献U-Net++模型。 + +## 学术引用 + +假如我们的项目对你的学术有帮助,请考虑引用: + +```latex +@misc{liu2021paddleseg, + title={PaddleSeg: A High-Efficient Development Toolkit for Image Segmentation}, + author={Yi Liu and Lutao Chu and Guowei Chen and Zewu Wu and Zeyu Chen and Baohua Lai and Yuying Hao}, + year={2021}, + eprint={2101.06175}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} + +@misc{paddleseg2019, + title={PaddleSeg, End-to-end image segmentation kit based on PaddlePaddle}, + author={PaddlePaddle Authors}, + howpublished = {\url{https://github.com/PaddlePaddle/PaddleSeg}}, + year={2019} +} +``` From f400b02e45e80940374dae3f2ba11b9080ded109 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 10:05:43 +0800 Subject: [PATCH 29/52] Update README_CN.md --- README_CN.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_CN.md b/README_CN.md index 3e9c0e1a57..9e8a273834 100644 --- a/README_CN.md +++ b/README_CN.md @@ -101,7 +101,7 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml ## 学术引用 -假如我们的项目对你的学术有帮助,请考虑引用: +如果我们的项目在学术上帮助到你,请考虑以下引用: ```latex @misc{liu2021paddleseg, From 1c22bb5e94cfffe49b0a8f83f1410c436dadf7ab Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 18 Jan 2021 10:06:12 +0800 Subject: [PATCH 30/52] Update README_CN.md --- README_CN.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README_CN.md b/README_CN.md index 4da1e9c1af..16e7519482 100644 --- a/README_CN.md +++ b/README_CN.md @@ -103,7 +103,7 @@ python train.py --config configs/quick_start/bisenet_optic_disc_512x512_1k.yml ## 学术引用 -假如我们的项目对你的学术有帮助,请考虑引用: +如果我们的项目在学术上帮助到你,请考虑以下引用: ```latex @misc{liu2021paddleseg, From 1229e0a6e3742e4a34a8e0da102ba48cc30dc057 Mon Sep 17 00:00:00 2001 From: "shaohua.zhang" Date: Mon, 18 Jan 2021 10:43:46 +0800 Subject: [PATCH 31/52] remove the deprecated param (#785) From a7bd77eb8d4aed33004fd84afde7123f40c30027 Mon Sep 17 00:00:00 2001 From: wuzewu Date: Mon, 18 Jan 2021 14:26:36 +0800 Subject: [PATCH 32/52] Add fcn config --- legacy/configs/fcn.yaml | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 legacy/configs/fcn.yaml diff --git a/legacy/configs/fcn.yaml b/legacy/configs/fcn.yaml new file mode 100644 index 0000000000..726350b734 --- /dev/null +++ b/legacy/configs/fcn.yaml @@ -0,0 +1,39 @@ +# 数据集配置 +DATASET: + DATA_DIR: "./dataset/optic_disc_seg/" + NUM_CLASSES: 2 + TEST_FILE_LIST: "./dataset/optic_disc_seg/test_list.txt" + TRAIN_FILE_LIST: "./dataset/optic_disc_seg/train_list.txt" + VAL_FILE_LIST: "./dataset/optic_disc_seg/val_list.txt" + VIS_FILE_LIST: "./dataset/optic_disc_seg/test_list.txt" + +# 预训练模型配置 +MODEL: + MODEL_NAME: "hrnet" + DEFAULT_NORM_TYPE: "bn" + HRNET: + STAGE2: + NUM_CHANNELS: [18, 36] + STAGE3: + NUM_CHANNELS: [18, 36, 72] + STAGE4: + NUM_CHANNELS: [18, 36, 72, 144] + +# 其他配置 +TRAIN_CROP_SIZE: (512, 512) +EVAL_CROP_SIZE: (512, 512) +AUG: + AUG_METHOD: "unpadding" + FIX_RESIZE_SIZE: (512, 512) +BATCH_SIZE: 1 +TRAIN: + PRETRAINED_MODEL_DIR: "./pretrained_model/hrnet_w18_bn_cityscapes/" + MODEL_SAVE_DIR: "./saved_model/hrnet_optic/" + SNAPSHOT_EPOCH: 1 +TEST: + TEST_MODEL: "./saved_model/hrnet_optic/final" +SOLVER: + NUM_EPOCHS: 10 + LR: 0.001 + LR_POLICY: "poly" + OPTIMIZER: "adam" From 3d11f6dd89677f85e910365dab46b0a55db0d338 Mon Sep 17 00:00:00 2001 From: taixiurong Date: Mon, 18 Jan 2021 15:34:40 +0800 Subject: [PATCH 33/52] add fcn doc with xpu --- legacy/docs/train_on_xpu.md | 38 +++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/legacy/docs/train_on_xpu.md b/legacy/docs/train_on_xpu.md index d7b830b73f..310b7fe814 100644 --- a/legacy/docs/train_on_xpu.md +++ b/legacy/docs/train_on_xpu.md @@ -10,16 +10,15 @@ * 数据准备(在legacy目录下): ```shell -python pretrained_model/download_model.py deeplabv3p_xception65_bn_coco +python dataset/download_optic.py ``` * 预训练模型准备(在legacy目录下): ```shell -python dataset/download_optic.py +python pretrained_model/download_model.py deeplabv3p_xception65_bn_coco ``` - * 执行训练(在legacy目录下): ```shell @@ -30,16 +29,15 @@ python pdseg/train.py --cfg configs/deeplabv3p_xception65_optic_kunlun.yaml --us * 数据准备(在legacy目录下): ```shell -python pretrained_model/download_model.py unet_bn_coco +python dataset/download_optic.py ``` * 预训练模型准备(在legacy目录下): ```shell -python dataset/download_optic.py +python pretrained_model/download_model.py unet_bn_coco ``` - * 执行训练(在legacy目录下): 因为昆仑1的内存不够,在用昆仑1训练的时候,需要把./configs/unet_optic.yaml 里面的 BATCH_SIZE @@ -54,3 +52,31 @@ export XPUSIM_DEVICE_MODEL=KUNLUN1 python pdseg/train.py --use_xpu --cfg configs/unet_optic.yaml --use_mpio --log_steps 1 --do_eval ``` +### FCN +* 数据准备(在legacy目录下): + +```shell +python dataset/download_optic.py +``` + +* 预训练模型准备(在legacy目录下): + +```shell +python pretrained_model/download_model.py hrnet_w18_bn_cityscapes +``` + +* 执行训练(在legacy目录下): + +因为昆仑1的内存不够,在用昆仑1训练的时候,需要把./configs/fcn.yaml 里面的 BATCH_SIZE +修改为 1 + +```shell +# 指定xpu的卡号 (以0号卡为例) +export FLAGS_selected_xpus=0 +# 执行xpu产品名称 这里指定昆仑1 +export XPUSIM_DEVICE_MODEL=KUNLUN1 +# 训练 +export PYTHONPATH=`pwd` +python3 pdseg/train.py --cfg configs/fcn.yaml --use_mpio --log_steps 1 --do_eval +``` + From 75d7a50bdffa822502b943a826e99e78828ff45f Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Tue, 19 Jan 2021 19:57:25 +0800 Subject: [PATCH 34/52] add voc_context (#718) --- README.md | 2 +- README_CN.md | 2 +- configs/_base_/pascal_context.yml | 50 ++++++++++ docs/data_prepare.md | 24 +++++ paddleseg/datasets/__init__.py | 1 + paddleseg/datasets/pascal_context.py | 77 +++++++++++++++ tools/convert_voc2010.py | 135 +++++++++++++++++++++++++++ 7 files changed, 289 insertions(+), 2 deletions(-) create mode 100644 configs/_base_/pascal_context.yml create mode 100644 paddleseg/datasets/pascal_context.py create mode 100644 tools/convert_voc2010.py diff --git a/README.md b/README.md index 72966c3fdb..09f88e7e7a 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ Welcome to PaddleSeg! PaddleSeg is an end-to-end image segmentation development - [x] Cityscapes - [x] Pascal VOC - [x] ADE20K -- [ ] Pascal Context +- [x] Pascal Context - [ ] COCO stuff ## Installation diff --git a/README_CN.md b/README_CN.md index 9e8a273834..8b95895b4e 100644 --- a/README_CN.md +++ b/README_CN.md @@ -47,7 +47,7 @@ PaddleSeg是基于飞桨[PaddlePaddle](https://www.paddlepaddle.org.cn)开发的 - [x] Cityscapes - [x] Pascal VOC - [x] ADE20K -- [ ] Pascal Context +- [x] Pascal Context - [ ] COCO stuff ## 安装 diff --git a/configs/_base_/pascal_context.yml b/configs/_base_/pascal_context.yml new file mode 100644 index 0000000000..85f70387b3 --- /dev/null +++ b/configs/_base_/pascal_context.yml @@ -0,0 +1,50 @@ +batch_size: 4 +iters: 40000 + +train_dataset: + type: PascalContext + dataset_root: data/VOC2010/ + transforms: + - type: ResizeStepScaling + min_scale_factor: 0.5 + max_scale_factor: 2.0 + scale_step_size: 0.25 + - type: RandomPaddingCrop + crop_size: [520, 520] + - type: RandomHorizontalFlip + - type: RandomDistort + brightness_range: 0.4 + contrast_range: 0.4 + saturation_range: 0.4 + - type: Normalize + mode: train + +val_dataset: + type: PascalContext + dataset_root: data/VOC2010/ + transforms: + - type: Padding + target_size: [520, 520] + - type: Normalize + mode: val + + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 4.0e-5 + +learning_rate: + value: 0.001 + decay: + type: poly + power: 0.9 + end_lr: 0.0 + +loss: + types: + - type: CrossEntropyLoss + coef: [1] + + + diff --git a/docs/data_prepare.md b/docs/data_prepare.md index cfba4e3481..c641a59e74 100644 --- a/docs/data_prepare.md +++ b/docs/data_prepare.md @@ -48,6 +48,30 @@ export PYTHONPATH=`pwd` [ADE20K](http://sceneparsing.csail.mit.edu/)由MIT发布的可用于场景感知、分割和多物体识别等多种任务的数据集。 其涵盖了150个语义类别,包括训练集20210张,验证集2000张。 +## 关于Pascal Context数据集 +Pascal Context是基于PASCAL VOC 2010数据集额外标注的像素级别的语义分割数据集。我们提供的转换脚本支持59个类别,其中训练集4996, 验证集5104张. + + +在使用Pascal Context数据集前, 请先下载[VOC2010](http://host.robots.ox.ac.uk/pascal/VOC/voc2010/VOCtrainval_03-May-2010.tar),随后自行前往[Pascal-Context主页](https://www.cs.stanford.edu/~roozbeh/pascal-context/)下载数据集及[标注](https://codalabuser.blob.core.windows.net/public/trainval_merged.json) +我们建议您将数据集存放于`PaddleSeg/data`中,以便与我们配置文件完全兼容。数据集下载后请组织成如下结构: + + VOC2010 + | + |--Annotations + | + |--ImageSets + | + |--SegmentationClass + | + |--JPEGImages + | + |--SegmentationObject + | + |--trainval_merged.json + +其中,标注图像的标签从1,2依次取值,不可间隔。若有需要忽略的像素,则按0进行标注。在使用Pascal Context数据集时,需要安装[Detail](https://github.com/zhanghang1989/detail-api). + + ## 自定义数据集 如果您需要使用自定义数据集进行训练,请按照以下步骤准备数据. diff --git a/paddleseg/datasets/__init__.py b/paddleseg/datasets/__init__.py index c37e82ddaf..31dc494fbd 100644 --- a/paddleseg/datasets/__init__.py +++ b/paddleseg/datasets/__init__.py @@ -17,3 +17,4 @@ from .voc import PascalVOC from .ade import ADE20K from .optic_disc_seg import OpticDiscSeg +from .pascal_context import PascalContext diff --git a/paddleseg/datasets/pascal_context.py b/paddleseg/datasets/pascal_context.py new file mode 100644 index 0000000000..2361507b0b --- /dev/null +++ b/paddleseg/datasets/pascal_context.py @@ -0,0 +1,77 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from PIL import Image +from paddleseg.datasets import Dataset +from paddleseg.cvlibs import manager +from paddleseg.transforms import Compose + + +@manager.DATASETS.add_component +class PascalContext(Dataset): + """ + PascalVOC2010 dataset `http://host.robots.ox.ac.uk/pascal/VOC/`. + If you want to use pascal context dataset, please run the convert_voc2010.py in tools firstly. + + Args: + transforms (list): Transforms for image. + dataset_root (str): The dataset directory. Default: None + mode (str): Which part of dataset to use. it is one of ('train', 'trainval', 'context', 'val'). + If you want to set mode to 'context', please make sure the dataset have been augmented. Default: 'train'. + """ + + def __init__(self, transforms=None, dataset_root=None, mode='train'): + self.dataset_root = dataset_root + self.transforms = Compose(transforms) + mode = mode.lower() + self.mode = mode + self.file_list = list() + self.num_classes = 59 + self.ignore_index = 255 + + if mode not in ['train', 'trainval', 'val']: + raise ValueError( + "`mode` should be one of ('train', 'trainval', 'val') in PascalContext dataset, but got {}." + .format(mode)) + + if self.transforms is None: + raise ValueError("`transforms` is necessary, but it is None.") + if self.dataset_root is None: + raise ValueError( + "The dataset is not Found or the folder structure is nonconfoumance.") + + image_set_dir = os.path.join(self.dataset_root, 'ImageSets','Segmentation') + + if mode == 'train': + file_path = os.path.join(image_set_dir, 'train_context.txt') + elif mode == 'val': + file_path = os.path.join(image_set_dir, 'val_context.txt') + elif mode == 'trainval': + file_path = os.path.join(image_set_dir, 'trainval_context.txt') + if not os.path.exists(file_path): + raise RuntimeError( + "PASCAL-Context annotations are not ready, " + "Please make sure voc_context.py has been properly run.") + + img_dir = os.path.join(self.dataset_root, 'JPEGImages') + label_dir = os.path.join(self.dataset_root, 'Context') + + with open(file_path, 'r') as f: + for line in f: + line = line.strip() + image_path = os.path.join(img_dir, ''.join([line, '.jpg'])) + label_path = os.path.join(label_dir, ''.join([line, '.png'])) + self.file_list.append([image_path, label_path]) diff --git a/tools/convert_voc2010.py b/tools/convert_voc2010.py new file mode 100644 index 0000000000..779536f03c --- /dev/null +++ b/tools/convert_voc2010.py @@ -0,0 +1,135 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +File: convert_voc2010.py +This file is based on https://www.cs.stanford.edu/~roozbeh/pascal-context/ to generate PASCAL-Context Dataset. +Before running, you should download the PASCAL VOC2010 from http://host.robots.ox.ac.uk/pascal/VOC/voc2010/VOCtrainval_03-May-2010.tar, PASCAL-Context Dataset from https://www.cs.stanford.edu/~roozbeh/pascal-context/ and annotation file from https://codalabuser.blob.core.windows.net/public/trainval_merged.json. Then, make the folder +structure as follow: +VOC2010 +| +|--Annotations +| +|--ImageSets +| +|--SegmentationClass +| +|--JPEGImages +| +|--SegmentationObject +| +|--trainval_merged.json +""" + +import os + +import argparse +import tqdm +import numpy as np +from detail import Detail +from PIL import Image + + +def parse_args(): + parser = argparse.ArgumentParser( + description= + 'Generate PASCAL-Context dataset' + ) + parser.add_argument( + '--voc_path', + dest='voc_path', + help='pascal voc path', + type=str) + parser.add_argument( + '--annotation_path', + dest='annotation_path', + help='pascal context annotation path', + type=str) + + return parser.parse_args() + + +class PascalContextGenerator(object): + def __init__(self, voc_path, annotation_path): + self.voc_path = voc_path + self.annotation_path = annotation_path + self.label_dir = os.path.join(self.voc_path, 'Context') + self._image_dir = os.path.join(self.voc_path, 'JPEGImages') + self.annFile = os.path.join(self.annotation_path, 'trainval_merged.json') + + if not os.path.exists(self.annFile): + _download_file(url=JSON_URL, savepath=self.annotation_path, print_progress=True) + + self._mapping = np.sort(np.array([ + 0, 2, 259, 260, 415, 324, 9, 258, 144, 18, 19, 22, + 23, 397, 25, 284, 158, 159, 416, 33, 162, 420, 454, 295, 296, + 427, 44, 45, 46, 308, 59, 440, 445, 31, 232, 65, 354, 424, + 68, 326, 72, 458, 34, 207, 80, 355, 85, 347, 220, 349, 360, + 98, 187, 104, 105, 366, 189, 368, 113, 115])) + self._key = np.array(range(len(self._mapping))).astype('uint8') - 1 + + self.train_detail = Detail(self.annFile, self._image_dir, 'train') + self.train_ids = self.train_detail.getImgs() + self.val_detail = Detail(self.annFile, self._image_dir, 'val') + self.val_ids = self.val_detail.getImgs() + + if not os.path.exists(self.label_dir): + os.makedirs(self.label_dir) + + def _class_to_index(self, mask, _mapping, _key): + # assert the values + values = np.unique(mask) + for i in range(len(values)): + assert (values[i] in _mapping) + index = np.digitize(mask.ravel(), _mapping, right=True) + return _key[index].reshape(mask.shape) + + def save_mask(self, img_id, mode): + if mode == 'train': + mask = Image.fromarray(self._class_to_index(self.train_detail.getMask(img_id), _mapping=self._mapping, _key=self._key)) + elif mode == 'val': + mask = Image.fromarray(self._class_to_index(self.val_detail.getMask(img_id), _mapping=self._mapping, _key=self._key)) + filename = img_id['file_name'] + basename, _ = os.path.splitext(filename) + if filename.endswith(".jpg"): + mask_png_name = basename + '.png' + mask.save(os.path.join(self.label_dir, mask_png_name)) + return basename + + def generate_label(self): + + with open(os.path.join(self.voc_path, 'ImageSets/Segmentation/train_context.txt'), 'w') as f: + for img_id in tqdm.tqdm(self.train_ids, desc='train'): + basename = self.save_mask(img_id, 'train') + f.writelines(''.join([basename, '\n'])) + + with open(os.path.join(self.voc_path, 'ImageSets/Segmentation/val_context.txt'), 'w') as f: + for img_id in tqdm.tqdm(self.val_ids, desc='val'): + basename = self.save_mask(img_id, 'val') + f.writelines(''.join([basename, '\n'])) + + with open(os.path.join(self.voc_path, 'ImageSets/Segmentation/trainval_context.txt'), 'w') as f: + for img in tqdm.tqdm(os.listdir(self.label_dir), desc='trainval'): + if img.endswith('.png'): + basename = img.split('.', 1)[0] + f.writelines(''.join([basename, '\n'])) + + +def main(): + args = parse_args() + generator = PascalContextGenerator(voc_path=args.voc_path, annotation_path=args.annotation_path) + generator.generate_label() + +if __name__ == '__main__': + main() From a86330dc281e1dd7576472c6943cb9f0c3a1b2f4 Mon Sep 17 00:00:00 2001 From: Zhang Ting Date: Wed, 20 Jan 2021 15:33:14 +0800 Subject: [PATCH 35/52] Calculate and print the average time --- paddleseg/core/train.py | 29 ++++++++++---------- paddleseg/core/val.py | 19 +++++++++---- paddleseg/utils/__init__.py | 2 +- paddleseg/utils/timer.py | 53 ++++++++++++++++--------------------- 4 files changed, 53 insertions(+), 50 deletions(-) diff --git a/paddleseg/core/train.py b/paddleseg/core/train.py index edcdc8afc8..31ea673e86 100644 --- a/paddleseg/core/train.py +++ b/paddleseg/core/train.py @@ -20,7 +20,7 @@ import paddle import paddle.nn.functional as F -from paddleseg.utils import Timer, calculate_eta, resume, logger +from paddleseg.utils import TimeAverager, calculate_eta, resume, logger from paddleseg.core.val import evaluate @@ -112,16 +112,15 @@ def train(model, from visualdl import LogWriter log_writer = LogWriter(save_dir) - timer = Timer() avg_loss = 0.0 avg_loss_list = [] iters_per_epoch = len(batch_sampler) best_mean_iou = -1.0 best_model_iter = -1 - train_reader_cost = 0.0 - train_batch_cost = 0.0 + reader_cost_averager = TimeAverager() + batch_cost_averager = TimeAverager() save_models = deque() - timer.start() + batch_start = time.time() iter = start_iter while iter < iters: @@ -129,7 +128,7 @@ def train(model, iter += 1 if iter > iters: break - train_reader_cost += timer.elapsed_time() + reader_cost_averager.record(time.time() - batch_start) images = data[0] labels = data[1].astype('int64') edges = None @@ -160,24 +159,24 @@ def train(model, else: for i in range(len(loss_list)): avg_loss_list[i] += loss_list[i] - train_batch_cost += timer.elapsed_time() + batch_cost_averager.record( + time.time() - batch_start, + num_samples=batch_size) if (iter) % log_iters == 0 and local_rank == 0: avg_loss /= log_iters avg_loss_list = [ l.numpy()[0] / log_iters for l in avg_loss_list ] - avg_train_reader_cost = train_reader_cost / log_iters - avg_train_batch_cost = train_batch_cost / log_iters - train_reader_cost = 0.0 - train_batch_cost = 0.0 remain_iters = iters - iter + avg_train_batch_cost = batch_cost_averager.get_average() + avg_train_reader_cost = reader_cost_averager.get_average() eta = calculate_eta(remain_iters, avg_train_batch_cost) logger.info( - "[TRAIN] epoch={}, iter={}/{}, loss={:.4f}, lr={:.6f}, batch_cost={:.4f}, reader_cost={:.4f} | ETA {}" + "[TRAIN] epoch={}, iter={}/{}, loss={:.4f}, lr={:.6f}, batch_cost={:.4f}, reader_cost={:.5f}, ips={:.4f} samples/sec | ETA {}" .format((iter - 1) // iters_per_epoch + 1, iter, iters, avg_loss, lr, avg_train_batch_cost, - avg_train_reader_cost, eta)) + avg_train_reader_cost, batch_cost_averager.get_ips_average(), eta)) if use_vdl: log_writer.add_scalar('Train/loss', avg_loss, iter) # Record all losses if there are more than 2 losses. @@ -196,6 +195,8 @@ def train(model, avg_train_reader_cost, iter) avg_loss = 0.0 avg_loss_list = [] + reader_cost_averager.reset() + batch_cost_averager.reset() if (iter % save_interval == 0 or iter == iters) and (val_dataset is not None): @@ -233,7 +234,7 @@ def train(model, if use_vdl: log_writer.add_scalar('Evaluate/mIoU', mean_iou, iter) log_writer.add_scalar('Evaluate/Acc', acc, iter) - timer.restart() + batch_start = time.time() # Calculate flops. if local_rank == 0: diff --git a/paddleseg/core/val.py b/paddleseg/core/val.py index cdf0a348b9..f6371a1c17 100644 --- a/paddleseg/core/val.py +++ b/paddleseg/core/val.py @@ -15,10 +15,11 @@ import os import numpy as np +import time import paddle import paddle.nn.functional as F -from paddleseg.utils import metrics, Timer, calculate_eta, logger, progbar +from paddleseg.utils import metrics, TimeAverager, calculate_eta, logger, progbar from paddleseg.core import infer np.set_printoptions(suppress=True) @@ -80,10 +81,12 @@ def evaluate(model, logger.info("Start evaluating (total_samples={}, total_iters={})...".format( len(eval_dataset), total_iters)) progbar_val = progbar.Progbar(target=total_iters, verbose=1) - timer = Timer() + reader_cost_averager = TimeAverager() + batch_cost_averager = TimeAverager() + batch_start = time.time() with paddle.no_grad(): for iter, (im, label) in enumerate(loader): - reader_cost = timer.elapsed_time() + reader_cost_averager.record(time.time() - batch_start) label = label.astype('int64') ori_shape = label.shape[-2:] @@ -139,12 +142,18 @@ def evaluate(model, intersect_area_all = intersect_area_all + intersect_area pred_area_all = pred_area_all + pred_area label_area_all = label_area_all + label_area - batch_cost = timer.elapsed_time() - timer.restart() + batch_cost_averager.record( + time.time() - batch_start, + num_samples=len(label)) + batch_cost = batch_cost_averager.get_average() + reader_cost = reader_cost_averager.get_average() if local_rank == 0: progbar_val.update(iter + 1, [('batch_cost', batch_cost), ('reader cost', reader_cost)]) + reader_cost_averager.reset() + batch_cost_averager.reset() + batch_start = time.time() class_iou, miou = metrics.mean_iou(intersect_area_all, pred_area_all, label_area_all) diff --git a/paddleseg/utils/__init__.py b/paddleseg/utils/__init__.py index 4c5dc6d806..d621193545 100644 --- a/paddleseg/utils/__init__.py +++ b/paddleseg/utils/__init__.py @@ -17,5 +17,5 @@ from . import metrics from .env import seg_env, get_sys_env from .utils import * -from .timer import Timer, calculate_eta +from .timer import TimeAverager, calculate_eta from . import visualize diff --git a/paddleseg/utils/timer.py b/paddleseg/utils/timer.py index 4478af62c9..f1fcbfa96b 100644 --- a/paddleseg/utils/timer.py +++ b/paddleseg/utils/timer.py @@ -15,37 +15,30 @@ import time -class Timer(object): - """ Simple timer class for measuring time consuming """ - +class TimeAverager(object): def __init__(self): - self._start_time = 0.0 - self._end_time = 0.0 - self._elapsed_time = 0.0 - self._is_running = False - - def start(self): - self._is_running = True - self._start_time = time.time() - - def restart(self): - self.start() - - def stop(self): - self._is_running = False - self._end_time = time.time() - - def elapsed_time(self): - self._end_time = time.time() - self._elapsed_time = self._end_time - self._start_time - if not self.is_running: - return 0.0 - - return self._elapsed_time - - @property - def is_running(self): - return self._is_running + self.reset() + + def reset(self): + self._cnt = 0 + self._total_time = 0 + self._total_samples = 0 + + def record(self, usetime, num_samples=None): + self._cnt += 1 + self._total_time += usetime + if num_samples: + self._total_samples += num_samples + + def get_average(self): + if self._cnt == 0: + return 0 + return self._total_time / float(self._cnt) + + def get_ips_average(self): + if not self._total_samples or self._cnt == 0: + return 0 + return float(self._total_samples) / self._total_time def calculate_eta(remaining_step, speed): From 1febd11df4586aed0bdbadb99b612c9143ce9515 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Mon, 25 Jan 2021 11:46:52 +0800 Subject: [PATCH 36/52] Update README.md --- configs/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/README.md b/configs/README.md index 2ae9a8f2f1..144caebbbf 100644 --- a/configs/README.md +++ b/configs/README.md @@ -47,7 +47,7 @@ > 损失函数 > * 参数 > * types : 损失函数列表 -> * type : 损失函数类型,目前只支持CrossEntropyLoss +> * type : 损失函数类型,所支持值请参考损失函数库 > * coef : 对应损失函数列表的系数列表 ---- From c53926a1d74847fce91297731be1233bec32f73e Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Tue, 26 Jan 2021 20:24:23 +0800 Subject: [PATCH 37/52] Update requirements.txt --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 37707980fb..f1c765b4b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ opencv-python tqdm filelock scipy +paddleseg From 80b000200ff68f49cf7c58e0ed0bdb97bb075b77 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Tue, 26 Jan 2021 20:24:44 +0800 Subject: [PATCH 38/52] Update requirements.txt --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 37707980fb..f1c765b4b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ opencv-python tqdm filelock scipy +paddleseg From dd3e5ee207e637359c9afac309ebdf4e9fe3f32e Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Tue, 26 Jan 2021 20:25:34 +0800 Subject: [PATCH 39/52] Update requirements.txt --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f1c765b4b5..37707980fb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,4 +7,3 @@ opencv-python tqdm filelock scipy -paddleseg From 6af56259480bfc3f3b268044e77738fb90894622 Mon Sep 17 00:00:00 2001 From: Liu Yi Date: Tue, 26 Jan 2021 20:25:48 +0800 Subject: [PATCH 40/52] Update requirements.txt --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f1c765b4b5..37707980fb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,4 +7,3 @@ opencv-python tqdm filelock scipy -paddleseg From 0150eeeeb1cc27be6a9e444aaf07f67021a3201f Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Wed, 27 Jan 2021 11:06:11 +0800 Subject: [PATCH 41/52] add dnlnet --- README.md | 1 + README_CN.md | 1 + configs/danet/README.md | 4 +- configs/dnlnet/README.md | 23 ++ ...t_resnet101_os8_cityscape_1024x512_80k.yml | 31 +++ ...net_resnet101_os8_voc12aug_512x512_40k.yml | 25 ++ ...et_resnet50_os8_cityscape_1024x512_80k.yml | 30 +++ ...lnet_resnet50_os8_voc12aug_512x512_40k.yml | 25 ++ configs/emanet/README.md | 2 +- docs/apis/models.md | 25 ++ paddleseg/models/__init__.py | 1 + paddleseg/models/dnlnet.py | 226 ++++++++++++++++++ paddleseg/models/layers/__init__.py | 1 + paddleseg/models/layers/nonlocal2d.py | 154 ++++++++++++ 14 files changed, 546 insertions(+), 3 deletions(-) create mode 100644 configs/dnlnet/README.md create mode 100644 configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml create mode 100644 configs/dnlnet/dnlnet_resnet101_os8_voc12aug_512x512_40k.yml create mode 100644 configs/dnlnet/dnlnet_resnet50_os8_cityscape_1024x512_80k.yml create mode 100644 configs/dnlnet/dnlnet_resnet50_os8_voc12aug_512x512_40k.yml create mode 100644 paddleseg/models/dnlnet.py create mode 100644 paddleseg/models/layers/nonlocal2d.py diff --git a/README.md b/README.md index 33302c952b..9f74d5cfda 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ Welcome to PaddleSeg! PaddleSeg is an end-to-end image segmentation development |[U-Net++](./configs/unet_plusplus)|-|-|-|-| |[EMANet](./configs/emanet)|✔|✔|-|-| |[ISANet](./configs/isanet)|✔|✔|-|-| +|[DNLNet](./configs/dnlnet)|✔|✔|-|-| ## Dataset - [x] Cityscapes diff --git a/README_CN.md b/README_CN.md index 16e7519482..2cc80439bf 100644 --- a/README_CN.md +++ b/README_CN.md @@ -44,6 +44,7 @@ PaddleSeg是基于飞桨[PaddlePaddle](https://www.paddlepaddle.org.cn)开发的 |[U-Net++](./configs/unet_plusplus)|-|-|-|-| |[EMANet](./configs/emanet)|✔|✔|-|-| |[ISANet](./configs/isanet)|✔|✔|-|-| +|[DNLNet](./configs/dnlnet)|✔|✔|-|-| ## 数据集 diff --git a/configs/danet/README.md b/configs/danet/README.md index f9474a70e6..8472d40217 100644 --- a/configs/danet/README.md +++ b/configs/danet/README.md @@ -10,10 +10,10 @@ | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|DANet|ResNet50_OS8|1024x512|80000|80.27%|-|-|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6caecf1222a0cc9124a376284a402cbe)| +|DANet|ResNet50_OS8|1024x512|80000|80.27%|80.53%|-|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/danet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=6caecf1222a0cc9124a376284a402cbe)| ### Pascal VOC 2012 + Aug | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|DANet|ResNet50_OS8|1024x512|40000|78.55%|-|-|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=51a403a54302bc81dd5ec0310a6d50ba)| +|DANet|ResNet50_OS8|512x512|40000|78.55%|78.93%|79.68%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/danet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=51a403a54302bc81dd5ec0310a6d50ba)| diff --git a/configs/dnlnet/README.md b/configs/dnlnet/README.md new file mode 100644 index 0000000000..162e4dfc08 --- /dev/null +++ b/configs/dnlnet/README.md @@ -0,0 +1,23 @@ +# Disentangled Non-Local Neural Networks + +## Reference + +> Minghao Yin, Zhuliang Yao, Yue Cao, Xiu Li, Zheng Zhang, Stephen Lin, Han Hu: +Disentangled Non-local Neural Networks. ECCV (15) 2020: 191-207. + +## Performance + +### Cityscapes + +| Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | +|-|-|-|-|-|-|-|-| +|DNLNet|ResNet50_OS8|1024x512|80000|79.95%|80.43%|-|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/dnlnet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/dnlnet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=922cf0682c5e684507ab54a14ef12847)| +|DNLNet|ResNet101_OS8|1024x512|80000|81.03%|81.38%|-|[model](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/dnlnet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/cityscapes/dnlnet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e0d13c4d9dbf4115bbba2abdc88122c)| + +### Pascal VOC 2012 + Aug + +| Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | +|-|-|-|-|-|-|-|-| +|DNLNet|ResNet50_OS8|512x512|40000|80.89%|81.31%|81.56%|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/dnlnet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/dnlnet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=8877c77bef8b227af22c5eb3017138ce)| +|DNLNet|ResNet101_OS8|512x512|40000|80.49%|80.83%| 81.33%|[model](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/dnlnet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://paddleseg.bj.bcebos.com/dygraph/pascal_voc12/dnlnet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=1d42c22da1c465d9a38e4204bebeeb54)| + diff --git a/configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml b/configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml new file mode 100644 index 0000000000..ba562b0591 --- /dev/null +++ b/configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml @@ -0,0 +1,31 @@ +_base_: '../_base_/cityscapes.yml' + +batch_size: 2 +iters: 80000 + +model: + type: DNLNet + backbone: + type: ResNet101_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz + num_classes: 19 + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.00004 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] + diff --git a/configs/dnlnet/dnlnet_resnet101_os8_voc12aug_512x512_40k.yml b/configs/dnlnet/dnlnet_resnet101_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..b0b11b7260 --- /dev/null +++ b/configs/dnlnet/dnlnet_resnet101_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,25 @@ +_base_: '../_base_/pascal_voc12aug.yml' + +model: + type: DNLNet + backbone: + type: ResNet101_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 4.0e-05 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] diff --git a/configs/dnlnet/dnlnet_resnet50_os8_cityscape_1024x512_80k.yml b/configs/dnlnet/dnlnet_resnet50_os8_cityscape_1024x512_80k.yml new file mode 100644 index 0000000000..ae6bd0f4b3 --- /dev/null +++ b/configs/dnlnet/dnlnet_resnet50_os8_cityscape_1024x512_80k.yml @@ -0,0 +1,30 @@ +_base_: '../_base_/cityscapes.yml' + +batch_size: 2 +iters: 80000 + +model: + type: DNLNet + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + num_classes: 19 + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 0.00004 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] diff --git a/configs/dnlnet/dnlnet_resnet50_os8_voc12aug_512x512_40k.yml b/configs/dnlnet/dnlnet_resnet50_os8_voc12aug_512x512_40k.yml new file mode 100644 index 0000000000..ee8e802d17 --- /dev/null +++ b/configs/dnlnet/dnlnet_resnet50_os8_voc12aug_512x512_40k.yml @@ -0,0 +1,25 @@ +_base_: '../_base_/pascal_voc12aug.yml' + +model: + type: DNLNet + backbone: + type: ResNet50_vd + output_stride: 8 + pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 4.0e-05 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + +loss: + types: + - type: CrossEntropyLoss + - type: CrossEntropyLoss + coef: [1, 0.4] diff --git a/configs/emanet/README.md b/configs/emanet/README.md index cfe34bf2f3..2a5109bf5c 100644 --- a/configs/emanet/README.md +++ b/configs/emanet/README.md @@ -12,7 +12,7 @@ Expectation-Maximization Attention Networks for Semantic Segmentation. ICCV 2019 | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | |-|-|-|-|-|-|-|-| |EMANet|ResNet50_OS8|1024x512|80000|77.58%|77.98%|78.23%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e053a214d60822d6e65445b8614d052)| -|EMANet|ResNet101_OS8|769x769|80000|79.42%|79.83%|80.33%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| +|EMANet|ResNet101_OS8|1024x512|80000|79.42%|79.83%|80.33%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| ### Pascal VOC 2012 + Aug diff --git a/docs/apis/models.md b/docs/apis/models.md index 026b8ac89a..17af634234 100644 --- a/docs/apis/models.md +++ b/docs/apis/models.md @@ -20,6 +20,7 @@ The models subpackage contains the following model for image sementic segmentaio - [UNet++](#UNet-1) - [ISANet](#ISANet) - [EMANet](#EMANet) +- [DNLNet](#DNLNet) ## [DeepLabV3+](../../paddleseg/models/deeplab.py) @@ -446,6 +447,30 @@ The models subpackage contains the following model for image sementic segmentaio > > > - **momentum** (float): The parameter for updating bases. > > > - **concat_input** (bool): Whether concat the input and output of convs before classification layer. Default: True > > > - **enable_auxiliary_loss** (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. +> > > - **align_corners** (bool): An argument of F.interpolate. It should be set to False when the output size of feature + is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. +> > > - **pretrained** (str, optional): The path or url of pretrained model. Default: None. + +## [DNLNet](../../paddleseg/models/dnlnet.py) +> CLASS paddleseg.models.DNLNet(num_classes, backbone, backbone_indices=(2, 3), reduction=2, use_scale=True, mode='embedded_gaussian', temperature=0.05, concat_input=True, enable_auxiliary_loss=True, align_corners=False, pretrained=None) + + The DNLNet implementation based on PaddlePaddle. + + The original article refers to + Minghao Yin, et al. "Disentangled Non-Local Neural Networks" + (https://arxiv.org/abs/2006.06668) + +> > Args +> > > - **num_classes** (int): The unique number of target classes. +> > > - **backbone** (Paddle.nn.Layer): A backbone network. +> > > - **backbone_indices** (tuple): The values in the tuple indicate the indices of output of backbone. +> > > - **reduction** (int): Reduction factor of projection transform. Default: 2. +> > > - **use_scale** (bool): Whether to scale pairwise_weight by sqrt(1/inter_channels). Default: False. +> > > - **mode** (str): The nonlocal mode. Options are 'embedded_gaussian', + 'dot_product'. Default: 'embedded_gaussian'. +> > > - **temperature** (float): Temperature to adjust attention. Default: 0.05. +> > > - **concat_input** (bool): Whether concat the input and output of convs before classification layer. Default: True +> > > - **enable_auxiliary_loss** (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. > > > - **align_corners** (bool): An argument of F.interpolate. It should be set to False when the output size of feature is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. > > > - **pretrained** (str, optional): The path or url of pretrained model. Default: None. \ No newline at end of file diff --git a/paddleseg/models/__init__.py b/paddleseg/models/__init__.py index 296c00a900..848eec9f9f 100644 --- a/paddleseg/models/__init__.py +++ b/paddleseg/models/__init__.py @@ -32,3 +32,4 @@ from .unet_plusplus import UNetPlusPlus from .emanet import * from .isanet import * +from .dnlnet import * diff --git a/paddleseg/models/dnlnet.py b/paddleseg/models/dnlnet.py new file mode 100644 index 0000000000..4b0913b6a5 --- /dev/null +++ b/paddleseg/models/dnlnet.py @@ -0,0 +1,226 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + +from paddleseg.models import layers +from paddleseg.cvlibs import manager +from paddleseg.utils import utils + + +@manager.MODELS.add_component +class DNLNet(nn.Layer): + """Disentangled Non-Local Neural Networks. + + The original article refers to + Minghao Yin, et al. "Disentangled Non-Local Neural Networks" + (https://arxiv.org/abs/2006.06668) + Args: + num_classes (int): The unique number of target classes. + backbone (Paddle.nn.Layer): A backbone network. + backbone_indices (tuple): The values in the tuple indicate the indices of output of backbone. + reduction (int): Reduction factor of projection transform. Default: 2. + use_scale (bool): Whether to scale pairwise_weight by + sqrt(1/inter_channels). Default: False. + mode (str): The nonlocal mode. Options are 'embedded_gaussian', + 'dot_product'. Default: 'embedded_gaussian'. + temperature (float): Temperature to adjust attention. Default: 0.05. + concat_input (bool): Whether concat the input and output of convs before classification layer. Default: True + enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. + align_corners (bool): An argument of F.interpolate. It should be set to False when the output size of feature + is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False. + pretrained (str, optional): The path or url of pretrained model. Default: None. + """ + + def __init__(self, + num_classes, + backbone, + backbone_indices=(2, 3), + reduction=2, + use_scale=True, + mode='embedded_gaussian', + temperature=0.05, + concat_input=True, + enable_auxiliary_loss=True, + align_corners=False, + pretrained=None): + super().__init__() + self.backbone = backbone + self.backbone_indices = backbone_indices + in_channels = [self.backbone.feat_channels[i] for i in backbone_indices] + self.head = DNLHead(num_classes, in_channels, reduction, use_scale, + mode, temperature, concat_input, + enable_auxiliary_loss) + self.align_corners = align_corners + self.pretrained = pretrained + self.init_weight() + + def forward(self, x): + feats = self.backbone(x) + feats = [feats[i] for i in self.backbone_indices] + logit_list = self.head(feats) + logit_list = [ + F.interpolate( + logit, + x.shape[2:], + mode='bilinear', + align_corners=self.align_corners, + align_mode=1) for logit in logit_list + ] + return logit_list + + def init_weight(self): + if self.pretrained is not None: + utils.load_entire_model(self, self.pretrained) + + +class DNLHead(nn.Layer): + """ + The DNLNet head. + + Args: + num_classes (int): The unique number of target classes. + in_channels (tuple): The number of input channels. + reduction (int): Reduction factor of projection transform. Default: 2. + use_scale (bool): Whether to scale pairwise_weight by + sqrt(1/inter_channels). Default: False. + mode (str): The nonlocal mode. Options are 'embedded_gaussian', + 'dot_product'. Default: 'embedded_gaussian.'. + temperature (float): Temperature to adjust attention. Default: 0.05 + concat_input (bool): Whether concat the input and output of convs before classification layer. Default: True + enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True. + """ + + def __init__(self, + num_classes, + in_channels, + reduction, + use_scale, + mode, + temperature, + concat_input=True, + enable_auxiliary_loss=True, + **kwargs): + super(DNLHead, self).__init__() + self.in_channels = in_channels[-1] + self.concat_input = concat_input + self.enable_auxiliary_loss = enable_auxiliary_loss + inter_channels = self.in_channels // 4 + + self.dnl_block = DisentangledNonLocal2D( + in_channels=inter_channels, + reduction=reduction, + use_scale=use_scale, + temperature=temperature, + mode=mode) + self.conv0 = layers.ConvBNReLU( + in_channels=self.in_channels, + out_channels=inter_channels, + kernel_size=3, + bias_attr=False) + self.conv1 = layers.ConvBNReLU( + in_channels=inter_channels, + out_channels=inter_channels, + kernel_size=3, + bias_attr=False) + self.cls = nn.Sequential( + nn.Dropout2D(p=0.1), nn.Conv2D(inter_channels, num_classes, 1)) + self.aux = nn.Sequential( + layers.ConvBNReLU( + in_channels=1024, + out_channels=256, + kernel_size=3, + bias_attr=False), nn.Dropout2D(p=0.1), + nn.Conv2D(256, num_classes, 1)) + if self.concat_input: + self.conv_cat = layers.ConvBNReLU( + self.in_channels + inter_channels, + inter_channels, + kernel_size=3, + bias_attr=False) + + def forward(self, feat_list): + C3, C4 = feat_list + output = self.conv0(C4) + output = self.dnl_block(output) + output = self.conv1(output) + if self.concat_input: + output = self.conv_cat(paddle.concat([C4, output], axis=1)) + output = self.cls(output) + if self.enable_auxiliary_loss: + auxout = self.aux(C3) + return [output, auxout] + else: + return [output] + + +class DisentangledNonLocal2D(layers.NonLocal2D): + """Disentangled Non-Local Blocks. + + Args: + temperature (float): Temperature to adjust attention. + """ + + def __init__(self, temperature, *arg, **kwargs): + super().__init__(*arg, **kwargs) + self.temperature = temperature + self.conv_mask = nn.Conv2D(self.in_channels, 1, kernel_size=1) + + def embedded_gaussian(self, theta_x, phi_x): + pairwise_weight = paddle.matmul(theta_x, phi_x) + if self.use_scale: + pairwise_weight /= theta_x.shape[-1]**0.5 + pairwise_weight /= self.temperature + pairwise_weight = F.softmax(pairwise_weight, -1) + return pairwise_weight + + def forward(self, x): + n, c, h, w = x.shape + g_x = self.g(x).reshape([n, self.inter_channels, + -1]).transpose([0, 2, 1]) + + if self.mode == "gaussian": + theta_x = paddle.transpose( + x.reshape([n, self.in_channels, -1]), [0, 2, 1]) + if self.sub_sample: + phi_x = paddle.transpose(self.phi(x), [n, self.in_channels, -1]) + else: + phi_x = paddle.transpose(x, [n, self.in_channels, -1]) + + elif self.mode == "concatenation": + theta_x = paddle.reshape( + self.theta(x), [n, self.inter_channels, -1, 1]) + phi_x = paddle.reshape(self.phi(x), [n, self.inter_channels, 1, -1]) + + else: + theta_x = self.theta(x).reshape([n, self.inter_channels, + -1]).transpose([0, 2, 1]) + phi_x = paddle.reshape(self.phi(x), [n, self.inter_channels, -1]) + + theta_x -= paddle.mean(theta_x, axis=-2, keepdim=True) + phi_x -= paddle.mean(phi_x, axis=-1, keepdim=True) + + pairwise_func = getattr(self, self.mode) + pairwise_weight = pairwise_func(theta_x, phi_x) + + y = paddle.matmul(pairwise_weight, g_x).transpose([0, 2, 1]).reshape( + [n, self.inter_channels, h, w]) + unary_mask = F.softmax( + paddle.reshape(self.conv_mask(x), [n, 1, -1]), -1) + unary_x = paddle.matmul(unary_mask, g_x).transpose([0, 2, 1]).reshape( + [n, self.inter_channels, 1, 1]) + output = x + self.conv_out(y + unary_x) + return output diff --git a/paddleseg/models/layers/__init__.py b/paddleseg/models/layers/__init__.py index b334c88476..86ec36c08d 100644 --- a/paddleseg/models/layers/__init__.py +++ b/paddleseg/models/layers/__init__.py @@ -16,3 +16,4 @@ from .activation import Activation from .pyramid_pool import ASPPModule, PPModule from .attention import AttentionBlock +from .nonlocal2d import NonLocal2D diff --git a/paddleseg/models/layers/nonlocal2d.py b/paddleseg/models/layers/nonlocal2d.py new file mode 100644 index 0000000000..bd577c1a16 --- /dev/null +++ b/paddleseg/models/layers/nonlocal2d.py @@ -0,0 +1,154 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + +from paddleseg.models import layers + + +class NonLocal2D(nn.Layer): + """Basic Non-local module. + This model is the implementation of "Non-local Neural Networks" + (https://arxiv.org/abs/1711.07971) + + Args: + in_channels (int): Channels of the input feature map. + reduction (int): Channel reduction ratio. Default: 2. + use_scale (bool): Whether to scale pairwise_weight by `1/sqrt(inter_channels)` when the mode is `embedded_gaussian`. Default: True. + sub_sample (bool): Whether to utilize max pooling after pairwise function. Default: False. + mode (str): Options are `gaussian`, `concatenation`, `embedded_gaussian` and `dot_product`. Default: embedded_gaussian. + """ + + def __init__(self, + in_channels, + reduction=2, + use_scale=True, + sub_sample=False, + mode='embedded_gaussian'): + super(NonLocal2D, self).__init__() + self.in_channels = in_channels + self.reduction = reduction + self.use_scale = use_scale + self.sub_sample = sub_sample + self.mode = mode + if mode not in [ + 'gaussian', 'embedded_gaussian', 'dot_product', 'concatenation' + ]: + raise ValueError( + "Mode should be in 'gaussian', 'concatenation','embedded_gaussian' or 'dot_product'." + ) + + self.inter_channels = max(in_channels // reduction, 1) + + self.g = nn.Conv2D( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1) + self.conv_out = layers.ConvBNReLU( + in_channels=self.inter_channels, + out_channels=self.in_channels, + kernel_size=1, + bias_attr=False) + + if self.mode != "gaussian": + self.theta = nn.Conv2D( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1) + self.phi = nn.Conv2D( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1) + + if self.mode == "concatenation": + self.concat_project = layers.ConvBNReLU( + in_channels=self.inter_channels * 2, + out_channels=1, + kernel_size=1, + bias_attr=False) + + if self.sub_sample: + max_pool_layer = nn.MaxPool2D(kernel_size=(2, 2)) + self.g = nn.Sequential(self.g, max_pool_layer) + if self.mode != 'gaussian': + self.phi = nn.Sequential(self.phi, max_pool_layer) + else: + self.phi = max_pool_layer + + def gaussian(self, theta_x, phi_x): + pairwise_weight = paddle.matmul(theta_x, phi_x) + pairwise_weight = F.softmax(pairwise_weight, axis=-1) + return pairwise_weight + + def embedded_gaussian(self, theta_x, phi_x): + pairwise_weight = paddle.matmul(theta_x, phi_x) + if self.use_scale: + pairwise_weight /= theta_x.shape[-1]**0.5 + pairwise_weight = F.softmax(pairwise_weight, -1) + return pairwise_weight + + def dot_product(self, theta_x, phi_x): + pairwise_weight = paddle.matmul(theta_x, phi_x) + pairwise_weight /= pairwise_weight.shape[-1] + return pairwise_weight + + def concatenation(self, theta_x, phi_x): + h = theta_x.shape[2] + w = phi_x.shape[3] + theta_x = paddle.tile(theta_x, [1, 1, 1, w]) + phi_x = paddle.tile(phi_x, [1, 1, h, 1]) + + concat_feature = paddle.concat([theta_x, phi_x], axis=1) + pairwise_weight = self.concat_project(concat_feature) + n, _, h, w = pairwise_weight.shape + pairwise_weight = paddle.reshape(pairwise_weight, [n, h, w]) + pairwise_weight /= pairwise_weight.shape[-1] + return pairwise_weight + + def forward(self, x): + n, c, h, w = x.shape + g_x = paddle.reshape(self.g(x), [n, self.inter_channels, -1]) + g_x = paddle.transpose(g_x, [0, 2, 1]) + + if self.mode == 'gaussian': + theta_x = paddle.reshape(x, [n, self.inter_channels, -1]) + theta_x = paddle.transpose(theta_x, [0, 2, 1]) + if self.sub_sample: + phi_x = paddle.reshape( + self.phi(x), [n, self.inter_channels, -1]) + else: + phi_x = paddle.reshape(x, [n, self.in_channels, -1]) + + elif self.mode == 'concatenation': + theta_x = paddle.reshape( + self.theta(x), [n, self.inter_channels, -1, 1]) + phi_x = self.phi(x).view(n, self.inter_channels, 1, -1) + + else: + theta_x = paddle.reshape( + self.theta(x), [n, self.inter_channels, -1, 1]) + theta_x = paddle.transpose(theta_x, [0, 2, 1]) + phi_x = paddle.reshape(self.phi(x), [n, self.inter_channels, -1]) + + pairwise_func = getattr(self, self.mode) + pairwise_weight = pairwise_func(theta_x, phi_x) + y = paddle.matmul(pairwise_weight, g_x) + y = paddle.transpose(y, [0, 2, 1]) + y = paddle.reshape(y, [n, self.inter_channels, h, w]) + + output = x + self.conv_out(y) + + return output From 6973146c1f2c5dde5545e8f4ef6f0b459360141a Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Wed, 27 Jan 2021 11:21:49 +0800 Subject: [PATCH 42/52] Coco stuff (#749) --- README.md | 2 +- README_CN.md | 2 +- configs/_base_/coco_stuff.yml | 45 ++++++ docs/data_prepare.md | 266 +++++++++++++++++--------------- paddleseg/datasets/cocostuff.py | 82 ++++++++++ 5 files changed, 271 insertions(+), 126 deletions(-) create mode 100644 configs/_base_/coco_stuff.yml create mode 100644 paddleseg/datasets/cocostuff.py diff --git a/README.md b/README.md index 09f88e7e7a..629db7428e 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ Welcome to PaddleSeg! PaddleSeg is an end-to-end image segmentation development - [x] Pascal VOC - [x] ADE20K - [x] Pascal Context -- [ ] COCO stuff +- [x] COCO stuff ## Installation diff --git a/README_CN.md b/README_CN.md index 8b95895b4e..60eecba13c 100644 --- a/README_CN.md +++ b/README_CN.md @@ -48,7 +48,7 @@ PaddleSeg是基于飞桨[PaddlePaddle](https://www.paddlepaddle.org.cn)开发的 - [x] Pascal VOC - [x] ADE20K - [x] Pascal Context -- [ ] COCO stuff +- [x] COCO stuff ## 安装 diff --git a/configs/_base_/coco_stuff.yml b/configs/_base_/coco_stuff.yml new file mode 100644 index 0000000000..d57fbbf22e --- /dev/null +++ b/configs/_base_/coco_stuff.yml @@ -0,0 +1,45 @@ +batch_size: 4 +iters: 80000 + +train_dataset: + type: CocoStuff + dataset_root: data/cocostuff/ + transforms: + - type: ResizeStepScaling + min_scale_factor: 0.5 + max_scale_factor: 2.0 + scale_step_size: 0.25 + - type: RandomPaddingCrop + crop_size: [520, 520] + - type: RandomHorizontalFlip + - type: RandomDistort + brightness_range: 0.4 + contrast_range: 0.4 + saturation_range: 0.4 + - type: Normalize + mode: train + +val_dataset: + type: CocoStuff + dataset_root: data/cocostuff/ + transforms: + - type: Normalize + mode: val + + +optimizer: + type: sgd + momentum: 0.9 + weight_decay: 4.0e-5 + +learning_rate: + value: 0.01 + decay: + type: poly + power: 0.9 + end_lr: 0.0 + +loss: + types: + - type: CrossEntropyLoss + coef: [1] diff --git a/docs/data_prepare.md b/docs/data_prepare.md index c641a59e74..b07a548194 100644 --- a/docs/data_prepare.md +++ b/docs/data_prepare.md @@ -1,124 +1,142 @@ -# 数据集准备 - -PaddleSeg目前支持CityScapes、ADE20K、Pascal VOC等数据集的加载,在加载数据集时,如若本地不存在对应数据,则会自动触发下载(除Cityscapes数据集). - -## 关于CityScapes数据集 -Cityscapes是关于城市街道场景的语义理解图片数据集。它主要包含来自50个不同城市的街道场景, -拥有5000张(2048 x 1024)城市驾驶场景的高质量像素级注释图像,包含19个类别。其中训练集2975张, 验证集500张和测试集1525张。 - -由于协议限制,请自行前往[CityScapes官网](https://www.cityscapes-dataset.com/)下载数据集, -我们建议您将数据集存放于`PaddleSeg/data`中,以便与我们配置文件完全兼容。数据集下载后请组织成如下结构: - - cityscapes - | - |--leftImg8bit - | |--train - | |--val - | |--test - | - |--gtFine - | |--train - | |--val - | |--test - -运行下列命令进行标签转换: -```shell -pip install cityscapesscripts -python tools/convert_cityscapes.py --cityscapes_path data/cityscapes --num_workers 8 -``` -其中`cityscapes_path`应根据实际数据集路径进行调整。 `num_workers`决定启动的进程数,可根据实际情况进行调整大小。 - -## 关于Pascal VOC 2012数据集 -[Pascal VOC 2012](http://host.robots.ox.ac.uk/pascal/VOC/)数据集以对象分割为主,包含20个类别和背景类,其中训练集1464张,验证集1449张。 -通常情况下会利用[SBD(Semantic Boundaries Dataset)](http://home.bharathh.info/pubs/codes/SBD/download.html)进行扩充,扩充后训练集10582张。 -运行下列命令进行SBD数据集下载并进行扩充: -```shell -python tools/voc_augment.py --voc_path data/VOCdevkit --num_workers 8 -``` -其中`voc_path`应根据实际数据集路径进行调整。 - -**注意** 运行前请确保在PaddleSeg目录下执行过下列命令: -```shell -export PYTHONPATH=`pwd` -# windows下请执行相面的命令 -# set PYTHONPATH=%cd% -``` - -## 关于ADE20K数据集 -[ADE20K](http://sceneparsing.csail.mit.edu/)由MIT发布的可用于场景感知、分割和多物体识别等多种任务的数据集。 -其涵盖了150个语义类别,包括训练集20210张,验证集2000张。 - -## 关于Pascal Context数据集 -Pascal Context是基于PASCAL VOC 2010数据集额外标注的像素级别的语义分割数据集。我们提供的转换脚本支持59个类别,其中训练集4996, 验证集5104张. - - -在使用Pascal Context数据集前, 请先下载[VOC2010](http://host.robots.ox.ac.uk/pascal/VOC/voc2010/VOCtrainval_03-May-2010.tar),随后自行前往[Pascal-Context主页](https://www.cs.stanford.edu/~roozbeh/pascal-context/)下载数据集及[标注](https://codalabuser.blob.core.windows.net/public/trainval_merged.json) -我们建议您将数据集存放于`PaddleSeg/data`中,以便与我们配置文件完全兼容。数据集下载后请组织成如下结构: - - VOC2010 - | - |--Annotations - | - |--ImageSets - | - |--SegmentationClass - | - |--JPEGImages - | - |--SegmentationObject - | - |--trainval_merged.json - -其中,标注图像的标签从1,2依次取值,不可间隔。若有需要忽略的像素,则按0进行标注。在使用Pascal Context数据集时,需要安装[Detail](https://github.com/zhanghang1989/detail-api). - - -## 自定义数据集 - -如果您需要使用自定义数据集进行训练,请按照以下步骤准备数据. - -1.推荐整理成如下结构 - - custom_dataset - | - |--images - | |--image1.jpg - | |--image2.jpg - | |--... - | - |--labels - | |--label1.jpg - | |--label2.png - | |--... - | - |--train.txt - | - |--val.txt - | - |--test.txt - -其中train.txt和val.txt的内容如下所示: - - images/image1.jpg labels/label1.png - images/image2.jpg labels/label2.png - ... - -2.标注图像的标签从0,1依次取值,不可间隔。若有需要忽略的像素,则按255进行标注。 - -可按如下方式对自定义数据集进行配置: -```yaml -train_dataset: - type: Dataset - dataset_root: custom_dataset - train_path: custom_dataset/train.txt - num_classes: 2 - transforms: - - type: ResizeStepScaling - min_scale_factor: 0.5 - max_scale_factor: 2.0 - scale_step_size: 0.25 - - type: RandomPaddingCrop - crop_size: [512, 512] - - type: RandomHorizontalFlip - - type: Normalize - mode: train -``` +# 数据集准备 + +PaddleSeg目前支持CityScapes、ADE20K、Pascal VOC等数据集的加载,在加载数据集时,如若本地不存在对应数据,则会自动触发下载(除Cityscapes数据集). + +## 关于CityScapes数据集 +Cityscapes是关于城市街道场景的语义理解图片数据集。它主要包含来自50个不同城市的街道场景, +拥有5000张(2048 x 1024)城市驾驶场景的高质量像素级注释图像,包含19个类别。其中训练集2975张, 验证集500张和测试集1525张。 + +由于协议限制,请自行前往[CityScapes官网](https://www.cityscapes-dataset.com/)下载数据集, +我们建议您将数据集存放于`PaddleSeg/data`中,以便与我们配置文件完全兼容。数据集下载后请组织成如下结构: + + cityscapes + | + |--leftImg8bit + | |--train + | |--val + | |--test + | + |--gtFine + | |--train + | |--val + | |--test + +运行下列命令进行标签转换: +```shell +pip install cityscapesscripts +python tools/convert_cityscapes.py --cityscapes_path data/cityscapes --num_workers 8 +``` +其中`cityscapes_path`应根据实际数据集路径进行调整。 `num_workers`决定启动的进程数,可根据实际情况进行调整大小。 + +## 关于Pascal VOC 2012数据集 +[Pascal VOC 2012](http://host.robots.ox.ac.uk/pascal/VOC/)数据集以对象分割为主,包含20个类别和背景类,其中训练集1464张,验证集1449张。 +通常情况下会利用[SBD(Semantic Boundaries Dataset)](http://home.bharathh.info/pubs/codes/SBD/download.html)进行扩充,扩充后训练集10582张。 +运行下列命令进行SBD数据集下载并进行扩充: +```shell +python tools/voc_augment.py --voc_path data/VOCdevkit --num_workers 8 +``` +其中`voc_path`应根据实际数据集路径进行调整。 + +**注意** 运行前请确保在PaddleSeg目录下执行过下列命令: +```shell +export PYTHONPATH=`pwd` +# windows下请执行相面的命令 +# set PYTHONPATH=%cd% +``` + +## 关于ADE20K数据集 +[ADE20K](http://sceneparsing.csail.mit.edu/)由MIT发布的可用于场景感知、分割和多物体识别等多种任务的数据集。 +其涵盖了150个语义类别,包括训练集20210张,验证集2000张。 + +## 关于Coco Stuff数据集 +Coco Stuff是基于Coco数据集的像素级别语义分割数据集。它主要覆盖172个类别,包含80个'thing',91个'stuff'和1个'unlabeled', +其中训练集118k, 验证集5k. + +在使用Coco Stuff数据集前, 请自行前往[COCO-Stuff主页](https://github.com/nightrome/cocostuff)下载数据集,或者下载[coco2017训练集原图](http://images.cocodataset.org/zips/train2017.zip), [coco2017验证集原图](http://images.cocodataset.org/zips/val2017.zip)及[标注图](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) +我们建议您将数据集存放于`PaddleSeg/data`中,以便与我们配置文件完全兼容。数据集下载后请组织成如下结构: + + cocostuff + | + |--images + | |--train2017 + | |--val2017 + | + |--annotations + | |--train2017 + | |--val2017 + +其中,标注图像的标签从0,1依次取值,不可间隔。若有需要忽略的像素,则按255进行标注。 + +## 关于Pascal Context数据集 +Pascal Context是基于PASCAL VOC 2010数据集额外标注的像素级别的语义分割数据集。我们提供的转换脚本支持59个类别,其中训练集4996, 验证集5104张. + + +在使用Pascal Context数据集前, 请先下载[VOC2010](http://host.robots.ox.ac.uk/pascal/VOC/voc2010/VOCtrainval_03-May-2010.tar),随后自行前往[Pascal-Context主页](https://www.cs.stanford.edu/~roozbeh/pascal-context/)下载数据集及[标注](https://codalabuser.blob.core.windows.net/public/trainval_merged.json) +我们建议您将数据集存放于`PaddleSeg/data`中,以便与我们配置文件完全兼容。数据集下载后请组织成如下结构: + + VOC2010 + | + |--Annotations + | + |--ImageSets + | + |--SegmentationClass + | + |--JPEGImages + | + |--SegmentationObject + | + |--trainval_merged.json + +其中,标注图像的标签从1,2依次取值,不可间隔。若有需要忽略的像素,则按0进行标注。在使用Pascal Context数据集时,需要安装[Detail](https://github.com/zhanghang1989/detail-api). + +## 自定义数据集 + +如果您需要使用自定义数据集进行训练,请按照以下步骤准备数据. + +1.推荐整理成如下结构 + + custom_dataset + | + |--images + | |--image1.jpg + | |--image2.jpg + | |--... + | + |--labels + | |--label1.jpg + | |--label2.png + | |--... + | + |--train.txt + | + |--val.txt + | + |--test.txt + +其中train.txt和val.txt的内容如下所示: + + images/image1.jpg labels/label1.png + images/image2.jpg labels/label2.png + ... + +2.标注图像的标签从0,1依次取值,不可间隔。若有需要忽略的像素,则按255进行标注。 + +可按如下方式对自定义数据集进行配置: +```yaml +train_dataset: + type: Dataset + dataset_root: custom_dataset + train_path: custom_dataset/train.txt + num_classes: 2 + transforms: + - type: ResizeStepScaling + min_scale_factor: 0.5 + max_scale_factor: 2.0 + scale_step_size: 0.25 + - type: RandomPaddingCrop + crop_size: [512, 512] + - type: RandomHorizontalFlip + - type: Normalize + mode: train +``` diff --git a/paddleseg/datasets/cocostuff.py b/paddleseg/datasets/cocostuff.py new file mode 100644 index 0000000000..88a8c8a903 --- /dev/null +++ b/paddleseg/datasets/cocostuff.py @@ -0,0 +1,82 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import glob + +from paddleseg.datasets import Dataset +from paddleseg.cvlibs import manager +from paddleseg.transforms import Compose + +@manager.DATASETS.add_component +class CocoStuff(Dataset): + """ + COCO-Stuff dataset `https://github.com/nightrome/cocostuff`. + The folder structure is as follow: + + cocostuff + | + |--images + | |--train2017 + | |--val2017 + | + |--annotations + | |--train2017 + | |--val2017 + + + Args: + transforms (list): Transforms for image. + dataset_root (str): Cityscapes dataset directory. + mode (str): Which part of dataset to use. it is one of ('train', 'val'). Default: 'train'. + """ + + def __init__(self, transforms, dataset_root, mode='train'): + self.dataset_root = dataset_root + self.transforms = Compose(transforms) + self.file_list = list() + mode = mode.lower() + self.mode = mode + self.num_classes = 172 + self.ignore_index = 255 + + if mode not in ['train', 'val']: + raise ValueError( + "mode should be 'train', 'val', but got {}.".format(mode)) + + if self.transforms is None: + raise ValueError("`transforms` is necessary, but it is None.") + + img_dir = os.path.join(self.dataset_root, 'images') + label_dir = os.path.join(self.dataset_root, 'annotations') + if self.dataset_root is None or not os.path.isdir( + self.dataset_root) or not os.path.isdir( + img_dir) or not os.path.isdir(label_dir): + raise ValueError( + "The dataset is not Found or the folder structure is nonconfoumance." + ) + + label_files = sorted( + glob.glob( + os.path.join(label_dir, mode+'2017', '*.png'))) + + img_files = sorted( + glob.glob(os.path.join(img_dir, mode+'2017', '*.jpg'))) + + self.file_list = [[ + img_path, label_path + ] for img_path, label_path in zip(img_files, label_files)] + + + From 840b9fd73f97e74b48d2b6a958ec0a4fdb5de6a4 Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Wed, 27 Jan 2021 14:42:40 +0800 Subject: [PATCH 43/52] revise emanet readme --- configs/emanet/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/emanet/README.md b/configs/emanet/README.md index 2a5109bf5c..6c16ac2a8a 100644 --- a/configs/emanet/README.md +++ b/configs/emanet/README.md @@ -11,8 +11,8 @@ Expectation-Maximization Attention Networks for Semantic Segmentation. ICCV 2019 | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) |Links | |-|-|-|-|-|-|-|-| -|EMANet|ResNet50_OS8|1024x512|80000|77.58%|77.98%|78.23%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e053a214d60822d6e65445b8614d052)| -|EMANet|ResNet101_OS8|1024x512|80000|79.42%|79.83%|80.33%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| +|EMANet|ResNet50_OS8|1024x512|80000|77.64%|77.98%|78.23%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet50_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e053a214d60822d6e65445b8614d052)| +|EMANet|ResNet101_OS8|1024x512|80000|79.41%|79.83%|80.33%|[model](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/cityscapes/emanet_resnet101_os8_cityscapes_1024x512_80k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=87be6389cdada711f5c6ada21d9ef6cd)| ### Pascal VOC 2012 + Aug From f8509542f54b790cc35a91ca09d444554ad2b0ea Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Wed, 27 Jan 2021 17:01:40 +0800 Subject: [PATCH 44/52] Add LimitLong Transform (#803) --- paddleseg/core/infer.py | 17 ++++++++ paddleseg/core/val.py | 6 ++- paddleseg/transforms/transforms.py | 65 ++++++++++++++++++++++++++++++ 3 files changed, 86 insertions(+), 2 deletions(-) diff --git a/paddleseg/core/infer.py b/paddleseg/core/infer.py index bfb3888909..9d6df78b8a 100644 --- a/paddleseg/core/infer.py +++ b/paddleseg/core/infer.py @@ -42,6 +42,23 @@ def get_reverse_list(ori_shape, transforms): if op.__class__.__name__ in ['Padding']: reverse_list.append(('padding', (h, w))) w, h = op.target_size[0], op.target_size[1] + if op.__class__.__name__ in ['LimitLong']: + long_edge = max(h, w) + short_edge = min(h, w) + if ((op.max_long is not None) and (long_edge > op.max_long)): + reverse_list.append(('resize', (h, w))) + long_edge = op.max_long + short_edge = int(round(short_edge * op.max_long / long_edge)) + elif ((op.min_long is not None) and (long_edge < op.min_long)): + reverse_list.append(('resize', (h, w))) + long_edge = op.min_long + short_edge = int(round(short_edge * op.min_long / long_edge)) + if h > w: + h = long_edge + w = short_edge + else: + w = long_edge + h = short_edge return reverse_list diff --git a/paddleseg/core/val.py b/paddleseg/core/val.py index f6371a1c17..003273e01f 100644 --- a/paddleseg/core/val.py +++ b/paddleseg/core/val.py @@ -123,7 +123,8 @@ def evaluate(model, intersect_area_list = [] pred_area_list = [] label_area_list = [] - paddle.distributed.all_gather(intersect_area_list, intersect_area) + paddle.distributed.all_gather(intersect_area_list, + intersect_area) paddle.distributed.all_gather(pred_area_list, pred_area) paddle.distributed.all_gather(label_area_list, label_area) @@ -135,7 +136,8 @@ def evaluate(model, label_area_list = label_area_list[:valid] for i in range(len(intersect_area_list)): - intersect_area_all = intersect_area_all + intersect_area_list[i] + intersect_area_all = intersect_area_all + intersect_area_list[ + i] pred_area_all = pred_area_all + pred_area_list[i] label_area_all = label_area_all + label_area_list[i] else: diff --git a/paddleseg/transforms/transforms.py b/paddleseg/transforms/transforms.py index 7f285ed340..52ba7a29f7 100644 --- a/paddleseg/transforms/transforms.py +++ b/paddleseg/transforms/transforms.py @@ -228,6 +228,71 @@ def __call__(self, im, label=None): return (im, label) +@manager.TRANSFORMS.add_component +class LimitLong: + """ + Limit the long edge of image. + + If the long edge is larger than max_long, resize the long edge + to max_long, while scale the short edge proportionally. + + If the long edge is smaller than min_long, resize the long edge + to min_long, while scale the short edge proportionally. + + Args: + max_long (int, optional): If the long edge of image is larger than max_long, + it will be resize to max_long. Default: None. + min_long (int, optional): If the long edge of image is smaller than min_long, + it will be resize to min_long. Default: None. + """ + + def __init__(self, max_long=None, min_long=None): + if max_long is not None: + if not isinstance(max_long, int): + raise TypeError( + "Type of `max_long` is invalid. It should be int, but it is {}" + .format(type(max_long))) + if min_long is not None: + if not isinstance(min_long, int): + raise TypeError( + "Type of `min_long` is invalid. It should be int, but it is {}" + .format(type(min_long))) + if (max_long is not None) and (min_long is not None): + if min_long > max_long: + raise ValueError( + '`max_long should not smaller than min_long, but they are {} and {}' + .format(max_long, min_long)) + self.max_long = max_long + self.min_long = min_long + + def __call__(self, im, label=None): + """ + Args: + im (np.ndarray): The Image data. + label (np.ndarray, optional): The label data. Default: None. + + Returns: + (tuple). When label is None, it returns (im, ), otherwise it returns (im, label). + """ + h, w = im.shape[0], im.shape[1] + long_edge = max(h, w) + target = long_edge + if (self.max_long is not None) and (long_edge > self.max_long): + target = self.max_long + elif (self.min_long is not None) and (long_edge < self.min_long): + target = self.min_long + + if target != long_edge: + im = functional.resize_long(im, target) + if label is not None: + label = functional.resize_long(label, target, cv2.INTER_NEAREST) + + if label is None: + return (im, ) + else: + return (im, label) + + @manager.TRANSFORMS.add_component class ResizeRangeScaling: """ From d7bfdfacb2f63a262cc0db40ba00712e9e22a4a0 Mon Sep 17 00:00:00 2001 From: Liu Wenlong <51303942+wen-flow@users.noreply.github.com> Date: Wed, 27 Jan 2021 20:03:26 +0800 Subject: [PATCH 45/52] Remove redundant code, Solve the data type inconsistency bug (#805) --- paddleseg/models/losses/bootstrapped_cross_entropy.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/paddleseg/models/losses/bootstrapped_cross_entropy.py b/paddleseg/models/losses/bootstrapped_cross_entropy.py index 5ca95feb69..6443ccffec 100644 --- a/paddleseg/models/losses/bootstrapped_cross_entropy.py +++ b/paddleseg/models/losses/bootstrapped_cross_entropy.py @@ -38,9 +38,10 @@ def __init__(self, min_K, loss_th, weight=None, ignore_index=255): self.ignore_index = ignore_index self.K = min_K self.threshold = loss_th + if weight is not None: + weight = paddle.to_tensor(weight, dtype='float32') self.weight = weight - self.ignore_index = ignore_index - + def forward(self, logit, label): n, c, h, w = logit.shape @@ -55,7 +56,6 @@ def forward(self, logit, label): y = paddle.transpose(y, (0, 2, 3, 1)) x = paddle.reshape(x, shape=(-1, c)) y = paddle.reshape(y, shape=(-1, )) - loss = F.cross_entropy( x, y, From a4f76450a2e5d44d8877898c1dc8c1591ce8d815 Mon Sep 17 00:00:00 2001 From: LutaoChu <30695251+LutaoChu@users.noreply.github.com> Date: Wed, 27 Jan 2021 20:06:53 +0800 Subject: [PATCH 46/52] fix bootstrapped cross entropy bug (#806) --- paddleseg/models/losses/bootstrapped_cross_entropy.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/paddleseg/models/losses/bootstrapped_cross_entropy.py b/paddleseg/models/losses/bootstrapped_cross_entropy.py index 5ca95feb69..a4909fa672 100644 --- a/paddleseg/models/losses/bootstrapped_cross_entropy.py +++ b/paddleseg/models/losses/bootstrapped_cross_entropy.py @@ -38,8 +38,9 @@ def __init__(self, min_K, loss_th, weight=None, ignore_index=255): self.ignore_index = ignore_index self.K = min_K self.threshold = loss_th + if weight is not None: + weight = paddle.to_tensor(weight, dtype='float32') self.weight = weight - self.ignore_index = ignore_index def forward(self, logit, label): From d621dd4fe1f248dfdbdb735c19d0740a7c1848a8 Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Thu, 28 Jan 2021 16:24:08 +0800 Subject: [PATCH 47/52] Update Dataset Raise Information (#808) --- paddleseg/cvlibs/config.py | 6 +++--- train.py | 6 +++++- val.py | 6 +++++- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/paddleseg/cvlibs/config.py b/paddleseg/cvlibs/config.py index 892e601dfe..f2224f09fa 100644 --- a/paddleseg/cvlibs/config.py +++ b/paddleseg/cvlibs/config.py @@ -157,7 +157,7 @@ def optimizer(self) -> paddle.optimizer.Optimizer: lr = self.learning_rate args = self.optimizer_args optimizer_type = args.pop('type') - + if optimizer_type == 'sgd': return paddle.optimizer.Momentum( lr, parameters=self.model.parameters(), **args) @@ -235,14 +235,14 @@ def model(self) -> paddle.nn.Layer: @property def train_dataset(self) -> paddle.io.Dataset: - _train_dataset = self.dic.get('train_dataset').copy() + _train_dataset = self.dic.get('train_dataset', {}).copy() if not _train_dataset: return None return self._load_object(_train_dataset) @property def val_dataset(self) -> paddle.io.Dataset: - _val_dataset = self.dic.get('val_dataset').copy() + _val_dataset = self.dic.get('val_dataset', {}).copy() if not _val_dataset: return None return self._load_object(_val_dataset) diff --git a/train.py b/train.py index f9f4465d96..950a6f8fe9 100644 --- a/train.py +++ b/train.py @@ -115,9 +115,13 @@ def main(args): batch_size=args.batch_size) train_dataset = cfg.train_dataset - if not train_dataset: + if train_dataset is None: raise RuntimeError( 'The training dataset is not specified in the configuration file.') + elif len(train_dataset) == 0: + raise ValueError( + 'The length of train_dataset is 0. Please check if your dataset is valid' + ) val_dataset = cfg.val_dataset if args.do_eval else None losses = cfg.loss diff --git a/val.py b/val.py index cbc49d63cb..39826ffc6a 100644 --- a/val.py +++ b/val.py @@ -102,10 +102,14 @@ def main(args): cfg = Config(args.cfg) val_dataset = cfg.val_dataset - if not val_dataset: + if val_dataset is None: raise RuntimeError( 'The verification dataset is not specified in the configuration file.' ) + elif len(val_dataset) == 0: + raise ValueError( + 'The length of val_dataset is 0. Please check if your dataset is valid' + ) msg = '\n---------------Config Information---------------\n' msg += str(cfg) From 4cec69612493390662d16fbf6feef0bf91382e64 Mon Sep 17 00:00:00 2001 From: LutaoChu <30695251+LutaoChu@users.noreply.github.com> Date: Fri, 29 Jan 2021 14:51:18 +0800 Subject: [PATCH 48/52] add dependency prettytable for paddle.flops (#811) --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 37707980fb..3abfb8979e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ opencv-python tqdm filelock scipy +prettytable From 708461aa89e2ebe5918ae348867a175f5c9fdd82 Mon Sep 17 00:00:00 2001 From: wuzewu Date: Mon, 1 Feb 2021 11:31:55 +0800 Subject: [PATCH 49/52] Fix code conflicts --- paddleseg/core/val.py | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/paddleseg/core/val.py b/paddleseg/core/val.py index 7b05761a1a..3516de9364 100644 --- a/paddleseg/core/val.py +++ b/paddleseg/core/val.py @@ -136,38 +136,25 @@ def evaluate(model, label_area_list = label_area_list[:valid] for i in range(len(intersect_area_list)): -<<<<<<< HEAD intersect_area_all = intersect_area_all + intersect_area_list[ i] -======= - intersect_area_all = intersect_area_all + intersect_area_list[i] ->>>>>>> release/v2.0.0-rc pred_area_all = pred_area_all + pred_area_list[i] label_area_all = label_area_all + label_area_list[i] else: intersect_area_all = intersect_area_all + intersect_area pred_area_all = pred_area_all + pred_area label_area_all = label_area_all + label_area -<<<<<<< HEAD batch_cost_averager.record( - time.time() - batch_start, - num_samples=len(label)) + time.time() - batch_start, num_samples=len(label)) batch_cost = batch_cost_averager.get_average() reader_cost = reader_cost_averager.get_average() -======= - batch_cost = timer.elapsed_time() - timer.restart() ->>>>>>> release/v2.0.0-rc if local_rank == 0: progbar_val.update(iter + 1, [('batch_cost', batch_cost), ('reader cost', reader_cost)]) -<<<<<<< HEAD reader_cost_averager.reset() batch_cost_averager.reset() batch_start = time.time() -======= ->>>>>>> release/v2.0.0-rc class_iou, miou = metrics.mean_iou(intersect_area_all, pred_area_all, label_area_all) From 3bacc868e36a522bded39bc59fe44598e433e326 Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Tue, 2 Feb 2021 10:11:24 +0800 Subject: [PATCH 50/52] - (#818) --- ..._80k.yml => dnlnet_resnet101_os8_cityscapes_1024x512_80k.yml} | 1 - ...2_80k.yml => dnlnet_resnet50_os8_cityscapes_1024x512_80k.yml} | 0 2 files changed, 1 deletion(-) rename configs/dnlnet/{dnlnet_resnet101_os8_cityscape_1024x512_80k.yml => dnlnet_resnet101_os8_cityscapes_1024x512_80k.yml} (99%) rename configs/dnlnet/{dnlnet_resnet50_os8_cityscape_1024x512_80k.yml => dnlnet_resnet50_os8_cityscapes_1024x512_80k.yml} (100%) diff --git a/configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml b/configs/dnlnet/dnlnet_resnet101_os8_cityscapes_1024x512_80k.yml similarity index 99% rename from configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml rename to configs/dnlnet/dnlnet_resnet101_os8_cityscapes_1024x512_80k.yml index ba562b0591..b6fe983785 100644 --- a/configs/dnlnet/dnlnet_resnet101_os8_cityscape_1024x512_80k.yml +++ b/configs/dnlnet/dnlnet_resnet101_os8_cityscapes_1024x512_80k.yml @@ -28,4 +28,3 @@ loss: - type: CrossEntropyLoss - type: CrossEntropyLoss coef: [1, 0.4] - diff --git a/configs/dnlnet/dnlnet_resnet50_os8_cityscape_1024x512_80k.yml b/configs/dnlnet/dnlnet_resnet50_os8_cityscapes_1024x512_80k.yml similarity index 100% rename from configs/dnlnet/dnlnet_resnet50_os8_cityscape_1024x512_80k.yml rename to configs/dnlnet/dnlnet_resnet50_os8_cityscapes_1024x512_80k.yml From a3c9a02ad0de94542dcbcce9f44b2aff71806772 Mon Sep 17 00:00:00 2001 From: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com> Date: Tue, 2 Feb 2021 12:06:14 +0800 Subject: [PATCH 51/52] Add Config Check Module (#817) --- .../bisenet_optic_disc_512x512_1k.yml | 1 - paddleseg/core/train.py | 7 +-- paddleseg/cvlibs/config.py | 13 +++- paddleseg/utils/__init__.py | 1 + paddleseg/utils/config_check.py | 59 +++++++++++++++++++ predict.py | 4 +- train.py | 4 +- val.py | 4 +- 8 files changed, 83 insertions(+), 10 deletions(-) create mode 100644 paddleseg/utils/config_check.py diff --git a/configs/quick_start/bisenet_optic_disc_512x512_1k.yml b/configs/quick_start/bisenet_optic_disc_512x512_1k.yml index d04b1056e1..181bdf0941 100644 --- a/configs/quick_start/bisenet_optic_disc_512x512_1k.yml +++ b/configs/quick_start/bisenet_optic_disc_512x512_1k.yml @@ -39,5 +39,4 @@ loss: model: type: BiSeNetV2 - num_classes: 2 pretrained: Null diff --git a/paddleseg/core/train.py b/paddleseg/core/train.py index 31ea673e86..942e4aa970 100644 --- a/paddleseg/core/train.py +++ b/paddleseg/core/train.py @@ -160,8 +160,7 @@ def train(model, for i in range(len(loss_list)): avg_loss_list[i] += loss_list[i] batch_cost_averager.record( - time.time() - batch_start, - num_samples=batch_size) + time.time() - batch_start, num_samples=batch_size) if (iter) % log_iters == 0 and local_rank == 0: avg_loss /= log_iters @@ -176,7 +175,8 @@ def train(model, "[TRAIN] epoch={}, iter={}/{}, loss={:.4f}, lr={:.6f}, batch_cost={:.4f}, reader_cost={:.5f}, ips={:.4f} samples/sec | ETA {}" .format((iter - 1) // iters_per_epoch + 1, iter, iters, avg_loss, lr, avg_train_batch_cost, - avg_train_reader_cost, batch_cost_averager.get_ips_average(), eta)) + avg_train_reader_cost, + batch_cost_averager.get_ips_average(), eta)) if use_vdl: log_writer.add_scalar('Train/loss', avg_loss, iter) # Record all losses if there are more than 2 losses. @@ -248,7 +248,6 @@ def count_syncbn(m, x, y): flops = paddle.flops( model, [1, c, h, w], custom_ops={paddle.nn.SyncBatchNorm: count_syncbn}) - logger.info(flops) # Sleep for half a second to let dataloader release resources. time.sleep(0.5) diff --git a/paddleseg/cvlibs/config.py b/paddleseg/cvlibs/config.py index f2224f09fa..5ab1d29872 100644 --- a/paddleseg/cvlibs/config.py +++ b/paddleseg/cvlibs/config.py @@ -225,10 +225,19 @@ def loss(self) -> dict: @property def model(self) -> paddle.nn.Layer: model_cfg = self.dic.get('model').copy() - model_cfg['num_classes'] = self.train_dataset.num_classes - if not model_cfg: raise RuntimeError('No model specified in the configuration file.') + if not 'num_classes' in model_cfg: + if self.train_dataset and hasattr(self.train_dataset, + 'num_classes'): + model_cfg['num_classes'] = self.train_dataset.num_classes + elif self.val_dataset and hasattr(self.val_dataset, 'num_classes'): + model_cfg['num_classes'] = self.val_dataset.num_classes + else: + raise ValueError( + '`num_classes` is not found. Please set it in model, train_dataset or val_dataset' + ) + if not self._model: self._model = self._load_object(model_cfg) return self._model diff --git a/paddleseg/utils/__init__.py b/paddleseg/utils/__init__.py index d621193545..b11c17d4d8 100644 --- a/paddleseg/utils/__init__.py +++ b/paddleseg/utils/__init__.py @@ -19,3 +19,4 @@ from .utils import * from .timer import TimeAverager, calculate_eta from . import visualize +from .config_check import config_check diff --git a/paddleseg/utils/config_check.py b/paddleseg/utils/config_check.py new file mode 100644 index 0000000000..47a7049823 --- /dev/null +++ b/paddleseg/utils/config_check.py @@ -0,0 +1,59 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np + + +def config_check(cfg, train_dataset=None, val_dataset=None): + """ + To check config。 + + Args: + cfg (paddleseg.cvlibs.Config): An object of paddleseg.cvlibs.Config. + train_dataset (paddle.io.Dataset): Used to read and process training datasets. + val_dataset (paddle.io.Dataset, optional): Used to read and process validation datasets. + """ + + num_classes_check(cfg, train_dataset, val_dataset) + + +def num_classes_check(cfg, train_dataset, val_dataset): + """" + Check that the num_classes in model, train_dataset and val_dataset is consistent. + """ + num_classes_set = set() + if train_dataset and hasattr(train_dataset, 'num_classes'): + num_classes_set.add(train_dataset.num_classes) + if val_dataset and hasattr(val_dataset, 'num_classes'): + num_classes_set.add(val_dataset.num_classes) + if cfg.dic.get('model', None) and cfg.dic['model'].get('num_classes', None): + num_classes_set.add(cfg.dic['model'].get('num_classes')) + if (not cfg.train_dataset) and (not cfg.val_dataset): + raise ValueError( + 'One of `train_dataset` or `val_dataset should be given, but there are none.' + ) + if len(num_classes_set) == 0: + raise ValueError( + '`num_classes` is not found. Please set it in model, train_dataset or val_dataset' + ) + elif len(num_classes_set) > 1: + raise ValueError( + '`num_classes` is not consistent: {}. Please set it consistently in model or train_dataset or val_dataset' + .format(num_classes_set)) + else: + num_classes = num_classes_set.pop() + if train_dataset: + train_dataset.num_classes = num_classes + if val_dataset: + val_dataset.num_classes = num_classes diff --git a/predict.py b/predict.py index d262f04ad2..8ac2bb3b6d 100644 --- a/predict.py +++ b/predict.py @@ -18,7 +18,7 @@ import paddle from paddleseg.cvlibs import manager, Config -from paddleseg.utils import get_sys_env, logger +from paddleseg.utils import get_sys_env, logger, config_check from paddleseg.core import predict @@ -150,6 +150,8 @@ def main(args): transforms = val_dataset.transforms image_list, image_dir = get_image_list(args.image_path) + config_check(cfg, val_dataset=val_dataset) + predict( model, model_path=args.model_path, diff --git a/train.py b/train.py index 950a6f8fe9..76be634c7c 100644 --- a/train.py +++ b/train.py @@ -17,7 +17,7 @@ import paddle from paddleseg.cvlibs import manager, Config -from paddleseg.utils import get_sys_env, logger +from paddleseg.utils import get_sys_env, logger, config_check from paddleseg.core import train @@ -130,6 +130,8 @@ def main(args): msg += '------------------------------------------------' logger.info(msg) + config_check(cfg, train_dataset=train_dataset, val_dataset=val_dataset) + train( cfg.model, train_dataset, diff --git a/val.py b/val.py index 39826ffc6a..8a3f9c328b 100644 --- a/val.py +++ b/val.py @@ -19,7 +19,7 @@ from paddleseg.cvlibs import manager, Config from paddleseg.core import evaluate -from paddleseg.utils import get_sys_env, logger +from paddleseg.utils import get_sys_env, logger, config_check def parse_args(): @@ -122,6 +122,8 @@ def main(args): model.set_dict(para_state_dict) logger.info('Loaded trained params of model successfully') + config_check(cfg, val_dataset=val_dataset) + evaluate( model, val_dataset, From 450194b22d0ee2702938b012244c8ff78c52f5bd Mon Sep 17 00:00:00 2001 From: haoyuying <35907364+haoyuying@users.noreply.github.com> Date: Wed, 3 Feb 2021 10:08:32 +0800 Subject: [PATCH 52/52] revise qa question (#822) --- configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml | 1 - configs/emanet/README.md | 4 ++-- configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml | 3 +-- configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml | 3 +-- configs/ocrnet/ocrnet_hrnetw18_voc12aug_512x512_40k.yml | 1 - 5 files changed, 4 insertions(+), 8 deletions(-) diff --git a/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml b/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml index 3e1a140236..05a119dc86 100644 --- a/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml +++ b/configs/danet/danet_resnet50_os8_voc12aug_512x512_40k.yml @@ -6,7 +6,6 @@ model: type: ResNet50_vd output_stride: 8 pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz - num_classes: 19 backbone_indices: [2, 3] loss: diff --git a/configs/emanet/README.md b/configs/emanet/README.md index 6c16ac2a8a..47c469a94a 100644 --- a/configs/emanet/README.md +++ b/configs/emanet/README.md @@ -18,5 +18,5 @@ Expectation-Maximization Attention Networks for Semantic Segmentation. ICCV 2019 | Model | Backbone | Resolution | Training Iters | mIoU | mIoU (flip) | mIoU (ms+flip) | Links | |-|-|-|-|-|-|-|-| -|EMANet|ResNet50_OS8|512x512|40000|78.79%|78.90%|79.17%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e60b80b984a71f3d2b83b8a746a819c)| -|EMANet|ResNet101_OS8|512x512|40000|79.73%|79.97%| 80.67%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=f33479772409766dbc40b5f031cbdb1a)| +|EMANet|ResNet50_OS8|512x512|40000|78.60%|78.90%|79.17%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet50_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=3e60b80b984a71f3d2b83b8a746a819c)| +|EMANet|ResNet101_OS8|512x512|40000|79.47%|79.97%| 80.67%|[model](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/model.pdparams) \| [log](https://bj.bcebos.com/paddleseg/dygraph/pascal_voc12/emanet_resnet101_os8_voc12aug_512x512_40k/train.log) \| [vdl](https://paddlepaddle.org.cn/paddle/visualdl/service/app?id=f33479772409766dbc40b5f031cbdb1a)| diff --git a/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml b/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml index 9272069d0f..35b6ca8def 100644 --- a/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml +++ b/configs/isanet/isanet_resnet101_os8_voc12aug_512x512_40k.yml @@ -7,7 +7,6 @@ model: type: ResNet101_vd output_stride: 8 pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet101_vd_ssld.tar.gz - num_classes: 19 align_corners: True optimizer: @@ -25,4 +24,4 @@ loss: types: - type: CrossEntropyLoss - type: CrossEntropyLoss - coef: [1, 0.4] \ No newline at end of file + coef: [1, 0.4] diff --git a/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml b/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml index 2da023a696..d0d0672ec4 100644 --- a/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml +++ b/configs/isanet/isanet_resnet50_os8_voc12aug_512x512_40k.yml @@ -7,7 +7,6 @@ model: type: ResNet50_vd output_stride: 8 pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz - num_classes: 19 align_corners: True optimizer: @@ -25,4 +24,4 @@ loss: types: - type: CrossEntropyLoss - type: CrossEntropyLoss - coef: [1, 0.4] \ No newline at end of file + coef: [1, 0.4] diff --git a/configs/ocrnet/ocrnet_hrnetw18_voc12aug_512x512_40k.yml b/configs/ocrnet/ocrnet_hrnetw18_voc12aug_512x512_40k.yml index f625bcc028..3e6739c839 100644 --- a/configs/ocrnet/ocrnet_hrnetw18_voc12aug_512x512_40k.yml +++ b/configs/ocrnet/ocrnet_hrnetw18_voc12aug_512x512_40k.yml @@ -5,7 +5,6 @@ model: backbone: type: HRNet_W18 pretrained: https://bj.bcebos.com/paddleseg/dygraph/hrnet_w18_ssld.tar.gz - num_classes: 19 backbone_indices: [0] optimizer: