From 82e86b3c981b8f5f7cab4fd22641efa484f543ca Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 11:53:19 -0800 Subject: [PATCH 01/31] add capsule gans imagedetection --- CapsuleNetworks/.DS_Store | Bin 0 -> 8196 bytes CapsuleNetworks/matrix_capsule/.DS_Store | Bin 0 -> 8196 bytes .../matrix_capsule/Audio/.DS_Store | Bin 0 -> 8196 bytes .../matrix_capsule/Audio/MusicGenre/README.md | 38 +++ .../Audio/MusicGenre/build_csv.py | 84 ++++++ .../Audio/MusicGenre/music_capsule.nml | 44 ++++ .../Audio/MusicGenre/music_capsule_auto.nml | 40 +++ .../matrix_capsule/Image/.DS_Store | Bin 0 -> 6148 bytes .../matrix_capsule/Image/MNIST/README.md | 37 +++ .../matrix_capsule/Image/MNIST/build_csv.py | 77 ++++++ .../Image/MNIST/mnist_capsule.nml | 42 +++ .../Image/MNIST/mnist_capsule_auto.nml | 38 +++ CapsuleNetworks/vector_capsule/.DS_Store | Bin 0 -> 6148 bytes .../vector_capsule/Audio/.DS_Store | Bin 0 -> 6148 bytes .../vector_capsule/Audio/MusicGenre/README.md | 38 +++ .../Audio/MusicGenre/build_csv.py | 84 ++++++ .../Audio/MusicGenre/music_capsule.nml | 43 +++ .../Audio/MusicGenre/music_capsule_auto.nml | 39 +++ .../vector_capsule/Image/.DS_Store | Bin 0 -> 6148 bytes .../vector_capsule/Image/MNIST/README.md | 37 +++ .../vector_capsule/Image/MNIST/build_csv.py | 77 ++++++ .../Image/MNIST/mnist_capsule.nml | 42 +++ .../Image/MNIST/mnist_capsule_auto.nml | 39 +++ GANs/began/MNIST/README.md | 41 +++ GANs/began/MNIST/build_csv.py | 87 +++++++ GANs/began/MNIST/mnist_began.nml | 96 +++++++ GANs/began/MNIST/mnist_began_auto.nml | 42 +++ GANs/cgan/MNIST/README.md | 39 +++ GANs/cgan/MNIST/build_csv.py | 87 +++++++ GANs/cgan/MNIST/mnist_cgan.nml | 75 ++++++ GANs/dcgan/MNIST/README.md | 41 +++ GANs/dcgan/MNIST/build_csv.py | 87 +++++++ GANs/dcgan/MNIST/mnist_dcgan.nml | 74 ++++++ GANs/dcgan/MNIST/mnist_dcgan_auto.nml | 43 +++ GANs/gan/MNIST/README.md | 41 +++ GANs/gan/MNIST/build_csv.py | 87 +++++++ GANs/gan/MNIST/mnist_gan.nml | 57 ++++ GANs/gan/MNIST/mnist_gan_auto.nml | 44 ++++ GANs/lsgan/MNIST/README.md | 41 +++ GANs/lsgan/MNIST/build_csv.py | 87 +++++++ GANs/lsgan/MNIST/mnist_lsgan.nml | 58 +++++ GANs/lsgan/MNIST/mnist_lsgan_auto.nml | 39 +++ GANs/wgan/MNIST/README.md | 41 +++ GANs/wgan/MNIST/build_csv.py | 87 +++++++ GANs/wgan/MNIST/mnist_wgan.nml | 67 +++++ GANs/wgan/MNIST/mnist_wgan_auto.nml | 41 +++ GANs/wganGP/MNIST/README.md | 41 +++ GANs/wganGP/MNIST/build_csv.py | 87 +++++++ GANs/wganGP/MNIST/mnist_wganGP.nml | 69 +++++ GANs/wganGP/MNIST/mnist_wganGP_auto.nml | 39 +++ ImageDetection/.DS_Store | Bin 0 -> 8196 bytes ImageDetection/ssd/.DS_Store | Bin 0 -> 8196 bytes ImageDetection/ssd/VOC2012/README.md | 127 +++++++++ ImageDetection/ssd/VOC2012/build_csv.py | 99 +++++++ ImageDetection/ssd/VOC2012/ssd300.nml | 244 ++++++++++++++++++ assets/Picture1.png | Bin 0 -> 70821 bytes assets/Picture2.png | Bin 0 -> 78978 bytes assets/Picture3.png | Bin 0 -> 36469 bytes 58 files changed, 2907 insertions(+) create mode 100644 CapsuleNetworks/.DS_Store create mode 100644 CapsuleNetworks/matrix_capsule/.DS_Store create mode 100644 CapsuleNetworks/matrix_capsule/Audio/.DS_Store create mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md create mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py create mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml create mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml create mode 100644 CapsuleNetworks/matrix_capsule/Image/.DS_Store create mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/README.md create mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py create mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml create mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml create mode 100644 CapsuleNetworks/vector_capsule/.DS_Store create mode 100644 CapsuleNetworks/vector_capsule/Audio/.DS_Store create mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/README.md create mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py create mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml create mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml create mode 100644 CapsuleNetworks/vector_capsule/Image/.DS_Store create mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/README.md create mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py create mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml create mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml create mode 100644 GANs/began/MNIST/README.md create mode 100644 GANs/began/MNIST/build_csv.py create mode 100644 GANs/began/MNIST/mnist_began.nml create mode 100644 GANs/began/MNIST/mnist_began_auto.nml create mode 100644 GANs/cgan/MNIST/README.md create mode 100644 GANs/cgan/MNIST/build_csv.py create mode 100644 GANs/cgan/MNIST/mnist_cgan.nml create mode 100644 GANs/dcgan/MNIST/README.md create mode 100644 GANs/dcgan/MNIST/build_csv.py create mode 100644 GANs/dcgan/MNIST/mnist_dcgan.nml create mode 100644 GANs/dcgan/MNIST/mnist_dcgan_auto.nml create mode 100644 GANs/gan/MNIST/README.md create mode 100644 GANs/gan/MNIST/build_csv.py create mode 100644 GANs/gan/MNIST/mnist_gan.nml create mode 100644 GANs/gan/MNIST/mnist_gan_auto.nml create mode 100644 GANs/lsgan/MNIST/README.md create mode 100644 GANs/lsgan/MNIST/build_csv.py create mode 100644 GANs/lsgan/MNIST/mnist_lsgan.nml create mode 100644 GANs/lsgan/MNIST/mnist_lsgan_auto.nml create mode 100644 GANs/wgan/MNIST/README.md create mode 100644 GANs/wgan/MNIST/build_csv.py create mode 100644 GANs/wgan/MNIST/mnist_wgan.nml create mode 100644 GANs/wgan/MNIST/mnist_wgan_auto.nml create mode 100644 GANs/wganGP/MNIST/README.md create mode 100644 GANs/wganGP/MNIST/build_csv.py create mode 100644 GANs/wganGP/MNIST/mnist_wganGP.nml create mode 100644 GANs/wganGP/MNIST/mnist_wganGP_auto.nml create mode 100644 ImageDetection/.DS_Store create mode 100644 ImageDetection/ssd/.DS_Store create mode 100644 ImageDetection/ssd/VOC2012/README.md create mode 100644 ImageDetection/ssd/VOC2012/build_csv.py create mode 100644 ImageDetection/ssd/VOC2012/ssd300.nml create mode 100644 assets/Picture1.png create mode 100644 assets/Picture2.png create mode 100644 assets/Picture3.png diff --git a/CapsuleNetworks/.DS_Store b/CapsuleNetworks/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..d886c72cf90085464fa5367d3330d0e0a49e4f14 GIT binary patch literal 8196 zcmeHMO>h)N6n<|LU?x)}lZa*l3$6x8kSH4y2w(x5O|VD^2HAup{3WwHBTP9v6Lw}d zfe?%*O0hioQ}|Q<9JCY<9t>4l>cN9bd9cKkmX;j6aIwmZ7Fu5Pb@yzFY=GP>3Z|>N zUr)dHy7ztc_4K@60KjD4Xa>*#K%&Z|s!(&C!t|`pE0JJiF-atU01L)p7&4&4=sfFi zAVeTUAVeTUAVeTU;8s9@&TLks8=U(>8@3?=Ap&st7_ge_nEftTLrUI_f2nDt?~$}u8iyK&6x#)Wgz3)`K&{l>Wpq@@?>nb z+jh!2DXcR&*S7b%p5*WA933cFo|U%E zK1a`*)sb;oo~co*tF$_8g{0O?gZZ>|Ja73Y_*^SjY04pkqS)gaY&JtRq`34HQI3 z>h550>m!Obm~pdN3dO6&reJtSLQ{^gReN;L?;SNA`c+@kQY`k)dQBT*x^}}b4M~#f zi_ED{G>R^dyFL*YPbxdNRze(-&<#iE*6ReEg_qzB_z0%q0(=4Azz^^XT!G)=54Z|{ z!QY4&!E#)Ll~{|baV<7rBR1p1*oqsm9lNm?AIE)|!b5l%hj9c=bkN5<7Vs%NgU{fz zcotv4*YQn!3*W}`_z8ZBpW)~D6@G`8@OzxW%XkHU!#~9@j1)J)o?<9M;#HChaXZ-g zkr%Je?I`E=x?7gpKUE4s-+fPQY|R53S~qV?&Jv=80&`e;Eh#w{1=%RF8%(kZr3zGPz6ut2kykj0^0TwX^#+TbWbIY!7+F38-MOPx9?M#2u)-8|IQoV|KGjc_~G`6L24B_xed`QI-Bf)B#* Q{Ga`Z4bOit@3`~-KOKxh)N6n<}$z)YqflZa-31y=(kNR*A_2e5$6X0ZrAf^0$({=)3em`pi4v+T@n zfDnu)3b8!-Q;3#72Q7tz2cb$!J$O(l4;Gm6ujJr`i&b8<(DI_MyJx9n`SE5^FkRLC zdiuTBJ@1>Zr}xbQ049q@8-NA?5>+Nug_;`_rf2oK5(!3XNh0|JxS268pRwJ{b++L^ zh(L%yh(L%yh(L(I?STND*{n!6IroJ&Y(oS>1nx=%*!v+ymB~aPCxzs%4r+J`K(d;K zd7(b#0o*4R$V4C~h2*X@r^p^KaK#|SK;cgM7&9lC2;`)Y!W~e!0|qN&kf9)7o%~|l z955kd*oFv%2waZ<%iU|B8;W3nH6wq&kjdMY=f~sUW2CCOX4Y&;s+DTz)b2}Ic9-7TZTy{dj~AXaL1Bv(J?$e zPg*D>NtWbcRU04gjIU{pt?KBSXpN0`b;Vm_?W;N`CS<8`Y1_Kq1G&RT#*a=MJ0+sR z;LQMa6;ZxAGepD7+8Me@W;HExd!#L$b#wV?&eOGCsYiU*h7w(KhvN z+Pr1!u&Ne4bEj$RzEv_)x^H^JYMnD>Q=pVBTe{@LqvZiBL&d5N?@~n()_Bnda ztdAU$<*5d>zD}F3EtJ$IX|R~F4i+u{FrRDDVolj=P!!u-gUx2Bh7{O0X-g&boV15& zdkQAC26t(0(Ud-qqEel}RC!ogDXYijzNd*7+@@`@qV?N)#5jZY{w4l#?81LpTwP*#=W=?hw%WK=%9~9Ea5YF98chLcoJX4 z*YQn!3*W{w_%VKhpWHf zDfgH6K;_=(CxV)XB*ry$Hn&zDj4qLhUaBN6o1@ER+5;+1Eu6nVThSVgsu9`+fwnEW zN+#N?Rbr)PrAw048g9B~t)i&av)-GXQM48th@>$h=p(oq*W(6EVh?V_Cjyf85k>bCMRm-gO*GBpK`h`n9>rrs)@M0Y zpT`&Q6+DHf@iijtJNPcXhwtNAJcsA;qkz0$;5Rr`LD#JnbiF*2t~ocAGKzi{;D^(h zbr;W8YK>cH^P{(VrI*qL$9A0&nvrdt8t8=_=r982^b>+KcWkK=k!dXd-+1%y|95Qs zVPGKwAp-RJt4^g;NkXFh6QU9nmTSkT9-zt!vzru>yHLZ^agsb8Cwc7;L+Z!Kl=&nA ZIVmKKQ2F0K1n{4r;rSn)|9AKK|1T{>V%Gow literal 0 HcmV?d00001 diff --git a/CapsuleNetworks/matrix_capsule/Audio/.DS_Store b/CapsuleNetworks/matrix_capsule/Audio/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..668fdfc3bf80f1979386f30c6c8ca8dde30c15c9 GIT binary patch literal 8196 zcmeHMU2GIp6h3ELV5Sq=X+=AQ1y>6cq{>zbr5IqlU6j8{Wm{VQT4r~~cH(qq*_qt} zrC6T`VvG;|1kw2OAW?kqK}a;}gAXe4!HA82kq2LRG4Vx%#uq*J-dO^ri@azWotw=4 zX6`xX{(SS@xqDgwz+}N_0nh+IqKl!bP&G|qc6QxRB0-Bxh!hWyg@?cd2l7;hC1fCE zAY>q9AY>q9U?ws^XErNRnR8!Q!#ZRjWZ-|v0Q)|~=wg@%a6%yebWp`p0K!UY=7sJl z58yGe022XD2;`yEr^p^K2*n`9KoL&%7;`5~1UMm3gae9jz+hwyG8E*aQ(TO@111E9 zb;v--z>N&B+`S6Apa33N*XHk;Q7@OV-3-|;WOBCU`SJKS7^$e7HG7UERY_HItM;dd z-O;qK`votl=l8OwZrvZw1ou6zYhS(BGrKHvbWokw={mk{IcA;*TZTz4`}!=$a7UAF z!7)7EPg)oxNtWb6RT~>y6K`*htyT34+ZAD5;2B`xc>92hx#WbEkp zu~TAw7%LVqQ@U?@gKCXO6mH47PIulc5?y^+*Dj1WWU0;ScD6vy zW`}L3ts}xan|E!y+x0Bpa-H4%zIn)JHrCN~rv5#ioA*0;XZ;rSFTCp57&~$-*xP01 zJwjPNTU{T$Yw3!3Yx{=I$r`n`?zVY~vWH%W<6DnfX1;gSGW_A)x|cN_!*YiDik4?( zY_rGFN6gyDAz7ZRQ)_Fq25o_)HcI`4jCHVJ`G(=tuz@n}`56Syk(Dl296xZLv?sfPQsEK;;yThExbEb)fbYZR@o zD3+vd3mVrwplJQ{5=Uq$UM)5U%^MS%GQdXNu6zEDoaxZ4KBJ-7+Eb01wwu{H48t@e zNop(!pfRyb-12odCgS2rW#`yJh(i*#!T{vpFr0+v;T3on&cS*37(R!u;Rm=3Kf^C@ z1%89y5ix?5I18(>9v96551@$-`dGjsK90xn z1U`u;@mYKsU&Yt(bv%pj;|KU5euSUmmv|As!b!Y@m+>e3Rm?C_+8kY_r3l0;gyl3H zd+MFaG`~ zwTAf%q6?KfOS@s}F6kwynujE&I&}{BR_=~2mPu%;Bp#ch%VpXPDo!qFn6Is9jz-l8 z?T5hH5?v+J22d-+NKHzcB&)NyYx_DyQ7dUb$ilW!>5xeC>TJ<(N_kLb+fs$U74J=@ zdjT%Ok0jea;7<~371ohlVTy1UMm( bTB!W%e+UTAe<<_gH9Y@s{yX76b^iYa1xkYo literal 0 HcmV?d00001 diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md new file mode 100644 index 0000000..9308533 --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md @@ -0,0 +1,38 @@ +# Introduction +These sample .nml files are for training a Capsule Network classification model using audio data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +Data for this example is from the [Music Genres Dataset](http://opihi.cs.uvic.ca/sound/genres.tar.gz). The dataset features 100 audio samples from 10 music genres. +To run this example, first you will need to download and pre-process the raw data for the music classification task using the included ```build_genres.py``` script: + +```bash +$ python build_genres.py +``` + +If the script failes, make sure that you have installed all the package dependencies of this script which are listed at the top of the script: +`tarfile, shutil, pathlib, requests, natsort, and random`. Missing packages can be installed using pip: + +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/music_capsule.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/training_data.csv" ; +``` + +NOTE: Audio files are big! Be careful with your batch size, or you may get out of memory (OOM) errors. If that happens, reduce the batch size. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) +For more information on using the AudioDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. + +You are welcome to modify these tutorial files. If citing please link to this repository. diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py new file mode 100644 index 0000000..c079336 --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py @@ -0,0 +1,84 @@ +import shutil +import tarfile +from pathlib import Path +from random import shuffle + +import requests +from natsort import humansorted + + +def download_data(): + ''' + Check if raw music genre data is present. If not, download data from the + official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://opihi.cs.uvic.ca/sound/' + f = 'genres.tar.gz' + if not Path('raw_data/' + f).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + tarfile.open('raw_data/' + f).extractall() + + +def flatten(l): + return [item for sublist in l for item in sublist] + + +def write_file(validation_split): + ''' + Iterate through genres and write csv file using the supplied validation_split. + + 1. Data from each genre is shuffled and then sampled into the training + and validation sets respectively. + 2. Both the training and validation sets are then re-shuffled to intermix + the different genres. + 3. The resulting + ''' + train = [] + valid = [] + + # Sort the genres alphabetically. + genres = humansorted([str(p) for p in Path('genres').iterdir()]) + cwd = Path.cwd() + with open('label_names.txt', 'w') as of: + of.write('Class,Label\n') + for index, d in enumerate(genres): + of.write(str(index) + ',' + d.split('/')[-1] + '\n') + # Construct lines for the csv file in the form: + # /path/to/audio/file.au,class_number + # where class_number is the index of each genre class. + csv_lines = humansorted([str(cwd) + "/" + str(p) + ',' + str(index) + '\n' for p in Path(d).iterdir()]) + # shuffle the list: + shuffle(csv_lines) + # calculate the index on which to split the list into training/validation + # and then add to the respective lists. + split_index = int(validation_split * len(csv_lines)) + train.append(csv_lines[:-split_index]) + valid.append(csv_lines[-split_index:]) + + # Flatten and shuffle the resulting lists. + train = flatten(train) + valid = flatten(valid) + shuffle(train) + shuffle(valid) + + # Write the CSV file. + with open('training_data.csv', 'w') as of: + of.write('Audio,Label\n') + for l in train: + of.write(l) + for l in valid: + of.write(l) + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write files with 20% validation split + write_file(0.2) diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml new file mode 100644 index 0000000..27b26f9 --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml @@ -0,0 +1,44 @@ +architecture: + input: + audio ~ audio: [maxlen = 1536, nbands = 24]; + output: + label ~ flat: [10]; + + audio + -> Reshape: [[1536, 24, 1]] + -> Conv2D: [filters = 32, kernel_size = 5, strides = 2, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Matrix: [] + -> ConvCaps:[channels = 32, kernel_size = 3, strides = 2, routings = 3] + -> ConvCaps:[channels = 32, kernel_size = 3, strides = 1, routings = 3] + -> ClassCaps:[num_capsule = 10, routings = 3] + -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/training_data.csv"; + input: + img ~ from "Audio" + -> audio: [maxlen = 1536, nbands = 24] + -> AudioDataGenerator: []; + output: + label ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 32, + shuffle = True, + shuffle_init = True; + +train : + compile: + optimizer = Adam:[lr = 0.001], + loss = "spreadloss", + metrics = ['accuracy']; + run: + nb_epoch = 2; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml new file mode 100644 index 0000000..f223bd0 --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml @@ -0,0 +1,40 @@ +oracle("mode") = "matrix_capsule" +architecture: + input: + audio ~ audio: [maxlen = 1536, nbands = 24]; + output: + label ~ flat: [10]; + + audio + -> auto + -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/training_data.csv"; + input: + img ~ from "Audio" + -> audio: [maxlen = 1536, nbands = 24] + -> AudioDataGenerator: []; + output: + label ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 32, + shuffle = True, + shuffle_init = True; + +train : + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy']; + run: + nb_epoch = 2; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/matrix_capsule/Image/.DS_Store b/CapsuleNetworks/matrix_capsule/Image/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..ff4ef2ae3bc32e8234afc999197fb9c8838ecd67 GIT binary patch literal 6148 zcmeH~JqiLr422VaK(Mj2oW=uqgF&>g&_)j+YAINV{T$tw9|Tux5qW{+&14c~zhY-2 zBD%Pr=OPmk8Np3uW}#z>{ZKZtm9^|-f4-eA$LsatRlTgW8aRIA^*oO$BtQZrKmsH{ z0zX8+?rqp;9Lh)nBtQaB0``6=aMPOFLjBc&;3EKZfV3OdK1)E0C7?C6g(3sfXoW_r z`WRw)Z-cu-q*dc&NNv-yYBMhs@f#!J%G`@bVFWja|i4xEs!kEub~Eg(3svhk#>XAc3zE FcmQ?q65{{> literal 0 HcmV?d00001 diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/README.md b/CapsuleNetworks/matrix_capsule/Image/MNIST/README.md new file mode 100644 index 0000000..da1bb84 --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/README.md @@ -0,0 +1,37 @@ +# Introduction +These sample .nml files are for training a Matrix Capsule Network classification model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_matrix_capsule.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py b/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py new file mode 100644 index 0000000..b933aae --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py @@ -0,0 +1,77 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,label\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml new file mode 100644 index 0000000..b41e4bb --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml @@ -0,0 +1,42 @@ +architecture: + input: + img ~ image: [shape = [28,28], channels = 1]; + output: + label ~ flat: [10]; + + img + -> Conv2D: [filters = 32, kernel_size = 5, strides = 2, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Matrix: [] + -> ConvCaps:[channels = 32, kernel_size = 3, strides = 2, routings = 3] + -> ConvCaps:[channels = 32, kernel_size = 3, strides = 1, routings = 3] + -> ClassCaps:[num_capsule = 10, routings = 3] + -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/training_data.csv"; + input: + img ~ from "image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098]; + output: + label ~ from "label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 64, + validation_split = 0.2 ; + +train : + compile: + optimizer = Adam:[lr = 0.001], + loss = "spreadloss", + metrics = ['accuracy']; + run: + nb_epoch = 5; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml new file mode 100644 index 0000000..1b8c366 --- /dev/null +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml @@ -0,0 +1,38 @@ +oracle("mode") = "matrix_capsule" + +architecture: + input: + img ~ image: [shape = [28,28], channels = 1]; + output: + label ~ flat: [10]; + + img -> auto -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/training_data.csv"; + input: + img ~ from "image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098]; + output: + label ~ from "label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 64, + validation_split = 0.2 ; + +train : + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy']; + run: + nb_epoch = 5; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/vector_capsule/.DS_Store b/CapsuleNetworks/vector_capsule/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..ae3a2e9dff69884e3f1d52357082c973a4cf7113 GIT binary patch literal 6148 zcmeHKyH3ME5S)cXibRu=@_vCoFi|L|`2ikA2r0+~O7AM)#iud*ps*}UX=q?p+MBz* zojctWUM~RK9s3(#3t&Tc#L0)b`M&$iE-K<^amLVpz8s#1;lSm2=G--IaYT=IO!#Zu z;{#84!|Ttn=bfo6DIf);fE17dQeatuD$<*)QF3hXS_u^tV`4= z1*E`Kfy-QOz5d_P7yADxNh>KJ1^$!*w%R}JcYIRS*4g8{);9Vh-E+R^Zkz{&LzH7; jlw&Tu9A8IL<~5&lzbhOQgU)!+iTW9EU1U<=w-xvX%I6u@ literal 0 HcmV?d00001 diff --git a/CapsuleNetworks/vector_capsule/Audio/.DS_Store b/CapsuleNetworks/vector_capsule/Audio/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..0132d794c71906dab1bc6450b02dc03d813d7938 GIT binary patch literal 6148 zcmeHKyJ`bL3>+mcOwzbaxxbJ9Qa}paiUR(9XmrP3I3&iWgCRx$;)Lli zu49%UHct?H;gHA-&5}w?s?~^LNoT%QT`wFGlMbul!|KUa6N<&td4G#?SWi@x0#e{q zf&1Lfy#FuhXXgJ?l6F!+3fz +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/music_capsule.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/training_data.csv" ; +``` + +NOTE: Audio files are big! Be careful with your batch size, or you may get out of memory (OOM) errors. If that happens, reduce the batch size. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) +For more information on using the AudioDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. + +You are welcome to modify these tutorial files. If citing please link to this repository. diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py new file mode 100644 index 0000000..c079336 --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py @@ -0,0 +1,84 @@ +import shutil +import tarfile +from pathlib import Path +from random import shuffle + +import requests +from natsort import humansorted + + +def download_data(): + ''' + Check if raw music genre data is present. If not, download data from the + official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://opihi.cs.uvic.ca/sound/' + f = 'genres.tar.gz' + if not Path('raw_data/' + f).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + tarfile.open('raw_data/' + f).extractall() + + +def flatten(l): + return [item for sublist in l for item in sublist] + + +def write_file(validation_split): + ''' + Iterate through genres and write csv file using the supplied validation_split. + + 1. Data from each genre is shuffled and then sampled into the training + and validation sets respectively. + 2. Both the training and validation sets are then re-shuffled to intermix + the different genres. + 3. The resulting + ''' + train = [] + valid = [] + + # Sort the genres alphabetically. + genres = humansorted([str(p) for p in Path('genres').iterdir()]) + cwd = Path.cwd() + with open('label_names.txt', 'w') as of: + of.write('Class,Label\n') + for index, d in enumerate(genres): + of.write(str(index) + ',' + d.split('/')[-1] + '\n') + # Construct lines for the csv file in the form: + # /path/to/audio/file.au,class_number + # where class_number is the index of each genre class. + csv_lines = humansorted([str(cwd) + "/" + str(p) + ',' + str(index) + '\n' for p in Path(d).iterdir()]) + # shuffle the list: + shuffle(csv_lines) + # calculate the index on which to split the list into training/validation + # and then add to the respective lists. + split_index = int(validation_split * len(csv_lines)) + train.append(csv_lines[:-split_index]) + valid.append(csv_lines[-split_index:]) + + # Flatten and shuffle the resulting lists. + train = flatten(train) + valid = flatten(valid) + shuffle(train) + shuffle(valid) + + # Write the CSV file. + with open('training_data.csv', 'w') as of: + of.write('Audio,Label\n') + for l in train: + of.write(l) + for l in valid: + of.write(l) + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write files with 20% validation split + write_file(0.2) diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml new file mode 100644 index 0000000..5ad61a4 --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml @@ -0,0 +1,43 @@ +architecture: + input: + audio ~ audio: [maxlen = 1536, nbands = 24]; + output: + label ~ flat: [10]; + + audio + -> Reshape: [[1536, 24, 1]] + -> Conv2D:[filters = 128, kernel_size = 9, strides = 1, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Vector:[capsule_dim = 8, channels = 32, kernel_size = [9,9],strides = [2,2], padding = 'valid', name = 'primarycap_conv2D'] + -> DigitCaps: [num_capsule = 10, capsule_dim = 16, routings = 3, name = 'digitcaps'] + -> ClassCaps:[num_capsule = 10] + -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/training_data.csv"; + input: + img ~ from "Audio" + -> audio: [maxlen = 1536, nbands = 24] + -> AudioDataGenerator: []; + output: + label ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 32, + shuffle = True, + shuffle_init = True; + +train: + compile: + optimizer = Adam:[lr = 0.0001], + loss = margin_loss, + metrics = ['accuracy']; + run: + nb_epoch = 2; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml new file mode 100644 index 0000000..db11587 --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml @@ -0,0 +1,39 @@ +oracle("mode") = "vector_capsule" + +architecture: + input: + audio ~ audio: [maxlen = 1536, nbands = 24]; + output: + label ~ flat: [10]; + + audio -> auto -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/training_data.csv"; + input: + img ~ from "Audio" + -> audio: [maxlen = 1536, nbands = 24] + -> AudioDataGenerator: []; + output: + label ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 32, + shuffle = True, + shuffle_init = True; + +train: + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy']; + run: + nb_epoch = 2; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/vector_capsule/Image/.DS_Store b/CapsuleNetworks/vector_capsule/Image/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..e565933bb7f86cf910bdc95bfa695dd96bf4576b GIT binary patch literal 6148 zcmeH~J&pn~427Q;kXG7;k}}O6fEz@JEhwOY1E5_>Bnm{Iqx0;zVb~gt(6i*c*oo)o zD<)$Awz{2`z#PCQ-4)+H49pnMu*D7=>~T0<|IWXc%h}WT$h{uWd5y>QT$YG{2#A0P zh=2%;h(H|TJpaE&^h|mb5fFiK5b*Cqp}W@9)-^sI9HIrFPMHqlJbDRg@dUM|wyw<3 zEV~EGQj0dk^HEMMxv!?SuDu+V&4=Zk&AS+y^>SEYK(iX6K?Fo#Kw#42xu5?(^#A(* zL5V^U5P>%%VAJ(}z2Zyd+4}T)ork>r3CJ!R1)O? literal 0 HcmV?d00001 diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/README.md b/CapsuleNetworks/vector_capsule/Image/MNIST/README.md new file mode 100644 index 0000000..6b6ad80 --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/README.md @@ -0,0 +1,37 @@ +# Introduction +These sample .nml files are for training a Capsule Network classification model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py b/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py new file mode 100644 index 0000000..b933aae --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py @@ -0,0 +1,77 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,label\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml new file mode 100644 index 0000000..b8aab34 --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml @@ -0,0 +1,42 @@ +architecture: + input: + img ~ image: [shape = [28,28], channels = 1]; + output: + label ~ flat: [10]; + + img + -> Conv2D:[filters = 256, kernel_size = 9, strides = 1, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Vector:[capsule_dim = 8, channels = 32, kernel_size = [9,9],strides = [2,2], padding = 'valid', name = 'primarycap_conv2D'] + -> DigitCaps: [num_capsule = 10, capsule_dim = 16, routings = 3, name = 'digitcaps'] + -> ClassCaps:[num_capsule = 10] + -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/training_data.csv"; + input: + img ~ from "image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098]; + output: + label ~ from "label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 64, + shuffle = True, + shuffle_init = True; + +train : + compile: + optimizer = Adam:[lr = 0.001], + loss = margin_loss, + metrics = ['accuracy']; + run: + nb_epoch = 5; + dashboard: ; + + + + + + diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml new file mode 100644 index 0000000..243859f --- /dev/null +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml @@ -0,0 +1,39 @@ +oracle("mode") = "vector_capsule" + +architecture: + input: + img ~ image: [shape = [28,28], channels = 1]; + output: + label ~ flat: [10]; + + img -> auto -> label; + +source: + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/training_data.csv"; + input: + img ~ from "image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098]; + output: + label ~ from "label" + -> flat: [10] + -> FlatDataGenerator:[]; + params: + batch_size = 64, + shuffle = True, + shuffle_init = True; + +train : + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy']; + run: + nb_epoch = 5; + dashboard: ; + + + + + + diff --git a/GANs/began/MNIST/README.md b/GANs/began/MNIST/README.md new file mode 100644 index 0000000..5667ae3 --- /dev/null +++ b/GANs/began/MNIST/README.md @@ -0,0 +1,41 @@ +# Introduction +These sample .nml files are for training a Boundary Equilibrium gan model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/began/MNIST/mnist_began.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/began/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and a vector of noise to a training CSV file. + +**mnist_began.nml:** Full self-defined architecture without any automation. + +**mnist_began_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/began/MNIST/build_csv.py b/GANs/began/MNIST/build_csv.py new file mode 100644 index 0000000..285b90c --- /dev/null +++ b/GANs/began/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,noise\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (64)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (64)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/began/MNIST/mnist_began.nml b/GANs/began/MNIST/mnist_began.nml new file mode 100644 index 0000000..d2f32c6 --- /dev/null +++ b/GANs/began/MNIST/mnist_began.nml @@ -0,0 +1,96 @@ +oracle("mode") = "BEGAN" + +gamma = 0.5 + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/began/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [64] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator: + input: + z ~ flat: [64]; + output: + g ~ image: [shape = [28, 28], channels = 1]; + + z -> Dense: [6272] + -> Activation:['elu'] + -> Reshape: [[7,7,128]] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> UpSampling2D: [[2,2]] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> UpSampling2D: [[2,2]] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [1, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> g; + +architecture name: discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ image: [shape = [28, 28], channels = 1]; + + img -> Conv2D: [1, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, strides = 2, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [256, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [256, kernel_size = 3, strides = 2, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [384, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [384, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Flatten: [] + -> Dense: [128] + -> Dense: [6272] + -> Activation: ['elu'] + -> Reshape: [[7,7,128]] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> UpSampling2D: [[2,2]] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> UpSampling2D: [[2,2]] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [128, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> Conv2D: [1, kernel_size = 3, padding = "same"] + -> Activation: ['elu'] + -> d; + +train : + compile: + optimizer = Adam: [0.00005], + loss = l1loss; + run: + nb_epoch = 2; + dashboard: ; + diff --git a/GANs/began/MNIST/mnist_began_auto.nml b/GANs/began/MNIST/mnist_began_auto.nml new file mode 100644 index 0000000..c453002 --- /dev/null +++ b/GANs/began/MNIST/mnist_began_auto.nml @@ -0,0 +1,42 @@ +oracle("mode") = "BEGAN" + +gamma = 0.5 + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/began/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [64] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator: + input: + z ~ flat: [64]; + output: + g ~ image: [shape = [28, 28], channels = 1]; + + z -> auto -> g; + +architecture name: discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ image: [shape = [28, 28], channels = 1]; + + img -> auto -> d; + +train : + compile: + optimizer = auto, + loss = auto; + run: + nb_epoch = 2; + dashboard: ; + diff --git a/GANs/cgan/MNIST/README.md b/GANs/cgan/MNIST/README.md new file mode 100644 index 0000000..a8c2905 --- /dev/null +++ b/GANs/cgan/MNIST/README.md @@ -0,0 +1,39 @@ +# Introduction +These sample .nml files are for training a Conditional gan model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/cgan/MNIST/mnist_cgan.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/cgan/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths, a vector of noise and corresponding labels to a training CSV file. + +**mnist_cgan.nml:** Full self-defined architecture without any automation. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/cgan/MNIST/build_csv.py b/GANs/cgan/MNIST/build_csv.py new file mode 100644 index 0000000..3fb7064 --- /dev/null +++ b/GANs/cgan/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,noise,label\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (64)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + ',' + str(train_labels[index]) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (64)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + ',' + str(train_labels[index]) + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/cgan/MNIST/mnist_cgan.nml b/GANs/cgan/MNIST/mnist_cgan.nml new file mode 100644 index 0000000..3923d23 --- /dev/null +++ b/GANs/cgan/MNIST/mnist_cgan.nml @@ -0,0 +1,75 @@ +oracle("mode") = "CGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/began/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [64] + -> FlatDataGenerator: [], + label ~ from "label" + -> flat: [10] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + + +architecture name:generator : + input: + z ~ flat: [64], + label ~ flat: [10]; + output: + g ~ image:[shape = [28,28], channels = 1]; + + label -> Dense:[64] + -> imd + + [z, imd] -> Multiply:[] + -> Dense: [256] + -> LeakyReLU: [alpha = 0.2] + -> BatchNormalization: [momentum = 0.8] + -> Dense: [512] + -> LeakyReLU: [alpha = 0.2] + -> BatchNormalization: [momentum = 0.8] + -> Dense: [1024] + -> LeakyReLU: [alpha = 0.2] + -> BatchNormalization: [momentum = 0.8] + -> Dense: [784, activation = 'tanh'] + -> Reshape: [[28,28,1]] + -> g; + +architecture name:discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1], + label ~ flat: [10]; + output: + d; + + img -> Flatten: [] + -> img_f + + label -> Dense:[784] + -> label_f + + [img_f, label_f] -> Multiply:[] + -> Dense: [512] + -> LeakyReLU: [alpha = 0.2] + -> Dense: [512] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.4] + -> Dense: [512] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.4] + -> Dense: [1, activation = 'sigmoid'] + -> d; + +train : + compile: + optimizer = Adam: [lr = 0.0002, beta_1 = 0.5], + loss = 'binary_crossentropy'; + run: + nb_epoch = 2; \ No newline at end of file diff --git a/GANs/dcgan/MNIST/README.md b/GANs/dcgan/MNIST/README.md new file mode 100644 index 0000000..8ae4b00 --- /dev/null +++ b/GANs/dcgan/MNIST/README.md @@ -0,0 +1,41 @@ +# Introduction +These sample .nml files are for training a Deep Convolutional gan model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/dcgan/MNIST/mnist_dcgan.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/dcgan/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and a vector of noise to a training CSV file. + +**mnist_dcgan.nml:** Full self-defined architecture without any automation. + +**mnist_dcgan_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/dcgan/MNIST/build_csv.py b/GANs/dcgan/MNIST/build_csv.py new file mode 100644 index 0000000..8ac5f29 --- /dev/null +++ b/GANs/dcgan/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,noise\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/dcgan/MNIST/mnist_dcgan.nml b/GANs/dcgan/MNIST/mnist_dcgan.nml new file mode 100644 index 0000000..4fcc913 --- /dev/null +++ b/GANs/dcgan/MNIST/mnist_dcgan.nml @@ -0,0 +1,74 @@ +oracle("mode") = "DCGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/dcgan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + + + + +architecture name:generator: + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28, 28], channels = 1]; + + z -> Dense: [6272] + -> Activation: ['relu'] + -> Reshape: [[7,7,128]] + -> BatchNormalization: [momentum = 0.8] + -> UpSampling2D: [[2,2]] + -> Conv2D:[128, kernel_size = 3, padding = 'same'] + -> Activation: ['relu'] + -> BatchNormalization: [momentum = 0.8] + -> UpSampling2D:[[2,2]] + -> Conv2D: [64, kernel_size = 3, padding = 'same'] + -> Activation: ['relu'] + -> BatchNormalization: [momentum = 0.8] + -> Conv2D: [1, kernel_size = 3, padding = 'same'] + -> Activation: ['tanh'] + -> g; + +architecture name: discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> Conv2D:[32, kernel_size = 3, strides = 2, input_shape = [28, 28, 1], padding = 'same'] + -> LeakyReLU:[alpha = 0.2] + -> Dropout:[0.25] + -> Conv2D: [64, kernel_size = 3, strides = 2, padding = 'same'] + -> ZeroPadding2D: [] + -> LeakyReLU:[alpha = 0.2] + -> Dropout: [0.25] + -> BatchNormalization: [momentum = 0.8] + -> Conv2D: [128, kernel_size = 3, strides = 2, padding = 'same'] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.25] + -> BatchNormalization : [momentum = 0.8] + -> Conv2D: [256, kernel_size= 3, strides = 1, padding = 'same'] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.25] + -> Flatten:[] + -> Dense:[1, activation = 'sigmoid'] + -> d; + + +train: + compile: + optimizer = Adam: [0.0002, 0.5], + loss = 'binary_crossentropy'; + run: + nb_epoch = 2; + diff --git a/GANs/dcgan/MNIST/mnist_dcgan_auto.nml b/GANs/dcgan/MNIST/mnist_dcgan_auto.nml new file mode 100644 index 0000000..578a49c --- /dev/null +++ b/GANs/dcgan/MNIST/mnist_dcgan_auto.nml @@ -0,0 +1,43 @@ +oracle("mode") = "DCGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/dcgan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + + + + +architecture name:generator: + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28, 28], channels = 1]; + + z -> auto -> g; + +architecture name: discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> auto -> d; + + +train: + compile: + optimizer = auto, + loss = auto; + run: + nb_epoch = 2; + diff --git a/GANs/gan/MNIST/README.md b/GANs/gan/MNIST/README.md new file mode 100644 index 0000000..2d27d70 --- /dev/null +++ b/GANs/gan/MNIST/README.md @@ -0,0 +1,41 @@ +# Introduction +These sample .nml files are for training a gan model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/gan/MNIST/mnist_gan.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/gan/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and a vector of noise to a training CSV file. + +**mnist_gan.nml:** Full self-defined architecture without any automation. + +**mnist_gan_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture and set hyperparameters as default values. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/gan/MNIST/build_csv.py b/GANs/gan/MNIST/build_csv.py new file mode 100644 index 0000000..8ac5f29 --- /dev/null +++ b/GANs/gan/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,noise\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/gan/MNIST/mnist_gan.nml b/GANs/gan/MNIST/mnist_gan.nml new file mode 100644 index 0000000..9a0d33f --- /dev/null +++ b/GANs/gan/MNIST/mnist_gan.nml @@ -0,0 +1,57 @@ +oracle("mode") = "GAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/gan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 256, + shuffle = True, + shuffle_init = True; + + +architecture name:generator : + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + z -> Dense:[128] + -> LeakyReLU: [alpha = 0.2] + -> Dense:[128] + -> LeakyReLU: [alpha = 0.2] + -> Dense: [784, activation = 'tanh'] + -> Reshape: [[28,28,1]] + -> g; + + + +architecture name:discriminator : + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat:[1]; + + img -> Flatten:[] + -> Dense:[128] + -> LeakyReLU: [alpha = 0.2] + -> Dense: [128] + -> LeakyReLU: [alpha = 0.2] + -> Dense: [1] + -> Activation: ['sigmoid'] + -> d; + + + +train : + + compile: + optimizer = Adam: [lr = 0.0005], + loss = 'binary_crossentropy'; + run: + nb_epoch = 10; \ No newline at end of file diff --git a/GANs/gan/MNIST/mnist_gan_auto.nml b/GANs/gan/MNIST/mnist_gan_auto.nml new file mode 100644 index 0000000..ec16742 --- /dev/null +++ b/GANs/gan/MNIST/mnist_gan_auto.nml @@ -0,0 +1,44 @@ +oracle("mode") = "GAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/gan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 256, + shuffle = True, + shuffle_init = True; + + +architecture name:generator : + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + z -> auto -> g; + + + +architecture name:discriminator : + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat:[1]; + + img -> auto -> d; + + + +train : + + compile: + optimizer = auto, + loss = auto; + run: + nb_epoch = 10; \ No newline at end of file diff --git a/GANs/lsgan/MNIST/README.md b/GANs/lsgan/MNIST/README.md new file mode 100644 index 0000000..1564f98 --- /dev/null +++ b/GANs/lsgan/MNIST/README.md @@ -0,0 +1,41 @@ +# Introduction +These sample .nml files are for training a Least Square gan model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/mnist_lsgan.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and a vector of noise to a training CSV file. + +**mnist_lsgan.nml:** Full self-defined architecture without any automation. + +**mnist_lsgan_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture and hyperparameters values. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/lsgan/MNIST/build_csv.py b/GANs/lsgan/MNIST/build_csv.py new file mode 100644 index 0000000..8ac5f29 --- /dev/null +++ b/GANs/lsgan/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,noise\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/lsgan/MNIST/mnist_lsgan.nml b/GANs/lsgan/MNIST/mnist_lsgan.nml new file mode 100644 index 0000000..8685323 --- /dev/null +++ b/GANs/lsgan/MNIST/mnist_lsgan.nml @@ -0,0 +1,58 @@ +oracle("mode") = "LSGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator: + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + z -> Dense: [256] + -> LeakyReLU: [alpha = 0.2] + -> BatchNormalization: [momentum = 0.8] + -> Dense: [512] + -> LeakyReLU: [alpha = 0.2] + -> BatchNormalization: [momentum = 0.8] + -> Dense: [1024] + -> LeakyReLU: [alpha=0.2] + -> BatchNormalization: [momentum = 0.8] + -> Dense: [784, activation = 'tanh'] + -> Reshape: [[28, 28, 1]] + -> g; + +architecture name: discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> Flatten:[] + -> Dense:[512] + -> LeakyReLU: [alpha = 0.2] + -> Dense: [256] + -> LeakyReLU: [alpha = 0.2] + -> Dense: [1] + -> d; + +train: + compile: + optimizer = Adam: [0.0002, beta_1 = 0.5], + loss_generator = 'binary_crossentropy', + loss_discriminator = 'mse', + loss = 'mse'; + run: + nb_epoch = 2; + diff --git a/GANs/lsgan/MNIST/mnist_lsgan_auto.nml b/GANs/lsgan/MNIST/mnist_lsgan_auto.nml new file mode 100644 index 0000000..cb908aa --- /dev/null +++ b/GANs/lsgan/MNIST/mnist_lsgan_auto.nml @@ -0,0 +1,39 @@ +oracle("mode") = "LSGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: [];; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator: + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + z -> auto -> g; + +architecture name: discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> auto -> d; + +train: + compile: + optimizer = auto, + loss = auto; + run: + nb_epoch = 2; + diff --git a/GANs/wgan/MNIST/README.md b/GANs/wgan/MNIST/README.md new file mode 100644 index 0000000..558b971 --- /dev/null +++ b/GANs/wgan/MNIST/README.md @@ -0,0 +1,41 @@ +# Introduction +These sample .nml files are for training a wasserstein gan model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/wgan/MNIST/mnist_wgan.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/wgan/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and a vector of noise to a training CSV file. + +**mnist_wgan.nml:** Full self-defined architecture without any automation. + +**mnist_wgan_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/wgan/MNIST/build_csv.py b/GANs/wgan/MNIST/build_csv.py new file mode 100644 index 0000000..8ac5f29 --- /dev/null +++ b/GANs/wgan/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('image,noise\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/wgan/MNIST/mnist_wgan.nml b/GANs/wgan/MNIST/mnist_wgan.nml new file mode 100644 index 0000000..99cb63a --- /dev/null +++ b/GANs/wgan/MNIST/mnist_wgan.nml @@ -0,0 +1,67 @@ +oracle("mode") = "WGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator : + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + + z -> Dense: [6272, activation='relu'] + -> Reshape: [[7, 7, 128]] + -> BatchNormalization: [momentum = 0.8] + -> UpSampling2D: [] + -> Conv2D: [128, kernel_size = 4, padding = "same"] + -> Activation: ['relu'] + -> BatchNormalization: [momentum = 0.8] + -> UpSampling2D: [] + -> Conv2D: [64, kernel_size = 4, padding = 'same'] + -> Activation: ['relu'] + -> BatchNormalization: [momentum = 0.8] + -> Conv2D: [1, kernel_size = 4, padding = 'same'] + -> Activation: ['tanh'] + -> g; + + +architecture name:discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> Conv2D: [16, kernel_size = 3, strides = 2, padding = "same"] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.25] + -> Conv2D: [32, kernel_size = 3, strides = 2, padding = "same"] + -> ZeroPadding2D:[] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.25] + -> BatchNormalization: [momentum = 0.8] + -> Conv2D: [64, kernel_size = 3, strides = 1, padding = "same"] + -> LeakyReLU: [alpha = 0.2] + -> Dropout: [0.25] + -> Flatten:[] + -> Dense: [1, activation = "linear"] + -> d; + + +train : + compile: + optimizer = RMSprop:[lr = 0.00005], + loss = wasserstein_loss; + run: + nb_epoch = 2; \ No newline at end of file diff --git a/GANs/wgan/MNIST/mnist_wgan_auto.nml b/GANs/wgan/MNIST/mnist_wgan_auto.nml new file mode 100644 index 0000000..807406c --- /dev/null +++ b/GANs/wgan/MNIST/mnist_wgan_auto.nml @@ -0,0 +1,41 @@ +oracle("mode") = "WGAN" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator : + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + + z -> auto -> g; + + +architecture name:discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> auto -> d; + + +train : + compile: + optimizer = auto, + loss = auto; + run: + nb_epoch = 2; \ No newline at end of file diff --git a/GANs/wganGP/MNIST/README.md b/GANs/wganGP/MNIST/README.md new file mode 100644 index 0000000..1a189d7 --- /dev/null +++ b/GANs/wganGP/MNIST/README.md @@ -0,0 +1,41 @@ +# Introduction +These sample .nml files are for training a wasserstein gan with gradient penalty model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/GANs/wganGP/MNIST/mnist_wganGP.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/GANs/wganGP/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and a vector of noise to a training CSV file. + +**mnist_wganGP.nml:** Full self-defined architecture without any automation. + +**mnist_wganGP_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/GANs/wganGP/MNIST/build_csv.py b/GANs/wganGP/MNIST/build_csv.py new file mode 100644 index 0000000..0b454ab --- /dev/null +++ b/GANs/wganGP/MNIST/build_csv.py @@ -0,0 +1,87 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + with open('training_data.csv', 'w') as of: + of.write('Image,Noise\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + noise_num = np.random.normal(0, 1, (100)) + noise = "" + for noise_ele in noise_num: + noise += str(noise_ele) + "|" + noise = noise[:-1] + of.write(str(Path(img_file).resolve()) + ',' + noise + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/GANs/wganGP/MNIST/mnist_wganGP.nml b/GANs/wganGP/MNIST/mnist_wganGP.nml new file mode 100644 index 0000000..285d4cf --- /dev/null +++ b/GANs/wganGP/MNIST/mnist_wganGP.nml @@ -0,0 +1,69 @@ +oracle("mode") = "WGAN_GP" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/wganGP/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator : + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + + z -> Dense: [1024, activation='relu'] + -> LeakyReLU: [] + -> Dense: [6272] + -> BatchNormalization: [] + -> LeakyReLU: [] + -> Reshape: [[7, 7, 128]] + -> Conv2DTranspose: [128, [5,5], strides= 2, padding = 'same'] + -> BatchNormalization: [] + -> LeakyReLU: [] + -> Conv2DTranspose: [64, [5,5], padding = 'same'] + -> BatchNormalization: [] + -> LeakyReLU: [] + -> Conv2D: [64, [5,5], padding = "same"] + -> BatchNormalization: [] + -> LeakyReLU: [] + -> Conv2DTranspose: [64, [5,5], strides = 2, padding = 'same'] + -> BatchNormalization: [] + -> LeakyReLU: [] + -> Conv2D: [1, [5,5], padding = 'same'] + -> Activation: ['tanh'] + -> g; + + +architecture name:discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> Conv2D: [64, [5,5], padding = "same"] + -> LeakyReLU: [] + -> Conv2D: [128, [5,5], kernel_initializer = 'he_normal', strides = 2] + -> LeakyReLU: [] + -> Conv2D: [128, [5,5], kernel_initializer = 'he_normal', strides = 2] + -> LeakyReLU: [] + -> Flatten:[] + -> Dense: [1024, kernel_initializer = 'he_normal'] + -> LeakyReLU: [] + -> Dense: [1, kernel_initializer = 'he_normal'] + -> d; + + +train: + compile: optimizer = Adam: [0.0001, beta_1 = 0.5, beta_2 = 0.9]; + run: + nb_epoch = 2; \ No newline at end of file diff --git a/GANs/wganGP/MNIST/mnist_wganGP_auto.nml b/GANs/wganGP/MNIST/mnist_wganGP_auto.nml new file mode 100644 index 0000000..00ad5a8 --- /dev/null +++ b/GANs/wganGP/MNIST/mnist_wganGP_auto.nml @@ -0,0 +1,39 @@ +oracle("mode") = "WGAN_GP" + +source: + bind = "/DM-Dash/NeoPulse_Examples/GANs/wganGP/MNIST/training_data.csv"; + input: + x ~ from "image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator: [], + z ~ from "noise" + -> flat: [100] + -> FlatDataGenerator: []; + params: + batch_size = 128, + shuffle = True, + shuffle_init = True; + +architecture name:generator : + input: + z ~ flat: [100]; + output: + g ~ image: [shape = [28,28], channels = 1]; + + + z -> auto -> g; + + +architecture name:discriminator: + input: + img ~ image: [shape = [28, 28], channels = 1]; + output: + d ~ flat: [1]; + + img -> auto -> d; + + +train: + compile: optimizer = auto; + run: + nb_epoch = 2; \ No newline at end of file diff --git a/ImageDetection/.DS_Store b/ImageDetection/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..c8a836c870f19a653ddf1583f4068362f9bb8752 GIT binary patch literal 8196 zcmeHLUu+ab7@yy@z;3(HDIhHe~6al6+- zOR)wMV-(-~1JNgA4Aw{RPegq%CTNT%QUjXkgD>clF)>l&i{I?-L1}41d@vYxHZ$MM z_s#ds%x`}?n;FI!S_|qr#;O@(5@jKIITg1_!Y=Z(6!JB~2=ZrkE^Sz8YJHy0P3s*f z0t5mC0t5mC0t5mC?gj+tpDmI!!++ni24#RifWSS8fY=|Rl!c7>a?F$N)k)bAJb<(MZucLx9P0e@upClt7&Q~apg zogwDQpbQWQ5SWgD_-sm;O^=!$ntXmYblYiYxQb9o>8#mvB&kd)n_G4uF<=cPoRm|r zTT}UcqSc*p2GV}L$FhtY^;CMdt_>ym!ZyovQo5<-U1wd@sLTF7-BhiiR;ysDwi_qm z86-)TndolZriV32R$kqf&JZ z%F6I+nVV9NP4jbu)UQ>Po}={E`Li^b>%2^9l;mC`mDUVTu~B(M;+=Bepogrd+^lSo zDp4=@)62`!dTck`wpds>C`Ro{+0M?KX40%K)2zhW^l3E{m%Yif3m+2LOJGQ z9x70cI;_A-tipOUVgoj!6`kn9lh}zK96%Z>G#rM79FAcKV|W^;@eE$TOL!S)aSm_b z9ejw7@G(BYWqgAxn83IA5!dmvH@i@g*j>dXcoM&nyh&gG-YxXv?dcoY(Y|Am^7!3L z-|yymoV{;RRkUu^LygUkw%mB#Q{M6|M4Ze!mueoHT%HwKMOea)0CjnK;G$=88zUOvHC1tzHv}@DLH-Yd+i%QFsZ_U3RTaE0hSI zRSdI9*(?+1d8upLvXyMJiR6CNHf6gkzCX_XJxjk}SJ+R)(7(Yjo7lO4n7N#oxfadX zh8D!J9gh(wyRi!i^x`0rND(`aBI|Q=7$erLkF!pL7>FzR4vZ`+h@N2zmrfwp8$aXf&VE2D2*rLtu*`WQxv?2_!z-k4GRN2yURb**bq+9}FOD}+y-JPMGFrBG8v)e)` zsnJMGObp%!|F z2t)`(2t)`(2t)|n2nf)b%`at=b6;wsK0+Wu;Fd&ye;*RmxJ-p|R7&ybpiYzkBr9lI zTr{RSK=>p=nF{5ol){x_%IX23E5Z^3(w+1PZcZ{4%26q$JA-s*2vXA&J zH<$^sLD6r?7WVSHo!Ou-7mnLK&$&F#<~nR^ut%TS>bXJIcCCVNwoQvn_I28>=?%7c zMc4F2oQzZ`imIqRx-mSwa%Hk1(bU{D(vTQly_&|&O{+#mRAu&}#`SIc`wtx+J~DFj zWtkbmuLG+dm+31L({y?pGkLFn;0nueKsbJ>HRo_3Vjq@)I9CQjt(vAyH#WCz-lA>m zvh$vsu@754dbLkjw9)O(f>oltJM*4X?05Yhy(XV^@8ZY^rFib?I>YGZx{ao3nTn#+ zl_^k{s+U8Nx2p3)X+B%b*3co;&JHk-J^F9iU1EQ+zu7-v zP*DX9(=iJR(SZA~97(LjIy`{&Xu~daq8od#7bYz9q7VHj;Rr@>6vyx^j^jBzk5};) z-o`t47oXsBe1R|V6~4z=oWsvJkKb_-f8bB~oUtC$WHbFOC)oVrwb>jO zY+ic9v-#`{N#Q$Y%}y-3XL<9Q^(~hvFz%&a#mg&cDA*_IB`8-WmU#1H#+zSBnQJU-h{yF9?SxR<7;jRE z`uY^P(h_a8qUuwH>DqOgrdQBTP^E5zwoxIr>$l0cX>F6rH>A!crR{%`_6K&3U0|0G zLnYC)7ITTH^+eKDNMREm#ujYFc65Y9-A6P%Ks2>+5CKtj2#0VOPvA*BMTC7?(Denp zgcEojZ{STL?)&%vAK_!1#3`J{XCZ;V!x@|(N7-%TDErGq%JzHdv{?-DjQ#Z5h5GFA zYgw4ru^8yP-e Conv2D:[126, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv6_2_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv6_2 + + + conv6_2 + -> Conv2D:[24, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv6_2_mbox_loc"] + -> boxes_conv6_2 + + + boxes_conv6_2 + -> AnchorBoxes: [300, 300, this_scale = 0.37, next_scale = 0.54, aspect_ratios = [1.0, 2.0, 0.5, 3.0, 0.33], this_steps = 32, this_offsets = 0.5, name = "conv6_2_mbox_priorbox"] + -> anchors_conv6_2 + + + boxes_conv6_2 + -> Reshape: [[-1,4]] + -> boxes_conv6_2 + +``` + +In details, Confidence RPN is implemented by 3 * 3 convolution layer. 6 default boxes are defined around each anchor point, and each box is classified into 21 classes (20 onject classes + background), so convolutional layer has 6 * 21 filters. After convolutional layer, prediction results are reshaped as (21,). + +``` + conv6_2 + -> Conv2D:[126, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv6_2_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv6_2 + +``` + +Box RPN is also implemented by 3 * 3 convolution layer, which has 24 filters. (24 = 4 * 6, 4 location coordinates and 6 default boxes each anchor point). Then, box predictions are reshaped as (4,). + +``` + conv6_2 + -> Conv2D:[24, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv6_2_mbox_loc"] + -> boxes_conv6_2 + + boxes_conv6_2 + -> Reshape: [[-1,4]] + -> boxes_conv6_2 + +``` + +Anchor boxes labels for training is created by Layer "AnchorBoxes", which uses intermediate results between box convolution and reshape layer as input. + +``` + boxes_conv6_2 + -> AnchorBoxes: [300, 300, this_scale = 0.37, next_scale = 0.54, aspect_ratios = [1.0, 2.0, 0.5, 3.0, 0.33], this_steps = 32, this_offsets = 0.5, name = "conv6_2_mbox_priorbox"] + -> anchors_conv6_2 + +``` + +In the end, all class and box prediction results across all feature layers need to be concatenated into final output. + +``` + + [classes_conv4_3, classes_fc7, classes_conv6_2, classes_conv7_2, classes_conv8_2, classes_conv9_2] + -> Concatenate:[axis = 1] + -> classes_concat + + [boxes_conv4_3, boxes_fc7, boxes_conv6_2, boxes_conv7_2, boxes_conv8_2, boxes_conv9_2] + -> Concatenate: [axis = 1] + -> boxes_concat + + [boxes_concat, classes_concat] + -> Concatenate: [axis = 2] + -> prediction + +``` + +# Data +The data for this task can be found at: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/ +To run this example, first you will need to download the raw data and pretrained vgg16 model for the VOC2012 task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/ImageDetection/ssd/VOC2012/ssd300.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/ImageDetection/ssd/VOC2012/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. + +**ssd300.nml:** VGG Based detector runs on 300 * 300 Image. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/ImageDetection/ssd/VOC2012/build_csv.py b/ImageDetection/ssd/VOC2012/build_csv.py new file mode 100644 index 0000000..0352477 --- /dev/null +++ b/ImageDetection/ssd/VOC2012/build_csv.py @@ -0,0 +1,99 @@ +import gzip +import shutil +from pathlib import Path +import tarfile +import os + +import numpy as np +import requests +from imageio import imwrite +from xml.etree import ElementTree +import json + + +def download_data(): + ''' + Check if raw VOC2012 data and VGG pre_trained model are present. If not, download VOC2012 data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL_voc = 'http://host.robots.ox.ac.uk/pascal/VOC/voc2012/' + f = 'VOCtrainval_11-May-2012.tar' + if not Path('raw_data/' + f).is_file(): + r = requests.get(URL_voc + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + tarfile.open('raw_data/' + f).extractall() + + Path('pre_trained_model').mkdir(parents=True, exist_ok=True) + URL_vgg = 'https://drive.google.com/uc?authuser=0&id=1sBmajn6vOE7qJ8GnxUJt4fGPuffVUZox&export=download' + f_vgg = 'vgg_16.h5' + if not Path('pre_trained_model/' + f).is_file(): + r_vgg = requests.get(URL_vgg + f_vgg, stream=True) + with open('pre_trained_model/' + f_vgg, 'wb') as f_k: + shutil.copyfileobj(r_vgg.raw, f_k) + +def write_csv_file(): + ''' + Write absolute path to image files and bounding box labels to training_data.csv. + ''' + + xml_path = Path("VOCdevkit/VOC2012/Annotations/") + image_folder = Path("VOCdevkit/VOC2012/JPEGImages/") + + label2id = {"aeroplane" : 0, + "bicycle" : 1, + "bird" : 2, + "boat" : 3, + "bottle" : 4, + "bus" : 5, + "car" : 6, + "cat" : 7, + "chair" : 8, + "cow" : 9, + "diningtable" :10, + "dog" : 11, + "horse" : 12, + "motorbike" : 13, + "person" : 14, + "pottedplant" : 15, + "sheep" : 16, + "sofa" : 17, + "train" : 18, + "tvmonitor" : 19} + + with open('training_data.csv', 'w') as of: + of.write('image,label\n') + filenames = os.listdir(str(xml_path)) + for index, filename in enumerate(filenames): + tree = ElementTree.parse(str(xml_path / filename)) + root = tree.getroot() + bounding_boxes = [] + size_tree = root.find('size') + width = float(size_tree.find('width').text) + height = float(size_tree.find('height').text) + for object_tree in root.findall('object'): + for bounding_box in object_tree.iter('bndbox'): + xmin = float(bounding_box.find('xmin').text)/width + ymin = float(bounding_box.find('ymin').text)/height + xmax = float(bounding_box.find('xmax').text)/width + ymax = float(bounding_box.find('ymax').text)/height + + class_name = object_tree.find('name').text + class_id = label2id[class_name] + bounding_box = [xmin, ymin, xmax, ymax, class_id] + bounding_boxes.append(bounding_box) + image_name = root.find('filename').text + image_path = str((image_folder / image_name).resolve()) + jstring = json.dumps(bounding_boxes) + of.write(image_path + ",\"" + jstring + "\"\n") + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/ImageDetection/ssd/VOC2012/ssd300.nml b/ImageDetection/ssd/VOC2012/ssd300.nml new file mode 100644 index 0000000..ae039d5 --- /dev/null +++ b/ImageDetection/ssd/VOC2012/ssd300.nml @@ -0,0 +1,244 @@ +oracle("mode") = "SSD" + +num_classes = 20 +class_names = ["aeroplane", + "bicycle", + "bird", + "boat", + "bottle", + "bus", + "car", + "cat", + "chair", + "cow", + "diningtable", + "dog", + "horse", + "motorbike", + "person", + "pottedplant", + "sheep", + "sofa", + "train", + "tvmonitor"] + + +architecture from "/DM-Dash/Neopulse_Examples/ImageDetection/ssd/VOC2012/pre_trained_model/vgg_16.h5": + input: + x ~ image: [shape = [300, 300], channels = 3]; + output: + prediction ~ bbox: [num_classes = 20]; + + + x -> Conv2D:[64, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv1_1'] + -> Conv2D:[64, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv1_2'] + -> MaxPooling2D:[pool_size = [2,2], strides = [2,2], padding = "same", name = 'pool1'] + + + -> Conv2D:[128, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv2_1'] + -> Conv2D:[128, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv2_2'] + -> MaxPooling2D:[pool_size = [2,2], strides = [2,2], padding = "same", name = 'pool2'] + + + -> Conv2D:[256, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv3_1'] + -> Conv2D:[256, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv3_2'] + -> Conv2D:[256, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv3_3'] + -> MaxPooling2D:[pool_size = [2,2], strides = [2,2], padding = "same", name = 'pool3'] + + + -> Conv2D:[512, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv4_1'] + -> Conv2D:[512, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv4_2'] + -> Conv2D:[512, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv4_3'] + -> conv4_3 + + + conv4_3 + -> MaxPooling2D:[pool_size = [2,2], strides = [2,2], padding = "same", name = 'pool4'] + + + -> Conv2D:[512, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv5_1'] + -> Conv2D:[512, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv5_2'] + -> Conv2D:[512, [3,3], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv5_3'] + -> MaxPooling2D:[pool_size = [3,3], strides = [1,1], padding = "same", name = 'pool5'] + + + -> Conv2D:[1024, [3,3], dilation_rate= [6,6], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'fc6'] + + + -> Conv2D:[1024, [1,1], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'fc7'] + -> fc7 + + + fc7 + -> Conv2D:[256, [1,1], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv6_1'] + -> ZeroPadding2D:[padding = [[1,1],[1,1]], name = 'conv6_padding'] + -> Conv2D:[512, [3,3], strides = [2,2], activation = 'relu', padding = "valid", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv6_2'] + -> conv6_2 + + + conv6_2 + -> Conv2D:[128, [1,1], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv7_1'] + -> ZeroPadding2D:[padding = [[1,1],[1,1]], name = 'conv7_padding'] + -> Conv2D:[256, [3,3], strides = [2,2], activation = 'relu', padding = "valid", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv7_2'] + -> conv7_2 + + + conv7_2 + -> Conv2D:[128, [1,1], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv8_1'] + -> Conv2D:[256, [3,3], strides = [1,1], activation = 'relu', padding = "valid", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv8_2'] + -> conv8_2 + + + conv8_2 + -> Conv2D:[128, [1,1], strides = [1,1], activation = 'relu', padding = "same", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv9_1'] + -> Conv2D:[256, [3,3], strides = [1,1], activation = 'relu', padding = "valid", kernel_initializer = 'he_normal', kernel_regularizer = l2:[0.0005], name = 'conv9_2'] + -> conv9_2 + + + conv4_3 + -> L2Normalization: [gamma_init = 20, name = 'conv4_3_norm'] + -> conv4_3_norm + + + conv4_3_norm + -> Conv2D:[84, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name ="conv4_3_norm_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv4_3 + + fc7 + -> Conv2D:[126, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "fc7_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_fc7 + + conv6_2 + -> Conv2D:[126, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv6_2_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv6_2 + + conv7_2 + -> Conv2D:[126, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv7_2_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv7_2 + + conv8_2 + -> Conv2D:[84, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv8_2_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv8_2 + + conv9_2 + -> Conv2D:[84, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv9_2_mbox_conf"] + -> Reshape: [[-1, 21]] + -> classes_conv9_2 + + [classes_conv4_3, classes_fc7, classes_conv6_2, classes_conv7_2, classes_conv8_2, classes_conv9_2] + -> Concatenate:[axis = 1] + -> classes_concat + + + + conv4_3_norm + -> Conv2D:[16, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name ="conv4_3_norm_mbox_loc"] + -> boxes_conv4_3 + + fc7 + -> Conv2D:[24, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "fc7_mbox_loc"] + -> boxes_fc7 + + conv6_2 + -> Conv2D:[24, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv6_2_mbox_loc"] + -> boxes_conv6_2 + + conv7_2 + -> Conv2D:[24, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv7_2_mbox_loc"] + -> boxes_conv7_2 + + conv8_2 + -> Conv2D:[16, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv8_2_mbox_loc"] + -> boxes_conv8_2 + + conv9_2 + -> Conv2D:[16, [3,3], padding = "same", kernel_initializer = "he_normal", kernel_regularizer = l2:[0.0005], name = "conv9_2_mbox_loc"] + -> boxes_conv9_2 + + + boxes_conv4_3 + -> AnchorBoxes: [300, 300, this_scale = 0.1, next_scale = 0.2, aspect_ratios = [1.0, 2.0, 0.5], this_steps = 8, this_offsets = 0.5, name = "conv4_3_mbox_priorbox"] + -> Reshape: [[-1,8]] + -> anchors_conv4_3 + + boxes_fc7 + -> AnchorBoxes: [300, 300, this_scale = 0.2, next_scale = 0.37, aspect_ratios = [1.0, 2.0, 0.5, 3.0, 0.33], this_steps = 16, this_offsets = 0.5, name = "fc7_mbox_priorbox"] + -> anchors_fc7 + + boxes_conv6_2 + -> AnchorBoxes: [300, 300, this_scale = 0.37, next_scale = 0.54, aspect_ratios = [1.0, 2.0, 0.5, 3.0, 0.33], this_steps = 32, this_offsets = 0.5, name = "conv6_2_mbox_priorbox"] + -> anchors_conv6_2 + + boxes_conv7_2 + -> AnchorBoxes: [300, 300, this_scale = 0.54, next_scale = 0.71, aspect_ratios = [1.0, 2.0, 0.5, 3.0, 0.33], this_steps = 64, this_offsets = 0.5, name = "conv7_2_mbox_priorbox"] + -> anchors_conv7_2 + + boxes_conv8_2 + -> AnchorBoxes: [300, 300, this_scale = 0.71, next_scale = 0.88, aspect_ratios = [1.0, 2.0, 0.5], this_steps = 100, this_offsets = 0.5, name = "conv8_2_mbox_priorbox"] + -> anchors_conv8_2 + + boxes_conv9_2 + -> AnchorBoxes: [300, 300, this_scale = 0.88, next_scale = 1.05, aspect_ratios = [1.0, 2.0, 0.5], this_steps = 300, this_offsets = 0.5, name = "conv9_2_mbox_priorbox"] + -> anchors_conv9_2 + + + boxes_conv4_3 + -> Reshape: [[-1,4]] + -> boxes_conv4_3 + + boxes_fc7 + -> Reshape: [[-1,4]] + -> boxes_fc7 + + boxes_conv6_2 + -> Reshape: [[-1,4]] + -> boxes_conv6_2 + + boxes_conv7_2 + -> Reshape: [[-1,4]] + -> boxes_conv7_2 + + boxes_conv8_2 + -> Reshape: [[-1,4]] + -> boxes_conv8_2 + + boxes_conv9_2 + -> Reshape: [[-1,4]] + -> boxes_conv9_2 + + [boxes_conv4_3, boxes_fc7, boxes_conv6_2, boxes_conv7_2, boxes_conv8_2, boxes_conv9_2] + -> Concatenate: [axis = 1] + -> boxes_concat + + + [boxes_concat, classes_concat] + -> Concatenate: [axis = 2] + -> prediction; + +source: + bind = "/DM-Dash/Neopulse_Examples/ImageDetection/ssd/VOC2012/training_data.csv"; + input: + img ~ from "image" + -> image: [shape = [300, 300], channels = 3] + -> ImageDataGenerator:[]; + output: + label ~ from "label" + -> bbox: [] + -> BBoxDataGenerator:[num_classes = 20]; + params: + batch_size = 16, + shuffle = True, + shuffle_init = True; + +train: + compile: + optimizer = Adam:[lr = 0.001, beta_1 = 0.9, beta_2 = 0.999, epsilon = 0.0001, decay = 0.0]; + run: + nb_epoch = 2; + dashboard: ; + diff --git a/assets/Picture1.png b/assets/Picture1.png new file mode 100644 index 0000000000000000000000000000000000000000..c7f7ce03c1415d3a2097eed6e4455c3a421e7e41 GIT binary patch literal 70821 zcmeFZg;!K>+cr!|2ucZpAYIZSJ%k7X(o)hLN`t@*F-S=%sDN~cfk+J9-5t_HH`2@i zBMbxIp5N_#zu$VF=lu`vwKj`2d)8ii@9R3R^E{5@Jg$k+)mFWA^WIGy9GqL~YRb=W zaPWt5aBx2o69Q*MyHhd1KU|+@s!BK|!;HUy40AWFBJlUJ2dPK_2?XrI$yWjPQzJg<6=I;gZeombxsGr4|aZ985>U{n>`Gd z_CmOyCEmefrT8DmXiDo4`YN6IF$;mx|2S@j5^p_>pdb$YzYjV^mUakErt8iB@eMI> zN#*}?mg2vc{CC9u8!-RN%J}bk`CkU*zv<$?3HHDB<-g_ie?X7_XSQ7agkSBTPaL86 z_jr3Jl)=6Bv-EaucsldtGLa*9;5V;2R?0&K$i&gj+1{_!RLrC`&^FUL8z8i7m44UWEtScFB36AI3} z+p3~T^A~Pemr<}h(~;gt+B_N8@i_2KX?W=0f?gPiQfu0^IE2q%Ow}+&hw)%8w-lB# zeWy(DQD$RL*v`2X&eeSoecX~Y4Ga69QXWH+)@W;LyJE{zU!iG?&Bqx1W^9HWQY<8X z%UiffAqT}WV8Qz?LpfI``6cC=@>nET%0kk)*2p5{!b|fP7Lum4UZX>L-iS6|RmGhRPiVVtY{Xs5+fvV&qS%ik8a_p$z1RLd)Kv zWRJ`N@cb6+OHrV8){Q?_m#cS&tgbJnH?{ri^(*ZAK5oYQH`+;mzgm+zFRq5a9BR2n zw@9LDd*s{=Zf|@fe&$ejb+H?l3q1;FZZw}qYRisH8e3V!H%&JkO#Zptb5d{-Y&>db zJ75N`5wB^%?8ikn9|j-J&7TkZ3tH|E2(>hV>zI#T6D7%r@cgLpE^b8hZ_MxX=(bYB zZe!lw+<8q;h@m2HL0oT2w!CnM446rPWws81sr9xDvE#s;)wH;k)NQ7hTQre`_%;Hw zy2R7qGqVMEfeCCFNc0R$H-p7MJcTYF{sGiqlz~j0RPv;s>fhM9R0*&1v#!f(>~>*{ zFgrm^BdT#rP9>sg?f#+juzR?;mUA**4~E+zC7-c%t6m@^-E_)ZC%_D0zV=d=i*4VYdW3c+X`OT zwVZAQ?&Y2hghh(XUd4#K!?hf0zG61y8l$l-Qzobx)UaR8x3YcONPzsF;SL8~pN|Oj zB(-a;Ft4WgTT02`YNk>T_lHfJEh0WWTNJ37FOOcei^s2-oXpLb=<*tPRv<204 zxUkoV9KOZ=ngWdt?=D4_ge@R|99n#arSpZ`U+osxxMu?cwXhA>{#-lDiCrs>$%+xk zqE)TU|L_(IY8G7*VZ4%912+F;L!$?Hs8I4CYf4Mz^Ip}by%nXdOe;f@OO4SJ`Z{t| zCV~4^8Sf+)Yo#x?zho4a)c=fDTwGg=mn|VaG;T&Dt3=#PJDW?)xmu8$%sOT`z7wC~3BwM#H?sY1b19>aCRJYJa6-Vum#58q#g)$}e-U8N zi>T(!xuq*VBoLep>n_a_W$Ff0WsUvf$HSsbYLkUp$}k|FhR8$ybaX?kPV(+1L*|vr zNKT04wjT}Y=Uhyrcin7(bi^FygEk%vUF^cF-jei3_wZRB#}Yc(AQ@asANB3_C|r3R zBdFnx+kty!s>km`5Q#bbtI$b9V}UUAYMhWD;p=x2@E_9Xy$TJcw|FHv($mm6&yHK{ zna1GA^2fvxt>#qGp4l33Y2*JSNp6DZCyNd_={rlx0>=;5(?-oiDK7?){N{i9`C$Aj z3retHFcg~@B}W3LDq^>N`9~_^Z2CjOSw&GD23mX@Ixr7;oESATeXSE8(~F`NT4jV@=!*5 zGJPxjQ2w7HQS=^X1(n(Z+T`-dj4{g7XWPp@`N}QcI2>`jQwXz!a?kHX5x*kyGEw;< z-PD;Y-Ho77a=TtM2w4+>Vs@>p5btw>qUG~4h~@Vy$Dc-5EyZ~HB$7N?w2ONky#1J& zw>?vhR&+SdY`}PAEbw6Qdadn?_E)OB;Q@5RoO=V!2lEk)2~)$y;g*A;eAG3<{d-WOQy5ZsN}V?Igb|gt_nMWcoqAaM8x?!Zg9_ifBvv+Fh_i3 zM25fC1QLFewT4&f5Nsh;_^E7D{Rh>i_;FaxjZ$x^ZU~9?=T)KL-x>q*`^f68i0MY& z@Qdq0b)%_Go^(ld(DTaAQUezBooSbK9t^*p~xixfnm7j z=RnrTdY7LYEUQ_@@=zbkC2pNYi8!7Ve~)q@KY8$l(Gz}fbc`L7Tj2+!B436*^!(R1 zjSQa=xtgRL%GX4TL}Gjq)W0^Lh8zYi@i7YPGA(_i*q3ZRe&-M>To}sNus@vf??S`0 zrB{FNfyTerZ|hAG3d4w0-F04q7y)GRZo9csq(DShqIz%BSryDJ+6fsOBmKFpJ#T0s znNerkgkV4dJ)kpFMq$si_nV`Aw61s=1Av`8{%22edE+JV4Ek3j z^9<<3JW3GooUJK5b5$mYeVT%%8lTJDn)&W>Bkcl$mv_-e2@Zh-#%x1laKpeG;#>%l zRkxaj>WC!j>cGP$|Fkax%-LRxb{ltSg$SU15ynf0S4Mt(?+<}>-6dQ6Hoii%ju_%&c=-G_24tp z2R{CgH!h+N1~NGbiFBu)YwD9Gc3PPir(Eh+bfsQC)3a*KG=`8^HiG-< z>7y4?8rL~FNzn*%GcWVznZAV4%`QjHqYz};{;s4iOE09_g55#;ADpkp1}xmC#_SFx z|A!BYuqgg8dpNjTMf{sQXc@l?2_jU{p~Px+)=vmTdmv9{ymkK_4T{A4mUxW|zPBrn z{A~`^|3P@~u?X)y%zcOU-{6D-uR>WdwL#ZqWyQA)W+G!)ehYg$2scFnve7E+&i_vX zyr8C_3Z8XHNxo5iaVQefLN&MvQhhkGB;K0_Ji%r>P@Lz>hSUDftXP{8xYw?{!2(&K zxL~c_);-35Zaew_5CyfpxGMg$R4KOOUmF%cGGQ%O7YCwR+W#u><6Q4iQaTJKw^Lp4 z20jV6JfCMqypMW_#hssW7WrgL03?T~Cp(Nesi%O|9^F>EM%Bk-tFI&pYsFI6T`O}! z*6=ekXx=rtqwrD0VBdHi&&SZW({7jNBO!u?G1XRvh>)lazQ0PRqoyL=2^J6|(s_|F z)UEye(=8E%x6eDb#|%wcEHvqHfLq*Z7n43mNXn7Faf(N4_g2j$YXn% z3=49g+$8uDO2jmCxvi?e+XErrZo$m8B%_?qm~yV8{>5WjHHj|)NGCAzaOs~JrNBd~ zvV4Xkd*q$6*+KzKaNHFavbc!J+nDQ_9(kjpV*tCEQU3A>oSRFR zonN^<)JDWSZ|*y3xFNMV4V|{|w-9q~+2Bt56YhJR=`;M|Y%%bVZ6d4YyS}#cxAQAh z%hKg;%asdq*uS}=ZeibjIx`5-=j+q_S1Y!5#>+MV4YOr}5LBq%nIj1bb z&}3uFx`-FoM^Or{*@&e4BpQ$q0NXc#vg+9>|7{LmK`XyG%aa#vspX7P{~8dpBQ#)NYIBxqquC;);h^^C~u z7Ey6EpTS*HmPocEMyq~r9R4fPY)Dc+ousr}ezUp6pqH-YU)WESZuK4iE84cw z6N}jum#b@<+$igN7=1L64NI7kI)G{zwtqSF$w5Cv0ViUoQ2?h2tl0Tk_~}>quod)t zLuGIgnT_IC%1F+R=6cONH{v25kT~t?3WPuBL-b?j3*EJ>$wc;xLI5#Kc>b#5e3JAV>>D6fV{#A#DgL;=) zp=%)X@@wN=M7hA4s`>BLN|Ah>vYg>8(d3{(_KnTHfhWV2J@_xMQsL=E;royoH8<8 zt-{~(#unGRwE(m>-{i%&_lMY&K4xN|Git zZL0|Je)eerh>pg2#DpPb$QOJh7WdrHX>E_7C9|Jr&rMXaro+p&$oBmQr*}?Z(Z*)& zTKqs|u1)UdjP3eAe59ymF`x^Vk^_haeRg2Mi)+uiWRp+~BtQo3?ybPvzmdFHeID?q zgV_>VqdN`Eb|ZpfIcC$w-E3oD_0F6}#gK$qvsJJpS6RcxCo_p%SPQhzn5m|5L(7A* z{Txej(^TEsXqmj2ZQqLnUm6-qUqjTaN{?XY=F_7td9_28fW0KsH zh}V&=Q>HG(2ziYqaTD>ttt^0|Z)1t!MzSub%hIr`LX$;M)J1*LGv45N~5-LO~*Jw=sL zexx^P))~uo%~6*Ds_=ZuW6Q7Mh|TcWHJ6{mJUar?f}C7K-UnW(lw_M-9`jr0E_wWo zTP_M^r7l@;AmHD!<9ghn6hWj0-;&_!iF|Lw94CY@CQ{|6 zvI<`lxXWLwsFD1wveuU*bgyIOe>>STT*JQ+VuvB-buVZvPy`|HwJ^PYPy#5mvs{^% z|H{#{H*m)#G-=fT1u(c3g|D&21hzU@G^E#J`e5k?h1!p^ALcPhb}YDslZbWVcW1n} z?>D1FN4|YCpog9oTG5d18yJf>0YR1l)Uc9?p;cQYsRr$S-~FoTPsyE_cQHqNaRs+l z!?%EhKMTHp_md^09dAKf`87>{vMjP@&K}`UJfHM>vzjjK*+1OcZ_dq__Vy0Vk8p(Pm;|5g zK@)26-So>ek9}4kQ8P$1?3HWy6q+skJX|3->*LOuX9O>Ysz?PoOvlR}%z!S&5}L>G zPXKc54yYS7Nda~_Ky(XB#Tnm`;IRvNfrQ&AYy~H3qX>Yz$OES`Q>*U zVz(9?g&#gaW0fe^#^dfGpg_wru{`UiNM#GHGi1`|H}E43mB2kTzxoFdtln+mIkcV< zL8HT&FRhO^W6vS*M(ZMnD7Q z<Yf6!wibbezhQ>I-{930SD*Ft-xLY{_H^fJZ)Z z=0&^g9o)spgJ9X65a^}M6WK>B1>|BpwDe{VdgrXS&+D$hODzQn>1b! zfDd_B49S)Y8}l>0pF%vu+24S$a^q%E#W1hyMr1KD?(_yWw?BSw1?YB>L?57lB-~<7 zj;27(xB;pfXJgPWJSRYI`HSC*C@3D-1>Ci(f8o&mKuo>csRa-eGqBK+-OjTGNT?a4 zg0ojEEh9& zu8T4wkYlRM76d|VHP=^YNvrdIRL<(qHgrP1O7cRVEWCx8De$8h-4#ExV`N2;a0Nf4a#} zU8s!E3Aq@5^I7qH(Dy)#DdIWRkaAO0;VvuLf(QS2e^}uOpgPpHz0VK8!WM_XnGU9@ zOF-rpvUCqRT6D?@zDNGhAtt3HCE$wrFMaV*ZLOG}39|`FO#nIWlx4qz|3d;rp0RHw zxp6T&tSK(ruVkv5W)iG=S!PZL7rkui=PkTb*7v19j;}1X8PKVPl#u%#A%T=`o%EFb zh3blk2t+(pG-!afJph@pWDYfRDXzMgm&B6vy7=y ziqjt&7+8h9^0(X}MCZ6vew8wT=lqXqG*#$OhXoSG5UB`p7@BTAtFk2)b~0tlBXK${ zC-*q`Wad7b31F@v%fNj@&@NEZ6Z=7qc*(7t@spE}`|Fraf2byVEV^%jI~W#qcML`r z&{{?1?N>b^m#RKpG{z4#xHsJkomzmx)Ec}(YV{m+w&{j!;_q>${ipf~Zlep%H2pfx6Jxek$PHIatrT9_KdnB^$pZ;)` zM6#I;!E3ImyBjBI3t6a)feM`wZF-LQ*zbdtP9Y?dTo+*V4I$>RgxU?QGcAsq#HASJ zEq_g#ksTKMw5S;f6CmRYge(=7GPjQ_S#?Su3V9VIqVyE9Eq%U3W|2}plek7T=(-o$ z$vjEBE&peks=IhNoGKJOmH@4kKAW-ApsNcI<2~UzrET7r2OaD@VX+H62N;1q*=?o8 z;`XNB-!o_(H=i*FY=7Wb*5$itT5R`IE;s<(=+xv*7y zi)6`2aJzBf3s5+SG^8I=SG=diLcO$OCy$g}JdURlz5Yxd{Da@Rn}=nVX>8ud2C?*) za}b9PlS~{>lrCl6<)@FQ0eVqAJKh4r?M9!Uf)S2X5pHAfs}QtDwj=A%CK+{wQe335 z8AaT35i&XQjv4V=<5{RuGbZP#*k%&+vwv)OX>uFWvK0L&&e25i?eogo6o?22ru6d9f*yekXPIqI}TTphq6rV zoylKvqD)Parkg?Y5n>YZ?Ngkq=r(ewC*`mZxQ+CtD(g&t)VEzvMCUGy7&`r}JhLe1 zyy=142yZ?cr8z^~xQH(Co}cAs8WZ~hHZHuhF?9ZK>MW0ih&d-d2M;KhvuI*nSzN9}MqGj=S#TY#dm1#A ziRY?Y>^%p?UQ*QdFa;(>7E8?9CyE|5jRYU)ZxWdu&w?Cmh6x+us}R+p&2{6Q-$tGA z9k*78Hp)H`+h5ldOH#tI&C7w@S3qXwhvjLREOeRVW=$P~7VVJ!5 zNI<+P_vg(`QR;5rWi3U@QrfwI@r*?Sq)_HEGY8$OF{%`oAMEf4oyT}i-;ytzq zx)ik9cz>j;fUe8g^{N)gpnw`V`&-SBb8i$--bZ*AMQ-nLzwCh0-gMv5)HVbjun&WK$seC0Gu*D0 zfJUdS2~1CX`IN~;;hZ;tC(+M)pME|%5c0r|<`)@0JS|{A_Q7AJNy&QEur`ze?~8UT zOZ;7=$w+_^rDMN~qG_aP=*!m{l}$i@OklNcDrTAM>8gxkKSD-`C!u(TX#(jEOciZKZ4W;y&1ghy7g}L3#227w%UOCY&n$)*_sScw}@l)rQ7D+0p4x?G5 zg+W+nVnH$G=Ln%=)sugeETwo6hr;4&UQ5eGOB6o-I~N2XzcnkHPb^jPWgGKl#Xa{7 zsr{sUA9v$&vvk^T5P5-ZTGHi2yP^m?5;}7?g2;ILRes$3?XLr9$$lG>XsS;H>DxE; zh=;eXW^CgG`p=`Kw%F}4B^ZhVHa_r+m}i*R82BNPFS99+Ze3Qmt^pvFhXf@#{6o=m z0vQ!h8-3J^FT+x8qK(t^NCMGoL!>UJ;L8P!Q9cZ7h2^Tjg@y zlBMu@TZUoK$ZPvr`H>qn{Pm;iwn{g`ijSCm{c8_l%j-irwp_`AfsZke$$e!CN@8*8 zs00u=s_ZBJ?|Pj>N!AI+pN&m1UTrR|MR%HqBGFpO}=>!T7&pBD5GcYy-#qT>GR zqXKne0WY<^r=7ITWDh$Z+GwyG-=UJr<1J%cNuCBu?fsP8K}r)yhV#nr{^9Lb2Y z_7PMT+*W2MFLS9zA2cj&Lq&5IYy+!=*yraZ;a|?!Ur5S#O0jC-h_T)G?qyY}tgp#3 zI|;9+@aU}Q+5~G%#wboWHUH@nY95nQ+)K-A9d`{u{X|gwq;2*|5G|=wt$c*0TmjcJ zPXREB?P>|tjG`H74pi!`S`(fsiFxsaBUqNUaqheF%y4b!{m`LW4$6*;PjDxoo2Wb? zPOt|{c%QPgp#qkTLR;R6Deqg7+iSRTl(oANPo_JiIcg<6C8i36$Fx9d_oKKMpFd#l z2-_V2chBVBuY9&Ca~R?J&Z71{hAjTp)GgsT$n4Vft@n{U z3Z8&gR)}g4Hqp@+Z7E6I(&VDZ?%%J;pIrN;+Qk{K_Ftufxmx!8;CS$x!9};p<*#Op^h-s zB6#Pw1%EkEC1*DRwz0B}ib{9Bb6b;+`YDv!Z?}e9{y65_tvU(#2bZ})_KlOHnxeHOgLl~&AQ^GCXy zM<)Oldc8G!zMyg1e5U0&wSL9~TFO<587GLgnosK5_%rQQjB?>v8*i~ER{tsN{ zFbFh)zix{SjSc-mA;;>!kDJ(X3k=31V{>B$RG*Fv8w$sx7o~=~FL4|_)xN3|CMYKb z@8&KYHlZnr#9vLErSCZ@nvUC}rSsctviJ@0ddkTUza_eixEpxAN1c=VLu<3uCP z)=MuG6@F&)&3$hyQDoC;@Rf@xNaSvR3t@`^*)&|x?1x5kRDwlQG{I1kHlbI$eI z%*_SAx#BxY4L(EWh3U~I`|SpV`>F4z1UXt{zc0AYM;*A~Y1d_H$Wm`=Ovtu$ZGqJV zJiiORb%uu{>J?rtHg6n$(T10LIzoE&rK?nZJ5`^_XKEUDtMcI(;t}ZPc|dGultOlWXe0-Ax|F6RYEYe-l8&;nbo_bzkKgQ3C@*8%zlKihRaoHbPpMlo}T5n zD$MOdn)ZsZN)Z0dyo$T$JN}1I z!ZLzb+?t?UAFKErDqx)r&$le6f_D6i!>^G~R`T7O3M_@%vshB!TF~f6+=;vf@4D-rVdjv_(nTPU& z(A|tF<())v~wIWTxi(~xTMHwOdgt7`NyZv^GEE8i? zqg@~Oqi6i2N$lNy14H+?jZ&3S545(U_?)V#Ge1IntRM(9);RtVGIk(~dL$n9o0|T{ zL?T_+5bq*rU%5Uou5Due$Ga1S|-%FFAL)dBH=viItg2#aO3H*mcS= z>ng$;2Xabvi?HE$Ii0y(t;_dnN9c*@)=zLF>UO6jls<(cfEjMhA$ ze3ZalVr={R_si#_hFJ~$lphE=2t7f!3E4L(&TcD`;SJ?fEad()HXpHwae{u|DSqY@ zHmsQCX*Sr0Gx0i2P1sr7X`sjv01o zIfLxxs^4P#B2;08_CKex>i<$yT^O}QqT;J7pZ7~%xd3x{aLD_W#1kz{P+Jr6p8D05 zmHxX*U7;;K==qFJT~=c0X-&W+W`g|jCEbXpw-t&1S%X?^08@{xz12oV%LQ*vBZafg5nwYY-q|Y5!;5sfu!8q@cFB9{0pU94o)mCMW%*2wXfssR!vBPdq6WZ_z zrrGN!xAx!W7#4Qn%J&K>y$dwSs-vRiSYC;w4C z-pe5AT>;nI5M&lz?&)qf)5WFf)bCFk3nvXbj-Z;Ep8D~P8r`#m9>}MXuMFcXTMu*~ zPf7GakoJbX@26&3GMV3~k@Arui0C6cI;nX5XFB(J58vQ^z1{U%w0)b|cr4Qi-*+XG zaO9NcS%_bI@>5t>`4Uk9vn4zVr`^b2t=rl2vZE94l ztJN{be{pmn+^tEzLf<1*ROE0x{3x_p z)lBQn#^3$G2Z}w-7EFJDWKrHi%iIcXgAq zSTU0a?eU;)8~*o&w*jIo+=B4^kcojz#NU$Ls>G_h^r@}4M`{}aN>e-?b)c_sJ35g&Pf1z=&bqV1UJq-&>HXRKdcsZ$=xB8uXCTIm z9&+&taC}+&eJgaWfHOOWg;mJiAk1Y#-y@1I=)GFfD;b$@(Ol(7KppLQkp26sbUkZ~ z12UC_F-s49f4G`@I)dD5|INOwNP#-(!#IYC$Qu>+NV=<-EZ9Z;fAK{YS-NakzWG=k z>BF@(eh#>a;3^U6KK3QjLI*z-dXq*NAuxGS@=HKdI@0`^WB0eG#zOF2u@&4_`RIuY zX)rw>ct4x?(dYiAH9~EQHQK`p1;9Zbs|Q8z*9gUNdgDY3b7ao34(s!D_wI#7UBujU zAloh`5|meG)dkp{F_q7Wz$M(A0>*Hh&c@Vp*8kJp_7JCPqQ%ex@;ybf5pF+EwZcyj zEv~cGuQuutIqOwue0QU26`YkZD3vF7RDU}$+Jmna`H=M6YUF{%J4ouK)9!O}cL!AK zSwNH*57MwxEpfc8;hxzV{`tMOlrZMidbk1+sZD*2-5>n>P!P<)`)> zI=-;wy_D%6nD2vm3llwj%0o^@Dd`hU_wII=L93X~4?eYeemQj?D0lW|zcWZKa(6P* z@o>nMlyI&|V`Tp3>a2@yME6_UUn>_U8njY&@75o|;%uv|Fux8{yAT;__}$ce#nYvi zeNx)V>K2(=f07dXCZWWd;yHEmIt(kJsF!gvsKAKI!{m)!rnHYYB9;?;h9XfdY`3ht~`~X`~xK9`sb(58gwCfZj#KOh|5zacw&bE)1&8V zRDPos;=tAjbb12k$Z~J6+wmW?{i7A-b|ASH{oXClIQ6PDX1kNlBO3}!q4#4tEp;#* zJBB~DFkIZpgkz-m-`3;>I0XL1fp5idM{GZYw^!1mo0`_J7GaYYb4#5M538o9-_4mX z@5qHXG?>}F5r#s9#!@d4R#c?-7ieZ6J^Pkl+ z#&L)ry?+|l=HyRW^#-6PJRsIv4>F>x37#jzL(bXKUIA~mb$*Y5<)&`|TR!UkaN}UL zndZ~yLgcGFw<<|q(~vkt53O-+4$fAG{8AvU{Y2Z1u(o}^jkrIel&c~pTuqE&=6nP{ zyqNw?3{QGN2Ja6_IR{+tZa$rqs!4`w#+Hx`QRW8H&%Hkd;EJ`G=Z-rv+I_2UzNgi~ zw8Vag#avLYzx8aqYeH?4X{y|R571n6NhUeY#C_ zE-H_qdv?rxPjGGvyzzb#)E$a-FDi$}+hI08ZrI8gM<9vK-3reQ6N*Yr3)yOJgtt8y z5j%EmeYtp48mI1S=vK%^9a30S-v6$DKBMLK&)X3(c{?UM3mzg@OXB_*3oDNJDA|StPcL{klalg2Xm!!C6~t zc<;8Q*)vtw+bOd7&4F$-xrVdap=>4~6)T4+ZAZtu+);i%`TkZeWbJ!8)`n+WWslH&TCmy;JKUWK-iU0QUCq98>X=o+U>hJsaT-&LH>%;cFKv-FFZ&UTlna~86 zRU2T>Pr#@zr>Em?dO-#1LAt(gA7{#)+sMf$#E-jK^;pQ1$;>Q<+lp<5;c-O1=1^2=HOQ$;+?YG`4}USHNW|!=D6&maxHF0#bUE}#M68TJ7js7&a(to*Wj0go`h%5~ znSlK(4yMsic++S9C&NlD~Th zVWBU4iOSCCWpZDQeQdj=^2L$&m6T(UI@v={z$ zT}w`{W=^`lXq_ne^ox08cLj~Sm)Ww+QBMkJ=BdDi*~L6O7werp2!h(tc`|h~ z2fw`-SgNQxYl2r#h93*xc(YvP`CgnNN&Wt<%B(mqw=CbO2jM4+n9OM?UF<_Wf4xT$ zE#)RbBnszhoiSUSl!IXBknkW>R(UXB!4rJLNjAZNucXqH9K*U2Mk>hT>(f-&>m@ki zj?~{xB7L)Wk>T{dyk5S4Hvq0v2S-DwqoFwZBeZt3*C~v!U`pmime7>ob=6V3qIY#Q za~^jd5N+j1a|@?=P7PPy-#2xx7FXbZ&d0mWXLbJ6#KtU;R^(goOo(}eSTEc~{7yad z{BE`^yyS)52(Y_ABjlTc%d|i{WH@8HH&ggCoZaWal^zATdpIfieNtJI7Y%~x_T=Qk zaq4N|ZFugSw&Cx+G$NuRN5Rsduea28@crj^e)&R39!{t9bhnP@+RfN6|FB3?T{t{- z>ey6Z=n14F#UFnHHX+;FdJHPeh;l?Pc9a$d>NNNZRUV$}mHyJ%NWaH(toyum)c4mg zzu%k5(!SSl^npOE+Wa<(%=}|Lt935_;G&%4L31VMZ0@cIr|r{*ht^z2AQxgS`~j>< znb;&;!&dv+!1$Ouq7pYawR)v_rHl1X=19Y{4ZDUH50q1VQe!!ovpwbnZgc-ql=Zfv zh`?^Aq4eliSED}kRXB%eQ@x3(Mvq7R-~<(472YQJ;^*Mm_$p7qh5-3fwZ(+7SJn*D z?`kD#Q&hOGWo7??CGQZ}70G+*$F%4krfU*CkF_=3qLiJ5A~r0beHxa}rubOXlgWwP zGE3KK67z37jsdgj-$^g0EJv^`THGCp(UO?``W6>_O}05%cl(IM-tCx)5g%TUji;?b7)15xFj)nzeBtdgWdH;NfpoemNAacgK=d=o-=!*M8uimf-d&x!%ei9G$SfH> zEr$B}#~@!@Z%i8uZOZ zMgyNZ+%;SP|IODB3gK;$=!YJ?%e{|(#(iy~TfXO-1)}8Y34FK{)1z$G)4A-?tlxy9 z3!Kj&yko&}9uQI$xb%8SF(~t+r$q3lP>DyAlZqXMyWm4jot0Z!JMlP_uDY6cChc`K ze~9S*o{KTX5O&<|U?4A~2vCYq6az~*a}nOk-Svq0VHcyGmuZvKE3U>VRh3As11ig$ zs*C0y!C8%+PEIZCzS~3B+F!wi%JQ!2cnq;gwvy(rcY2f~$L*L`kaIQclXM&jPRcJo z2YMSlOPw`W-E+4}3)HXIAVuVCOiANSFH5-Xz9I{9mUf_LhLSD6{r9&_oW5DkC8^%` zEE#_W?%S_%Ra(y2)!|?UjCS8Nce-&tAGXos6FTD=J$n+?aBsn!dio9V8R!G|m)Fga zxtwItrl)?{7a1=7y=@dmT~!w%qRwKVuK22)07h){h|EmwH`XdALM-em)t&df|A&Cu zj>fBjmU8c=owIWNEBXg`aGH@qmqU4bMsG|F&^_F=I1Dg;VOH`P(=G6z!YLWOE)Iie zA+d%D2w=FifFA=ccnh-ji%^^Hy0vbO4GWRb5P5o|Mvt@9$9PQfOqomvgugnqWg**O zmimHQ%Uc=*^oo^4bo-*(8y>XI_;y+4xP}0@rF&SuE|exLSldjfX~BX2IGmn|^RqF0 z`Um$S4ZeT)x*qs-9SZ!M6CY6mWco%IyVl)o$>;R>_MRm}rqS^@+q}p9^vk5l`~A@q zhO^z$%!?+${&Po9k0>fTlie=t!{VNErA8~UHbk&|p=ITHHRJO-x~N6u52Xddb7V?q zL*+HKE2yhPU`(Bk6z$Jknb=kw*mCp5+=*TP^PD)smbbl~_aE&9wZBO=R{n(S3y&_$ zOIcoDn5;<;EM#o&h_QH6;}RkU3}jRmKpROfn>)Z=L^1vxTnW~%gsM)>yoPf@HthdytaaKVo~wJL8F zd@A63Y=%zXI6NSB4lK!7XPSg+xp@u+*Wb%|WxU!Z{ab%NrQ4`m=^(^ri%oVy<4%O` zD;J{Fo!l!dKKm+eK*j!N-$or&F?>cXT~yOqK~#eq6ihedU`Y1q78g7`{dcBSRAa4n2cNI+*|y- zwV76`S!0|G)38pq6t{~Wp`aB~2WF+T?T5r%e@Ilf#uFwkNpzV%cl8(SDfP?sB&jHj zYj-}tClkpO11hIufU=O>XTuWOoTPr={b? z^yH?^W_6($rBIz8g4T3L>iojY>MPTZA`V5B)LOm*tDl@kk}8xS0Hc!Ex{TiNNj0!o3x-y*4|}6zn+{>Ad(~ZAw;z z9nyBnn3h52N_@9QU7}YF5_0;Wgaw4TN*@*gn7Pn^GIv2`yNowW&Y`upJhqP0_Of%d zEi0;Eq1DxW9Ww_W6FJe=y27~#f1#SRB=zP4{9rzjb&tzLN9|UOHknxC&4J0ge_HA~ zFYIhTuqt~CL^?4j#^H<}ip3L;*+;N{{YlwZF*5%O^#cPIAmO*2_MJFP&0|0QLzjwp ziTK4Ehsn(6T8-u5g36I{qM8GrS?NB`fHbOnmzsh^qQr=QB$m3Jvi@Ai@89>($KB~4 zwnS~$t`R(jlC zke_YZsi0F4bYch9H};V~HneKttLYW)4m&HWUCm{l5HvR%JmRhmetADDg|b6#aGPg> zQLOtegkwku_YF1k{-Cc!yjM$o_7zOXNyNaf0Mhek(Er!L zTjmp$Y~!mbq3xOzq5~&A8sKLE#O&U0!?pl#=El`49gY3^72&-| zCNC=z`n|A@Tp45~9YNsx0Npc<11+1l1G#WTR7YvSBJ$t`TX^X<+mg+IVFI^NDfjz} zh5SY{LL^9&klpZTV$JEIxP=wL5q?&J$a(>xZTGt1n^w>85rAA@?xSRS?JJWj+g|b< zXJDdTTjp9Y85p;kmgeu(OBWf30hSOjl)~i9LqTH)+HZt>U$Y*Bs17+@y|NonF#2l5 zb^8DDbd~{8wc*wty1PZXk?t5mq#LBA8|m)umhKP`QIKwiknRDIPHE|ouCwPo=llM` zADG$udG1*2TIVS(PutTEEfya;xA*cpkJ~f*S}qj%gM;5FT><-4{>32h1K1e(D5COm z4*b#g4Mz9FhnJzT7E7rJ{GuyJC+2T8&AIl8^MjKuL8{vle4f z;xEGpba(T2Q-!hjd{DnaNB6D{Ql=~6z@S`YI170yDEFU*>H^nj`|W!L%>JecuHRge zb{*ArVuM|nrnLIfuY9~Fyz66%ybg0Tmc1OI2P39Eq-=hgqD0LHlXp1!=QA<3=A@%zm+4Jx_D7$s=eQq!Vsyy9=b*ALH@>WtKg6p` zx%!$fXGy~UE6_}T1fla#SZ(cEK6#L$KPMSj8~x$gZm$5 z1gA}h(3rA`3m%>&hKc+TxU%b6)g_Z-6Ngh0xZe-UmJcH>*kFI?blwomeX`#WaQ}tX z>9E;=>ZRz&dHFu$=V|u8%ogk}N9QaH)(77xUPSG!qRVFJ>T}2LG1)gLU77vCP_a?B zm-toon)_YnO-s<#af``B!@73|#a-T4qZnZ)#H?<*+=rCBmvuwBz!u-GCakt7{5t(* z@wfLSB%KuzI}iSb_~A(+ttdG8dLhX!Yk;gY-fZqSj>Tt~ZgQt>z)U+)V@tC;-TfLB z&@WLgtGauI`|pz$V7V>UcT9`+#f+Mo7&CQ9 zfz3nX<*o1ja3n20=7NWr9#`u>^^K7Yeg+qxCOP5r#|ypeo3F=ppXN~>0y366hhA|8 zBu)<%yDV%MdMB=4Rww>!t!;Hx0OKZz?IzF8$6lew<@}a;AcQk@>7zS6HBRA63vgFP z9cO^MY1yy+%LpBIRmL>DKgbnANR`bB;a-Iq&=Ni_eb*ZDMQmJuEGqV3N1Jw4I_PO| z5DBYwfHJ+OQm(}1z6TZqwaO^xvOw4G+R@&iy9Rh^ES7yI*~XkqjhR2wO26}eMm_Hd zm-m_T*30eY&Qm}!`0>doUrvs(G5XVcIa>P5qGyH7tT0Iny{QpzJmfDFcvMpMzKxye z$Tu3Q$jK)NIKuAr<4pz7?KM7T%TBM;^9a^?*h$dOn=&kFRlV6t|3p9xCZR=C{<+^f z{Mm#+YA9xS#@Rr6c}##KKywl{sU?+xkEq>ep&1!`E$zl1uw zqU5ty1_V=NkS8^6EUZT{)jQ+biXD>m(IEL2C227s%Is#A{+_?qmWltjYuDJ}8i5pN zRblbr!5V?=8*o}^c-Cr&RX#qaaEd*Vcji+ym>@x(maB@%ET!TAi;PKt#U3^_ALoZN zcVP`vH*w^6h7J0S7yDT(azipII{WVEsP5$Txf~21jCXeFaVf4~n!$)K@kb4lNljX8 zE{tyOhKGXl#~v7+S)`4F2G?XN6J($i$$^%baQpYRX7P7?4 zbxe$i7;K}3<_x0_c}mLNFriNpJmK0{eb-!-$!M=x&N>Fyj8C-nFxD%8vW8s%7 z`;C-LwzHbW#2XR+`zpnl@WeMi)W~`CQ2Mgv%SSd^O_FKusX4guC>vF#r2*oQJ(6a*$9a7@z{(V|C^NH7xHIxJ9^P1zIfWMuTFx_7~O&;yk zxapqv{AlfHbFK0QB2SYvjgk1T*JhAeV|qHOMP80K&R zJZE~gi2nv?i8{$bxtdcOyB1E5W%j@~SF@O@0L8}LR}{Xq^Ssb?vC{bJdwu3mC;^7O ze!ACIq9CQOh5u=j134VHsOn~Ou@11kX3Ur4{W4N!KM-?%mRJLIS(Bkv<@F` z8F4-%r7a=Y{b59GknzwX@pb7qL5#U-HZ8)+7jcJ=hFFa3G;y=(%GKuR_k1 z)rc`YJO-r=1ol8g7)L@7x3`^G@GB@Mv5QV=X+~-H9ifqK*~??!PuC7hly_Ce<_$gW zcnCcL)#gP!o_;vfy2%W4?VeAnUB}h@&HwN;p+GnKqgf@rL(qDxSg@ep>OZ|aDFZxW zeb=vasuZQQFUPAJh5Ch@P3w*8U$Tr1=6+K%U#-txDjMEPY;~STKMit|DdlQp0w=3!h93auwL97nAJvyPYTENV`N@wiX0QK5HfHByuX$23 z+ELAl4qvW(@pj2}!xG5^ZL!!$3IUcYQRltzNwGx%977!QLiJ4h&!a{wOeell5Dr{s zt+p|LzxsQRbAO}KP;ThZ=oex$G6B2NT~TsOzfgh!Tn#P!-?ZcJbte^1McpUv`g zGLG*N8GS#H_uE({KvgpNOrfgY;ce?dB>i{rc$5NV3?v+8p~7%K{vNMg63sFbmt;jx zOG}nQO6YeQPDCL=z8v{8HXGMqqHk%Sb8n5`Mo%lGt+Tghh=#TJVitw_`b>+971jQf zR#S?ygkuGTxX~9FY>PGGeajSH|4*i?j!N3%2p9yd*ar5G?o_#+@8`RX#Rcf2$n3~$ z9WNhmj_l&MVceoN;A6?X0@i7ou3`t{zlTU?+<()i4gMO$8+^>0Ds>Bg1FU(`+;{5e#3Q<| zIbsCz_9&wvVjD3wVL#vGwr6iC1aKCdt(>D?Y}bz``E9b?m`S@)jeB(%;^Evd_GLUk zJz}eM-e%Q!gXjH&R-|@F1*GMCuA(cVs)@aLyYv6V$jNj;SpU!X@@@+Pw zW|eIQc|W{gb<&6dHcvKIZBHDKf&P%6oRweP$F1>o`a3?>peeOS@X4*x`i+ zRO3(gD1LORM=2FnqKxUI-z0OV{rz#77$>h%KrGFzvc?v%&~1=thoao*E==SR^^Hm8RUpya+Rl`sS{4DZ20C$m*j@GnOKv+ zj1GdGWwz9#e31E{GQcz+)g|n9pEHpP)h>`lV}$GQN_p*8pTI(fW_;UAlIiC^@fk@-rR6)c z)oT?zv*#8dsOo6>Y0M%xeYu|$xqkx9viJh&UFjg{so*nELQKCjjG5 z%4sdU)%U%*&*X$3#(P_!6<_Y~Cq+CdggNt~U)4}@i#c4F()7Y?c;?w;+Tk)kY&vmb zc#e^>a>_X(8r=QQ@Z08%{T=@-40JNB8_fSlNo&9Y{q}8sPtvPT)}#TSi_+)$OTv@J zO-5m(Cx-qHkFqFp&Nj?ikE>UV=oi`Gnvl#r0GPS8OUh$xWi1PBFGB-udfDS`yNQ_TxY0YA_{s1(TIGW1`lyhkkZryBr^CXd&4W*6 ztAAqpnV$xO)N#lD`-p|M=QWCBaV}0se8B%#uxQXo68e|M4hYJe-EuAPxZHQJ=gNH< zpJ5*Y7ZohNNS@dG9V1U&7+I*d z5E@xkfNF(t@dy|V-(X0PH7331x&6_rPcdY6a%>3Py(-|2Nux;>Vn7fo0fzQw5B>=q zu~05EkRE#%)J5>w0q)IB*{7EO#)6@V51~FMoqKy7YvGfpK>{fGri^YdlE&K(HQ&5Y z3NvBA)kJJ>G*ppjj_^?8`rV^w48@a_# z7+j%HfnUpC-FUIt2dL>er|c)jVq?%S6olL_gIh;%dBYe?! zh1mEiQ;7&XJ*3`EC=enqsvZPg&FYfi;HZ%%Cns5ojPUPoiYV{pdz$R*A??8JXj`&7 zM1ProB0g-no49I{=xvVuz9zs$AP`ZZOQZkUsP&P#TlWL`p9v2t+;6>2k;qVQ10HTp z<%MRiLmHJ8&*_)!;6ds^yyyxg1C( zOr&?ngd9Ao8ml90o2{FHXX+C+U7H;JBT16UIserLyympb^Vy}KS8lA+P+4l6Z$gW= zmc;E9M}HG&@Mp$8o0OXCU-bn?Y8&L>Ac$Iab$%nQp^3{Dh6`M zWX69)Dwq}CLKglP;VV^iD2eRawO;D}QSX_kFN?Je#3}DtQ+cpv1Q;t%g`T@CdplV8vO}dsHzHqO`{7yg ztA*uzf5NlSO1t=2iD}iF?Moqc)bxyot_}B7|NBG(x7u_7>1= zn6ZQo6Tt~WJUx8WTf!gq750YO=hWyI1ZMbHjs1wO)O>mgMD%6WP31+cEVus7 z2ieCMJCj*o1P8LHMP{sYyp}$gAuW&C#3ASAN%Pyu2A<#nm5}L9)ffAkx1Wo6C!7t_ zY-;v;$=A-JetGA0>3uEE*&3HDeo;XWRWxObZ0NNg?n4###T9I$`IPMofF^^qkqaS9 zPttee?uBb9lK@#zqCH(^$VR*pyQav8A7o|q`wxMdvBVT--S>+*eTYKglMbuEzJOdI zjT}9qMmZhhIOKyaX@}R6bNAYHCHrW!t@<_ViQjU?OT)8)rJ^}ale6>3)9B=AgcRHF z;V-&BO(lH)lpIz4c+&A7)HK>LtHb^CMCw<=yOut)M-%R#~{ycKzrsbwW7GZ#Ej5ai$}`ueA5GS z&ELaCbyQz?zU-SwW6j3o0&)Ww^Qp%H?+lY)z~^Ck-MFx=oWxbYH|T*>&WG6FEBV7O zptz*~Sa<`)tgJth#tv>vo`Y8oGA|}6L+gkp8=~oj)uKZ(;$p?-hFSZ|-2<{7L?q4w zp6~Xu?g{{;tIm3;fU`YOtj-Rj%L7BW7J@wv`lUZ=ofqwM3IG#^A7`l2>>qcw4!z50~bhk?|K$ve6fubu0_?rx83s+|ux9dw*KJTjlf zDwrw)P)A218lGbPJ~tl73lX4?N=}W=zZ>>%)-SGWC~`NO`JlVQ6cw7`79ze)H;&y~i=BrV+C~h=HL`-OB<+e40J0@MRPkBr z93XQ1_wf=;&F@FcWcnug7)#u=LD?+`17E+}7Gi|rTM)=$rqcC7!&9?DFnM-bL;;G7z~ z3Kgs&byeIr7y28M@wJnRbFclddRe!XCGAqdm_>H`AAt`f_CgDXHdnCC>6jb7@I#M$V|I;tcSH|HSO)~p~(T<6wzgYZOM)L zfI4fqRx7uO%IKsT@DuEHbEafRqOSxng%|(A_fJ)00Cl@Q`*ys3r#$@1~ zk5rPR>MLeX-BmKV*UVAuXa4a*Shwfh$Qk5sH$js}}3Tn^Ze7 z^x9@C*R^t)*`4a9UPdVSfhd3P;!sP@@k~}7ALC8#%(oTkDrm9v#$P?%bdP2cqu5(d z<)`3>1;F&Q?*Af?eWR~It68Z|(O83mSao{;OLPFH!;$eyCabdz8r zXE;PK9h#+v5;0sLz?WQyz>G0rGrT(zFP9Q|Ginl@(hrHHty63f%=PMzPX!)M32~1 zC+)OZ($r9%wuzi-P4-nzzH{&cotisWJZoL-w#M$ItNLt;Rm+xXdn(!bk*N*sFc zCR_z|yDJ-)K2~OY^6|~}7gybEi!%f;nQSG4?^k4gA;%b|Fi1O*+--ggVKJBV;ZGR@ ze!NMR#feRe+Rp4uvZF`|^yL`IAzf8by#6x9O~leK`}4}~b*h*ar_3VgX0z-WBFdvS(YX}RW)U6Xmn;qtF0cAKb@SG&r*??<_X^6^IY=Zz^hwb&M z_j(k!4&Rgzx=o?`;q+^OS z({l=e*|yGpV(5>6AIu^g-~8Mnb@I}7jmC|}=bKB!QXc*@T@zssrFo~>QybYQ+rQ9Ef zm5-eS#(OZ-vOZ6pS5)_GR+f=brb)62$vzkQiUlx6hHv0LwYnv@>7dMTjNR3?&Vquy9ioYLY!k)?JqFINm6t@2Z+B*Su0BILR% z-{yr9!1)0%zzkeqRLd_o1ra{QKIM@v{E2`95LTDGwbxufV!)=&J-^X>cDaQPZn9L^ z_7#K_-|*}%I=#wgeno}+mdZVU$g}Fp3S#|Vk%ZwaBQJe25asZN>4%-2_(O| zRXYpVx5c(xjfZu;u@3~|YMRVs45#0jVoz-o(kYwfFU7=QQ;xwzG~;#*J>a8gl@mOS zD7>PJ>w~gyRg@A(z8xZK_TSj_=+ad;6Hhn01Y<5vTo70&&1-1M1WbnfZ|d#nM-OwW z8-FPl6n{$o$(7u!?joD{-;8;mL0znBm$Aru&3A0Q_Ghh}q#7fpuNm1ccD|1l!B@m^ zj;)49poif9;7~WPfNs0CZC~A1?mOSHrd=4G08cDRmoIGgZ#7{KatuiImZzX$Yxb!x z$yC5*!xv&*Wh7GXs=6zGVN7O=E!z6p2Ajnnb(|4SC$Mk|3S9YCT@BIxBsY1_L!e=E zvkSs|QPmMc$8d7@-w_-Hokl(qnAE=HF&bfz_}O`^DP=5mZ*dVZn8k2-ShO~KPXph$~jq|fZ^(~lSfSX+Ac2C_va zl}|!IQQ-BW)kaz0c^v7fBN2MvrA1w?tGpDBCy$hP2kCI~yMp(FxY;=HyXO9_TT;l2DF%80v)Wo76$sJ=xae;i5Hd_+rAd}!j&w4Y@}7Ff6)xRVF_x%%v@ zj}FF|r!vRAW69xrarNz)kaO`g-%r+~qC-Gkk-!NUo|@Q67g24aOqX@|YYII9& z!e1rsWM8yITQykUkMj5-4r*v|X`-dkrTTDn$U_j3WwwS(=enb`csD#V{!Q#Qc@{VU z6e<$vGmHU<@5lLj+PROhkl@sZxY?1Czs9|{Ir>ZP);m)(JZkhhmk z7`tPWrscRt@p5Nv$7*fQ6@7o(#5@kMbCU+Z$jkAg-*KSzcgs5XE!h@Vd<4@9G*c79 zZ(i%ln*t3vXQs&M>c5X+*k-rMWTEwaS_oir*bI7Hd~-az5mN=8_=9%zuoP zG8ejH+c)pA3sC}J+J2xg{FmCr^&f@<*VOdz#w#kQWbs*BC_Z%s05FeLevMI-TK2;N z&kh<6{DqIc;ctZ6VU_dt&8?@_6Ea8>qwOu%=pVIe4xd91{- z^ccg&NIA)UvGV``-Of*Z`1fO^w}#5a>V0Ygt@zgWjBH)E;S_BAp@pY~+cs_taO6H-wnLJ2f*t__i@Z7fP zS{QC$R%(_=mkw;lFL}HtFRs~)#H+lU+d=~|4>~RwC*EGko)CfsjeMMCtIb)|Z4gN- zHe(MlZFq8N;AT9P$Ao>&k(~b|0US^-roe7n^!SlkZ%tnMxwBC+V*Y60Z)KB$&lf7p za0ZAmna~@Gh_`x|z&KpOHY;UGmhdwBO-u5&poRROzn*M`sDml(4wnYmsC&Y_&eOzy z+(l^2c>4drfNi@;usj16_g_^8`@1V1Gb&?=U^yA`13r;#jYD;hB}`nQWP`=1u!3+= zI#Vl)c!``XB041-y|igeR6Ped|9?)}j^{uGhYu4aHf6~dg0MyRq24q^KMKUj;%s4h z-~hrSs3>uxq2a=|spV6JaW##~)#LOCkyy2s8%G(GGWlQt&G;DWgz;=Qnx>GPS)kao zUMcrXC!ct26Hbdj@6P<_1w6f31HGtVrBw`nv%6PJKffAFA;^8>{dv_mqxyzojXA^K zAdyiN$_iH*hyIg?im%sKM}z5-2zN?Oe>KxJqlXB~?8`Dosx70VVb`v<#DPZgfpogJ zM)NivV3`z0j2Zt{=1T6a-#DLdt?{{cLZ$Lwoa`jYAUyn%eTy^-n)bW|6T1^obGdsw zMUME+TZ`)BRx!G>54m=InUbQ|VV;1S#+&oq%5d&_SFF+_MMW$+5=Q07dgtGTN?J^o zZkI;9f=6pxJSTIJ%Tbaqx9lDx_rHi5Vo^9~uxcehN9>Re=fT!OcuO(!N^L3G)H;B# zS=nF(*w>NVTv4@Oix5(bS45sCyE)Bc&u;EjG^o^cSx?c3zN+idK z!OCmA1R@pe+kA-4Xxdr+djW}g;Q~u#$g_?4qq5Rhn(=u!jMf^u_g59+dDB?gM#=a$rq8O({ojAYNGHjLpWYS8ec3+eVm-7lhwWO(R;2N0WCRJ_05rmOzE}=KgOI6w zDVuy(*&KnH>Qm-9I;-3duH6Coo9Hlc$sb70$cs!}d2aY7%*>JR@Z z+5G-4uQ(4%R8I_kE0Fv09?UZ9CsZ<^T(_Gd8JO&D4CPHGq4`QF-?M4j+sp}g`0L@p zobn_iJ-S^0Xp1Tr#x!II=!g$UlLOAMUK-O!OPVUWB%zW;fWn_^^BP^Vn!;N00ylVnqKx`+ybz6ym^KEHa@^J1%mGi3poLb zLLdL3hJlg)5Y-?!W8J`T7CN0r6P|PNEm%s(-R+Qs@qgha2H06a^FkiS5{VjA9t zE9KC9w~9{DV;2{iPo};^uQ~tj%HMg2@%Q*`3g|8vWAXaF5=fljrn~Wq;5f(u0~W4w zHOX~%7(q9(&I{OR?*go08-0>LfH}*-{q|2ZCrJh|CB5VjKN_!zRfe`AqUsNn&Yp@% zQ`0}o#(xa?2Vtg!&6Jy0Z@cgGq?Om(ujW3i+a|ce%4Bb;_-ixVV-?!C|MH3Ekww;F z;|Lf!wCG$In!0^FeELcVfFRF%sG;G!FQQj|0ae!AK@{ksQ#k=v(I*3}m{XZ`*k`4dthZj9t} z(zZwZE_7e!Nv>w=$MZVm6sU@=+x7*j0n&JvhV&^hP%cAKCochv^>+ySA0RwtLnWf& zk!{HbU|RA`{(X`D2PE{YH~?dNULNI&dGUv%_xIX+E4HPHnOS=Ut;9BFq zTFKY5v_FF!#neOYHYB>{I*IZjT+yj6Zbnt~2gDx`x8Qyo=ty)Oh|B41qRS_77A4Kf zC5PBmLQs*tkl!bW8_}CDg4FfObul<{P)G8|g5Fnb!{&)n3?2C*W<4f*u}tk{@tLV$ zVgk4=%RmGRis_cS-8?}c0Ouo~7LZ0|^fw+GeAW|%ddP}yEfcUhGyAr=BoM`Mz8h~O z;`{wM_MsXM;PTpyH+eLF_vW^sa3Sz!HNK^}$VvSa*e|-b{ru0yaam$=U7y7soLIb7 z#c&SHcAJFG?lVkY4g+panwOmhHg14?r8Z%}?G=YAD63&Ct93aVc!@r??4w&aYPjCD z;q7c$Yns=>7qBO~&N3K!?eRKr6Go^+ZL1kf3C9+x0Fb6#n%6t$st0PON0FB=Hm9mZ zHkIEW&Tmd1o(L|eGi<+$D-57;mz(q96a4T|VPUbu1eG~vUjMZ@>$lNx@cotjr4iMnquYhi9(QMHP^zY2WQRnrV zea%g-m%|z2AJ|Fh(8Ea9qx^w!c)e^##1#nLnkS~4E6P9g)_#oU-XFrul)=i?P)rX> zaME`i-4lq1-$hl*!=h8tEXE?Nt#Q3vZgaQ6i#QP3S`;5CQQ?ZW_+I$9&BU2NUo{aS zQM`z!kJu^VBY24OXZpE&uh8EKJphxx+nO)$aKyB(9XJMhwi;!qK-!AKKr8e$-;~<_8W3D-CcTuB-U8}{HQ|@X z%Xq;g|K9kfdu2!(k@06qQ^(9uf2AA3h)=O??(qa)Zy#ZR-z?PwShzv_<^=#9*L1BZ zXG^4a09rTQJp`&-Np9rxa@bb$!^&_$mkaQKX9_t!{5&6`G8sgq{3QYHA7epCSOT6$$>qYb`;9!@~7J7Hij(2w6$r%sWA-a1*sW~Pw)S7?5RT*e|C;0^4Sy`LYUF98jlE@|Qb z_g_h%hHmte3=qoGZ~JrLTK3JCXFO$@yW~pT)r2Y-cs)9E>LzVC(d|Y(sRC~LHB~Y z_(UqParb|3N#H?$EDQc>-heYDcZH$8s&K7{28+6f8*~xBVAQJtMJ3$rXWeKSf324Z zGBgK0zYwp>okeJn7kgkTJ^m?!a=oOx60}X|qx8jt=hqN^c-!+i%O6_(|KQzLQb&G6 zj9Z1e0Sor=;~%Q$KZZYfg(;KMrly`Z)8S-x5AA%YBuB`Jgd(X+z!t=nnmO(wk=rngU))KBV4{^G+|2x+)dJqw~TR)kj#3tMU-m7ns*I6A6YRURDJ zKxT6`GB4*=#I7yZR!@5&llobAA<6?#TvQ+CxgkV*O^RYC%NfR|-oK z9;I&+gwETG@~*JTB^|W1#kp@!m;i-+sYv}f{lI37rMzGyXcDhjYH#_QxQFC zdJL~Cv&Jka8gsuG-7S|%rm>i=p4-ATw^||u8zzn{0r%=*oKo%9_xwXP!mzMW?>_W4 zHXaY0j+%AvgTYgfYCx#rDIe$%*b3mECCv9zdIA7)VvDGDeK(>^-5za7U3>J^n71Tr zJWm(COwbo2BQ=tq{K(fXUo1cU&9%y7av-|1Bw;Intup1Tf~c;2BL z1^L%S*rb!$s-m%Y|El|3*`0wIPTmPoT$Hpc$R~+$M)9GT4-tP?wU4mCR0*sPQ zx!H1g39gvlI+qC<*xBWOk-*Fcb3ag*LHPp-Pq0i511C_z1N&i!@>Co5yx^O0(XX#S z5fKal$^7NmT|@Uh1B4KR%pu7!r%kG@@b59x>O$YTHWHSsjqphR5t6}|gv8bR;ulvsJyfqZfF9V_qHRE5 z)|l$RmGL|(0kvWnw!21j*kmzZ66mR3d#|_-;2%D}WmuVK!S2mHG;aLQXLm3tN(Pvt zcXKBJw2E#Qf^F!>K$;i0L!Z`G>kG6mkx!ZhC*;}*B*_Mp^0zJHYxf}nOKKBo zcH#``(j|N-saj?m0sUAn@IPsb?Cv~IbY?@h;T}2#YAPks;kXYiPVB~xRr7!nUHtg? zY2A}!kr1s%m`o3iZ1r~J1dGYhqt5`&yM`_8eeqS#A|lUXizk$>p+Y5<)wY>yi;}qO zD}WF0KzdeFt$&o^M6#vj{#;JzUaWBZmgJaQjazM$qAr4zKQc*oFrCfodYnh@F*Clo zm62TdSlIDA_~j;;WMlKelZ}mFKXgyi_>L`)De{XR+ogok6Jd{>Uc@;bv3Vvz*x#xF zFm}9`UT|e~Ov9YS(G8)RB*!1aA|U;tW^+yGNN_9Qt8u2sVjo%^gzm^hKNVgfbd?39 z6eC;dvYEqd9y-Gry%#jR0Nh z`3O#WB)LLdxa!VYN8V5Hk7|m;e+HU+=IVpK6N{7}!0cG}_bmZr_|lZ1Zi0{qzfOav zk5#hy+I3z0>i2uWPRp|!*u*r?Gj?95rI{3``;dK~_H&-iLgiDJA6WbJ>Fx!%n=oB= zTcB29g9Bwut**{4y9~N-4H#bSqDD|Dr#@QaqwD?dF-=3qAR#GjH-EN1CZC{D`a!HB z@&8x=+p@Z2OuN!YOc;EJxQXnhDLuYAtZLW(94eDqjX5gs{K;AWz>zlMEw-MMhlmJl zju`xY?g(eJeT&-@{cZ}b)o_f53Oxe%I+8QMEUjC5Gh4sVcKmEQ=I3R;OqgO2|LG>&l345O{s#d(QxZh7}nf~D3LePeI#9;WTaKNxneMyDV6M+ry| zkOJV`wm^IO59)zk?T^kl$UK3>tX5Ds{2}e%*K=f7Ny|F2xNR!3L^`_&IAE7cfyYEC zuDEM>$de=|m*EvXJ{`Nb@Tibuajnqyr+1r7NOX?RX;h3Ql7}iF;#iX6e?T}&W!VTy zVOvB1&@=r#TBs{+s3cGOLw%{l+1ZE+I)Q59AnA!5)H?m1$sPL0eVwZF&mzWC+-9X6 z)gK>h2v8(s+TMZsH}p08>6bp$vt`1EfV_QAif0JHh+K7Kt^f+K=L_Mmp6*5hzRzpuS#Qd7 zR>$|gsi#Mpi-cc^AR~>M@PEvA(|_a<#yNX`MzRi938J$)=dogkB8%u3>i3W6VB)7S z!p|{X{p&J>rV>_YnldPZ*d*$S<*p^9a$DJAdtZZ8OED#t-~oH(yeZuzk0=t?L^)AH zw#HY=t? zjSIh;h<`mkfK{s&__jWEFBZVBP+3J;eQrXjuhQHfH#lS4x`UZ3Lfj~MJ@tZz>_|S@ z{}{Qz0wX{?&y*?gw&6II4W~zt(nV)3B3$oNZ|IRSRp5H2L{g>Er?!#D?c)Yo(eX!E zr5vl<557+VArz~yxY+n1!z15=*g%iPz+c$MaJ#-i`cTXFnTxJ*>+nm2?wy(Lk292+ z3@o^WwqhW4+~rupoF` z8d6Te)xmr=z9fStNz+qeX;Ra7wVq<^Mz}JkQZ@KKF=XY9kecEPM1p0?5?yZy@?+ea z*vejyR+1++@CU0XT^>h`VghP-$bM9s!S>U`iG%G@Lm@%@a6*A(N1;Xkk z)QQ%WJI7Fr7G76&Mc?<76H$w`4)y*4w@>snfZ1`{UTCe5dn)1&$tdtNwUp)3vI9X~OEM z7lV^czgoY_C|=5`W+q-)v~PLV#zNzZ(UpJP&y9E0_VKz+ttU4pmjys)nf%dYv;9E| zO+C;npy;%eWy){Ep(LAHvLUjsgpF*W+R5+|CN(ubafoC4#W>EF^C+95_2GRw}*)K~m#=evy zE05|6xS}7TA(}$JGM!qH4|%O#=MYzH|6$RX1x6g3Dv|U`}tCmp9J0lU0KEdQi}kvWMy z17=U_6XZhY5NuW(Q)U8!fgIEW$R8|c<`KVshpF^BaTDL23@fQ$Ix~!UlJRUI9k=$- zW?}y>1$%A-p{mU#mTwt{&n+-ZgiB5hSvChJ2FG;(ch756`A( zApduF^(8I+Q}Pdge{W7(FSa@QIB++U#pM)@1;TV^poR%imFlmxgHE4J0&OsY@wvkj zZuA6tAZ%3WqB)A@7W8_DSuJgSSpO$o)K`^+3=vWU7T)}oFlRERD-HPPONkeEyIxoq zzcLL4Uyhd?m2q*G;~hS$z5G*aVy3BlGTMP|OT!j<&EEu6uoScuFRo^@-o&@X%AqjDHIAA5>0j&;ac zoJ0cWkaj_L3@09{+5>&Tk$7~e3ySu?rZ3hPy+S+l2sri!rrYMB3=&aNt2ecXT%xKk zQC9_K35LP@pQF80#qYL%1zq_Q%k(;~f-!({(55b6OJ?b()Q@gkt8rki);-zmxUAT* zrGH>5BO4xk8y(Et{f>N1wrU?=MDghQ^yT?OA@bOH z)uVr-j8C9)fg6iKNHF2Kblos1QmM>HUTr_d%=k^$*6;OSxbb+h&%T!hNNjY)t!B{$ z7F>y>MgvB;XO8`hQ6X%~QI8_LP#QjtZSr+Cmjnd)%EMo66|g4N&g^vjs6hm}XYA=`jTa!071 zzYhn94yQ;XK6ZIsX!F2^N1Ejtf)`Rm$mien?BcGI+(d@Z$FiBnekE} zkeEr7fKMEN4)>Ib240d}LkS9oDIM?S{HIe+@N#C>W3@eT;0(E>4V^!q8rBP19YuDsXFJ}^K%UvXVymoyN@ zlo#bDCm`efrON&4`D>?zJ~ks!!YKT-SNuqz3X5$UrXTiU@XQi#)sH(PU|0jY50q8m z;L9O2qTVNL@cbCA6qNK;<2}6UlTDh}E|RcEuYg%HsU!h@<-a_-HZ7s>-o)%TT&9EQ zqi@{urK+u{FLE1gaqjfZ?5x=Xmu3oeASwuKss>-LUBbP<`*~~s%JHk;?w(}O*fa^) zlZ0?UWQ-E5EQ5?IdX>ofhOeJ*$iz49Xz681nLds6?WQ^fH1;Q)A)bw2S|85+K;pcm zoWsLLw-|m0lR%T~pr}u{O}J}UHJ)#!jDhwLk$qUkZef?az&Q~I?p{%FP)@cvyfOrE zLlO+^jFvT^xuxL7I1?Nhk#40qT!^ywgGNEMGOF)wc$OcHco*$TZ4;EudqM1kpT<)s z6Uh-uQ}DZ2(@Q#+*2}^cO-ZY8Kl|8fKGd_1eFa%%s6?O)^X1)M_<%Ne{ zd(Sb4%op<6r?A;6&5K>meQ;(sHi;NpTNu(WO6gWZ=gWb&{A#pnsYM0EYQ}+OjGG3l z5bD48YgT1#JGrrKY3Y63o95){xQ{g9;acp_cle4lu~ea2FEpjN#u$Awns9&2Z=Vuz z49jqZt4wu;n}$iaL`1!gn2qazlx)L2AbRs8Kj{AN0K77qyg=-=ZTO4+@iZLh>w=4s z1T%i?$5Iq^l|j5DFTgHHVmd)VA#Y=Ja}T)v-b3`;!W%^5jc~yH_9nVdH$XXoJWRhi z1dT4b9th)!)yoUDI?;hydG^ZloLCh!l|PB_$#1D*8?+Iq3Vmh+<+We?!3`|8pQD=> zgqj&J%k&}@wb`}GEd?*a)9tol|F~~;r1GRbr4UWsQt-Amr;mB8dBpGd#moZWWLiX`;)z^lwU0WvKSEfP0pkJSDug4A~|MWTA zVzB^;_mj-X?FAe6HcNsX#FWfeuk>mU2*fkd!2I-J^v)h4=MOT!dz}O<5hj{9RO$v3(;}n7NQH(|3H3;embdzB6SI4u{(C8n!TsKg*GMj2Ka#qH4=~iR z5;ofkcNV$e1syY;2bp&+#yB}adR07;p(W7Hr>I~;58;D~Qh{?Rz3+i2??J;p3$Or~ z3uRewaqe^qPcJXNQ0d(MFTahd_(oyI;#dow+wp=vsf|%9^PFtP7dC^^>FugLY;+K& zqJJJXNZ*V5HzWMMpDdld0`R`TDlW$NJ;p2o>JC&4BErQ$32>V({b_@x`ID)F574dX zp`g29l=E%Yp>5oaYqA7oaw`g0^H6g_ef@9sw?L4ac62c4Rx3Qdi-T{VPPs|`KbZu&-b(cP@fqNEVQjDrro4PhhzDOe6PSjGu#n#C% z8zDx;iw5H{9)~}^_t${f)P48abu3fEP#ih7J7kM}6^za{1>yuf!J^47VSOw({+ARp zlT|@;4`Mi`UC`+RXsVO%7miO1y!tb}jcSBFVOf(5-LYjt4a7Rs@Qk7FNoRqqlnezV8-|b1UDJfr?_3Fj7RQYLhpDrUis}!y z{>;#!)X*ssLw5)W0}|3D-3`*xJ(NL9cSnq_@pAAm&+E>v2L^_m$Jdn=ZJlCj2Akh&A`$cx}bLPXr2nPMy zKuevx16ZSc@h ztd{3hki9w$(_^f+r?Ll@qGBa5ksN8(Iv&@4U4$=XXG(-+MjVmndrJ+}=b zsZZsQimcc~kSSCN0F|%8hi7TzCgRRdEt?5SBcmt37}01h^TI{Gq_&j&(;$4SwT{pS ze@kl<9(M7Q-I<8xCs1Y(3(!cbszl&>DjWuBnOvoi6-)gb7ZHB7KJ2`n;j-U-(k28RUv!` zkM}gSVAihjLw_2Lf+UebulSf@?+G4qz-VW%#LFYEo-sZ$pPf;pv=GPMfEr3cb%rO8 z8@;=n2t|{iPwerOqd%I#7+nvfoolDDTP>vf+@{B7u5BC#E`O2|s`FSP&ucRL#jSO} zj(_di`eZd2hiFek3V)gJ;3|)+9ecCtrg)TONEi0<@xt-3PJ7j}wC`f!_r2wG>-N!1 z8f5zAb*E5n1ddl~L`NJ_MU7SWIlt;)>OIhyJLv_Kml|DtaC^4!i)p{hDXtipY6U^6 zk7!iYTg&ND&=HE0b%SLiL5!%S1L$d-@K_~K-MY59HgbbCe_+K^!<-btvcrj5MZuWP zI(#y2apZN0BX3Sv^8F8$ztx_^#$+_|0%H$(4%NKROGYNU#K;63>fsr7$swqnqTpYo zrytlzuw(@gGAo0zvP=@mmnpShRcLXL*3!s}=p^mgYcBr`a68YMd?FObG5|`>sS{k7 zn9x17aV5ua3}95yFP7S8t-#P5D*vK2$h}<&vBy8rBT}J+RN*CXXpNwX(^Xq|8!S7o zU~G}W_)VRT^j4=Qz|1TUz0I@m6}^{sf|b%lUBk@LiT3hqP@|iCpW!D{MIqf@itR?8 zawp<*bygp$SHG%nBK6Ki?}$I|d1O^`6*m7ayee&(CZqVK?;g#6Y)GfP>O8_$=l@#% z=kLEwI~;o3W6yo1sB1%GDMwG{S_%C)kV(#>TT^$YV`AQ7t^mf<`Y~{Y@AaV3vRrZX z{ioAYz-h*`QJB@E$F+Q6;}d)P-&4Ry55~V1B}6eijZMN{bSn5!@sWBwNGO(-#Q2%U zv}S0e{4Kgm*f3d4YTK;uqO3`$8O+M9YNNUweYG%fF@LFne$ z8heWWi(!WVbmEXRvFkXoxr3XAM-u8zf(k2rbOzG`4CrEnPPSbqU}Z6IPIydfyH10Y zfNeIUa2KW&f0&x_GdAo_tPUPBxolU3mOK9)^PwJlGrLt)hWl1$bsX%6o&v=}Rt&d~klGGT;;N529~I&gK*hKjbhzZsj3%0<4;|p} zKM5bPU(z-W`?TsjS<}B*y^a33O?LSxWIZx(c`x*UiRdcVYD_K{dF!b-r)9qBHZ&}9 zbKmze+O@1nem<|ICu_nLP)^xJ5TSG*UwA&Tf6lWr_RO8@7CHL6%L#c7HkbP@0%cbC z!;@I9Ixt^ck`K57JaRYQ97tK$`Q2X7me1}1t;3`7qc<#rB09k@vhAx-h2}pEZJ-x& z^@JVm_hJ)X?yA{kK`G>S;2Ur$by_(0Krip^MpKy&O2n!J@PxYr@%{R z<1Kz5OLUC`i|~W(7b{J)?GZIL(P>AFk2i{(#JBrYhn4&;5<=o!z%u#y6?f^eT{`+% z)MiBoer98ElXMCg7vFB^O;CI#@AICCWT#O=0{ircN`OTVYMz>zmcXNXMco90LM8Cf z>*JA1cDDt<(Q0T0Lq`8{X-rM%in4L$@h&uTaQQLLK!{fe(e?K80Nt%&=COPuYV;yj zk3ZdHk{!tZM$}4NXlCY-skA@;nlwrDj%1-HwSYNUJZNpet-k-XX{)sazq7r0=IHrG zmB7fUYrCdoGNKA z3ethQ6^LG8_M=Y+3UU0i7#h<&0^+b&1ZDOTqGpRV10ZBTJ#;G{PL{uUf(?Yk&(<_g z{&q<^7bbsT*4~#^cdzuQ_(mq9>$&l^k6RN84IwrD3P(3a0=FW4#*k=DD)}!N@JJAb zZhjchfn}%?lF@xp&{~McK5o$)MVWCnB3wQe8Y80(EiX~2%P|^qJhDc1!ab%Sf14~- ztda8b2FTSVP6Py80Iv8CnFZ%$wjO7Le2>N%+61TYRTBhcEnqsA-C-W?&|p84lgzaP z=I9@Sb;ICRl~JSfqh9w^g1MXcsrWM6c6$sTA>Uw5f$udv+Q*kA#UsRW8&{e>@>?Sd z9<@C_gerp=;A|=+?WCS3!kB!RvdiKk-$C|pjZQ&LN0HxtAoiP|IG`yic%R>PECubz zUlfVJcOsu4C`Rr>IR40syGT;#DoT|$6bJbu+|wNG(_h%;e}JOPs^x5}{f|+3+WxIj zNM{)sY(>WqHY<~t1l)J-ETR=Ty0(n%V;zpWn}FsEj863K-vqcpMyBOJR#ft3s=7rl zp%oW&ryn2A+{FWKBfNwiD&1jUVD3794>WeCJTO7$J!d_o)fktgJec4Or`_*`L`PZ7 zPSetI{2QrHlE&Lriuk_E?=Tvr$?f=dFW=KSCr&GJ1JE;a0=C*n5q|eeTuJR+U&l!Z zyTokWcLGtwb8;J~gA^vG%rT9V`S&bHh)fuEXM81X1#TBcK!{KfvMt83j9{#OcQW2S zNcN~F3s=hah|2c+0Xz(M&^|gwe%ADF9w#jVzv17KFGKV0St5J3WcX8xOj~Dcuo)#8 z9Z{@p`mEe^#quDo(X>?i&2$1^VDFUbkPKH!$p?MyZrjtCoyuWkigbRlA9z&8DPXASL& z0nzgVds81ML(kA^Jx$p1@SpU_dQH&ovhq|#nrpXhT6%!MMSRaF8I5;~pYeQ-f*zW6 z{DrvnI8+Lw)SxG}K8RnVH(~LY69}8-hVi#_)_z-Na6M9oVM!mT$_w4K>7!UKWksZn zT9~>DXcXjqN3`@i(jD~5Ta7jwZJDVYRcI|Oe#f)a^kCKsB)K8}wSQpuOb{iD9~FKF zRNcRWDqpZx^#f#cgNKiR?U8T`RgzYlVYZQDzW#ylD7D*8PIEMEd zsYCmm?>n=#Y9+UqcKNdGhcLzSbhRrv?=plv5{FrkpBYE4rdSA~SZ2ILX6{VXs>LO_ zWnw{e!KRKPZI5Pq{OoUG zHiX!UdSasXQD-HfAnkL|t_D-d1N&FWZIMBJ@_<3T!m$n&q)l%u`Z=PoQ># zl##h%A%yh-OL*!?o^C5;tjm*43dmFd*#_^tSuc2TEcU3=d#kf5~H$;^D2!~JE-9oLtk z0wr5%k?P7*v76l_$!KW4{?yb;T`osY_w18FoP7XXBhA=-2}Qlxarr-MWKhS@JK15e zI$21_HRD3DCQpezw z$>CRhL9c>1raiQwgyShzMe+`s1CBpjR!ddx*|*)1cye7uc)#tskfl560>g!AUi?bm zl;HSFh=t@KPJ{s=d{O*7e8wD_yt1Ux^w}P2YRrb8j1lnk*WwXBg^M-`)%o)U zMy$Q%J-lQjd`C2E0lsMApEXWS&RL;2#w8OdEfLu;sKJEb z?!YrZ542XarjJpf_`Jm!<9+#odhPibO)`P`STsD7o{3W#^E2AgM6I`O?=};B8ql-P zd{?`41q!FI-mPKTb9fSJnb637zrcvuAk%Ld>DCwzD2L&rdrQ5#-i-4o9(02r0q%8< z#8#CcEw4JdAl076=vXx$gzHAx0m%NCG&`}=0Ng}4(;!#vi*)VDpS>^0vLf{pc1tUp zG>XTpk&jcqz3T`v%Wm^l#OHa`3b0aA1HC(%4>sx~HPT5vygb33Kl zQV`8r$z4QFFo1-43(M>{J!pxPdW;~QamUPD_P=Vo)Fr=7u`i2;V5INmSxo=N#dK=P z=`K(miG3R3k+~A#1gUGF|M57=wlz_tR~bL7=DkulM*Z9vP-$2pG-e%MXl|2#aH)S5 zm(bG!zxtuAObJcsQi=Uvvyj>DEY7|te;hV+s zf40H4h@z8Xh{nwA67~57;DvKUH3;`!Z&~p>St|1I!4E}a;d>aE7nMD+JB6+)>5LyK zn6VGrBPZQRlA0<%0ERv7?N5H%S%fPaX!;Wivu(0HuZ}3QfM1}uP-A)S6NFVZ`9mCL zDRUA#yJ{Z)Btxz6LX{vU$Q@ZX)pved7E4^uj?%-deZk5~yh=z?E${Q0x_~@}ZH+d6 z5ZYKD*#Q|A`ela(Xk>BnJkkP!Tfix}lkS3P{rMY<=f}S%|ew{ z_4~dCY`+CdaLd!huux5VqZU}PZHuww@#Pq{cc!nQ6gnR9VeX+ygeLHo~Fz#EzU!yjK8JZy1e@r*o0T+e*=PdJ?M`w9($j<yZuo;-Qk1J5-J#Ae`kLnDMtyffeYriMA;aAK4{)@d|T;AH$<$O7S3_5$7t3%r-ndcTH zlkE09fg`O5RmN!<7-}r_5?Uy~1%hmCC42*MV5M5?$Te2>=h{+A=BUgAox0&z+-n-m zc~?S^PYMgHm8MyZ;RK?;{9gI5OnY?3w=M$yPN^dALzgVQ_R0rb%`Rb=!R=I~W6S&) z&yV5mAPWZ<nBBxLQjuc1@Uy6_Ioh`*Y5Z704_vQ02yq%wMci6L>?r95rt7(mC z>Luc-bgsv;A!Y$w7}2)N{Z(}LWcccIY)kYmRb6w$ffI%bO`wYk?Mh)T;z~gl0h|b> zTJKcDnLspcRtS7s9eu8^RxB(nriWmo_6Wa|B!u(*I3wq>FQ!D*kmD zd--a+*Xn!_o1*grpZo_(=nGGzt&O_2C@ajzb~9KJ!f(1$2#yf#OLz1|z(|XjhN?;! zP$KHRT>rD8071OmG3c4?(Yh)<|JA=qWe(Zil6-#X-q(plh`g9@Vpf;PG@4%lddyc5 zAd!*;tS&xGv5(4<;0UX)o$Z2=zpm8Y2tLoBzRXO1S#9@^wR;3RqwaYp>x@dfaRTNV zg+T$6wFKVI>p^cd(2*rQeGQ~W1Ruf6{up*H4SrLxHGM=0yBL}LwRo}s^u3pB>v{=@DJ7`kL-W#M_ZxjP+ymUpm?gYS6wQP8n$O3BjnZN2ie^%IqjCW+DxSslU_Ti&Q-)jd1?Y6)tU@uYd=PNM@T{4E(`dXVb?|bAMu7w%bZKHt9LO%gV5- zuoFxSCq`*f7+TE}YbfDrqksvqN1|c8WY|YIm9ksq19d zpF{%FDoaO`em$?`BA|Ab9M}HrqF?5E&&d%a;2CM-DlG zs%yRF)5MV{N_4#a;b1JFHq$vhmm9jKR5uCzFI1LDQT>&-Z*7;imiI3ZI4T_Md5Jpq zsNawH%NnPOAnafPoD#Sww6|y3M`zwoIT#8|&`@)E7fp3km8AhNm!h;PL`z03(NSb- z*GCFz3{z8bsfJd+t#~|%4$ueH2(DD{%akMRQrvXnwQIC$k+V%+Y=0PhMuiS}Zprp% zB(~g;YYU27?YTBIpLyqZ4+W8x%6wUUN&{B02P&B0C z4(G{FYdyaEmR_wy(%2VQ8MRyJG(F3Yd6!kk*HC&;+b=x$?rTg}P`*_CiJ2Tfjm$J? zrfmEe50q;7?^m&GC>C{6nz^eYS%PJ)l&55fEN`*JT&l)5kfHeq9)IB%q<^bo6myBi$2I9L!)GV=G5-F)MzwJ&y|g;1QJw0KOBxb)QDub4jAjZ#Ka*>%h49OLD@Y!aM*KYL%sb9U_oZ26yHAgB- z8+H_sRHE4n>}`0%i_5r;b+V(#q~=Zf44x6eyoP%{y{j0P{)|8cj7jNLJ4KbDrywMg zt0&NmIrc;=nev)UTYd*W`?+#|NLwB^3V5u7AU4j=<$hm9<^1Eudt4X`G))!e(t7Bv zX};@0l`J}KyWM-cu4-0)quH$ywV)J-DGfW6)7RpBV5sF-sPe^BDRbh9<9u79nfR@} z8HUJmu|UxT=}}4RM;)MS5Z!hIfC8f{QgC9pT|)gmkTr&xh(d#m{&n!v5@Oh{nKRXrto8=wBZM2hHxroY_ha=!!H?u?nMY0p`+aCO zO)h7w3}PI$tzDUJhzFP#U<3u|1h>ZVAd=`ri3XUn@kupM5Lyfrhv?5z(t!yJL12l-h7BX>TD z-I+K~7L4anI(VUS5ZQ)5O-;fwv_S>Iqmyg`OJT!^l|> zm)#uxl?}pSXtl~JA|Ei2{V}qkuR{1-)J1-jjByiJ@8T+j9^<+fBV_(WAL+brT%XsP z-}kpu2K%@CkZQY4Q1P$S%*$81f9+WIq!gvey*KtQZ@FN0J=9uZHhLN7q)bd^Sq7SL z)BfEskQVUdY3bjZAlGUg;I)@ufpPVvZYrF9H|stoUe@xAYs3qCZW~iII`1qOw5b!X z403^T6NzrIT3ya+ExfDAP*co>*JLYb{Tobee{;MOPrtzX}ZD*Qhn3JE)H1szo; z!(x=TJ;9W}x;{WxE4@dB0_u`3dj4sG+ndHw-_B3e_Q`7w?7|w=DWwF-^8br)LIFHd zP?Qi%&xlflyR7wQmz!NgxcL}I)n+2SY#)1$DoU86=3xwb*!U{3Yh! zyO}kl%3cm9nSWAn0nal9%kRLRyqV zbgy@Gim%DNtz~qkHeXv?r%cJGWJC@DO(JB`o9AIZ{|L48$DK<|%o`N51qc6>J~$5J z7&_ofepQ8C?e%F%jI7n|Em|9TEOsuCJU0E{Y1fv_oqtaqY}Czn7K}eiGQd4-pC*Gv ze5ZoC{-!mFqy2s@ki?Dmp0Yer|BVcVv*g$D6Psqu_3BGNZqG1S+SosJO)H4rG+mgH zTeWlkAkcBlHm!phImfG`)a%87YBp(%5)%;!pWHJQPU5s7D)sYNqTHTY?=Vk&8R0(!zJWNj z;ziYNKD{|^=&82>l(V-%1kxL#O!Nx!J)O8}dR+^I9XQ-lGNDg>K;YnTi{Fi2?H(`;UfYG+%SbQa({Vbc4aC!0kvnIyy~>DP<`O%Ac4D<^uJA$7Ww z3p|taQxnr*tIcji%ltcS;)YO{IR8f7t$uSz~8P?Z2htE4u=Z5{-$3Pe;urEAGZ`Fx_37c zc|E{V&bpcs^gJehXzVY?;+>`7~z7GXs#G zqzjBQo^__j(CibIdrF;Ln5S&b)!Q%B;q!l3@w-fTMbu_ezDG>!%cg2JwH6{r1~pF=BFlX@5ka#Y%1# zr|(KvVLWGKnZ~*OW6)xu*K2BC8MgtD?852Ir*0DO|G!HQ2EX!HM{!!6<$)W|wj#s( z)*X}?^WJUzvC1hLBU$y*5_rRfb;W(Tt#eZu)yAi3u5~7T;V-uXN-)hj$Uo(+`EK2Y z3voHQU3v7k5!-9z90N!W)iCgo7DvvrS4IoGO=pg_y6HR08%vq(q1q9jvA>UaCVs6L zX3ssOAmcKvF<$oBRwN1|yy{4KlPZ*I~8eZT#^c!S|S zHZYC6?5u|g(Fra)@P`GriyMv?Jl{-#sOU zT_i|ADLB>+2P69t)`db zjKpa{yIE)N5_{45JgG=34#xk6oTvWai9NwNB~_m%JDN9_r1(x`jJWNpdj#l<;QT5t zS}>wUlN0uGh?bnm@=%b>**Z$@ZSmhbnb%8=S#QnGc>YdgvE%RH(z)1+eiaQa%hnkw z6#t!aAcRwIQa1PN{Z6Jl(;-Z8Rj1^%q}#veeqmYqaLsu}Iz$>kvug6!4OUQ(1>w+742L(1X(0Up+>`y0af3Oe%3X zJ@{ud^#dAm7*fs0>WBh(NhS5e@u!qVciu*ZRx_ejLOJK#pH8farj0L#4t59mh+f^Y z$8hbUgs8KIv$gD%{Uob2QdXGdy9Q^XY8(2UPh|jfwz}%^1f6aH`7(>=`T%dRySowN z+MaZ&Sa>(%%7_9ly?A%htI3YW{lHKI=?}m9r?f-!O^7F zm$tp}bW>HIyQ4s|4~}Lzk*H9cLN1LmjzJix*edU!D%`Wmsb40k?e=FIISW$0Hf^eB z`(|9$o%t6Yb>a>bf+TY#1+T=vFhuKOwX_8iVcTd67c2WBakXn$eegzeh}Cgwf3*&E z8sRkErFxlvEX~>{uY$W{LL|w`eHC_pfDi?9u~i#oDa(r|oUYy!jU~1C4h+JlpLkfp z3-yj11B?DAnHRcMgAFpbjnvsV(It`WBpaCN`Cd@a=YAK@7M^ym(G2Gv(r&3V#>J

uP72NWycduRDuano_tg`oQrsmWl)u|p-K~Y=INTc zXTXoguF8)GX?j*vijhxIE9InLPysvaDQ4R82)OhJw@sB~iRvforC&*hziCg&pAScw zN8L`n{>mjV8O;gDK8|D#`6zZjDaQ71M;1)d-_Ur7qT+7XJCY$B{^`r;_dYK>&Ay)9 zP2}rmAPgDYx?!^FzU#;t9d-Sn6PYmOSG4-ng%!txpy3uZAjrFjJhGwUaOblx1*(C- z{{^8xsy`_2+7N}wnj!v7%f z%jRzU*1s^iKYz6u^Y5e0Bxtm;6eE2{Vb!^~_p^1gbtAeyFslm~ca6NgWmsZ7Nr{B^ znNgxp%(Q@-^qQOv>fH!(af!6KK4DDi=b1~;hfE$<_29~x2fj=C_JNhPgimK%9z1Vz zvf0x$|7|vv4s3{?`m4vM^>woH!8G62uhRNu-H_+TUl$GpEi`QK{jAObREz{hDn4)N z{FRK+o+o3U95*SXgpdCa`!x?i-d_G> zEH9-7q<#VGAuuyGU_y6`C0C&A7fc+Niula0%*tL-|5kui2FrL`Q9e@&7rE7%pNW_* zgXA4LGb`uQg}%|^>B#5;J}w}%$gzpl@KX^>_0AX+#u7!Cu}TwebPz_K2I2ooKP<%M zKAK`N0?WVLBQ*(jauk)OLa2_~4qi&o9(;P;Xf+f_6H!MctIs(A8};(n}2)x1ea_FDc67 z^;6KK(~hE2s%L^xbBF(&?IsRTGun4$SN5QzXQH-_uoG*r2wCE$ z!!J#5ly>0hm`4k*+~t?`lpufyF7Ty}RpETNJ*Uh@on?aJ@U|c@iS02HoIxwJf0lZz z#*znhmBK~YDRnwPMg!PEuLE-~>uZjm;sN75E+++o+=u+uqYHm@?RN#I^O<-V?LZ+= zOjGYh*VK)v^`B4lr88i!ihpP997SjCwqvLLZ<`l?lRc{tSFWi9E1~ zm)I5x<;zElA#_+$Exh&2o|{QuY7Psl`m@K}vhXv4Ftm-!4+yQyc>xhSA+g)JQB1Ly z8%#ZQ9vpT;lpxb`*cHn1iwja5OdXci(aR?hq53@THE8pesl}=6JhcCpaur%33zZA-!qC`%bs5(Tw#m2NTK5EXjw3q5_NLZ z6WP*=V=JQ1Ses`Y&es-u-^%&bSvFP>bixMx0oQ5SyT|SE6cs8x`F5T$z7j#{kK6!e zG`yaHlU#u-Oe^gADXB$&viW45X&+lPavf-@bquu)|=**uh+zIeAs9vW1=sFQ=Pjj4IVwP|_`^pO-w<1_3QCPG!)gJ=v}|sGzIkTF zXqD`tOLY$|DQ0;vxWB{EE8eUXQ zFuJX~F{V3p5;TW<&I-1Xaq?sVoS4-wnF0&`IT}24+ zNs)5PuaNre&uus7Mq}h!<@|a7z#r%m-0PONH;v&sKg(*)MQRU4*9M{Uz(4xW@^XE1 zW9O%4zplAhi=CBDx^ts^(@7|f>xqi*h1WG-GyV}*+*uNxJ|2HprZJ-~B@;yJ-;D`; z`PnavsNw`iCBvR+4UTf3W${aE6GW?G)I;QUty=Piv&RTs|KpH_*N=fZ`RTDOBCcx& zT#AHRZa5IiUEZ3w_Sx;m?GJumGnR(iD+ zpGyL|^1A-2-h7q~{_UQX??$`A|Oxu%jV!m#1VODS>jXv>s3yiHJs%wHFHT!VdoJ_c6 z(L0d!G%|#)o@3h5c9u*#|7zPmFXTCED56@>X<7q>r|2s)Mr({mf>Fq72({T$De=f* zVI95lno`f14}eglrijH(37W<8tsH@(-M}N(owk9U9lrReaBw)gPC+4P85v#Lm(iAX zu+<3*lX4&fxcOIyV5n*?0yhK{biKBcs_iVd@ItUMCXL;)cjL6Mbo*u$rLbJa>FbbK z=NrSJ3;6@&&GR>HHCeO#nH!qc9zU)#t zbEJapDUj)@Qj(TS$)Y?h!=!+`DJl9I3_5~PfL^!9)=oJZKAa{IgSLL7 zt)#S(8{putj^9Ba^`EslQF|DLal06mp}7R4m=!siGa)iXap1VoTqJXU< zj3SKazwGJLxL=jq$+@>TI5Bd!E2ifUf#a9xfV&NghpKdS)Ia5>6ibtJ{?Gn?@Yg^B z&OPD^!>RrDIiy&`)gE5l?eoFL;KGO0u46Pm%X7w6CFV_lT7PFAPqC@~xQH=H=rL+aZuR zq{iGNo~;EwmTubdq6F_y2hEEg*ZAc{Sy^38e&%uWKwgU_HJ9-y6LF_PiQ+A%6wxZW zWLj2LJW|3o;~tB#{R0@kl`#(V`GS3K{&g%Q%&iBfSOlk!I{<9CtZ`bqine`K6?QTB zfP<`ltm9I$AFl~#V-d+zsw^FaA{%ZJmJLXh91HkWB>EArG9N}i7WAWMZ3 zd*%0s4q0N;2fQJ7K^gv+lJ-iMbNg7+sZ>#FB?5dD{KV$$n`_yyj98d1=Z_R8UuY5; znUa3bRyoZ2T;IaZ9odyVMsY^-Ic<_6CX)N+=VvvCo{VW3T4|Ub8l5PSt(Atvms1vz zcyQW57s~D)xiNG5#?rKCXPVC)HO|5*7)1d5Pv{-zuI?W=m~^Yjx+u08X*yeOd` z`;Ji1G))ET#nNEw3_rrAl}z4Dnn_mmH&*)av!`icHsFxSmc&}-+Gbla`(b^n&4So5 zo@TRtuq3b-tr_8mrO7W5O&j|v&ZY|pzg`cnn^&>fYTTT(Tj;6=XgqEs1RL~JF&9hC z+k}wxavzf^MyIq%BR|+flamM!5n5|&fCWyj(JIY{W2tlg<%0P`?6RkMX$WzTYVakg~ssNpPZ z3Nu%T-5o>peALidIxG2P-1niMEzG!2ZiavGTI8>~)lvwlgLgc?Mxb?~)tGj;V!~CZ z*5_+9+)^JrelU&PXN%J6I?KOM;||Pde&Cl-o_>vzg%eI_B0OjYg}=mPMo7-$vzYK9 z)BNQ%!bG+vyMQ863MI>lyDL^o);;1;K*U>Gpi1pY`&|y(V)u=OCq4M9GR3bgPi{jL z{MS@Y?cg*-lcFCFJEBe7o(k0YOd^wih6$B|hvAE!ZpH#{)tNo8&0SA;u-77~gLw9c zOy2pjj!hLn#vp8qgxxw)tQwSl#;w3Iy-BM<#z-=ar--7v!T1OT>+I&khrW8DCrmSR7~1_Xmu~#)9(D?9 zGxb|b9Ws5|=s`iyoNd}}NOT-f)fYTUL@>CuveYtZa!sPq_ zirK_Zv?!ExvJwLjW5U$HWNy%krkefQKCw>eM$dk`yK~AA{d1h+Ixo5xJ*=~3ezsoL z1_;e415=x5<>HW5ot)lme%w8>I}V z(L^?z&qi3jt&R63#_VrE_7@moN&ki4Nz1W96_tJ)XJ9N>GhyJWvSQi7-jgVkB!U&sl_x?v zB)}~W3DIF{BWq(MKE?jfbMkb=#wQqP_6LR}L@Yd8qB93)qhf98<1KctK9K!T$NsYc z)OM&SaX%#{S58c2i5K?uUF2R}TF7D|qfD$*#9RLres)Xcv#+{XiJn^mo#KJsy2*c> ze!AUL9CR>G$9L`y;cv}`>5Q5f63&ouskMiC>Lkj;OCrhNsc+BHE|{c8Bze9MVhP`q zzFeGieELypY^G))}- z;-;?=q;g5*!*Sf+^oN9*(f!}g@=K5F$6Ym$c2^eD$8i{oOTU4R1wHn! z!g)`MgjN+;%43xJMz?GrU71b*U3r2atJm@F$eE4zh2(P~MYij-eBi%Cg-Dv3Ho~9F zalT25z{4Hm41V%^HT>XROD0IP($pMEP18B|Oq4}0G2*}1FmXo~r-A_RN10c)oYnZs z+~hYawhp-1+plFLDlJ^1%sz3edl#BVGjzFpMRM2fnNLs3eq>F%`*Hmz1RRup9)(lK zg=}@2sr-nwm+XO-IwuSxIfJ$J{_T%U4AQ?7VKEWg+EeyFjf>4zk7X=;Cip9vNC|sn2RkAM)U_#Dw2$KL5L(Iit&JA(#rN zGOpr3#xacEG?yKNWdZ5SAApFipD!IQrnj%x|#KSS^sKEFKMdeaYg{o}l@vO*3AxD7Z?ZFBgsr^xT2bnZG)MorXii zT$>%mN>zP11*e7m?!kjHAeY1;uQ?wlHlQ-u$Yq$1(gl%}#nZImOqdvIIT%bC@t z516bB+3#)ryy@a0K;3!cn+pO-oet~Y4fS62IdW7JA-VP?JNkqF0wD-Xmsp6MGExJh zS5NLycYD_D;!Yx0c$XX#iIN!Rc6^9IIP-{S!E7vv{M+tu!+*K7K>Pc(qVIF@a9~Qt z5nD9nmFYupE8>s_8%$qeeaQ`uT2<^D=1zuyKv*QHC+4f~U-g~6cVa8Pe)ftcD&Sj0 zNA!=|Pxqm$AkJhT)^d9h7th3Tf<3ttiNEY?8U_PM%ciILZ_6yAAn(iA@{-Y>_^DPv z{0#+nqm6v&_yvMWa37`3#LHu;m|3}p@80@lMMSGw<{;GJ842UzTu&x)Bfj(`YKi74=@2d%bVy~OlHs6v+bTDc|I~GLNy+v(r~RXOZT*>pZM<7T zHw13p(#MlYO)g*4j`l0O&%1Y}8Xy+$k%XQ+VIQF7)^+IRu^nvhM+7`0W6*EW9)kJ* z9e4n) z6>C!4lYRY4uRoPP)P^)JJFi&NkATP@+g{PU<;una{#h-Xufjrqi}e~$Q-#o36YfvN z>Gj;h42kY#gM(W--ccW|Xh^k7=v1EUo=;a~(n4yo9KaJ8dSOWNYAwV4oJrYB&SYJ2 zl4Rmzwa9;3OAsf9)58-XDQXpW&tNL1&foYaWkrr~YM4~!R;;B6%CCNzin; zzUx2zgLz%wlJ{glTMkD3~8Y+tKZ)2SX|jNR6wJ^vYHOM!F* zu3E*DT)_hUZgAGcc46!OPB|-0=hV;5|JRpz0D;R##LJa0!=iZ7ZEuMwE76s-mt%Rm z|3&Gp{R%LiEg!98Q7i`eznjzqD@RVDU-YmLW6KN5P&$mKht-3_$(}ITbypy5aVSLF zWA1R83U{aEcx>`(=m^8DP#ydiNyhG`;noryD;dA+zo&5YflBc3ZOY31m%F|{C%eS) z)edL>+cf?rSas8mak*I9Fjd<9qSS&@ju}LdKm zYU-bGMZ^H%Acx4enQ2XD;hYx$30CiD&-ax7ZTvLo^cj?P#?dF50rSRKxaum>wq-1t z^&xq%~1O*a?%4 z6+TeFN>!&qr4t75$ZVP{QK}|hXv?M6GCT$Rsr%jHUkChzotjf`{cq4YZ4B%TchyPi zcg{FhFaOZ|P;>3)*tfyif5~m&mZDE5HS|;R5y*^zNYrfdM0n{Ha6ku@tRxxWI=9sp8P3XtJf)DUM1M2**F&6bcC7iYalgweRUu^pACY5< zdzI}i>yv;j?OY=hHp34?X}Y4`Zdc@d7ipl6no$PUAS<>a>QAH!;8A2Z>OBh4n7t`h zu#LKiph~PK%O_T$5H|7e0bf7)PCleBUfKytFMR$}>UC0q<3*+#4O@17s0Sh$3md+F zrs_kKH?8F1tWNX@0PG|FLoZS6`JG|nXZ4aT^a!<*dEs7GcUH&n*fs7v+g z{T(>m0}hz{mht#zL$=38;_Co|iLnW@(zn36eV)k1nU`6g@vW#zR2^Kz)`D#hqdda( z_cK~n&+usRf5@RkBBhfn$@J=@^3PhG+T2R2pAc2}+Q4<3e;mfF#{X*BBg~4v|Na>P zF#D&C5cwq&WxS_Cd6TB>yT&KvzWv^*8j<-jEiDTXK~|$U*E7=sIhQ8kRPmM-u_{hYV`cU zTA1RF+oF-}Npz1c73*}_w(zhK7giU)F4V{?AH4*mKZs?X4H`-5D^QY7v1N^m5{15% zCxwvhm4)Z)iuE<^dGNb|LIRHq_w3`oj+lHQqHlQ~7?oj|pJ^l}7nldK#Gua!LLERBuPo*qYf z{TeefaWmBK8!EtqiWbYDWFHvwR&I9f>K)OB3RXf{W(#L)&ktT!RU93R-YGv(&SC&` z^5pFGg;Z+{95_HB}Lr1pVcc5ysd;_$cGTsG~Ru#4%%pkR( zlPNcY5|s5Q##CdZk-*|$8Viw5Af>ce`NqRTYZ;tNK4BJ|%(}+%>q|o$jhrt-ACL zb4}4`p>#5K^YoXQ$Lt{ug%Z1tj5#qp&{#r})|lJsVi1oD`nrRhw2iWptmWHxtrHh0 z+z>VJTtkE=TRuiiLfRpJAn3i{V;R{C)ZpNk z;X4FI>(+74B>2YhM^wbBdSA1-7v}n|Hqc3>p!q`g@tsnH$nv6a%TZ!{DQ2~KKyNK0 zsJ^+xv?aWRN*rkQ=4|}J$_AMulKmB`?v2DqP26Ranbu7#RERJ!K*piGKbZYXyTAz~ zh9nv&uhOPA9{LYE$P>1ONXp|@2x8n2k{&Y;Yj%g=`v%aUVv!z0Xz81H8aDb_!|=Z!qnw5 zmMb~9QsD$EiY}mPQVlkz+CUn{{Os@n11Fn#2;V7=9NpsMu_-l7Gig8i!5-spl)Soa za1PA_aaJ-s!ug;`e4Ns;)#AtNbfE~XY{4E~d8j=sjoP1U*DFudTldAs*NDlvQ=gXF zBHTK(3+>BevDsjyWa$>iwn0L3hBS=x}HX@WlnVGXeAHrUY|?RONx6&}Ys!?W1A zqEJ_9P;mE;FC&tlFt9T2S@8j~QazLR7`M`6c!39DdLL+T=23%?H29BwOsiw_iyI`5 zhqCEt!QklEA#YN6H&@Jpt%xys3Z*zUiJrDzE7c{eV_QJ5t2mBo-`YGoBgg#uz(Qj1 zVJw9d^?Xf`WVrSGN(xhKShhKJLK(#sGw zpf=5#1anU+l)q7DcNjuTk;auyU<+oPa@~6mnE}Oy2ddyNMZ05hL|2Ww(auf7j{HsW zl_cEA7YWygl+dIfGX`gl6Fr4FUhV{f%l$?pc(@rPEK?)icVUR&k)xay*ZH0^Rts}{2)ma?FU0MhvZ-GiQ;dAx3kW~8B1FLhox7Rr; zj{0M{@*W_YM~m@o#T^~vREF?4Jxer34D@~WMSRdvaxRBwwryBLQiX<9rqsv!$4`MM!FAFKqLZ3Q!&TyFUeH+G1NF9n-dHfBz#gkmc?V5xmHE=U zvxM*2CcByYgEW{%7Eqr0wgXwr$NqS>L}-KMuJoe(Zl2(}O))b|?gk(+r?R+9bBnq5 zK;G9j5R^%Hi2rNzl8g+L$i#(T!hLlA5L1FWgujv!1YM4dUdW zufGxaM16xvvmQbFCg2m+Fp$+u0`C?8M!-A4yuM8pW%7BJ_*Az$9d+$Y8DS@si~pu0 zX(-!?tLfyY;v*d}I9P!N+mgp?bA%+Kknt-;^x(yiAQzlN+W@Q&+m zKT04P8PaVuIv51fXKTMy*J>mRjT&4ljPiAs-iL7!so!Fv#p=5i53?UvtE=ew=Lz(9 zhAE+ZG1|{5ZtoH^yZoVl=H2RxsmuMNWVGO8#bIW_@0uI$%#Ag-BydbY2cgKqN8`j1 zRm{PsD3kc05PUU@Ql^HCs;$ZU$M{QK7y(r#ijSS4eWIIuSFd%k+zD&}7-~!cV)55l z0gkruoF-_6^S6N-N!eUe)du9ca!Y4!UsCTUOYmBy)es09Gfn=)0V{Y?5A;reXGOcN zRkZLF>qA3}5=u(z$H$11u|tnzDk!LuI6AVPc=c;S=~iixFP-1%PrJ#$I@{aWagAe0 zLdxa<1z(yhirDaeUnBFys_?ns8nsi15_rLy3nkiXk1LmVsp9fWlQIg_&uHJ2k%(JCOv>i#mO0IdDYqI1oA3x`E7g*b zn2}F=<6LMSGud3xy5ES~tFOBbax69yw4f>qRAYc(J?&b_WveQr2f=^h z@ae;t-g(hL^V#XA3dzPkRdVlKzt}YUriyR7UqbhCUm!Ne{B!+kJv9A4<`lHlq`UqN zwdV?oM_BHDgg;sIyB2iJ8tZC7k&QzbM|%@G42k40t-yw4DnyaL9@7hFG1vU`9plHb zC8z%W0%?V$qN*tR#O;8ZKBYDF(UEWS_54v|x!9ztqT@{xyEm05>xH_hr>7>(y2pp! z_XAbJZdC^!3qDlV30w|?a|~y(uTLy9MGs|FP@TSKC$6IIQ`O#UTwx>n%ug0Kn27vK z&Uhp>M~9yv&j`f}S^cBi90)V#_6v56W3yhr1L$=|sg)fOeJuWxOlUm*T^c=JFZl=) zwp*J-EHgVQ$t+yU+R_>z={v1>4Mcs4l}R_8E!3veh_kS0?8KpAAV3bsN5@kSI2eVzoGD zQ~oC7K-70Oe51i<4kkpl`j=lj!;^Z7#sx+gUwQbi))$(>0D_>iOPbx2&g0%th4rz8 z>z*3V{8J9u*to`X>eI$ZtS5Bnzps8PNgs$K9OQW2dd{K+gxUwuc9k{^F|RV*kcCsX zuTuR;4~>s!Z@vXGo<-ZJdj(US=vG)g`&PE(21>BeTaW1pyyD*ukU>8+*!I=YhOCMr zBi|c3KmtSb$?rwilj`%u+MvE?tA<{+L1tb{50_qzR3qn}TeSLz_L;6(j<#9=b2bt+cy~!Hex9{&SpqW9~s=R&(;bdr;(2fUm`(ShebaZE>B-}ag{ z0F95tXgt)86<=%el)vM6O^T_T!k)GPO#}<`HHF2Wt|+gAbnS`*zip0SquWqAiLNi# zy6^-YN*p~)J^kEqKlK69tU9u-9J%8KG`Wm|F)j4zM&JCjFdKSw)e05t7M%0gkMZ7; zt7{8vpHe!H^Yfk{-v_p6c`t_cn*juLSyS0J%^)as-gl9_i45c_y@0o(|~56Ox6cgvR@0~XZawvFAbrfYNm z-MU`Rg_Q(N4bJK5Bxf(w=Sk^>U-l^!-0z{8Vx^of^ zfIby*`poJ_TM36sG)}!^qE`%x#}BrZXY7DZUm-Gw1;coTg%dv4s6|& z9=~2sN%ywjJd$00GXumhQN zr7t!db&H;U#PoZ`HS!$0EO%mih|OYo#AL=@I!*cCWTzHYlkxV~=T*=AY)zFz>K`~V z9#SJ2L#@^kL zL#`ZE=}A3%LBgb7!yiQIPf3iqwcgbe&YMc@w;V_3?KQI%!3Gd_mm8#=`W&@Q9~3AK zPaMLiqpwcna$AErdIferM;**~?H-8T>l34Q-1bS`9VV6CGIGCGMk$5}R;_L+dEO7G z?Mhk`pI_n*&G-FYKx~nGy3fZ$6tO%y1FAi+TrBZC93#TE-;vL(@ak~Xq06$g6R&|N zhqRKD&)a5-`$NgiyPze)+zWYviN@FdlELwMNL(Bc9|8lVS%qEtEiBCatAr$a*&vqZ z*RO+bUw?Ed}Bpq=L{neI$ALT%~78-zyX^YCE#u`QvPwUYX;wf5}sbtF=E#;71DvdzF&D;$2@S%FMWSD8FAv z^Q+$c`0d{G98s5--xxTqF68yV>j;kO&7)u+IW|7|I9+Atw?BR!cy8D(YZx3i0tnKe zo%AobQfWy$X1xw;TQ6T7UGc0#)CAPKeyE7I+r;n%@0*09sJ^O69aE=4VY1GiAaN+6riS&~)L;nGBKW5uP+14Xq}|iS5k8Pv1fy9j6iN zVOOK-&4)xkDE;XdqRq9w_nWU|idc(3VqM(Y^PEg0GGNBEi9HsL^F9+`9es|8ANs&F zIqcC#rCyc$D50e(zHl^h|3p%Da_h%QjGIUVtSEDitQ5MCJua8^309sC>j%^`n z06H>{hYD@Rtqqe;3Pz4m^T7;W?+%!uY$8`P8)3^@hFf4IO;0CuyB zxCV|s9)eg5D~SHCr3<5P{+eYZl^(<@bz8q>g}Ao~BT-q9IHF^ReBqNgpia|UeFK!d zDuYEvWP@bud81(GC=${%!L|X=!kjMBmApE6jeW zfyBB-X3)#d(MnZ)BSvji|Dk<8R+bU-0~z%5j(GXTN%y=|y&Bf|(8*rds#B zY~v1tdD|vga|&_R3r{2ZB45g84GQdf;#5RS^xgczJ8foOM<0zf*Nk4Fxga#N#AZzu zADKN7iZz#|wSDPQSX9?UgQmo&60~l!>+IOES#GN`$C_aY6t(_hD04+$^_((l=Ge%nD zgC*ZaZ)j9sQd~3$M=3-!MDYNBWM^@b!MQL;aH4x`r{b8vYPLr==uOFDK*wPZz6^sy zPlP59mTT&;LAKsSOyH_;jXI3NfE9)hey&Wg0ZW9v^d(dM*dbYC4YnyAJS!SFQ=&nf zXw(eN_Qd0RQhO1%In>XqB=C{mSvK;o&JR5|X03haxK!+;8!N zj$H)*GSj?+lF8K7?e4o$a&Pb7*e_0@35^;DbW{KM9(i?^HvktN|F+wxV_k*0gZ&Mzyh6Sh-=n!e%Rms3sg2k{y2 zN_L*9rg3p9^Y!6(%~DAT=xwF1rLwr+kLKKKT;xH*?$hSKU1oSTP~Vp2{7LYTD5JhU ztPYLLB@43NkX$w5SKQOz%04@VxaiR1hio$(Y}~&quZW9}mWI%~%VFR*$tOtLM$^uz z#bkiMDxv?yU;d2zkF`r0KREyGarSlZqz} z{>q%LtC9DG+QTi^56pE6(e1P94cs7dUHZDyFO}vXN+f@eYSj(^7M**7>``66w) zN!E|{y)G30CU%@?1HVPA-NO@SsSEv*T1zQ* zc~Ilx{GvdY5!vNjjF6P#>XYoXKlwc^l6ySu^IQV_?7l*0|a+w(Pyba8;9a~(zo(J&r^`(MKxqD*iBZznjUyAc(#~s8o&BhS|K;Q1Pv2~>C z`tvm`@WykvFY?*fNM0H{R4s?>gbX~&uGwgUeopaUoR&=k?5RHNVSkuK zV^!12b?Cv_Y_4k3dV4mf8roM{wrwgaLmzBjMeZrNz3?4xp4+rW@7>cVD}FSo635C^ z3$3YWf_n|+J^YP;IgtrZm(6eYd=oWCGB5ksLW=F@5LSDav#+S9a_-r07Hv3?xYw5< z9>wysV0%^C<$!u|Cr&Uy0}2FU3nm!2fik1i@7K&SxkH4)ztU=J(mjR6Lir)w^|w%Z zv_v~6Nd6W9vg;75E^Z8!EMB<==* zFNWV~?@p+p4oW5CYZy3H0y7(;urZQr4rcA7q=QFMykj+hEfGLcZE7NX`3VzxA;orr zQMxtcU`yw9rQACEdQJDDMKG^I2lItE*VRe`#`@sP^e*n%@0PLy z2a8sExunK7EE%WmKOM8(d_QD<@3Q!qfqvm)J5(M%IK`W%WzP z&SL9--KJth?Y5?eHcYG~#khc09=i5&_0xt^NC`ng${9aKt z;yqs6sP`6eops+Ido%Mow*Ia(hemTmqW)P%2AC%N9t2N zi+={}P3O6PcYiy#^sbqbJjX1Zw%wUoBD>~6M$lZhcQ-UI9@E{^9AY>p3{+;{mZR-j z_2(Ak_MNV`-RYm!ZZFstw;b&(Z%gl)-$~Ub$-V4*eUq#ERvI##+L6g5Hs}4Yx}rKV z2xxhRbiS7ZKI*mV=nIkT*w(y(C&&kQ7RIXpeVx2lmz62H!SunJef1zk5Zr)oI}bYf z)^stKgq8p-|H%^pw4KNB2-Qf#2~$(22w1@ep~?~)0Bg?^(bMTdYAx%_kMvKi6FzF# zp>Ptz^?nhwWVq1I!XHiWmVC{P4$j@Am`nQ*85H&C6iOEIVU=TzkC2V_R6;xCBLq^# z=yy3Pr))Zya+!00#obal_y-Cwm2*8Si6pLK#Q%j$ z=?+(!4i$RiT>C1`A%mP|RgvfbYo6HpW4nBM-udS)rhyS&Uhk{RQ8_%$&i3F%164Ep zN3(T0deUX42&DQPOz=wK&vIG%;ba?QX`gNUa-<%3lYK5o5>CJ~imSJUgR1uSlYP$X z^#nf{s@?XW8bN;mF{SrwGLm>0MtDt|siX;dF5vO}te@fixKtWl581W*2}t~XSbIE8 zXewX&=Z&F8TQITIq`vp^(nW^ux;2E%yfU}poiHtA;x|bsS`fCW`PN}jKg!!jfnnpy zrvVAG*eqH_3sBlKOKud402BKpen}y|7bd1@c2{*yu`_F)Il<@sLE0)-A`h5>qT|CU`XD9UCuGRy3()}|4OmB;c zQ$S2bRph=gYw&7)P8Hn}d^aE>0@0ScEBS|l->lsFJZ;ruVb-^GZq{4ka!>Hy9j|Kl zZj(B!zbtabSeeJWc>(_PtZ{l%_MT9WB2nz8#J1%+U=DC`kl=x8K6> zgP6{qGjzOiMcGt7RkQ&tZbFwLz5AVA+Un4cO5-IyfL$MPwjKEd;jvfcb-8qq08#^e z;q=C6fLsVWXj^>Y4xIQUgwTrQsolLHyOwZ1mGLKY#=1UCh-nwL;C|@J{Hg?9DcdJ!9M$CHe{@(A*&5F-maR*Pf1v+($5M7@bUQdhlLG1@4 z=PHjbW0{Rz@R~((7oGdixLq7**ykj>rLVO|?1%kL@JQQ)v5|p&`7R=XYbV!N*(8V7jKsdZQwutkp}Mu+nDC);3adCI@1qP`ebQE#BRZSw6drUS zwwtY!fa5G#oFj;|3(U*$cIPWVY6Qg+HY4|}ukZF2ox8z>BHsizB>VcZ7dObNBN1~h z^+yb(4`VVKkb!i+3Ddea8|fE z+!e03?q|){&P%E0Ot_bn(Nov+1)!|v3M#Sp3WLOsOe8~_Pig=BN*+zt%=CkOhH3BI z-W-ply@&2m26XyVU4E@NC3C_cMKruK8F>-#f*E`d2V-mm{dBN(f&l9V3LK}vQW79Y zbfEvqh!bTGHmh318&f_ZJWuACaV=pl{#aC^cJ#xeuh)JUICL?b=A7Mc&TjOzM;fUr zxSDJKe#>>qt#zZxLJnKcU5NoFav(7Ow*AyQf+)cC``;@(c!7lY7sqK&TtjiEdjEOZ ze;Mzn!{1!Cn364N`p4G}5}22+Q>npgV)1RLn<0wF62p`;B8zD!(sYXqr_xM|$+d$= z4MiKxscmw8U;JhdgK1AWh9>Y9Ep^3H`p?Y}`#M^Ejdi=%+0;3w`n^Y^uR%%s`F=%X zhesL%vcC>nH3zgLYE^$z>Yk8OZk*zmCbyyP&QCp0xJ_-qbz0Q*!7T;Mirm1>8I@&V zSd6E+y3}Q6eZc;NuUMb5b=%sS;|g-5+5|7^sP&Li>%?%;6#XW|JlkfjZt~dFIq8{Il%45IBz*|@9$=$D{RHS*4yI5nux!*`i;d{NWipDt;i04_JxQoXpv!7@H_%;Gy!MGuD-48{-NlwMT_lte`b!_8U z!7f&ub2y5QNb==j%J~Y!&*`)06WSV_%BZ&!On?O$_oeyPSl6BI&zQmtXP@oRGBJd(2&$NcURfDbLc++GgRvNw$V(6(99=Kno~*-sVbj752Hw)B<#muKuZLSJaw zihobTYiW{HYVR-iJ&m{2;_Jn;u*dRe4@yitX1J1TH1+sPIo`t~vA-@>SR*-_{Yv?Biiplvr2t-7oEH9(ufGD8qGDwuH zNSyh3%0tsx+%e^p5yimQV8?(q1}R$|3eIw|NtnVfX8tZT&KjkcP${UoA)sM)yjo{% zR`%zYAy7_4^IzEq==U3=6lGebHnYW|95&7{X+zy84YF>>-MZ9i1`GTI2&#@(Vz{|B zEZHsN-*z$4Ba;LFk!^pkZYB|qm4$YYqc-@g-dY}VVLa+)M~q6}L73pdpn}mf5UEck zLbEIiR-(*F40zC9>Ux{_3*D;IV921-bZ&uir?_W}U}RY0^kjUe5Rq;X7W3!r;j3qA zRgAS45mg5iEGDf#<6Y(oN&HQT-xaCv;?tY?$WI8z?-P{HW=XmrAlw?NXW?rPy$>Sy zEj8rKeS|B>XwWaxjcRj=)#VnX`XZDymvupT7#(UY*+X;5W4ZBn00u+GSunq#8X2iA ztZ+U`u|JJ^Vl8s(p?(wKTbAj;Pd!2n;qneGKYFsge5)YaYZg%7B)Pp z{G$P*v)dYaMtHE`IYhnc3F~w0c5A7#-noe9rNAxEui;4di=$B7@mXB*sll`vUb}i} zswQz4_{A?t8aaYckyWW464;Y`Q$>u5)DoL+7&#DYyVozFpSCJ(StnqBTcrY+Gtm&c zYz_Ny=6@P2ZhBe}b07w_RYEY&9LF^2Jm#}jxv5y(F*y#5;Ro-L5y~hJ$>?G5kQIkM z54t(*v&{ufiacYJkDr<~k%Y?b7m(NxqV_~0AnCNJ-=B{-xz?Wo#g#P11N*QHp-}le z_C&#Lc}@IoNQuujOqziW4vAur;a`@{cI|D=@ac+7hrfUl?JfWW-dN}xbgu4QwwP5V z^MDA}KxtYA_x>SM)3lAE7L`@p)6w!k{HTR@yT4bgBQwkqo?IVQvifQocg}HLy@#=x zEH%go4Q7Svkvc&bOT`-o8|i+|^spA~X?r9g`mJ^3Vo!pP&M^)KX8khUqa&!k3yUUD znzp+Cf+F&PmgX-y-H6nQ57Jh6*zJz#?TSR(@ZXJ_%i{rGRl&+!GWvr$Mqb((8TcW2 z2_}@)dNA#6;N4DZw}Gf#w4^U*vV6?BfEc&To3PJs+KTsemA44iwv^5AcgqP@rA>~N z23ufnHJoColH|+Ly_7B}jQ1~@hA~_#T<9sj@%^4w2H1&K-*UFEnEU#1!1QRPBa?I7@<>jn~xvxQAb&vFzu_(iZbERM*vDbFG0A$75?Yo zn9J3rd80w!K*D0@pl8;p?4lbWnz}CZGuUizJ0g}GS(LoHSY5jdD>-z3HpSk2!QR{& zxzJFfPgCD3(Yu9iz3HX9)kreaGv!eM9J2aMqurh5gM}Il4@csXxvVrTs{v^+;ZXFF zb|U*zK~Loa!xzp>YNINT@$~@Vvc>9b|DLb)Q4W3hq&={F`OzF5$glWcsnYr#9Gil3 ztieLbEKltRCKz@Aoh1<|-qM8!dIiYB`9q6{TifM2efeO=?bUWgOi_ky)^IWY?kwXG zK#QBX%diev3Xg9aR{K;By(9>RB-cY`XESAkOTH2F?lAs@CN2iuBQgY-3pmRd?`q{B z2J33#>8B^;%Mpb$(ku9+w{y{hmQNcjH{ApYNv_H{>EF2bC4Q&-InkY-p5rHv zC+h5z;@Ox>0tF0{{Y~?*YxcWxVWQBW|1DsJeUicbx5S(A!-S`)NrU?`iMdF}$Pdho zmpG!xMft@GhR!`cYoyujSiE2yL4m$8T)ei^+`T8+f>DxDCbmqTOsjarnNIXF&-ym* z@8FK=({#>M{818`tV4d1;nk)~0K-L++p*A)c~~zR#@%q=P=C1yWa1j3lg0MlmoeOJ z23w9wU}dmsXt%E_yfQLDS(4nm??9;qz5;bt{=AFbW*y}*f7l%Gbn;MFu)z-QbnuYg z&}XVFh_5iI_~%lg1>ZY>AsBQPscNGv2aJ!=>QWW5JP0bWYSG@+D>)Fx_PiO1g35;D z^IOZ=ux{n*MX6aS@u()^(||o|T%wOJ2okeoh!V3HkAJIm4LesH*LZ9ZOkBwg;+w=X zzPu@F=Cp$i<-oA*9=%nQ5u=l4fx*KuQ1Wv?znv!s6*Dhu-HajEcE6!YC@sf?>))tT zf8BB&!9(o|a zK-@hIXP@os$5Y(VOVHFyz!PnMB7T|li`@5s^o#06io!q#iDlV!deQopY4XbmKhdbA6HILH~-8Xxc_FmDbA@;e{Y5LFOcSb8ZPq z9U-jG6AmQ`g&s9DA~}b?&2|cmaKoK1dLGilsp-Xi%=M>H-hD+v7+~w}NO*dbt%-PY z=f-fqq6^^~jHV5f79&_l>gB#=h1108*EZ1`#U$0P-Ubt_DQ)*}V&Hqf2Pgd&f2#8z zp0c#`0mp?P;WTRZ#+Lx=)>wjdLcVku5S(1cFGJO?A@`|< zFpR#4bbMbzbi0PZ?^kVw?HEr&8jmsGaJmVG$zQl^I~r@cIad3nhFm@S;ul}Flm66# zS*zPFBJUEALno=C9lgKg8CR@q2WZ$-B@hBJ>n0FxV|zn^u_psLXupRm+Pdt52nVj1 zXql@|5}-t-VrkxQQTS5jC!pn3?^=-x;IN_4TXNEx@g1!R2I;{znD7?tRXjsS6x|t& zcq>Ewv4@O&dPbICJRO_V?a8=;0X}1DJfXu)SZe?n{bOb&{i|3kF?VDZ6{d{s4biGH zXvOvHy>##B4r8aRuX=7O>K*X>8UJwfr^eELoLMor?MR}Mhbf*N_9;&<_2XKrLX|-W z3V|c~)2{%c4UW|fvTL}`U?2sl>w3L)%BcP zyQ%yaYdAj9ZcVP6Snk>Fq>vO+T{dRXH;STNJC{3_g;{gx0#;;q7)MIOlUy!wdnA+< zzipY9BUqRNPutF{6fC$Inr;|Ac>ia~2{~W4E*$9mW_Cl*O*p~RYR_y<#`C5F6HC0D znY;?LoeQ1+4Dl15Ru?_+0s^k6j-jW)(>&}?LFpoV$F`G7n#v{ws*ng!)^~br$T$Q# zM*p;)b5jRO5K_4`8(VCf9y+u75eKC%FU^-0TIcdcn=;gNF@e#!jysBE$#t7xHnsh0 zVZ4P=M_JCkTs#YGFl+3&(mm8{mjE1pDwKs7HoHyY=V?|7FiLU}5$Y`A7}_8!Vk3+LJx^mc^j-507`n{PC^ z>;X#cr6i+D^X@*9*0wa}S(2RSxBE0Ehwyc%(Z*Q(p?fSZ~>Jy7=NAGUHR3 z`!Y#hyCednHVtrdvgF!e-JkNXFpWm@i!2_-yUQZpb@MZ2;RC`FX1HvWexTFi4wCbZcm=-qX&Ak4$=N<>Lcsnfvj0bj9fXY{n?9NnONWc0*iV3g zS)vSAhSR`ZpkA_B2247POmJN2i%=bKCr24P-q3j^cl3iz(jTl4Hf4Hc869H@nFiQg zs+DR_rS+CF7ykI1-;Nh{7B@{#ljaAUVNNYN-h5nRMm~7cv|X;G0mesa87T$Wzy{26 zCyB)(QuaXgg-AJYKiSWKti5QKB#>kM*R%lO4$W5=d<-S{*E{~7v_!mg*Xzj7RR5Vt zzz=y34A_q8{pUpT7D}|sEpk8O|2w~+0t{3;umjuW-!EUukm$ZZ=cx4GQ!ysGn;F&; z|JQ9z6Fe2(PpoD5@2OxT?m*$$K2K-=`xhEuz_ay9TofL1F#q-~ zZwPEy48HsC|9qIW6c}KSssE1)08WD#a9~g+dH*Aw^KVn>0R!(9jQ`^Tzywqw;m5ej zV*foAJ23F$W7&T{!~c67|M_PB?{)m=D*34T0cqU&iYZ3bY0Max~E&u=k literal 0 HcmV?d00001 diff --git a/assets/Picture2.png b/assets/Picture2.png new file mode 100644 index 0000000000000000000000000000000000000000..52ce63ac331ea5e5b99904d2e9e7ce64b6f952dc GIT binary patch literal 78978 zcmeFZcUY6l);D~I(0eZ;2B}JwCMZaxgNUF~q)0@1?@dY+6r@Q}KtKU0g3_c*kw^zY zQL6MLD2TKKHAGW*Z`^0^^PKZt*LOY7^X~V*GdQ__nS0I5T5D$3tSM&y<31YTJZETP z2tXhJkOBSy`xqd^Ai~oP0L;t)82|tn0b0l*fDS|;37`!T{tFvIjsei$=`;Y4=n2sN zgXRMGJP2TEzw`X@q$#HPlY;JWG4xMN(|16+zXfPmc=(6-2YdJjh^i=@05o(>%xDiv z1jBDE`5SUhilok_0}Zt6H<=!85_Mzuu^iV-^z;xGm&^@K&Kvv|gu}uqAi#%)699bu zLV_+C>55*lwG(An0l9$!EMR1Fat;pAv9LIQQ0AY{-{t@HL74u{J1`-CP}X0`|2@Fv z;u`D>3U2|DUv~+1_62bh0MMRv4hRYX0J=>uT|7J_0L1hlmjG`N$RLO%4zT+lSmOX+ z{{tWJ$s1&ONe`sC1~v(+lT(m80I(nMB^u`J4wl273gQ!fE}niM?gX*6vxk!lh{+(9 z^Y!%yF~>0wAG`iv%5ncM*vaYoUu8Nux&MX#kOj;MR;=$C9N^;=`TNV?`Qhsm3hL{3 z5C=b5J%WrcfluB8y?Oha9$;1w5BXicU=Ct#5Knr768!_OdW0HVf%t%)=p3vE>Js1w z02)>oCw)^89|p0QpU;H@JszZMyLlR#fEd(0jiqOZi6w}^yfiLB{+IqyK23ycu)$?8 z9juEc!YK$uf6yd{dRys$_@K-}4_A{v%WS{lVQB-VgM8Aw5A(D>7zOMgUI_NSe89s& zy^TgBZ*U zb#!+6&6^;Iy3M^HPR+sh+SUy-bEkErcK%Q0=tVxF!4%V#( zrv6u6L;sTapSt{q-kDA?(J%=xon<=q?;Nnpu&e!5%7Latfq&}a55BJdr7gX`wDf0t z{#pM&-*^HpAeHIAQ$p`T8=-^HZs;(y6WR)hLQ&9{&{62C1N@Kr4S%zrW4s8Dc4FG)O|83JD{BQo57Wf(fP9=kO*6KI3lRsn#A@Xs{y zU9j6{0KlvF&Y?kJf6&n$Jm>&sfCC&ULckF~95@C_qzs${G=Z~#0dNj52P^?wz!B85 z2iQ-8fN&rhxCJBtDd6mw4deqwKq*iO)Bp`YE6@pa0|USa@E(`~=E1)58Q1`D00AID zAP@!!8-xcU1c5;$A+iu_UFgKxtTMcxgmvq-YdrG-z~aOld4>uF|;E1k&7~xlMDA<{?c9 zO*KsmO*hR5%@hrqW`pJj6o9fo`JqRl@=y(^9@HFa2X%u6L1UrG&>Uz9v=-V9w)6xP z4c&zPq@}0jp%tZ-2m7lrtre{^Z6IwdZ7OX(Z6$3BZ6ECfEru3HOQmC@J4`1_cbd+G z?h2hdT{v9=T@Kw-x@Nk5aO`}hBhb^+^V3VytAk_Dmfnj#iawS8F?}8VYx)oLEA&4Y z7#M^YWEsvdm@_yrgfJv9e%9z@j#+W`ck(jxdrJ2t#TQYkw-(r5qT+7_gyukd8g@r|o z;une(aSbnl{v&yjQu{yAZvfg8@V13Ox%leItjZKQ}ESnu$2-`ij zXKcM}OKe1T9(DzG6LuGNBzpmS3;Q_xHU|@jB!>=%BgYMn9F9hgF^)}6CQd0%Jx(Xi zSk3~@HqIH&A6z_KN?hh#zFetX&$))VzHl>eOK}@;yKvv;F6Hj!Ug3fAi1X<2IP=8w zl=1ZQeCDO+mF6|!_2Nz8t>Jygi{s%p{^EswCzlnIttNJtT7_2POBUq@`@65~W(DzDNs7n@Hb~u999j z#&!&TEbv&#vB~4~$2E?7AAfxOy$p?vnv91`fy|gJjqFKTPua(^ALQudPRaSnmB`J= zv&!qphsjsTV-)xm&MU+zv?^>XN+{YXW-1OUQctLz@HtU-VqS?`>6}uWQoGWwvW&8` za)I)s3Y&_d3R2~z3SL!K)m61n^`jb(nweUnT8|p#r25IAleH(;)g{!gsXtPm(csmv z&`8x7Iz@L%_Z0F}=c%8kPo54r-EewGQ(n_s^SS01Eom)Rtun2ZGh%0)&OAAT))v*i zrd_O!h98AH!As!Sv*Kr6&OSZ6rgL1!OQ%|ATla)+kZ!Z?p5AG_7`@l}P<;Nt2frAyJ51~2nm zMqGY=nP91FnQl36b<8T%s>hnc+QIsn^`4EsO_mM%isF@+D{aBo=WXs? z;l1x;<5TC$=zGn#&5zs9+po|6h<}9thXC1tgaCBl>A>tjT##wd(_l#O)!>&Qyr7dk z8hR}BPADc!JFFm#7;YWj7{L|c7cm+s6PXmbcEj*Sc@%Awb5u|C(db*zm>8XyC$W&& z>#;pZapY~}=bJ`1D{nF1^1k&p?nGQx-1m5!`1ac(w{PBFxnp#vDuF#AC}Ad1GqLC{ z?Oo5i?~+uK@{*{@F3E3FPNd|dkW-yg-=ry}Jxtrb=XUR1`pNXd4EhYejOk2xW@Q#v z){U%{`)2oB9*90je(*gTkv*KFnp2d^oEw^pet7=j%RGs^j68C_NB&fSZb99n!;g|4 z?LBsRJYIOVu(n8~D5dCEv1jqeCq_?NN~B71O6g0(N>|IQ%LdCe%AY+w^fcuurNXZQ zU3sap@0t3uXU`8mPp_h>3awhNcBp<|qhIr~R=)O09e-WQ3*bfQi?8+9>Zcn_8+sZw z8f%**n;tduG$*$}S|VF;tsbrDm$olIw4H0~Y1eFT?ojBc=sen4@QUwMdKYuo?XG>) z4b+d<0k1c^-Md$M5Iys~S9&M=F800aH|-x9FdXO|)EPt#of+yJK0Vy_M&nKE$jOnG zQMJ+Lw`y;j-<^EdGNwNE^8Kmz?H{y0bdAHuyC?J~1}04=N2V@JeVDeI{y5_>gZ}99 z@yo2wEPgI*jx>LBfo>srk#jL;>F`p?^6}+5^hxw9j6P-*dl@^w;=Ho)Dfkol^X*l( z)toih+OsdpUpm(f*WZ7&|N40&V1u-ou*J3YX#3c9)6UtQQQQ^WXM7NTe>e4;(6@^3 zs^5ElSo~Nb_!3Ba$wVPyDgTHKkt&zgX4CIHDJUs!UO7qHE6NI0eb&=`{3~}VgEJ2#YodfcNhv018BG)P%g-R9{>Y+ zrvn`mF#Jgkp@GuU(K9eIF|&XK&71%Y1PZ00h0@U-EaV|E;Cq0Ui;nxKk`6u3MJEQa zKwjlrnNJwSb?dwNE`7jBs9X<& zvbF(TGG`Z8H+K(Dui%i-u<(e;8}YaABqZKVO3u3fAUh}bVP1YoX<7Nxippot8ycIM zTUuYXwfFS)^$!dV4Zj(mn4FrP`8Yd=!LEGzyt?*f{VRU=+xH)YJ>t(_2XcXX1b>M2 zmt_AY7Z)fO4J|E{mf=7y2u(N`p}&`%AEY$~6x<&@{gTl!gXM2ZciE=;^>f&j@;3^z@9( zjK2f(KLYFTzB@BgFAzo{QV_doncmw%fs$N!_tzfG4D|Iy{&rpw9y=<;vV<mMO?ulFlpa!Ao`F;zGw@)%G2s=o&2Hced;mOz~_6O zd~eoX3f3snMB5VW@#kdox(-G38sY71eFYfKp;}+MSiNS@TVD80s#Fr^ZayS z#nUhCq0{x%i~UYc&OBp*fq`QEtV)LEg(f0HR#q0KN}_BhRR=H=zb29fg{rMjlpA9l@-m zSEji{yl*|TX!I(m9k!)J(jXSuwBYuEL~WS}8_Ow07cTp`xs5OtbJesD;%QQHn^APM zGmm1IV7u-`{HRT6Fy$DumXe3`L_%Vxir80)xw!2yix(5Vb*mFDiImu2)kBIXJ8cE2p_=@1_v?`CV9>*jb zoUDqWOAXQxnoH9xcD~n}4{TM|iQ#tnTe01}a1iS3a7oeoGV-R*&n}w{5N--g(uPie3>`fHsXvSQ=+}xPAB5`x$@VG4B zka&P#ZG1O^OB+TunQ)no6^SS~kH1jFT6lTEGdyFdCkyg86{lJ0a~S>TJ%h8sHDW=( zqX5o2B@k;aS*a*X5?Y&Dr7&fg`YKs%(Uf`Q(;_hkHwr zeSPwka=io-u5voTBg`#~s*vpvOAB+hQKf}>Q72L})8+d6Q(DdgzG#XzW@A+59^hHu$bT zYO#E=hElCQts>dq+#{DcBFtc9C}eFd1ePlvf0qAPHH;^ss)x$_!B5L%MgIDj&bU0F z;)qx<^7iF)zSU)Gdokz&y}8}DB4+9bMB_eiulb?YDRf;@?4g-v?=1b3VW(d`?jJ2T zyf9Rn{rCm_1qk-$nXF{l-q51CaDp#xyDqrcdmrHIlzsZuI!N{Myu(JAk{RpWfMGGM z*QLZ<#_on%E{zv{oNs&XwarK$J zIc7C4*=o{l$3tVJ5fk#EHOvGy(+jWCT;o>f4Dy9aow6f7=~_Lwcbl6n)`zT4$$+~q zGg9XdwGJRy$roZwv37{Bzxt>`rwboTw0!%Df1|*fQ$L`fZ=8GH;xN|(xlNUq&~?SR zB0?)Mlq4SEZGboL^HsN++zOpygS)5~#DXz?ojYLBNG`gkFR4tSFux}LyyV%}G zhdxOXFvIiRsV(=BvdFrtC}vx_RGcB~&3f|!M=;_Z9A{tmQ+$~nwRr|k&1#OlNrn;I za1|K;%~@Os*%Y6XhJm+6nChQgv6__MOr>7EEljT0d=k?pH2=uO?WT5P-5FvY9)Ur1 zqu9~~$d~YZMF}BcdGcAsl5$s=nm3KXD z&+BMzNK&vq$WqiS z|GBT#Tx`UzisxvSHB(c-OWJz6bZje`dzorJwt}EUcq4yBwiI*B*CoLZdRF|B=tQS1 zUT@IOCLz?`f7&0JLgc|(>Nev^3gjEa8jL?b z*`N4`$}m$aC-z~p8rjseA&#U~S3XDA8J^jcu9z2Ok-czArx6~tkv?aR-3Jo7xT2-- z*5y&yMfD!4_7SW<<%;89(m-~8j6}=~P&tW# zm>9i*C9SEgCJ_cxgQ$Y}Hd>25kvN4bd*QAI7e|zsi?bl)B+x87Q$Nl)Die__r{JFE z$t@hu?Y?HDaMdDa=g!M7K{`g*o!xX-7NC!U^C`TP;%Gix5h024xKj^PBp6XEulf4M zkLFhkbDz^bH|X&4hjT5KZG;}n;*gD&eU(lZ9LFvQ+_332Pu>UEsLBxm#12gO-D7ihE2P5G_=IgkKGuDs z<=FV8cM)HTXIkDv3?e}bMU^AV6R(Oip}5ITrNoQ=;Vi+=m*;0A`;|IIzAdV+h-(HX z``?O{C(96v6_6M^;)o$0@v)`gR=F4d-hjwH5I)MO`QRP9y0a?ei{*jY|GT42)@*bP zk>^=NWdzR!&?V}q<1R>SJmg^4U$7kWRM`qcI{n7=8*d!5?*gOfJ=y8X|DV0ux6`5a zW-P{C(>r@CzS>NFl5^_nx!k!v>}$|lXAfE04DACA6|GX=5T1;1>${@Q8W|r43|?}KaX&z zzTo7pl!VYS5(;%?Cs}6uz_MRJw*H@}=-*V1{P)ely3ZSgUcA{1nHNgfdaunvKKhar zd#Ci_cR|JCK(%_~GSB8SCc(|;1LI!vxp9{iYn9 zv=MXKr5`6YJRwK>?=%w9uFz*E{+b1(Wisyo6xx=Sgm-hYrY>B)G%{lMlAoUdelg>B z=8ULj%h+5>a{Dqh4&R)8PLx+g(rz4DipR=&+)cME#3=Gd zMyXA;8Gp!~p9NWNo}i1(n;Ay5CX(~k`s+x*!ajI~=cfRbv3nnodQ!6we4Iz_17%@W z+I9N??NeNMA2`4rRf)s+Z-Y1LQbh6eRe_OCPwnsf$k#|@&&k@>x_FjFVA67qjl(3w zio3g+Um&{$k)I~jP$Xghec=1OH(gpdWIt+dhst~vKi}sl0gr=7x7x!+%G#a z(5v4rONjX)i__;$eeqQG-?Pm9_)F!b-AI zJRQT8l7Osj-|B)S2G+pLfgrdI5}k+KbuMCsZ`>>22P!n%0WxR?(a0tqKhmW|| zk7$bR2*i#ZlIsciQ1L2f&ii4-&!>|vInpNE91ry)j93J0p^^2zltjv7 znm@Lp=au`Vl=CyHc_A|&{1*L=-ucv)94Hq|uC=1I=W}824dyxePWCfmj)Tg?g5KdF zChuU?H)pW!#BeN+rekk~u`o$lvsh1h)-|4I_E-SoGmi{6tw-_`BM6C+I)!RN-QEX0 z+u+yh=7O*>VHE4_rRh~%_1<*u$(*-0%D1~01O3(7Rb+=>4SNh-%j1P$k+_R&j$>GG z)N}M8X=7E0CH@T2%HtDe@mmIjZr<=`Yvv)WMeRwS!}wRjSu<(b(a)7<7K=EM=#1rt z!Q>EoL!!IFGxQn98JCRZ*`<-#tPIPY9NCKKM>Lkb5?8Ksi2Bx*Aie!RdZYJ&t#D`e zaBxm<@+dq$?i3*xTE$j)qc2>!JK!Gk7|qapk4SY{YtH(sz1S1cXWzqp&54p18cV%m z596N`t1yM7A=88xBdaFmZ^)z5-#)$>^;vx&tGyAa{2I3ONckqmdV4aqOE_X~5L{N( zu|yk9h-OWX_1jt9t6^0}45NDq!I^|cM=CiFxiwv^2OzP}#kG&;SepW|OYFu~?k(g_pQ-rgl*zQQ*ql+=M zN@GpsRn77kmnj~zz8=63{CX8tk{gnC$V5=4QX(q=5HV4O5aq!E>Ox$`=Dfn!Z4P2* zYhBWHZ#AbWECtNgGikI=ivE0e@%)7Th9JP8+@gM}PLa}}<&Re+U??q{`+##gu#x5I z$o}5n?DlIW?a>+Q%)DmdjfcoESMQi~UdFdg59m)`*5rYcx##T~@MBwh_{71bBS|v` zQ%!Gg?9%XDueO=wm1@1wlG#XH`klvA+ILP8}%gE ze>K~swIfd5hjyf~j|e-7EV)4q?Sxmacbx=VWpTl)P$)3e+r4seZuCn{v($u^HX&T} zQ(#NPW2P_mPfiH|pt*~?>qS8FqtL~%JVbGPQ9n-#_*8rV|x({`+#?cuqH}?QmT)?aG#~yx#o(`w`uXV9G2Sw))D-MOPpJ- zlAWz)J019VEd4E#7qVL>^s(?|+*+DVKZ5m#@YB}j1cb7gg3YR1YvF*kv%BgO+dcc? z9Dw>A^8kpg1ZM_I>QUkdW_j~Y^$5D{X;XpYq}=y{j;b)1EEd_co#U*Xw-j@Z`Wag5 zYR(LzUhD&8o(+N;afPJn>8LVOD~(S~`Vo@hWn;a!J2zaq6vS3yuaCL-U!?#+S|&f6LcAA zW0lBupq8(Y5k$H`B;K2#L`-^?^6Ioy#(JHKomUMFu*0ivJulY2Fj6FCjJ3X)+HsAO zV;VlIPC$HsyPy*1P`&l12qLsL?iMs6JQ$+QeKclNtO}NzF$WD5RsW=EiSeEx0QO#XLDO-HjdvZ z)WF5LdPs28Uu1sJvla+vCadlP4?b?de(VEP;Lc2r9k9nil%FAI?j#}zM~e0V6T6p+ zvwBpfz%uexVoYhYGv#@wQI+?N#Jk_;PpQ5-%`#DNRegxXlaP|gZ9IB(mt2`q(o-z% z_il0riEc&g8g+>vKi)u6B)uqSC^_3Tig35|J3O=IBBKagvd7&cCt1t0-bgxSE}Rg( z*T)%I%&~TP(2)brK<1tx%;VmbV|;^lN!@mF^NT87=Vxw+^bH8lo0c2gJ1(6v;$~Lr zhhgoo>awEvf;xYR9DWFHG4YL~zQ;t zt%=L+b)$7eNff!LXaMyKHw~)&n=j%i3U?m$Q)w&#_G1uqz9t?o#=0WALG$&AOjX+l zW)fP#S+u|lKoI41WbytOE|F@5U*GltMWvZ9W8lm~Fd@3*!Tq0sQoQ1TO@4V{4Q}P^ z^!E23FA9%g^7>#NRxeGQ9yBE0j_FBceSLp7oBSD+bqYCbM>N_890icSE{nu2#g4;w z{q_M~)TY$|UkP3SH4KG*wEQByduML`b(aEOvG0|+eSQ1;=3~C1S7oqEyh@$gP6|>j zk8*~;GOjeo_EO(3Ge%3}gsDukD`U3{8N72pXlY{RhiaC#b>w@0HsqCgidgYBYNf_n zS*Y9uUe{&q1L(VVkeJKS#tx}zejit~RyKQ^W8Aj0O zfrsz)(Q&CLT&Ar*HS{qE@~fP53E3?j3$Z5)fF3yR^Z==sDvRrS92~Y1tw{V6KXY@L_RE>yP3b-#_gU4%0-XEGHwP#l2swrj||S-MDjgi*9Gp=*e8Zy&fm zMjtzP+C6DA`Z$(N50TriVmBPHSH&&o?bg6Jt(s;e@#b@qH2fHF?GqFaB{<-udy(_* zgb(;1Yg>rgF-n!(Ot9xK+E#O)nenpGB4?95xj4;ZhWtxn4JU+eG4UoZ*P+ zHgoKI$HT-OT>Qn!PVIadM~OMP`@$odciNk?W_3r7f1B{k7}D!}@=$@sx#%>d3Wd%_ zFjL_XMVEX4v8& zU8N2cHQOY>UG{-FJ?q^5Iarcn^``t1)(sVU^@4fh+|`duZyUnm-}N*FeUhnqK5*|C z^q0urbiDD`vbJulhdy{wOO+@l)2M7%0rbsu4h?O(le7TgnilvoSMm7 z)S2boT*ceVpmD}1QsnBsWZh(bH`N>?eRe{jxh9OjrhcIzt-Nq`)gVF8`C?9_L=c}P zdszmZ$IQ2I5`vhHRoudmLTsr+U2HS#1?XiK$5(lpB3mVG4R?wzO-d&Pu9^w#$fKhit4YPAJLk$C5otZ*{PG@0}5gRM! z-8%^a7FWn7_;-mI`>HZ!J@#kM>}^eDpm9U@jZ@!n(B3qFzeWjZb46R^W9Kn-Y2VHH z#yp;^rJtMht#e5dM60H--m~O+Ex}9{r6i!xkL?7u)X*rRCt}XuVXC>IYsd1` zwsz&tl8~Q&t%K^eOSb*>(xc3`MqUO~4aF`ZUp%J{aDG8F6>WZ~^UC*4D5@M}>oY$U zRil8twsaonAa?xZR=VW$r7J4O7B-GDLMXB`Oe;mna8|0I-TV^ByWf3`Y1F`R+WeaD z{IV*Q!&gSP3+^7yXj9uo#G!nmqQI4@0&H%rkKda>I>R36Cm;6UUX0_kb^Y$^^iie$ zz=(LKPi1~AA!O#!Xz9@>SSomR|CgIW9!F zwiBZ7IYwDXejbXttJ|aiu`jQ+t<(E*GPNQx& zb8wlu;&yucZx99+Qg^ZTU8`c>Imvcmu!T6KT4P?4ZrncBZaXkN*1;ye|l)Sn{71fmcPg+U-6Mu zhX<13>=r&_OFzY_jNnO)C@Qt+$Gpxy@ZP@)+y#!Cu{=77-u6?DE*CF*`CXbe7}36n8IEBU_x({$^Ni@zm&Kqz3a(!3l)PB4qOGWrT^o@|Mt-TDR=q_+F+8w`v4)0AfC-Gnz z`V=`Lyf4kcHrsQmygdW!+tst>l8I;TDcQ%;5uf1(v9|@(5p2;{%OYxJ#i|14%qXvL zcOJRwCosg^zV+J24RwOb4Z6OE_W}21*7apCVtiFMt}@Am1m+iwr8UO9i| zXtEzOI(pn_l@qi*g!{y9OxWE4M`EiP4t4j_`yZa)b)rIjStpk4=2Z-R|Me^b z`V;kgIa6QT=?dDbfuPv&B0e9OW%JZgesKp>W79+XPH1?};epXRgD)90*3`b&{jYrK zXs}ZT$M(P(P~QGD#Ew1`_m4Av0~@-NPnvFpRHZ~%4B&p=ewjZPWG`JQes2;&Ipfj3 zl!i5vnYoJFqI5lbx-6Qr^mW>@kg3!!{qWs$4TXK&vhnz_f#rAvtH0AktVG0)F0T1= zPh_Ufb1&=ZV3Q+(ublG@+#K*3N(fPM4F1T$+Ip9gs1TBVN99fHAqjC~9BFyz{ssRZ z$Cvap@56G%`;K_0C7ek6yeY$r$H6yYV1{)ZVz3U7-V!HWyy~Sb`wIW4Re(Q-$I(G| zTwl^-ZkvEPw=kmmE_JxYqRW=Of8^#(SX(ST@d<7Q8&f{;1UKKGZ=SqWyuIXNve1?VpZp^s9>UeX^l0z}1{RnI;#|$(nU0 zb)2n6t!C8hj?HNNh?~L{S&s1{;&JlO6L2*r+6AnW@ekqgvp5I7A>~9G7}|pg%j&9o`t;`>|Xv@KxOvEar7q+BAB&U*wgDG zIs9^it`hxztlV?(-@Wtc8{_QFxvMnwmuxhz#Q!iS`yja%QM*LJ@V%oqH2Tl{vZS7-ZUHHmW*R2G{RGY&^wLOo_) zC?qXx?&|mJ(#piI&Fr3j*O!UUZB1=ZOsFCX(jj17ZJ|#Dq$p{JxjxH?X(Uy$998bujV^Lb0C};oqbb=J|XEkwypg-Xo z?TL}May3T3EL?V;HIX82Db`dJF6qg!($Ch+XQ;H%W*H#fPw_&?JJI2kT#(79h!%!B zKfvX&OY@5l1Ghe#JRzPO_&P1o-j-#2Jn0In*IDey^DqLUhN# zIg#D&+B{!&BaR>#J!9bsGn69})|0h9wY6F=C&k)FbEn5m9{DYY3!gI?d4#`Ox2Z{D zkG}C<`-~U78MWnEIjETCXT4q{cc(90U3YuSZpn6AqD5*>V|?KuI*$7b*K3-i+@-N2 zAI`!ukxdM@55$9WAU(i&lWajL@WbG9Z)xfK9yvq5`%B2T#-OGwpk1x`d(JA&x5)H@ z<2v`_kB+_3zD~>~X&}4Lq5M$%Uk<5nF;sY>*tG=8OxAo7JlGyx80EIj8fPh}N=d0Z zmdTkneF7@e>FAt8z3oxV?%&2T7M@n}tF-wNA-w{2NV(InN@9K-eM&uuU3t?cbPWJ`2=L*g_h zSU)^rt`#{9Qz2+417x8|vKhGHQ-kNY4a0nkxG>$bZF%Bt&HQkg{a4mhMxXNv)OU#t zi3yUSUsHwif1#*M-$+2~W(9u*o(HQ?%HE8J^>M0|rFlxEyF0mz%cZgUU|1X845iDZ zmeT1Oy$;X)z64g2LMhgg2J2BGMv;ub^>5A(B83uL&wfC8oY6ER2BKeynQOYJob|zmUCSEH z$#7V7aJU+&fA{vg`kO*l*l&khzl$3H(ML(v(VWB`#n&8kL>S@J%qQGHGj4asJnBl< z5+Oaj!m}l4e$WF!f4o)w!Fe0OgtHZqXeh2Vi3|=tx+nPXo7=HMV`zCzv$n35lZ_iW zHiulh%09fD7nkd`IQc>InRxVxQmJsmi*cExdeB~hE=#u-xCeGdkDyBotkJ=eNWMk` zjtXtbF$UEs&DETCiT3s(ku-VCky)Bu{Xfe-LKsE50&vosx%eFu(BF9i`k75`6`1~3 zVwB{&Ct7xyubP}?7CdWw1?@w!Yaz|b63JcW6b?2?;s1tj_zvax8ud`=zH8uuL* zuMPSiw-Z3SG4;8vEoCdLrO-jqZ%8tz(?Oz?LeE@gSJRZr_ zrAZyV(b~In=Y~09`L2TE_n#}xBz^w*(zt8mFlDUqir``Ni!q-<*=?_5(9f`sQJC;F zZPwT~R4CCC&pjNn!{zb*Dy4ST^P13-bddqFc9| z@iy4pzA<)%bj;h>`OER4Hi}Hih>UCA5$Az}OXIsc-m}bl}C_)6-@l>>-(U&NX!39GB*ihuF z;{y6KilL;pI=(rpEF~?wI=_Zfba-ag=xp19luyV=F<=!tAfPqFFO zR-Fo!t@;*xcr2hL!(mX+G}O)bCRclhPny1WhgWp*p*N?B=|8&dFAoVS;%qBf^y&+}@{;;UA zVsK}Eg5XNL>Wr%c@5OBn-XNT8Gz&g6Nfn&Yo{9|Abbb2LV^(kYg~Tp1a2|DQ+okEjnddQC?&``yMK?L) z76l5ue_f)aQcpw&5od6y%Xo8eKBPhQf-|_sBtz=(GAtUreL%-g4973`I35z{qW_=}4OiPPCCqDdK6ufl7{JPXzOmkN z?Q>eIJ5*YYq}UX%tFCkz$eEXSAiF&k-vawp2(eOLE#L^*rUb(&xGc10@&l z8gJFf)k?VDJ9Uu@Dy!v9dhxY?m=;QiLx~~LO*!Ef82~^Lo4p_tHEDqw%d5;u4eZdk_EE7ZCx^fjrMnv$n&Z3 zzeI2wgcI+Z#y(~Z63!5FpRrwEto9S{c-6?pksBTuB~{#Dzx|1lkC_=!05U`sZYrGv zCP9mSgP3B1SKoBUHS8I?D$T7UogT2gt|nX>_CIzsNi$q@t&ec**-I1iie8+~j!iGs~Hz30@3v-R*q4c%8rFUQ+v zeb*TOo?(+?s}*Y|&{Nc)-oI$>n0KwEsMjtnPK8FNZSttMrYri0ooA-ewF?*M&7x0% zcG9xxWg@{9?>bp|Nj9B-G14W~kIC7E=}4)T`y|&~R}e9gqzTRs`+zSc{2Xq1@FRFC zn@DA9c&uEbsl5;2C38?E zr^#ObE8hS-s^y_FPF2>%lMKigafVGUW^OT(kE)-2&YD)QI*gfoRJEOVVIj=td);z+ zN`6jJX|xPJr>e?f(B0j$ba_~HLc@5)pf$)g?*(w=DT*KNRre7}xQ7FGwbi!_NJ8hN zCqre|LrvtJ+v0@h*>u0a=RWD&=waGglj|>)Gv7G`KQ^Fp$r}cs(B(xf&6M0YlxNgD zA_2)RKYD4Y*JHY3E1hXbY-!H3EMK?#3ezWJ&YNG#CyxUMcfZvBb(a&o7}4#8`cxAw zz7N!~peV=hbv1y4#WHpjb{OgQ7LIj*ABtAr2N*XCMRQHO`ADwi%lY!wU(O*vNIg_o zR?{Y^6sp8KtdV)w2&Tk6@XDhJ(Yu;_6o=igi%s#>5}0_kRsMXwDVM7~$3Dq9Ojrlr zVXYPN4g!$k3E=+LeJvvB_*@e%T1SY1tO&yY<%-!lr5f~Mi;|Wb{rk*;2&plWk>gBA z(pGiNXlC!~=*PK2m+H*gz^R8Eml2x(n@Jjo5Fqn+P|d2qdvXMId*pf-q3hlR^`!LX z2B@ofod1A%JW#neg3P+$-q7zBT*wOZx z%~?F`b$(bYUci!=q|oNITQ>I`Rp9f&0a7ot_VB^=Pb;6hhu5@X5^>U)F*2Vw;nYMk z^3F0FnXQc2+iN4>FW-3es@=kzk*t$rl%aY~I9FMR<%7q%aq5v?qn|rVSJcToGgM{< zJi-w?LL0j~#*r57R5!YEvDuV#?7n_u{IGY#Y*~(AtXo!euSb?)p(AB`JOh9i!~gYC zCn6h#)7M4eo^?qm7#g8-;zm9W#&^|6KXZ$U{Y?SAdww&YF4cw$ZhlsY~AH4)lK!4<(C2Fpu zr!6-~qIzI+ONeG-U0=SlQO$_YGzQ(SB2@E9&-;-5&z3&@-s??K6GE9mZ-2sZ^Mnvk zkVmn-T@uq;a2)%DIq?>{YW?llgKzf2Ldy?K?50<-ORG!h0fD!iZF+YuOFZGd$q_OM zN2h87Qy+M&i1k?0sno2J2u#NDGscULY`z*ScXK4(80@z9zo{()UYEff#YG=7t4+lr zPOeL}Hl~RuEv_(HyDC*xXKuM>FqoCmz>g_})Dh)u24jN8OlIEwAJ*PGs;Tea77YT@ zMVcrmq4!=yYEYyJNKu-UsB{pJ4gv|Hfb=FHAVdX4q(o|H5_(6ZL+GG_Kp+7O!4&WQ zp8M{2zjMzy_q{X5{UZ!U2s>-9z4m8)KJzo@+yyvz&yUUQjVS&UGIt__dwA6h4g4 zVe1!qeD0`|j`oNjU?6f10A##QnUuev6liTmueq7?uL;#e^T@&}8?W;W5pP?Y7IWFeW&0=+5q(hHJuq*?xvllr)H72Ji$%MxzPc^$fgc=q%=y#m zlt-^fS*l58KXGDkZRZ8pbzEpe+)$)2SV ziFT9WF?Hh>({lU!a!ruDl}A=unh&UVnpp)j=iXY<7;yi~D~PIqM+23wS9V1HK*B2im z&`96YaFtC}W*n5rGywTc7;po!zW>uJ5T;Jd!X2%bAkJ3f!F_ztcv=7Vhe8P3FLC`+ zVi?B}qv8pNrV6d=5}=Z48jRK5W2}+I%rVK&(WE7r&`%I8VOk5#mU@?^FPhbwT~$@a zbKh9-yUpwpe4XvPH&V!^jzRr z=&bsz=KHjc@()f%;hy1q=B(eEv!xzpo;5PE)|WZMD7cnx0J`^I;^fU1NpP-c7F<-< zlf+&~l4hM(iNGZJZE(1-iEzJX?t@gnBn8yI{q)$S^u}|w*TsQ?0^5ApTP!Il-R$x zzSV=~CQ^T!E)qR&bxeP^N_|=P^yh=|%g^7icP(BT#{$JTy?^i%gbxnUts%C+rCcL~ z+biNpSW3EJZfi-VY&&|dJwylUszu+&g?B>Y0iu83Dp-5>EisHcJG3$7Yl%_e-MEYI z%swiJ)(J~_+@-JE`^3WD2PX>i#+Uqrryw{QFKX{5@taPqwFkG}uwSwWD}bnvMkhRc zlBpweV%JAy#pU<1vpYu^4QT3oK)-`t9P7(HmI@3|$Ppe9ATIIv)d_zd+@X2H{?nCE zLTUdCdIU)q;L6|d?AeubTo35xm%Ak+W4e6R+fQCWzRV;{wlMDEN|=Dz`;DevD|cJx zQIRmpE+x6$07WZw=Mb7#SHE2e+_;t z{)WBgpWsCWngMHhe?f;ZINx8;ZV#}lPK`CfZ|0Jpk_7<19+nH>bYqYk7Kw28UAi{> zKw3bkpRkmfjB3;8D|${QsvaH}@S8(4Wd#mAOxjm3#w6kE3EsetmIM2WU|-zKCI`)s zZKHMRS1P?-t9|mtXI1Z;z2krT!)dW!*ZY|ubBp-ZR24~v<3NLGejwH#QYzIT804BK zVJl@+q~GOID|K{cRJyjhht5H$okOadDKoPZ)Zt@NYjgGEsl`S>cK}egVfpa@3CjkZ z24A-%t4#b(c@>gWRWtS)6~-}$?^1o(RkH7yEtqXAFP^OPHH^N?07NNyhb~gUXmzck zP-HIZ?sa0lFCKjtznUN)aA6<@`(vipG-!L0(Z6AL$TP#Og8zB$l~?B@(~>Vx@BpFM zdT!&O3(6SnQphvK)%(PtsZ`r+wXG>@lTDf7k-Ny_P9<7tl|ImuA3WquA4G#4HUn6_ zT*Q;gXkE%HvVQm)dt#x;CHm#6!(SWx#s!mFZM)6fP7QNzr#YJaW)w%F3$L5Sx82EBf;_s@nqN%Y>d z;BEdg8?ZL>b|NHVNv^*%q%e0vFm!>=F}M6xoQO4uO=N&S7B(~iO+w%-iot|lB2Z@m zMW8Qxnv2$K!#M4T*l1x`Y`)$3U#&UjT%8>@0G28yC)eYCxxzEc2BmT~7#JZvC#6zX z8(1%ww8cn*urZaPvWSd6rNzddrJ%k=-8ToUS8>CgloC}N6?0AU4s&iqw@EuKAuO31 z1z@0q2-J8d;yEwkqG|>P(yJ@eilbC~nQho^bhZWyGj^&B1&E^L<*hkd7DtV5_sMcM zaencoG8su=!T4KSmP9O`cB5}ArCsQ<`X8?Cxw$!%%Hoa}A*U?u^6qzd#iJyS`B>7E zd}bO_fB{?uApn1rfpPptfRMaF=&N9o`}3X*5w2K%ueQp4Rlp+@PjcBYc&4ut;s8L+WJ+pLG*Ml zHrunNFY$vw__C;U)~7HZ5M}Qj`XWH2g`%Ln$Rro78Hk4k=K9g5pOieTD>9h#-in3X zGoIGk))#ckft(4CZ=997T;kynuFeKrLs8;5KB^Paq|i0a5&`aS9Y{QOTy$3UExMxn z$Jfy}No+@LVF_!^c8(jy%s*_A%UsoH~C|Pjt zRSqX&;kXUjS$!Pcnq!@ZIouH>aAB5>`8-jr%bce{JdC2}Y}M`VkQ_*cWWNcxCp4}@ zir81lQ>w1eOw96a4^1Y=o~VuvYghF7ii_#x#dbEH4~Bs;YYBpUyU%pyiGu;VPgOL^s#f8VfMc1;3&+rDtxTA+(0hRVG7H5 zYhQ@@3=IVW8$}%Yp@Ds3&-xqsIF9dx8K~J8a;WC0bqFQ-9xpXh-(_8o{xOCf01^^P zr15|%Xq<(JGM?4G{*Kp?qWP7&vT-=tPjyUMOT!o^qlXu#A2e&v1w-Q?pFt4Z5gcPv z+ga2n0y$AM&;x&&2K0)sDC){82gDc6W>1vpgw`n}%Bx-R?N@Pmzh6oJ&WTM;PWu^O z0}6z3q?C>Z{&YI8nCQX(6Z#d+0|U??+}&c8?zs{*i?kC=e{BH&QnB7pN6?en z2LC&ey(O--GEf){gJ}4T*ek}VSQ4}IYIiiTOG6*Fn{pTq-KQmtLR2Cn0;EOLegw9J zKR@iL)cj0#2ENB&yA|Dwz^nq0@ujWW!$-0qiwi%!7L~f9s+KGD*(Czm_?+$|_JhC5 zOu^ThR|H?KL<{2@>|{lZe-|yhxy0;{Ru!nRXM9FaD;04Mv;||1W&w_xLS1m-gIsPm z!lUotu4Yvyoh#skI%8tqKh(NlzbDJ8c%AnA@Z}AoMYOAoOe1?dv{3=;KsraA{1e_8 zfEn-2AtJh;y#8EuVt1kpeNl~sj-*)EcQms+{${wNU-p{bf!nYSWOOXDmcjh1gC6Jd zbS;fX`wOH86H`!`F_5vg6V%n(;lChtctM8F?xaKs zzn5D}OV+~#G5P)P(w{yKiI#w9v%SvTlR535fp{So!Bl1m{DCbohNMJHIOqhks~Re^ zv^o5k>b;t!QWhRQ_gJ#UyJqNA&Q;40gt>Xg!lkH&45U(V2-uK|@Mrwi(+*)D2p?J7 z-|ubvR9VTv-GaB?cNMd{_lCDqZ?iUKU*ZTFwW3j_Spx1>^!JQtO<)r}{tNnZYvxZ9 zNf|!WLvNREa^NjnP zeMv2zrMrSZBAu|oreNw_dL*2@vm2<~daUbYBJwfdWwT{x+;#FWQH~@9TLCwOcP~QI zf$SSAQ%$t9!aD_g^e%j)J~zg4UEIuEs-Y!;|GLBqM-TVY^EEd*wx?-NsL7}51a;gh zRsq;svekB%CIFR{xPL%Ur?6*pQy?xR$@= z%6(d%Z3xaPV^PM6l1;t%D>Fh^QUX7h`o3`0%=e|Tz9)TaMN7L||6Bxla8hz9UY;}< znznhLeS%L07tjf*g_F4HSoD0CGIGrHvu1npm)wUq8O|k- zYY%cp7=Bu=XNG`lC9s}&()tM&97*DY&GL>{?clZZF#hF%(qq9_dLPGpko?i2b#H_J zqi%#-j*7qAoy_@E+7r%XG7MPK#nhR00{bxYk zy{-V&=JX4BP?hKcTl0@LQ=)SvtFSI8&JxKK5j5Vn!bT}qGY{y$`cz(gvZnEhv3>i; zwnE9R=0dN12D^;H!&No`HDCy89^{2atW-=7G}c)KMFaRS?phsUJDLdb(JP`Y~`@2B*Hh z0H%kmHQWO>sN<0))Kt68$JLb1$RjvPbroxqfd>0V3?5?uIfm4KS+V~oF0V)~;un%( zG#8OYH^mPh-j}z+m*1-(8YZJT{L_^~Z|lUE#A90Hj4RdgU6eWfRr{K{u!^(YQm-H& znaCp9&RHtb8{l(8wf264S#C4#?Bgr(O(kc6QC#?6qo_)Q_wupFhxm?j5E0f*ld8qK zDsn*4Hxv*+=Q{%weV>wbOwKg!en&3$P|c1u<|wiL@b{n?p0!nh2=In{8qgZy?KwFA ziz+_Z-k*ql;=W{;M}6oT6q#9H^J8%EtD!+?=&2Bp-QNf5K+9QRllMad@kmRW67c^3 zNaPtJgcLzc*??oq0hn&VXRG^aD=dG#rO{q^+8X=4IjdoaE^2|9zW0@c7pa9d*j4V2 zjUGMEsS@cdbqLak?1ZFtNaFJc;QW0XIt(_G<&7LhRZyR^o0i!-OxC|jThe%CE(c{O z;RTNrX;|kep8tYe{jZks224ES3sr!84G+yIZQwmvuNEiWxzKMRY4&I>^0_;qX_=(d z=G`CsY_j7zB@X^b4o>LC*(H(9f%$ce2!pfla4dYf_w`7$LiC1N@eZTlR?GOC0YUJS z{|bb<%d38M)^>8mAH(oy7NGp=Tx30AIRX0{sTMwq&1dV z5@7QEr!S-~v2p!?n}}MsOC?LK#Tb!9dY^nv`MBVn-w^^7>F!^eo3gbc+voL70J^M9 zfSyBOE?ul7?Kdyv7=kDfzBdK*lD~%OYBdp0HoO|=)rLX{lWQr~Ez=j4tNR8pZi|}2 z16z?`3Z3m>a2kHJTF_&=;d`k#Va9uZy_yJ+Ic--MF}p(e{2>YHDh_ zrrU2p4zdGaV#7pcMt@TsPbraY#Pw1ovWBEIIH#uPuQR9TWONYuI8AR7O zbv~gLA#2r;vwj_7)Zw`f)rO{{P;{c5l~1Oop}-wa6}c~a*E3`U1{_weM8_W^-GDe3 zL4K68S-$sn?cltR9n^PD_2S}CVHzS=7@0nDOs%S_Y7<cr7Rp0xk@vRO8eAux+fZ_pXc*TFK^Dl^(sz~O*%QfT`9yc}3 zDc_(7<=kw^oBVloi~nR9X|a#EMX98eQq?f9uV}zh3!g_aLbyEZN%F_K`)_E7kqky{ zd$xq)J{9OIb#=!*_M%;b6J^KAg&sgObhQNrUWUs39k9m*6#l`uB5LPjK-6FkeFkp= zq?bj8l7~^Hrk7C2QaZ;rVwGUFi7hL}uF>s5Jv4Es9PrZp2 zBAToPJhOwcA{!%*S?oi8wlqzjE0a4Tb4L=HptmOeG*b^E3 z$nT4+TJ7tQuk``_F}o+@8v=;8J}V&n)!dEPv{D!E{LiaQ*Ytfw&()tD6yawYXGrfy zzl1-dN>Iu=IAPybFLXen&wLY`+iU0ev}&+n8fvBLSH9{pdc*w8=hq+nMiY!*p6a@s z2+@qsz#ELnJSp}>3NAY!Zk0isCZ>#-IAFVFI_+X&O_`NJE}MG0atH1g*YKpmT!oe6 z!i?H0-!K3a3p(Z1pV2=3S2ueKevgyQ0x7<;FhKxEGt*TDdYKbZI4uI081B94xUB3w zX?~mBj&m~dk8Y7jdsw(4DcAd)uDLJ>*JT)LvBG>$uw=$~f^HbXc zL-J3WzCUy{wa;;XUxikpp9;xt0!h0|8s3pOE zLzAM?FDL@hG!B3mT!aClRrNejO`Iz&ix$LH5m1FIjc~q8?ytsJFW}lLRnKRsb%Vt< z#0vUFhx_Z8Dek~1B8i^ncnF#6s1wGC7ppBYpKVxnncTS`n_%kOMrsJmN}$<Kk~z3;PYM&w@glbIdNcB&ZC(;uAn_HJ``^w`?9b*oDd4t54)`c7$R9cD>y)jx{AF4%}Cxj)2Z&vPnYW2-axQwkU- z-2os*%|8ObgrF+H^F2<2p8l2Zx%RYiGMSWDXwLv9K&y;1gVF-dR^QLgL+|bIURC$I z@T*GU?Ax%}xxlm1CN!rs`J+h%Dve_xST5jXnE13EzIKJSAHuqiJ!qHv1uu{LmPKNxHcGP1?6}6$DtxFCBkuIE7pu}A6>fOeJ`*419Egk zr#8z>y zJ+mLb2x&jUzBGO57GeUA{nQ3}vWf*T1p0@#tpWCDe26Y|9PG?20Qc(D`|*XerC)MQ z(E5H@!#N>WmgiV;#o=9s)CCrXE}0*QZx%%H{Jqmn@WLFy69r%oy^Mw9P`rG3V9nBe zGtC}sg8M37?>kriVfCcZWgbsSLpzC0^E-q}U=pf`8sam*Aq#T`5jkgrAG~*2Hru?m zc5g?yc6VB;bL)feZpJ)d-ov(zSY-BtcJrmjt?=MKBtAmb-4pHqIlDiqeDkPBzE{%P z&L-lu8^aBx#Q100qx$txMGN3uKF{>xW!u!QILtl%I_rGl#>FTgc)d%eMs!?UbQ%Ia zfShv{4zmvk^%fN}VN5I4ObF03m88RGGXz4vVL|X~&^lH={|exPA;hA{Ds1-7fCIj0 z;eu6`^0Fzz$ne?=zU!G6H#IKjfhVwV%~h09Cn619hr(K-n@&256?gbVPvDq*bDp-P zzVa1qahdIdQd@aB|LMUoMK@-5uO7qhs{vL(vcHCM;mYs)K~}>JlOtx`B=hFf19c0y zs~GK$HqS)Hz2=O&I1O9Ti(8&nj@i#lP;<&F7Tb#nMC4E0T| zaSv6PqS%7=(zOs)kSj~RxS$)F+4|{S|0H00;R^665mzF%uH!NWz-PR(eKLu<{B6$e zelNd_UdUE9XB)ihJjQJ%cH|hoe3B+X-94ate2BN4A=vd6$R_t&R`w z{s%Ei1K%d@9VldYI2-;2*(rzyeJ-+^5No3;R9KFyP_DRpdY+te_*4(<34FcT-Q_ypM^#k6rrEAKC2GRS?86c&yNI={uNlXmi73a`;`0f)luz2=GT1lWYq*Avb3_xrj~{Z^NmqmGd@Db?7i`&vrtK#g zUou<2Al8`%3Q){s|Aim`8DEYc5x6ZM=EVb<&-uA#yBDz0H^{wZ+*|wm8BE)H&j81W z^K+*1CU9^_1tAP(s)4skj2=Ow%HLMe0)z)FpZ&qmg!r5$cPZ{nle%SWe(Pv+K z&6DRem!tvf!yNR3G5H(tPM}P^Nxk$1$K_KyRAu0CWCByA7|0eW$xR#3RdwqZK zgNQ%-uVQY?Gb++j9)ORp$u>P)3*2;q)!A5GjM{(|kuFn*7k?wzi)Q%J$ma}d$^-ou z=!U5K+5;yI&pV7Y%^K~_S73buqXt0s-{K?l1^O=&`02Ek~q@9P%jLP|WsCECzTuU@N za6A95-JhXPg{IRdw^u$y$4TH zy`tWG&#W@g_0yt$DBX)7B=@$c;hp~L){NJXz!-?~HZzEY4mOALY%7!}dJ)3|Dd&Ti z{I3nvO#O1=-J1Yn>)jc+Cp_`$>KU{b#Au>*g`n9eQ)P(Y7Yi6N;kFD-Z?f)jp zkwmj(Dd{zm5dK4(X$_qQ!QDN;WZWW-ZLXeEPlRz3W8S1aPZG_8FA#E6)5;x3Z$f>V zn}c>$D>G!39)j%O0m^rkHIKxPMAtP$&jFh+7--#=5u;2Rng{PkZ_5d%5g+p(NRc|h zR)s$yK9xyWhzEPhiS4B79sRYxp!1rVua)b59c5nMyN-NP1?7y0GQkHVhFSn>yO?sb z*=o@!uVQE7+V(Gu*UZK zmPP5Z3E3(aPRBEbMl?;v=(`akRAx9y4>rk{iw2_ie8&1R6;bUxM3ZmRlFUyk3i0OV zeBhQniIF8Eb$VLqC0@)kfUW=rNEddg#wR^U5<3tl%BrDn0L`1ODA8i=WY2CbAZg`V z3rRe2eVonTTv z1Fx{Ql|ltX4l1hX=Txp|RB5eMdZNFnuQxhua=hHgGm-#D(70F#_P4#DdF3zY7R#R1 zE(B<%SAcp~zO-@`1|$oNA+G>u-{B@~cm?{^FL`wy^*c)#$MzxzhqvkK5j|&2Sj(s} z)YZa#Yom*q`Nw+gbFCggg0$;^8v0KF zlvvS;nohvh zYjr>r56dK2|C*($5pBu`4-ONj8;Syz?6M}NhQHK016`#mCYs!ZrnajVdkokw_>+x+ zy;U6Ah?s+`?3UT&I9E@1Z>Fs)P%+WuP$Dl9wfK6GJn@d{yImR!+hA`jT)RCGxar;# zJ*&yG(;)}vqOVQA$ETOOi+!%2E?n`uB38_-ULb9<<#e-UC}S{n`0V7w{t^gc1b0O) z(sn_b*kcv6ZPpi=ROLKx7Rxm{OMBafH$~oUw-)`xJDQ+(QRL+fhs!+tL2yNwGLRiW z{(^ig7FIV6>q&alo_iB{i&ohi!S8!ym0U0bA!D+H4=h;($B~=&KZpjoG2G{9>lX_M z0IrihY#huShi(?^<@MbD@}eu45ToRObzC$cG1XkGM_MNSVMRMz zdTR7hCmd%#f@V;gdk*(mjV~-uB-@n-N=^zY=6Q(ax_zE@Pj-wJaxt3>(71p8z9mCA z3FszTkgsnZBwvNpZ()_zX@EPO59(CJ5;f?WH!1SOXdmmpx1W^9CFr9_hTLVdXXi2- z%J(gZ(yMzwV+TR&fq7GCeNbiQR~X{J78re8P*k$(;k!boS$xx5Ewj z7v?w}i@YN!8x`&f&7b-i(_opD0`%HjIB=Ri==3K6@zG4Uce?=fnGdMSy?y7*<$4V9 zos*7d@WPIrEi?k==#h;?J0yxOBGpa?AV$<(Tuv;0{ z*&FWJ*Y>}X?3B7L2GlFdWOt<|mw6s-5`cIVm$1FsWY<#(UD#X$_oTVrnby7@PIsj9 z`;$dnut-z3S_bn(*mn^1sV;Z41unlIo~X&tr*LLT?Q7zOmQ4#v z9+ahgtKFJj_JVPlZMp%gUZ{w~C1e*QNg0D8Ul=!M;$ST-Nv*rQ8C=YHh95_7v<|I> z*Lrpxa75El^2xL~WM7{lstjYUw&ye4&7VRl2oQZ1bir1!p6tMRVrHtzk`masY@w#&150QB|nUfp>`Q>KTRPhu@pX(+t^74-X`yid~y z*x00@dd3rBQh3TvJ_`3WH>P5+^pcc2?PyPl1`{+9!21+fbVdPCPglf8F>;RcYJPcF z#8$}j+U63hGA!WLDQoXQyzV{A;vS@pTuOV5;T9bRn6~9f?e0LdtUon()nMk;%cCbn9TRC| zl=2W3uyx?j8y4M5`22LZD|kMZh$6h4S?#0B;+(5@oyQ3a2Vkp~7W7MRTuhDEXn8~) zV@e%ZF34Cq_p|^9=|ACOC+%f(Mz1Cot-?UnXNL*{o-Aq2{o8`Fu$(a3OZAYfeleF#X@y9Yh6&8Gf z{%YKNBN8M+fy$j%x?agnJLGlf9q1&MSR$D1eYxRPe23@5(Qr;O`7cO!2h=+QQ=-lz zaOM_im~-*?x&ClCH;gahgTociw&py7N%ez}bWfD;2XE}Y-^>JtMkpTjG*99Q6-8aj z4{J@_bH(4L_PaMXwcw5?`-IOAZm&By7_Y^hyN(=&1K^V}Fq#tBRN?KMcVuWY?rXW< zI}5QsWR6^Qed+L9@K!RzTwis&)vcXF&|cM!u`Dge(4{x*bimR7{gWkvz+5mcKud%6T21YcpR6YP0*fkf zy(FW`si(}`J?znsw$JHC=}e^;SWkkrv*@y~2dY}_2Y;4lRsd?~|Ihuj^dk+S%`T#?7wrS)gn zm1=LSR#R2liH#&jYCoU)3m6^oDGVGj(x1n7yOd`f<1d#t^SstH{0xBJ-L5=rm648K zqrcBH1kQ$_1ik?Gy&#ZQFE&!j$ZBuAs7z6$t1W&ySeGB-E77@H@*Y~In_9|G+_1~g zO2FS~E}W7GO?OKJ5@<$_0or$#HF}__LvNQXS{N&VF7AxDX3C-d|-}BqMxnB0q*1q~9>&8cA z$KBGFGw8{pH(+g(HvC#{cT71r~kOm~^!pKJCXX5|@wPb)dO zovnxtsbnc0&kX*NVFrSIk67&cqX2}jv%V|Z_{(V*a@I#!stvI-;mV7cn^VR&(oHbs zpiphQjRP`g{>DFc0O@cpT}iSZv2@cJYK80NP^;4PoB7e(>J*N7zJVPpQ#PKsAt(Go z(8j7!+>-H17-a1YxbbW825?)q7}I|Bt>+AQ6}hjZ2O{9qz~KvxJE$>Z&(j+*e@=U-Osi$orEmd0{Cj{}8TxMi*Lb+6K)HBXlZS(Z@pmQZ0@ui}Pqo_X z%BvDuyV$>=Bq$?%tp<95%u+>G$A2wfcU)$1dA4`*_^Dg!P_z2*$l1GbGyr$!5*;6i zB14oQSw^^StnPLBl5XkLJKS^p6`tQ(T*x7^Xz@W7;hPkgZMgla-aZGKY@ofU!MYGtwl*N_c!HoTlQKhwk-mZj(y=yV_4 z0#TSLnYt`g44jrK<4e6nT*NbyjfkFsA55g7>S4E7_1%Gr=)USS>AEjBS$)Y zv5VjLw`MFv2|$O+R?q$gc>z-|z+}Ni=EoCp$OLVUQvDY*u?6vFT!F(Qx=JQxX}h;!4Ngp(y)Bq`|-*T7A743~@UHM~OH z@lW3Tz!8zFrI9*arp9!D+tlTTtQ#H?L!HW+INw8~b^1gy_Z=#;P)rp527A&8n3c@o z7ex5nR2&@oVNoS!D|vEuEYCEH3B%`D5HVzvRp4y52l(Hr^?s%fukfc|?H=D`;SPK{ zoWgd!7CZb53a}@-AXOnTS_D<1)!BqkO$Vh_;|(C)3Dr82#}OXiN5^ws=L|8!ENO_p z@TS0q688+=rT&lHq^%Er-oHaELc|_1Xg4jt-^kCjK&!ss;rL~8;p-sliwE#mQa#Nk z=izz?23;wl?`C-S0V~-K_iTT%-Q4}kl9z+?dhp>GT|5(1nMvP3kS=z|AR0ho=^dsT zDp%*9^`B@ymlFLG^EFGSRFQ5#%SZ911JibNg{V_{Z+Zd{+qz^@5c2`+#2{nNUH;%- zl#0-)MU6M?--gL~lyWlAAf*Qr7x1fVzYOUC*@cNF8Jw3(#z^x5$^+CA9kc1E7EQ1`G%kQlk~{)o7251!xyX#L>WGr|Q>7OOZK?a+sP^ z>;?)Iu{*cjPP5-oBXSinO9882{Yn#qKNUg z{;BOUo|AE7EERiN3pD}k{fid~7PIbt{eRiuK#{iBy@EvIK2I9>y0#hZT*uDKL(7 z#Z?e|Oo4v<3fVAKu((Bi_IuIM7qEkSzEun#69Ilb+1HrXpO^8Z(mm^hrW!HSm`4;zJ|b@0fZ?oVpDTiuI+L)lLybYj%6@e!X{zn88ZS{LW+zvcGpv zcH*pWX|v-1HPggvQ=LBdri6y=pQz1~c%aaQe%6^w=#bk%Us7YgoYE15e4^> z$EZjw?C&9y>`biO4!>I(&)i!JTo9R|x-}}cWcFaCF7LV1fA=2PeZV`8JM%|E$d#;Yxq7BO5kK*0-P%Rls z%Qr7|=h=TSIj;dybY5nI@e}h22+C)`&wcfBC?XzQ7lx8}N@gij8O|phjg&(c1pR84 zS62EHe(|mU1eHud{(R;xr{MEkgmx%q zbDjIoN*mjkjMR3u$4&Rf8e4iFxjEyU2QGQIB|p9h8lboZ(HzEFU<8Dw8mggs)5pa@ z+|5$~ejnm?G(c_bqCr|*lU0qZhjD2klO<+w!by|UHFxIL zzMlO; zFs{1o`iTJ^Ty4*I zOZMB=(Y(aSiY00Iqpl662TcGIQT5e@q62N)yrxHWPA_hMn_L>{zg3kwyz}J%`y()roy^%r2v1eCx`s=> zM*E5F%#YZu|PrqHey@#npKD*gXxj*|3fv0SF2`3wpEW}~MXL)w|; zKS1Dg1oyvT3P4W^-v2~BrQHsHc6Y-rPA!QXA9G`W*Xna|rb%G{)1m4rQ z*ILiB81@GeIuy1Myl~H(#PKzmg!FNzZ~h9{7opyM<@V6gqy;AE)fIQIRl3-1BcD?^ zmASs$+YVjJkhUMnXl-&#Di)41yw8*#Hhup^9oM&0YvYzHNmtl{0A*3!7L}>8EK&4n zIkCt;Mt3pl-*$6g{l{Gc9z9TjH@!hD_rt67VNfY&bArosN_Xb!p(YF%>+M^dCJRe4 zktT=xi&>d8q6j`p+rO!k(DifJ&2ncVm;JPwoN~`ilwyS6fqz0yXm})juTsP849z^OM)@z6+VhDESnbS(Ar^JeHe*)`ta2kvasRHUZpZc^pGp zhd^{lMo$OJmxPu5aSP3+@*+no`2&T$cT1h~AnGox=fq?Y>IU5Z9`Nr|s%R2l!(R|Z ztQ}bizmGUoK|Eh2&;kJQ4Ij`S_6^TxkEkNi*YI{dSiRgvfEO0?;g($jQzUJ7Z^(Cs z%|h}`;A{jUzV4f{C$F;0`zJM`@=a&t{P5jF%H#CY1t%Pf8QtjUG`J=J!7cc`CsyKZ zP3=z61Z{MZ0tRHYW*yN9{u%TU*AZMxI^v|l^Hk7ky8QbCz5Bby-Q}Cy=ZB~$1431M6(U500t&fc+p?P z%hMoR{i1Z|d82045no5gYtJt{F5^f~;fse32HpV<$87}|{HQ?AHzHoZKdY0uWq^dR zfuR?92AxEf+bFiUThe3692S#fmsQ5*9y0RdJ+{HE7q81?{wp(Ec7~#I$C>l4(%{>f z1V$0Tj6gkbnt%{6xeKuIus>qcRBoI_vKpJSWl@BJYi)f~q!yDz`u_SvOz-WO%530s zj%0e`ghW3WOr3*YI1a~kN6fAev`nR{3m5j^&jh%3lx#?r1t`lzj9zzLDIeu@cVx<= zRMz3<5>~>ZU$O!QbgW*2t3#a-R}SLC`YP9i|ews<~~WjYJD_t zqQ@%KeG2cn?RigTu;XmBLB>W#j~uv9b{%O3_xwUj^nZKmw%N3Qnca|Qx3MR==XRQd z^~c7P`qL|_zJ_JooAuLx%zsNw+UXa8x0-*&(Ln2FP{K`H>6p56(#!uUq5pq>rbse=b&!q!x;IS~2Y;u>Yi zj*u#v5_n0vF7{hn?5(m?J<*3~F0?yx5e?`jE`1m$sluk9?7s~rk4;mO9%;W!=O?Ka z+C^G%T4ydVjwpEx7r4{@^g{}BGf|{$qKZr%=i7@0ts};@{s~TeAhH$UAX0-l06G)* zB4h~nh#Hj@iiQlaX@DF1@#TGW-!wJYr68*;IzH)P+9pl7@Q4)6boQH?g9eW{{uiTm9918m zRDwFGXNxZf_QqX%DXajZiDmc<*iz+FnF9QR0b;{4z5^~rZmw#4wcJ$v30u`^W3tV} zJkOBxY}=OrAv6=!|AJVK2!VjN+_vyh;}r{w$dL7B39^|VV@m`73oV-uX*}a6Ea!e7 zhXRhep0E+>dHft!17EZb0r+E41qbaS>qVrBwA5s4tt%SuN&X+(vh?2~MkHBEMBOb7 z^^ki}_7lfraH9?!wbnoXUf6rOCe-u0^Jcm5~y|2c48{;n9cj?2(yo1*Xj}2#&9}wU#;A@Rg)*3&- zI-)afq?eD14jH*@i$Vr?^K0SGsI|EmZ|QIwvV;sGF#`snl55JJyBIPJY#OKbWW!^U zF)}gB(G7RU=RlSWBY=HiEq>04l2Oy4=nG?_ju1bzo~!t%zdq$1oD=1@Rb&hs94BOZ z+)X`oeEz&?BF+F7PE??>n6B%n7!(;3f8gfqC&=y}s2dwsBySQXoebX?-_sg;P&ju| zAYgTr{6E3dV! zq)vSE95$8Atuqqf>2zKz>w9Pok4k@*OUyW~@lgOdgr8dlHq zSh}-@S*01{MqRv1)-Kj!K~k!wK4mUf0^nkuXa<{Qj$=Z5RtJLKQ&4);qj7fRSl&(y zj`qX8aj3oc$fb~qeShCR-bjH9Y88-Lt0zLd_XR_Jdv)e-Ur(=}5+TgaU`~4K;o6(V zQ<3(C0h^hVt+gtxCDAJo->S7Zc^Z#f!>OrT-s-FL6Ur%T(Mn+?TI;L^+RRH$Iy~ll zP76pgHk}zMK4uSDvAws1Ll97>^&IeF-Vu^-62LgRPn8B1evT_!HC0K|8*u2eP|m4L z2R4=hlMqR-;^Qw?HBOhXL?hlJeGn(a0dsMj$cOX05v6G}lYF+)t?3Q9|HWLFgMCry z>&$@P1PLP!8ZdjN^mLVJ)+W<8j?XO49Jy4H5zkWi7fvAs5ikoF zoE9C6_elC_IT#MP11GrT0l+u8nuog${38;VOg9C$#`Wf^Q&n4eD{v5HY6{-ibFpu* zk=>W9SLg^l9QWG~_^s(K0M(_ikD?4$SiL)*RC8r;M}Cyp@36DBu{h&9!0XHRE$H0? zi?8@ zrkO8;(e288T+7g3??w0s^6u|IKOl~YC`c`1iVGSt4sPb7a77zs>IOEqp7>I``NXZW z4L=STW4Jb9$l}L+%Uqh4G60#swskew1|H>woXSDCO(auW)5j z{Mq@KS*2SqXh$Ba4Rq2eUN`4;{=Ch!vq5B1Cf~Kg(vh5ic<$DcHT>P{dsK@+4TQyw zfnQ3)j&9?m0wh-KowBUYO>vjjyoxy*_3&wB|L0e}CnoZhsU-H8rYG%YcZm$jv%R%; zSSSrDn9M~;DJXq-%-X|9_r&RN3;Fsl`sdiIloSYHXQtFU=ietHKBBGPs|HA)l__dG z`j~$G66M|^=^e1gbl+GzIh~Aoi791nCRrRO<+RZqT{jbva*_Tr5&u0 zh4@ZkY)-9p_BUlBqsz|i5rny&UuDm1nZKODr79cHqRBf`Vk3Qr^gOdq$JT zC%=~*Sq@p-r?WCKd6X$=lTo2N7u0_bTnNf@q#)uwuU1sYlBwZ^H8)p_rj`fpD8|sU z-*!4Z#A6u^ zBS*fa>lPVf+*u|x9IWeYr=}hr3pa3! z=Nkh(qKp5SC_86|79m`Wt7C>2p_(rVo9VxImEi827ELE4PC8MA95hk83YqE&W$2k| zFx@+*wM7{MBJA!IkmAH60T^*fE9MiF>VwR~eZGQ3+|ujf5VY4-MZ>lh4u4mr>krq_ zg>Y*o5Lt}e1+I*2vHL37wzBh-^r4P2M0n*S5fip~32 z&7d56Hav5Wh{Uvj&s#50jwL0| zkbJ`K=*oLflN!sfN_=erA#f_!slD6ni9K3MBw) z07O`N$rIEER}r;jgHdrvm%K5$cS5Z1Ql$gO)cwnQlaJzWpY%0kxd*pIc0UdP(UbK?;AjH`uMy{%q!Sipbi%Z%qN+;ao zlLkj*n0$(1bh@bybpH!&?Mz+p)PO+{YVN^Cx@d-relh^E_zMJ7QxYuLzb-!f zU(OfNTRG8c|7RtD1$Qig8Rv3QF_*#v+gb-&fauXiCg3A)nhEKeuXYYzZSE@SbjhB=rf{0+PUNK2vC zfF{o~3ZarVWdoHm!P?CauXJm1IEQOsWy}MekS>9uR%XTaZ!0GE_)vT9r9C~R#{%97 zq`JGOGr3l_p*O-jEfBM*s?J8L`-^K7QPJ~fl;c0Tzht3+eW8F?L63W%zkyepX$Z$| zn;)pRD9Vv(+LEKyZo;o1_4M2FHw$caLEGu`ATbUcKin|4Wxs#AWEp z2PJ;eC)v@J6u0QMLJEzTk$B9^a)Ti;^3_aAYcpe))CU0Rl|GudpM2{qY`T5v-CuAU z{#Rh;FE|f%kb?Q!>@KB4+z|^WuhvLEkddUlty4lL0Yo`MPH>k8$XZ*Du$bQa7<44} zBMXS)6UgyM=1(#{9)9^hUZfyoG5}7X!&s-E%16~>S6G3x$iw?eBl@-0lQhdOTg2QJ>~^_5se;V=R=7%Q5!5V++qAbU?g?%j$dAc8 zyEZ}WwHp(g8SZPBCbKnud}?oX&(+IGi94PViZ)p3pss4Je9~)QDNPo%H?Dl|-2zm; z^wwt+BAdzl!$L^Mj@c^z6vb&^{jr2tw&TMHG0Qhf5K5cY(-qWYgywQheXT1c1s%b^E*KtjdnRCh3CmaWkSg zsvbqjx~9$erNE1W%enkUM-|8(1=Res%u{e%D`Liz*0q^qd8E#** zKe&N#^R^f`t98qb>iO@WpZ^L|>kr}A|B3S{HW7_TjHfmM6zU9z{QRsI#|s^1xKC!kY-KRf|X zxk|ObP@8*Lb%Tfd3-_RzUgwP)yvTc6&dvwX&GCpM=K-T07AYn*~5^z z!VgXYT$^_fL7kGthoH|7D0WFqI4Yt%`woDPc%KNgpSLjs$oM}39S<_Rn-E*t(C^{- zi0@1wuS3u+Pl`hj-EGn#qmo5CY|ollL1b@KFFH ztO4{}$@+~Dyrui8yMCnH*oREh{UBIcKT0U*&&T^=dH;>0cVUox%H!a)@uZRkUCW$4 zzf<_#B#i=wBkiEF82#J}n_X8yv?@m$c7KN`>puyz{=08TlfpIfAe<8QWX0&h<}56P zu{nCBPu!V%+{mbIVdC=ZeCOi3GurO9rl4a!y?5Tyx%??dKPciq#ry}w_O~brpfqSq zb2t6OqxwTodXMt`1cmCFCIwwE7;Jb(DopTlg$sPc!$$otY$e6=7XPnTUp}*UJw}{jd0H?);oys@wed zb0_|(UqR=8Uj6Hs{*+GhQP}-iFBv$G^4COd|M7p6cz_$g10WU8*a3nf)hK{2Suk&2 zbsr!x`|&8Lwi&()Aoaq@T2QH3@a`J`n&JHn*p`t_>|OaA9^@CS(%<@_Y(`mu_b!kx z04Q`#5+neXn?%<6;Sf=|1-bhT@zVPRRAqW&*9s9#z5_fTacjTmZ@nk7U4(FXgBTw< z)=ma?yd>TdUB2*qeiJ>bttgy;=ZJckk6pzVeup z#tIp<-CZm^9EEGm16B26JEa5@FUR=N=gVv_r1Z01vPtX*eTW7mT4GigD@Y@g*6H*< zJ?GnC=T)&tIjdTCutvW=S_7H6f_7wU2zTOKi~Bsu(SOD95l|Ameu6vVtuLKKhH>5F z53!m`eXk9iO>UjlIq~bz0}#*1!&0@`Q4po_pFh-p>Ren4GIupKzm5p@Rqc7X zdU6N(Vavus4SzhrrogiDvYWYcEjC^(&SVhBJ$J_K=$Y#ms-LsAF`q>@58byeZh(js z-0sb1tIodl++eWl{c&(ZY zns~-`XJc_xML7-=kY)~xJ|_Z^Vn5%7PyKwNvBNCvfvH%Eyi9b~yz^vPML1-oFo{V` zIPaP?x&yMmVk4Wb(f=^r&(mh64{SwK=xV&UrW*=rYRTh)WQm?CI{Nrb^Vg9125!$M zIB>Jexuos!9`8;L14EHS$-#-a-0Y82%{*DwSy^U^XY1>-Udv*XSWE;uvrEF@QnuN- z{cu6F_b8agR@lrJ+ofF|{*@Ba%qT!fyvv~bjP8+Uxfg$iwG2E+yw=A0R<&9eq#Sd$ zI3J@*iGXYp3^?T=%_elYQG??bjC{zyOlKc-iZ=kTNL zm_UUib+Kwg!gHvh^V}*nk9}d0ADc@>+fKAYYM*ZvK71k*!NoODHv~&;0Mp#hueNHf zobym;+B}zdpijKgP@`sDdn&eSloc z(YR#FuZ(NP3j9G=9z69BD8HXzP4Cd*Ok;7 z;ni3>iDMm+0B|Jng;1hpk z&El7w(99D#ohtm+vP*>+w~kYcuT#v zn|?2~FX2G3PWXgz#9wRmi2N%nW$x3$)>XC}V=EaLYDBe7 z8n_DaGNBY1)6T{Q-!o4?(kCC=gMux^7^i_2cq=q0}d}T@3c3L zX&G8^vfHsBpCN+*3hkUH=Zw$3beNY*2%ErH-@Nfc7N|TA5OTzxaj1Y3~>8%49BHfpAby^WHVWi)QRYbohwQRafjV&fipT5fZ1?`tHqbge! zcJF9BRU;#?=ghH-D6I4uUf-b*SSG>QGy`zclOD$$XlEyNP=M^ivO91+SxVYG9n_7yLi zRt59F9Jw=hue0-p?tRe{JiaHsSueuTR5KO<#2eE_+AWgKXV5EbH-#J%=r6PlD4&_U zaOZ*N>?2u+exF84?#2UwmEHZ(=8L5$#1$*%7<5xR@187!&e0f$vRaBS$3V2NajX%- zX;RC26`4*wtb8_=>InFpX^|s;z74?{q;M_Y>ZVonH%38*^3F?} zADo?z91EYEAg{(q`cm=x*tZ#a1EpD~7wgpAo^8vzrurtPv4n8*Z(LE)FOq+$Fua-G zlpa_k(gF{2UkeOUlaEN}8y;->fK1Vy=2mPzD$)}h;o8pH`h=s=K3-Ei)FuS=iThgZlbpHp|-5{+372 z0eF7cP;y+&8lrngNU7ERR;0hMrv+p3r07)thLqt z59bW`@2RXIbJ|I<)TWm)n1H+vow+ALrB_eA zjqczWp=wSKLi4w*(b}D{yz+2-y!rV^t8FxW*ZaJfBeG#8?b0i05C95fjbuudZA!9u zWiyeVRJUN|!*iVa*_nJ9;(%y0c?Ri5fX#>y6cxbV8aSdQAKn0Yyo^F1E^qR&a$bMJ z?LlE2C)io=u+!Dj^Gpb^OJE%HYUjW`51^IE#H%cQt0ZdPKwhpTh)i>8wY5%J`l^Z*KaDO5nK5dL!#YSRO2zXpoMbtdB^;oDzClo$?tVN@959cN8sY zvzZ8E?e*AdXGJQBhl`l^>88ts)~Fa^Y?!;}8dW`sWs6;2A|BrS><3D&;6?{ev0l?f%Z80{1#Shbg=0FeY0DjAjfkz(2-NfR`WNxt|{}w_jYV;A&==_sInaDVna>oypRHU__!f?aZ*ic>BosT@ciNF`Uj5{&Rb2O z_t@o11*e|Q?^dk&2`Ff#EJnQE>^+kc+?qS8YxiEY6D@@Z#^8wO^wXQdsWi};!}Ufx zMrA}5GL^OQkr5(`1;HazA=2bn^DRM9k-o)Ow@v%1yVD`UEEV=X_5(;G0B=Q`Gpn}2 zx#A2(C+s$Ho7S|%_wQ*1l@vdsjH8s@eH2y*=`ZWySL+uX)acKjDJlz#DH?ApzMUDN z!33h@of}R32|3Bh3-A}@RVda>kLm4?G%Abi#-BIPMv9m#7~2+XZCd<`N=oPa??{Tr z8?;zP!V77KDZ^~X2E^#zfy7xYcm|mJk5Z98xFy@#@XpHLxGKQ0AVM6OM28dURbG!9*1ke&NgAJPB%ohq1!z|UiRyty7u zS=pKnIbA;2i@q@;8FkK$ire>NL0Lq2Wf&|FpuMNu64F)*Z6`~V8QoSJaj>wtCdAP8 zAt(N(EoX{fnIz1XJ?8cp;*WfmhIE1{y*In-J^o2@M{zsLAt?SNx`Fwt5_77C#PF=D zK*ySk1!ASEc#Axs=UF41T$=3St)|AB#!ZmL^|i#)nRv{MUy}+GtoV{y*Ad}wd?h0ggW{-)J%gYPuRYBGufKAyBxR*M%jq+sbY0r*Pllf(!Bl4p zoT%mN?P}oz^3^lXuM}+SUzRG3+usM@0_-R~FHWX|BZO7|jzH(_8%Lkt;G}U)4O}~k zTUc@$6R}9X8+kqPQCy*xaw<>h$1{Sq>x;XOqsz`n%TY#H@92zfY$F5IExgH(21NME z{UcPqJM-qAt?Qyy@@gAHCYI)iID)4IspRW4{oPa0u zbKoPyl1ABE^n#VB<#=_LG0GK_#Pq4#?3X@d8cU3q3FU5+5R~7#?9iB*l-*!Ep|or} z#v*O8X8M~)Ned?-l2k?fXdp;^4{sc8Hlvo#+On{Fx3bKmXN2Iud&$4!Zq!L`7oLd_ zJSGBsTJAgo*RGSUF<>Joyk+fNT@imm_u?(TMMw4{^ZHUW*fP6Fv?@VAUm}7FJ!Pi3{9- z7cQ9>+#+;GRvt?($@RT1*sb`H_l2?q4(Pg^Sm@2_`FzBi16~>=5?1lqkkX?u(PrLm z7dz=ATZOzuDy-9W+&E>GMql}&9L*oih7axb1w!e|G=tF`Q`}!3RaaI9%oK!7Uf@Z%1>-&H!qOb! zFe(T0GWNnDfy$Rxp3r_z=?l-Qd_lXE++?!$f~u&DPxtkmBlX7{vW1Nq~#T8n%G!)WPPx)ao|vmIX;zJJP+c7+Z$zbTR4kSa3-|okcGy-F0J9-qp1_S zBX{0i4!WYA@dP6sXP>f&x@(_A2*eI}I#&e@Y+#2oyDYz`f*&7es#MgNGMBXDLShr zVWwcqp;Y|dvV)*u@c9R=)Mwsqu?Ne!@NO5H?Zgdxb2el%2W)ssIOMukET15E$v2pA ztG4p62h3hQeTC7C_N{2jXep5z&;L@Q?+E%Ufu3uGM$kq0HqhU)lgB^4h-kzp=PA4%L(3$F)8_$-4-7*+YbVHM1 znqL+QB6jqaMp6uhmpq_}d-N7F>My$P55W!l7g6OnVsu50-O=SjzlDSK_v&vS1w6{O z;9lc}ErCC+S${IX_?-rF#E#khtK3zVnE|)u9&jBes5nex zf)sf1r7O^9fh|I^$#5Qvc=P0*hYdTEMdr$#Vk!86db_&=j`a*Z;%jWZ?9_KUAT)K> zXP@fL&D;ZtYN*@ekf(etUlo#E%Q36*wc*_BP9mckaIQ1q)T?ncS*WX}bi zHitCQ)i!G^I8+6X#vh^>}n8Ox7;X&m-#G6u1fuUN6gZ7-nvld`n2&%3ckZ7(&t5Wuzv)57bUHolrTI zbxF^k>g|NeY&Cy@m%6bSPNXI6=%^gEwb}Gd4?XeXc}?r9$6Y^|>Xp5EK4sEH#ldwQ zAYoY@-tv@HQKGzkH6|`**oNdN=Q(AomlWQHUIjb?59O5K^9z_}FlCX%XaqfCJewTX zY&>hsc>qMxI+*dyq#DH1Ly5UX%(?`Syc6k2-Gd+Wh~Pzfz{Bc*E6@#e;rRnsfZ{jD zseT6@`%eyz05F`Vh!Yd8fivh8$*XRaY^Sj^MbuJb6VDP@sC=Ajxn4eq9cXxyRU;Up z^~wCv4In7&!8YA^Gfoy)BDwaH&PBI)4inLkZ3}+d10W zKy{D4o#&X?c=pvd`;XsyX5Q9dwqJoOTeFQs&m5eTP=B!l$aBM1X;t`f_tR;MZ#h+_ zVclF}q*%Gjh06?2jai01eDT~B#_CgRM2*@-f;{yl zCTpH|FjYk;${vfJixMH>tmkohVHi3UlQd5C_FKY_<7$lLf$cC}yol+^ia?>%aG@>+ zQ@n&?bV`-QMt+Jsvz&IuxvJvQV_iqfE}_>eysxMl+rD;i9+=6&_zy0+)~cfbUYb{2 zPDaXi(^PaBFk*(CN>umUedhhlvRT_^(lhEHGxF4p?lhkV%9Z#M- z>EMke9S4CtG1&AmZRPq=jr!?STR5%k)z}Wb?qK#bTuGe`%d-8ZncSmCx^Gr#WmrP4 z?b~Z7MG9h5k)yYX2hjm7GnuMc=WN=9ri4eok4!0N58 z`B}(W%-Kon8Rzv#Yo5<%=OA~#-x71%==m|bmyOuG*@oKSzzZSsVYbPm&gj8;k{n(yCz?l7pYlG_ZRzXa+WjkJOovitNU9_3tUoxhY;1e6Jlp zH6Dbfy0KzhjIF05GaybcPjPILdq_Ff5|zo7-hC3MY{#QhEUta-D`4f)H;#bIF!5L^ z*a+85O(}Nc%W_R;b?I((5x_FupW)xtx1k5r>VO}QgQb#(WQ0cjs%Dj*W~egST4#oENVGpP=4pbNS+G6 z*UtC7z0VcLA)0f!jFdKX@`RK3yF#$Z?0V%a z{H@ar;yC%l7?CgbeyvQxPWjYZo*U$rSJN2 zNPqVVHm6OrX;6Dq$3xalzF{-=nCh!>o$RhLfbGg{9ehRtO1GqNdPJCb@3Z)zjeu&z z^-ap0BNBbO-XZeVbKtL2=x@u${I#G^^N=x?x#?;Po`_dIbb|`pUr?**xy!rH)v-wS ziKp~RFa49 zzqEJqW=aB&`e;Z|gyB>Cj5s~(@sv9zUw~#S)RAW` zle+hXNONs&FT*s!Z`=g^9G|_a?ABOd{RV1vOHB3Dxz?s<@YH=7X<^4-IpsW^b1(U6 z*`9|id_%q&dpvuF#NU|IiV0yoo@A4$qV<{UkrVgr>gB$ECAeBD_(kj#l#}k!U-0m3qUSCqu1bCu8>A! zFz&b?U7)8FRjdu`)M~q9%;)Pgyy0#7MY8`kJd}$$kMM8myaaXJjeErhD{?B(l^W@z z+9PQ^bV}4RXR~`3ZNvHOwDSQ_?HXOmAEm$LWOslBZHx$3~Rm z3muzxAc1+2XDf~N5agUdNT2fsve$i@d4F{6KRo_>se4lR?_~K60g@N*cjRki%b6bN zWG;YlAm81)58QPr10VGVIdjSd1UkJ*it9YVxuz$~*rv_zTGg{7Jv%P!d~FodoQ3Ug zb$GXOF+XSOH2=bK?m^oSB^Le?lZxtU&1|oe=+fq==q^3HE-G`oHaf~8#rBby59kvz zvitk05y#W^>GvPdW$!1E3u%~9!}J@zZTE5l_Y3*Zy-OQ*(Ocx=0xK?XQS5_J5tE{_ z&=6V_MVzsp-3*G(kd)SjaXwq@{N#mxVq%F3kTSdB1LAy;Cd-n0zh&UT zQm2qz52RisE55w@Nzqpq*8b#_SWRmodCe5W<347qN-}j7&aXG`<5FygK$GEuwcDLX z%XIfRik>;nUYihGf`a8tG6~uHBQ-%;rhrsA;qloK^uXF`9HTQ3Xq22O0S+VIp3^Z`3i;rhoG6c*>*gZgejgahSAA^WktoUh)Z9N-Mq=7bMG367KEfH*x*zYxCny=_`sb;l#Ram4Z7(06D=jd8ipPK z`w=HqfwSiMJoV{!nmXzY7TGrB%{1u`7mG#HB0NMACx}37DaIU&Iyg3k_!f_3Bt@a- zl9v%X=YXt?$~5vl5%vx!gx8!&HEd2pjG@Yfo>Xi)Z;UOxQy+gJ7PiRBx?RXWL)9!o z&{1Hk#_jGT>|~>Dr&fBtdxqD}hD)wZ?X`6;%J177NAOeX>Zb}S8e>D%muiU`s@FaW zo?VjKkWAc^T2p#I>8jlhbPRu&ar>OIWpf24VNLfL*F}ek8WZ~^PIINdZp|Kf-XnOJ zG+~-!=vK&f+w2gK`ezTfHy#b!D@Z&m|Ul3 z?}c!jX3?J;o79<$^F?5IJzyPJ`@JOX4@J3#9{|Z)NJ9x{*D6!{)>*72t>R-Scifa)ef3_Lhv~j`Ou#k`WO;?)()`>_vOjgb+eJn2 z6=>kjgn2S$(Z4Bona(X&2S_{})oHm&bUoR2@uFS0Uf9jtR?9=sbwHty%MJcHC_fI$ zm)|o9zky-xvi%mQ#Sg&E?UCe@-gh@T4?$A^P|e$toNEcd#>}DSe`v#gRlNMmIndv! zYWwGI{(kRW|ITCoj{$G~LA9R0m6LxCpl{%ZV#q(m=8weqcRa-ZUvZ#=KPxr|D*%r= zj(;|?Rg=l{JRa2uKgA!$i_>1|@_OP#Z6yC`Xr>~9A6Db2n9=F=wWIf1Bd;fb7&^tg zvkFwXQe$O1vV2v3E1*g9@{6OX*gu)UJ06 zKwQXYMb^du?lZbWkZ=JQ=$g>~I~Q9;jD{h}2*g~;A&6%YHJoopJRrjM-aOFVAYOux zmy(0PWFrG85HipB@p|MsbHwHWfGSPcquMVAh)mKEA%`HbQz4VU{($`Vh$eqhE%e*VL(u$i#vurghW^)$xer0Q#i-2nZjhKRq94hi z4Y(4j6_MzF%NFqae*1(oisZ@kt?P%N@5{j05!S#fZ7`sT66(7?A_|eGhuPaDAUCgk zEWrHc34c3sBv-(kE$))lM84B)TLbPeVCR^~Pnei$f1DBE34a(mBrBXq27;vK^sWi24lub83Grd_N4`pHutis{KoS_b;_-)31^L z0z!(=ksyu%6jFCd)nq@&vWW*@>*-dNM5{ftmt)y$5FMsFE-5C-Ns`heA4K) z65;Xq2J65!(^(ju@ZLoftzM6t8_aHZ=~`i()BjuWM5E^$^;H@?I6FGTF|v@_neZ+T(Pk z_helb|GK}%m$$nW>-K0=sK9;2=7)lk+3?Y zi+Ol(t9uV=`?YjQNyx&2zL}c_o0Vkk8Rsy8wA=c%AF`s?GhW71YG&r3gIlw1dF86jTzVW)4=Rq^ z_qe;si3IyEZRRZ(cob%HDd=X1c*Kn>FL=Vt54;E!atKSp{Wb(#z|Hg!^yO9D$QYvW z`mr3kPLAeRa7@9a&d6`1Ydd8%aA(l^AR_BN_k$;6&D~+xGKZ( zqe1y~LNm$QE+63ypT9f@EqAvQbZ)CLeu1r*6T~h9k$T?v-CB6gUV#F4yTgl|$$gRU z(1+ek-ox&6t?7(PGd6V1Z0{e|B=RNfiX5d!!s3Xn=N(>t;*9|v#lv-Ak)BKh>9?M2 zxYzDn$)Vwjv(FNCUzudTp`C0JzH@6;dRaxG86L2x-Bfoo{*t9! zXmDg#`DN6c)1@{Y-aU&-FX%L}Uc;;@O$?VE;n+GN$CKNbiCNGuu-f%1G)sFXppN2HGA zEe?AdTMV6rfv5Yx1S#yfBni9Y%ebfUMBDGlyV0>Ev^oZHORLEx~Apj-XuDA@g+VLif!a@}>MiVb^*6VC{)hTSa^7PosC@=Of)LQJf^E zu?`rghDgkAf_dH{=;fwDR0FDbV2uwoRLE0S@ya$vKSi01PWK5n#Wpg%9Ez5`j;c>k zBIsmH5e{_1*6dcd#?0BBH*M#*4E#vbEFMF;(9*Qjg2~o=Pm|G&I=7FvUGiZ{H(zD641a6cW2#8qhRA|0>|b$fYmtKra|?o%$k2kD&xIC728( z(B`t>YfR9p24HuFR{*w0ujpp8TkNJhmGG*rN~*Di2|xynKoCF#7>;&O6`wej1ki>Z zA#xEMaC2@A=lV^2@udylANDxTk+4r<9X2P9P?fuzGez)77F;_J-!ntYh@>M@$1(+J zt~I#i*Ve;{ba9bgn&V0{8t)bOEXkK}ld+S~P+xSJmhgi0x91^OWII?NdgQU~uk1Jw zsImdowSg{YQcGuCzC$xN8_lqN<|d*ldjmXc>wb*#TVjEbEw8V2(0y2i1Ml5VmbQ7- zbNAgj^Sh$IMt?~vHqH_Yv1ck0$I0H7Z6GDQSLOoqWok0YX~HTK5WleN?fHHEr}hHw3Tom-Wmz5*V*GeN?L2? zwXdlHrt)rs6!8)X(;~#-x^g{~Y`QOQB9d`~2s`I_eEXu9k2G)6zHi8O&Xdjg2P>J^ z3L~Aq?HVV$afp3tBMxC8-&d9^5b23c1a>UYxR_TD$ch82d70w);~>t`)-_)BKx41l zbk9bejKyZtUn(5M7vS(mwbb6ahKSY3!_*wCosrAkbt)TGj1d$}O+ZBT27Y!1+JUW? z^Hj#I!oLF|{5DW0n99w@d$kHYA-&#X;H4Pu^}Mu{tgjD!U4iiX&cC8_`4Ge=<6`JJ zvV#9N-I7Y6!&dq<@EM9vF$|kkr=34ofpx>~MK>}i672oj?90+#jlPG)8NXUmY5HJb zo$+NYTUYGi7nd8i&g$Os{|6Fg0^yNg5pcrjXElQrjuPCgcKTRXoY4l+aVlUjpwPvX zuNw%|Q)nA#Wyr+b%SyI;KNJ$j!_>q7*m;|#wYB~OgOO2ZQS%wW^;ISv<@+krr}py5 z=P<1zLGWq`;RQ;r(u#Lk_KA zTzGsUyA!+3439mh6oc3VERPh&W`NDTv?t;Yf6?!WzSdB)NMynEd}|eL=Nf|^g ziId`VffX*^awD;R$75Mzj>?Bs^6#pm){cDL(1+qey}!U26c8-dUx?5F-W8J!dWE;0 zhyb;=J*|4|=N;rbM|Dai+(k4h zC;e;2#kZIiuvc{;YH9f?>1o;K0#~xCALaXoT*{|8@;~IO{PsR{zf*4gk679Nc@4_X zL=T+uC+Hbrm@cZ!OX^|JdZ5nW1bV8iIm0b@w|Lz3nRBZfX1oFFd2v?T$5R702mg*9 zyUEsl30luQkxm(zuxQqCq2cZOdR5l)JFWL~U(cY8v|o_k zy$J>pAK|E4;w(L-axM*>|D4n`%_4Fhvg_AGuuFIciQ7eLAcgR|jmSWtlz+^2n*{9` zKHnIqRc2=A!6-5b5q;sjLsLbkx(S8=(Y_|TZDG_|IVomb^|}3_OY-cmt3GW5wNM2O znSsbcF3%FOzIRMNG+m@SGG8Z~s5j{X&&8})Zy#ftBU2EDr|E)XDpEhdCDMv%Z=%h= zvs-_A<8++=;OqLmBF-N7R<}1{LaL&3dFRp-$5hJ%ge_$OmjOa~{a4EIKZo$|`>uwb zBdjz|9W5pg-~)j_#rD@(Nw>?due|-z?F*sk{Wyq>Q7gz5g1fPh^;*Gv=J6`uh(8(wK zAc~<_V;U?JE%wR4`Rx=K7gdcfto@>IS)82DmFO0@K78Y#-JYi^{}P#EB~8q!j)1|g zpcpQnN0qDw7Y~e*VxptI&z3S|kJPU;r!nD1Ux{xI)QMz{FX@IJf&|yz>oqDAlzFxf zrNkO3^ie5eV{=4=cy{aI9U|IX&u*zFT-Y2Yjf>Rpb5W6E4{i}v7Y&Al8rZ%KAiBzf znQJmGuMVatvu;_EO(tM=(~?DvcTOk!O|8DN4z|F=KCQw#9-iOPS!OQGzk z^TNe`f-<~^6I30`;@1V;q@o#VCU{?DUc7syagH*sPs><~6pi1Qk?MJCOc(6U5lS`^ zNx6~Nzw_ZQJqCk@nn&@Ik9NmL|0o)0^=24;>PAaqpje{dktHj9?W2qvzT%|$$ipjUcZO$7QA`1HySLCCt53}Yr{an7cYONC}X zOS<%%c>_2Cu$a@XQkpifR)frnI7_jEuHbW5TAc5>nXQU?ey7L5=M>w45`?7&NtbgtK@Gyi&UMj@b1SjG#nUsUhRLpT5Gp$ zp|sJWHRr{tS>Tc2FGoKErqrVmYm&KD5ogynbJRp1KRLc3y=$uOB<|(yFS6_Ig6I)~ zcS29?ZMugp4_grRm(ViavZN6?XcN2?(;RskgRsSnt6`0tf-Hudf@#(oO(yL2&PKF8 zpkFbb8GLz*X%Sl-XRVf$pX`ko3Dn_`yPS7J4_K-TlK<(<{wsX{9+vAjLh!e9rcH+e z9oIKjubMkaq+l4MKfFXsUR{zY`>2}2P@??g^Gf^a55CezLy$kxk`i@D$tZwF%!ND! zH0vpRy30nAqL}Hce5X-aSt+V9QELR>iwtYEcPYse@b28Pu^XTBPB{eqHD+Ddf&|m( zA?OT>{2uxpPX5|VaykTM!}p|!lv|5`m7^7P^pg|-t znGEzN{&4_*!%Pc*Go71Od~8H~`YtMu??RGEn6N zA^}^Mza-TL|K$`SyPsYHT7CoAbuI87u#BF8|D5jsvz4z>jwD>}TfLEExm)9%inrPt z&wKMY0`hT7-e~G1>mB_g<OMY`&l6uU%T=Gh<@!O zcVcvTLi}5B?e`d2w!I@EEAM8upWb_(9a7?fmL$3%?S#Sv@J7-ds*WOdPy*Fb4WdyC zYQoZQ7KzoN>%nE7Eo#iBb#1*E_}TW)qU$PzK<8|qGDFP0Al^Ku#U8E zKbedC-$4NO?vOWcS%S>3y_5O`s-Wc_|2NN=9~G}%sW5Nx%aN* zoOAEl`#k&E&!*op|IsrdtPGX`BCL^c+zKGVItPfbF6Km1>mtb~w@wmsx~X$$GHMR+ z{?Qf_|1X~A_pLqD8s3P2o!8GVhyR9C@8o(!>3X<$lfZo9OEcC4CAqwj2p)CLk z`HbD@#($BZ*#f=>HhYr_Fr|`EcYl`R{%807pVSBWYZovh+tn&0-d;BA{Ybrwi$~d; z?~ur4Z6&iMGV?|ZQQ-4*dz6)bwL8ElGG3eBX;Dlv`ESw_&Q;o~bnd-U7)6bL2$Y$^PDphNI)yM`O z3U`){7?ZDv)-5VaGQc7~se1;7iX=pV_W3%+JQ1by_Yi%RduTuGFxh_sCnTxl!Zn0! z5X6+0g^N7VJjtR#7(TLLR>@EDHt_pQV3?$3pj|?QyCk`N`cPCc6mSLeNDM<;j9fbKFK8aYeNqaCY1uPK0huY=GU6M9G=H^KX$YO`ts z?Z%J1dA7?4ZKRHgL`F1!=!0bL?_AgCoX60S(;bpW7dx~ zcXV{-W9HJyNyIBPfVR@zZ~BI{hpk66gHDHY^|B3|HLgYC!*~Qf7PY(+2V2ZrBe!{T z_@OS#0nkhKU1I1x#HYBEgyE{8Xczr6+aG%zqRhXtt4BpBoBQ824yZbCeedBr8WI^~ zA7WM&m?JZ_T`kegZGq0m{VpOgnP;otO(DOrs>Z>e9xOp8};ANH0gOp{4o(ur{l(CD{87qlxWO*8f+b}lz_+T!tj)qQTmi-}`n zqer!c6GDZAVv5f5fj~1!U4UM(W*v%=B8@5N_8rK}%R?u;vB0tMC(sS?)-*h0@#f}j z>$c_GF~65#hQV}TxSWGpO4GIHVhxn0NCAGt`QvOA7LV^WT&a_2YP}a4Cy-*PzN*7z zZZp<^ubefLFko|Sg7i0f_$##IYzQ^F6+wz}D<9Pjx7)&Q|#h;|i_e9OL26aUVT>Jh>#|94~4ZdxT&wUeJ@(XFl7&eqzRnVXvdXL8hu4;Gp*q#d?ud zNT1!1b>OR~Pzoz+xmVMh7!S)M`&nsyKHpNw^&7T3=0F8_vku&?OI)w}=1%h(u+_@E z(w6-`n~inTVf|o{RlXXg=nPFxv9^o+#pW)%AV7U;{QXUdj%?#lujGrpV+&Lc0B**$ zr~WPz^>;QYJ-^Cg`NKHNpX(D)@C3bhFwWh(Uw-7Gq^Xnvo}IUPb-OL55V5_D$zERI5dlRK49qCiRGO{v8{qOVJkFQz+hmNF)c1Eb> zqE+APsCQ7^Bj>JbK6er^AIdTEHiHkzG4SvzLHcxgbuRQmE;enCU#X50$Uq52W9_F= zamHg54f4tximZ#Cthv6YXBFSE6w9~C4#6vD-q9zJ;!U8p7O{t5tZx>kRSG+D`f8lb zqh4vUyYsY!Kad*nZqdHn3Ns-ZY*FZU^asghrcD-Y8~%!Q+FUf_VVwc%L=8ayYjF&# zBI$wi%(bg1d@d!4XLE?t8c~(9vcyf$jvJFLjfiL;0`SwH4b9*mDIE- zj+hF-+gmbnyz-Q!-@^&I&F4LOJzr3f{Gz50%L;dCrBjlPc67fsa;xDLA|52fD)Rb;{!-TKjK6JlMJH)nND z%QbNo%mXrvDd)^gXYcmp1s%f40}K)E5Z)12bK*)_hr{*aAv$~R(Ps{8*CGRUL3hm1 zgXpi3wSc+JanJJZS2+1GdY<<*4=WqUkF5~hs~?n$L*`{TDWigUQiqmg5vDg&?s|Pp zj#oPk*y*>HKb~Lrb;8f>a%h!@~wkn2$d=EPLtqa=iitos$DBUi8 z*d0@0o=roYWWl`ndSgBkdc?|YK0{IQMu-%xM@C2csUCrndHPmdY5FjMoq1UXGvScuO`hB8E5F+1PoC zumhP;MxKu?4UJ0GD&sfy1+3tXlE|9V&GcwxFQ7&GOLv}3N^b*hpez^9X-c_EK zGeGXYx%dZ{o_ATIgR)&a#m*b~>(lG=YDeEyeGSY75ErBaiUICiVRzd1vPpfbORyMA z{uO^OK9z*d@|3H3@cf+1;;1S(0t4=(oj^SV*r2z9vg`<&!ZQ~-T8}=pZtlC9pt%<> zoN|0xen+i|)@)P|X_Ai=E&dYg7-xDs<0MN2LkEJ)p^N!M?8n(t3nuG!%#tB=Elrn1 znssk-$T>C)w0$vO@RxpvcDqnnPY+>*FdJKUrRvLHYDrsxeluyUva=OII-7?U+l?Xm z-0atm0;iVFLqLcNEk{NYFR7BDlXHC+?dNUuOC%+aOzB3cEGXdA8>D7#YA6(#$=P{b zSm^e3EmQYsb?!{j7E`q;dO@Bg>I_e!A7<{HK@k;udZ!9iLyaEQ=&80h3wX=P-)6BF z4uC9Y5{6c;cHzt6A8I?=Kh&Kl(%5HVg=gtvOH0hRDG%-)k>P=G63Qm~AB9}Hhop(a z^hETfTd1{rIkhQ8wO-k6$FEe;hz-KXI?Sq>={2CvYz#;abYP1wFI=zYNYT-zj(9@% z>W*Q5R`bX4ir8!xNrs1SS44^6&V%30p5gzhRq_jS_;)sYy?-~eHv@B@fFFdZbj)-p z>AKIWjLRq>3u{Eq3dfez97<4sF|E6*y(q7C{1*fE}oeBC6>s(+oyceu=w%OW{zaDwHb%oCNi z7FGUnmj%=Qp1Y9tYm+RcFMGARAhz!4k}`o@yKb5*e`&zB*nO{ta)g-!%5thXpK{(@C zHT{6Ibe{-18W|7OG_)Fp*XQ3s8`)UDO$`H&&` zMmB;2WX}&uLXr3#_g&E5`W>~;pEf9Cn?9l{T_&m>#iIx1SsJshU%AZ}!*%$TO08ep zdD|<~o}uG}XB6EIj1c^GaMBLXkYG_TK&Mb{R@A2zwb+yu#g}^Zr={SN%?ENyV_jWS zw8q3+J-hA1ro~NOycEa-U}8uMkoSpoVwR3lKCOKEV;{UwB=tz#-JMIQIVHFlY%40P z$aR_lkJz?DysSTy9-XyZz&000h0G|4$mTMwcKO|62!|;;PD-;bVvcrih3P*y!FoTl zDgJo_pNG#A<(Ym(qTnPtQd!j5EyHbzSS&qvedMaj6Yd4SL`LzN2;;MwN@tBP??(*1 zJ(#>0#H21~)kiuXA>@^fv!mkhFo@tXYD(*ydd zl;B^DWtPlTFxaWkSTu7ge<5HmHg88)EfP@KsN^9TUZ2xxYs(OkZ{do_6%Qc|;mQJS z%J{#$oaeBT;&>7d)Jho;%afU}4CEhe`D{G0gjiKn+}pd(*uf>QXe71g-5Q|A3fhwn zB*6Luti-MDUM#^k8&1*>^zhwQ4+dbQ1i2m;`1u{|1hVy>be&-+{Z#MR^8jk`-Ha^Z z0^ANb4L!#xFIXP$!ZV+DZ!s>IfyOxwKsWQ>0C~NtzD&T_TpY9-)^u`beRFE?xVpNG zqRxz1(}T_RPzEWr@87rlWkO9MQ|;TPLmLo~pTKGATRGU|Uh_@ETrt5AeT| zcR{p;yr)&;g;^A#5b>^ZAW*=hT|#U5qD{#xB1JFD4`bVI({~W=q0!E0k*E7+A1A(a zas5cnq&dOJ(Y#_`ngYS6XaG44(j##E_W*hj0AOXGrs{5We5X=H@Ulu-JFt#h8ri{+#+Lz6W$DUeVmXQw0}~$ zcMWud)sXDRT^GMCw?ZkF1aHY;Z!{~ZR2kJ6XNc(#5r*oW*SF&D*Ad1)gf05-S@1Cq)!;(%0qwQk^z0RmP(>3Wp% zfTm4t2Ew?9))hG^ojc=OziZUV@LHWpV7#bPi(+5n2U?ls@vBeWybN*$ePnI;;_6H*b>^S|;fmjj)(2u62 zPNhHFk+$b5@eCTyExRe_=Jbe!q_f;PjS%vS!Ixk}Z+47Lq+7gppxNp_G(Dku9OJm9=b>Ep7H9 zB!-ZE22GRMerNPqo?m?bPoMAqdH%20^M5^e-7}Xn=RWs2*M08mT+919w2!nAh{yDR z$pHukgPVv^i+6rI-I%{~#~_GvdnfiqKwt6LUs- zi{MzLrB+Fv_gk*@CqSi)Q|H<4%o6n@X`|dHObiU1Ev+mLm>e`*9SFCj%b7E$8F(Pb z*Y9kgm65)ElH<6f6{yD z^vRzzy`KVsN39G%ofF`ZaJskzdP2|!dcVX&Ts=WM8?FIb(a+7>574!M)^YW6aRW3N z(7Swn{Q=Fr1JFB8{)cuv{{!vfa`LY>U0gi>qJNGBTnTQx-#h5cX_v6oKmIE}d`|}h zeytu7;FHrU(AWxm3()!I<8MZ%IRX93@8ls1K=T3my*C)qAM}J*u(36u>Gg@OK?Z=A zkPrkhaJsqdHv{x$KyUXueTdFSdby5=_W=_?1Ku+n^*(EI6wu&W2Dd5Watd>wxPcT8vy+&$ma;X z5A^#^pYt-Hm#@|d47Q^889Z}1ayB5VL+d953-~8VYQ!b?)wb^4aNnJa`&_TqyO;4GiUYb?bGW&ICFY+-qYKx zbw6!HFW(O6_Mng>e_o3_3nm*V2Rvbf&YZQR^9S^eQQ(w|DSb|ZJ}}Bc2O$G!KO_zX zgYzWh51oR%+h?n0!CyarIRLppfsiNU4(Z9k>ID6Do&M^t z?obFQr~Fm=^V;2z$Lg;kt5^6y6W|WMkQsCu{2dHvcW}+0ce8=MfD-S&{{HjsuAr}{ z!AbvZO_@0NpJmL3%=?**nGZ1UWE5r8Vbo$g2tIYesl>RKQRB~k{av4Ff~kvXl<5Q0 z2vfhacU0h?HU4Z5GKWS$AAfSi9dPVFjE+$nw5rOe&$th?587rFVibjTGpc|)=`kX} zy;VW!f9z}UUnBmTmw)h`O_7a(Z6lj5+m2s*ut9c%>R+wUIVBGL&5NJ?I{6p34F2NM zpU?Bp`~UrmH{=Ftnf+Q5ei>c{Z-qC&U&CwRRggFw1Ahj82d|^kf877T>a+3u>uy%7 zPtFI7%KNW6f8Ngpl+f=XaV5n)8W2R6{q#Bzv?R1D(_uny{w@o60YMsZKyJ{*t~k9G1i1hi^n3z>_z8cP zNi2eCe*=Q*I$eVULw?p{q<=9%><~A2Q$(PxkOZ^?j7SMmgR~%B$PhAxETE&1J>&%V z>;>j&AQTFrpbJn8bQP?QNl+@33FSZ!p(3ahs)A~v2IwX97V3mPK!aeej6*XJ4kAD# z7!1Y& z&tVO)*RU>FKMV_-f-S;U7~l+?3<3;W7^E2#81^#gF_Q}? z`wWE)l?)9GZy7!?j4;eFe1k)94!96}8+FWdlb0e66Vzyslt@Hluf{64%GUJagf z4}1hZ3;)5$%*f9u&bS-QS7SzNMpwoF#z@9%jH!$d87mo^8G9J9j5tOr6BpBFCOIZe zCKDz*CQqhNrWmGVraYz!rWWw-j586KnVE%{Wti2$yJydQiaDJ58gn{x3G)l)F6J@j zZ!9b`NRR z9NRfGI1Y1obDZb6!|{aU6$h5%2PYq=ET;kIan4}Q>zw(VFF5--m$f9a+!U!284_g_l@&cK z8Y-G0+9Eo;Nnn%KCf7}In@TqgY-ZRjyV+`U_~x9=?>2wivUQ8mmNQ%KZfV*wyH#kb z?$%RVZ*Q&NIwi(0rX%Jpc3TW1HZ3kFt|xw4{Em3DIBwhKZN}Szw`Ff@-$vRlv)y`o z)b^6?LlW!~dnG(1Zc8*u;3UN)%_UKi1(E|&Y*Krryrfd3TBW{A%ShWw$4Xa8f0YrI zF_AeZQy}wc2iFe7j({Eacf8-pymRkPpPlJDJ7pPURb{;A63-JHAic8BaP*o~DJl0PUPC0`{!uOO-5pm0;6Re`Fgs(4!Qf#TpEzCEUU zqV`npSyYl$a#c!Gdaul-d_Wnk{7f0IBB$c6lA-cZm0#6dHCDAzm7=Du7N}ONHl;49 zenS18df#4wy_S2g?R}-eq@k~Y)~MC^p{b^MR9o$ z6IOeyP*$x+_>VXrd3=O$RR3tg(Lw7S*1^_|HrzJHZ64Wtx7}}>Xggx3U>9Nc)?U=! z+y0pYi-V0rp2N3ehR2eQjUQJ%9&^0cQN|JJ_}WR->9kXwGqviIfoE6P{yUZKXh#l5+zcs2Pd`I_ssH}QMo@5Ixtdt7f%P)o?T z!FZkgYzye)n^?)Gw$bJFW%mE_D6_LSh1kvj+PJi9A-_r_gv zs#ofVH2t)adzVeChs}@FA3b`!`Ef!4LqTxCRN?W$&Z7NA&x&^!XO{?-TzvvP z34SvD^u*KNQnS*=vb|-+F^ub;o!`=;ux+S|%^s_!b=RNE@r)!HjN)H|MaYIIh2X?N9kBf1-U_V>JeZ}R@_ zheIE_daZju_8spV`RMlXYyatf{6NS6Y4H3frcZH0JVVK!H-ElAymPo@L~W!FyC3^* z^vLMonCsZgm!L1?@k8I`{&DGQcSXXio4=SHX|!LJZ2vf1RL&egt_aE3FbR&Oo4=5NWif zEfB;~4MCJZ8jX}rqfs6JUB4fKUIhHK3F)=zHfWYSo$4H)r+@ul2kiyK%L=;=i-N

G+O~(8Pwy~`O920Nr3-_)dK3$Ykl$+Xmg~S{J5|>dk%B4SM{a!wDc;eI0 zo23rvw^LoACfhvrTC*!5p$$>>i6u!i=;URqVS*d(^qy<7G{|cV|6bla;;>WXkt@MZ z$_w_53uEe}{p0dq+Z>+lUt})9Wnw*BxChc+VsNG8O*zOal5#@^A4!H#pYDq$ptTO; zx$llg+*j|%ob(%vAGFnYQKCzOVh&M~(O=_<$~1^=PKyRzJB;o4q3M4aO^`;a)1X5- z5+&#o-4{4+qT52hC-w61MFcLR2SaQT!@V9Z?V#{tn9Qp<`Y)JROXv0j&r7Onj_0Ku^ea%DOx|s&u$|J^tH^TEGdU-=$FDyfc4FzDdZpsNtd}sdbFqUZ_rW~k}kvF0zCo3W~%z!_Z1xXFK_fRY+?DZAtV4!!IAae z9~}1v^y?m!0bRB>U{(U~TOc5cK>Og_x>oYmQETCZr{FcTC%&e?hL_RHL;J!ht2dzE z1j!PM_%c-wq{+v-Bg2%v#Ow4ceievLG-&k(|GyZ77_EoCrt5UkAWl3MQIU1sd71oB z15VLIMJ6pLq2@?3KZbEx+Pevto2}6K?nme06tzgQZI>$cOl?WBiz8Y=nzF(8z=fKX1jJTpA=h0EEBn4eF5|5=p=MSElCHaRV6i(nPN9QBz| z*EC9sH)_^#P9j98TP}sVfA?D?U;}Cb2EU5oZc^V5FI#AX)h;?YhX%bH3eVrUuxxuC z-DpHQg6`7Bc~XTO5@?X}*|jCGg>NHb6$#|jj+Yi-27)<~H}w@Pn+b$G8gxHwq=USx zLX7G$(?o;3>(GS6G=eX&WJAq}Gn@TX33hj|Uw$Rto$?N^ds>=L+p_)w`>W~avhllI zuET;^&OoI4I3T-?Y0xSE6`2gWp78cC4Vv&vSf}j#KI%WJymrbNktNX;>JrmJgJyONf8YOt4n2W#m6)+CSj%8le$RL2dh| z>=b*p-jw@I#{KWy{NK5`zNlDlZf4_Bzqj^*w>_o#O1*^FfJf{jZZ_mT*Ndj_CqA6; z(itA*EAFvyKk;V#Dnpbs1cfHH*iNrNJMPCpwD%MwCCS}{%7Aq6%DsLz0BJqy`%14i zO&HP9`Ng&~Y0-989-<_X`CzZa2Ao#{o|hE)yn3}$7Rk9!we*thkia>SIt9LVvsq)&zA6uy(Fz5di_YBa4B4u9bbOm;9<- zWO1YaFsgyI<|Fn)HS<~~u=Rj)ZQcX4jPq**)6dqy9k_pU2wzvN_W$VvfYUxu^e7i6 zs-979*K(;AW}G&s867L?LT-3Jh3&z`$`Rg^dQLajhKb5|7Mm{zxz!LVE*k`^|KH zO-Ulc#h6^uf2@IbG6YfnoCZCMZ;SA^wel46TqD8O=`j|S<$7ISEsC$go*!e&cSVwJN<88pvpaFk^8zt>5cYoDreN8rM z2QE4Nk@si5K0jZ-qH;M_&nV<+Y2!TP^qB~_~pN`xX86%lJ0#1q>@f}4G zKF*TV#;(g~cQoUH2RASScCS0g&3@s1y&tZN5d)SDKM?Q!_QHHx7r1*M+uv9-|HBds zR>9v{Gdo!@U7&HgKWUBW4J6Atn%d7bc6AY7=jHxa?y4Z_PP}I{iYwa@ng6o7EhTpO z*zSx7LUzEG+(I4KLhlK4Wm)-2ck9<8C!sHK(BiLtrEiloXbY8}yo=~e+C>haJ0(Vj zHZ?jAL~`~;_EB!;AG3?ddEHZt!l=*U^C)SS37yibb`d++I5z5Abu%FXqLgv@{~LxrCm^-6P zMHKo@g9L~cjrAIx#kIS~97W$Yi|(H9&}p@-x<23Qzq@z;(zd(yn|vOZe%Y+Z7|0a1 zR+LxMAW^a(Q32OyeN~?rKeoV4w6)3*e15&p`h)K-Po&6QA?xd5hvS3}SiU%sB%#6l zoFYR=qof5>Y&!Hi>*a`Mhu>F-g&Ip@0akE9hdyi-6NkMBp$s14`6wZ9> z!IT4Yl4rJ;&6%kIzjV%!-pb0ZLPb4BS-!pZp=j0VL=vXVCvlODGe zlMmpyW69Q!^wcPTTKiA~TuagN3s=rX?xk3V}k zEVQ86ya$rgRg(K`sx;>=DK}{EvNFz%pWwWHjlJ{Q6RaG~0Du|Ni!CC~0%|yYXDLeye<|hHED9%ObFaXb^GQwdzTVrAM5;%}|BtlRPNf zftcUm(3Wyp<=7DQ&pCCh{MRY@t}TN#|btw{L{7p1{8d~H|*QGvgD^Z*-1G_ zi3KkJXU#}as|;1z5iu(ff6)|dRy%jAY5PD&5XbQbfpO#b@o35D2ZpDG43m|b#5Hd{gS-yY zbvf3_IddRQxPde|FpVI0+lt{*-?sLO0ZadKq=b*A$MZ*Gz|zmjH0_b@&awlRKF`C> zj_}BL*o%Cl){H!A47qpU7)stZUt3*DTBO`ewcwY5klcKvnd9?{>dKqh8En`mcgK=< z^28V&fF<3IVrAU?VU09I6RzX96Y6)8H{$)9GGjF>@8(Q9N%bf!_e*~8m9CC5R@g0l zA;%^P>A>ODzivENIHxnQYeq%1BI>cA zEQ9c!D7McAOqSf1_iNFO0}8E!<{`afFYA?X?FILNvsY)(0@pU587zG`DlAmlC)6v= z*+=a}#2fbb2Qkhk*7G?SktH&Lzf}MM{H>el{#KLY=tds}F}uiaXTEL3_`<2wi3cdD z5yJHdxr1q@7fz7Sm-TZD_RW!$V>r#O9Ocpd$ew(H+oHGjNbvwF0_Pu9G+*geT@r&b zdw9IzDfivr{Lf!+44TvyS%V`jnud8B< z^K6FyEkeSRWBXc@CVV4+qk5>Ka#G8}bVO$xQGHaW1dnc{?#2la96Sl0F(||Q%AAMC zHj(V)OTKY!@N!LTm#fjpU2-XVKJZ%hu~O9Jd=3#kr=AtrZAopz2u}2&g0XUKs@hV; z74L;U^J6MA?3gaS+#l!JH(@uFvd!`SjhziUUigXw`{Ou}a7DG&-q{X=#JUvMbGG|yFd5SkIwHj)HuGFoZ6{3x1tCR^BgtLw_;Dtv?AZtb9wW>b*1Daz1mBIu0(ePv_x3gx!BH+eT|nxM66_r zpIS+2*@qzH;Y-MJ7TjI~-|voeutk)Rob8`?YZso<*|tb7DVwT}O{-C;2~DZuh#$N@ z3_uFMMjpPd4UhFsc>Ydo0|f2p@xot&Ai=<{DgTTinV6H*$hNVLIE71v%YC@y>Z&7y z$WrPAGE0pgykyS0(Qd)VlMW;+wnEhxpZzP0!djR7da;cBROr*1?vo~;zYvjTn;PZsT1g%-* z6Uy+SVi3`I+F{~up7)WLpH(o6LR_*w{aCld*c%P~CF2{+c=YoaZnV^w6zjZwS#3>& z5-Kud60Z7RCSBMqrYk41w(1oiCkct2Dg|w! zjyQ`8!0KWypVD)w#wm(3*@tl$)Fhn(zujMy~s^1DKiYZRVzQDyIrz`LPL~d4frmxU(!l(Pnf1bBFGskuL3WU zUzfxUe2!SvUXO0=4)wPr^`uhnX3bAD?A_yEzg=KWk-a+wAJ zEWlO}xfoLbP=LTHfL?PJn5kzbGT{|o)(cgWgeri*<($!szDDA+ke=>-W1oG?E$-Ez zqu#)qqhpM`H+6;A31I_slc#)mz--j^Jjx6=>4@XLl+}Z>&U2cwwK|6Erg$1FWFsZL z?vj_5w6XP))hnuNq!X;&-Qu+F9ro(|){nR1A+~^|F!ts@- z@OnYw!|s>DBO5J4xzDV#POXjM{O+9U1MsQeoPPAGVgT1thyO-{=A`i3{Vxxw-OZjh zXmsMX8|kjlsIU{|t|DG4DDUaFQ+j^#Mb?$`(3USxtyk8;to;em5CgE5W0Y$)hLlwD zR;f+LsM4Dw{O9GSMSq|PN>y=v1RorVC;nmPfbiD4m}jcrm|SA0ncNdXBXNed8ynE08>7lgt~kN)3Qg%D_J2=rrNSwuWd+7~e$DDw_Aigz zPi#@9pq1UZXwT_$l}Yypr6!(+aYfhO(mP#hvAsEd-}?eaUgkhpnETo5s?+Un9&_-> z>7QrcasRwFd`yOX+xY^5pz#?!EmyK0=5$S1sgouDGf~bIgD1q}MZG;llVkHvojb}~ zpm3w}RGGbnvS!jQM~6o7ONx;4mp!t!nZ%)yBHK~h2DBJTqcb;#%dEvo?8DgtdwKh0 z#*CG2V+n72!H?peOmPJ~%I7TvJDrGqreM{K$4z+UX(m4hjTiFCmfU#aeZ*c*sYZx% zODc8y`JIe%Umme<%KA{@E2rCCqB<0^xJApnCp&RsdV0bzhE+V;?Deiv4fe}%9Ljqj z$YgI87yEjyh0FqPO&d(Elb6HXJb;x}CVw^^A4b`y}@hdW=zf zbhmC>gM(c+5cqGSvFJc17s~aGp0iji6Adb%Vt^b8a5nF2Lp)u%O-V;dK5!NDr$OB_ zaVrkbahZ*=_sQ`-sK9U9Yu1N#BhDr0;)~+{b7I>6XqLTOtHe!xuUG9Z)Q-qOX{7GF zo?Gmlno+MoO#`yjtU8?u3!d(62SS5wk%!8EUSB zV>l&%x)*1YG)55`)bEu_-4yOp*mU4nyMfn5&uJRq>amLhRO;T{6c? zn2A?gFfP;=_|Ih{C8c5Yo`n6HM={<@YI95I@>zv|@i{G2-Q7=vRj3->eGK#{;Sn@1 z`fClcg9brlIU4jv6hM7}D7q54vk!$Bv$(W!=w>h>+=3sOI%)Bq|0E*QXX}Lmr@}CI zsXYNc9P|1`i*xPWFGkk5#L@Od6NSq<(q7KIs%YUEb)gKAx(EXckdyx?w;@R8R$GQB)v!NI!j z;s=%P_T3y`o!CUBgM&8+zP5VvVzV1>3ly@dMk~Yx55rLm#FQ~-4#$YA4j_0N6P&L9 zGUAHNtzd8WU7fE>4c&Z;v*z4o1EN@8m7FLMqhzCY7ZWE(Ha26JkPdxw)qR(>l?$f` zivGsrnSz>~E;0KSmF~}nz=ISI-%DgRhfb^kf&ACO0$2dB#*1>}-fNN;(&W`(W)%)S zvvO|5s@|7!Qy_m9k5$zen;Tj!Gkwy8-k@{2yXqyy;)W{v5)I)|@z%GI_(MMV4LomW0)dA$82n#@%;6M2kKc*;u8-CK z_oCVU!E?o4lY`*X_>qK7Qr`-kcjV%R&PvT(@FCL@W)RLrv?NsMD3jS|F)cZ z^jcGelVOZh2qeUtB^iA@L=@AAj?NT7obnf?!!>b!@uS1D9n46x+022J;0hYlPo4un zRL>;BF-{X@Eb!(P~-x;AlF`p1lWEx8knQ75cC3<5sd31qc1HXPP6&@I0udZ)QdqW6<(X+szZ_0_d}U8zrv>91dU&P5 z*2u~_=>j7kYYTF1bLX`YAYil_bD<++i@!RvYfI&yatOUtF_?C&z?Z#IWF%t~TZYET zo7+H~1}?Xgk%v{I1Q-x&vT-Yd5kyAzKWwkA97m_hBIIYE|=)1?Exi^!Qd;%nD<=^-XuXNh3?F z<84I|*yVP%EN;bhZhUy1Kz|MagjWCpT%C>pkHstxF#(<9cV8S_%mtECWLp{CVC~EU zIY2T!r^!-StvkDq=|534+}B2}zh!#!PDsJ4C(Xq#7#dX=Zxmc~rgQNl+FOC)t0j2{ zF44OxgAecij!=+o$10TpELdG*cVx4c{g{33(UWY4CC=-5bUa%l`!V>w)%mLf0LvT- z4b|#b(Pi4hrD&h2USQ^VE>-@>upN*9-eKvvXG0Z1_t>G4wFx-uWe{O-3iO6Ks6#Y} zy$3XW2~$ok0q=vGcWb-o4wj~uS&K5Ua}`|zPX+C^olLwRGOy&KX@tA_90I}^#oLtYGP1p{!_(kFC3 z##>>F*AalSx;0f9$%jjSCsdSeC%iplf(AL8UaM+{e-tG(M-sy8Xu!LA*rELUzW$txkSE3U<)L3V=@Q`n`?{J4_qXs^sG z8AABtc=u5*q3;bD>`Sx39LV`OkHv2myv7-B%5oCE!8XS)#z5C~vo;`Xh-@Uak0{{8 zY`PQ4ffSyqo{arQu?gGMl)Uv^N3P?PqW;51@oW+Qae4Mc(>(U`4b|M2onZ(!io0~eG_4!@@uB+(o>eC$uRV(#Zc5BYVMvtm|gd^!HWZrY()#z?j9); zecQCMJLRIb*%!Mh0)D`uX~OQ)E9!B@$3TnmI1Njd%rME?aBz z9h;vT-}pP0*!OImiT5wuMQad%YjJD9@S-Q-tQp_B`G$+JjsCIqFrp8L&>MHwZzlR2 z#1*F@F4c+!Nfjq~a$V!rawbNL+FWM@o&rHL-b4Y;5%&LQXVC|GDvUG$NO~r)y}^|; z)ekT3v3e0vjq>#0{B11Xbj7qjh>}Ri>?bwmZqOrzQjmbQAi(1?wr1{Uz>7x}o zuE->Ozj4fDlizhi|D@~67o%9kj`qD7=0lwH@4q^_5U(nB8O=i$p%jgB9eYhO?;u>y zX1V)i{|zJ3GkMqekXF%>H#vJ>zn08=GEy?jU1UaaC46p88vGa-v^3;E?KyNT$Z6p7 zkbKZ+M2W(@FItOw0XIzOr`)NRC-xj7hT)c8q6a>Uf2dt3JBIc=#S@mX-$FaHHe+CK z-|2ud)H~&CoQbd+nJX&J%+yY2A(Zo|S;EsPQL*!=oe^O=t+(D(R@UKnmDay%((*Nw zVb4v_WgD$E7@3^COG1!$Qwqt350U(htp_t3>xFUOsHwyL_4kf_n(h<*mi{HpBfaUz z6FpIvCZ@q8_U2o2i%5S%e99aX-m`J5WEIKdW-*ebFUUgyoJFRVQq{85Z)r zyMd5)HN8*CX!AkRyj#*sD@kFls%vACx0bb?sW}lQH0VTx|KQNE!SJnS!Nh$tgs<+g zD;??!VZP(x)NOe5n17Sl`Ol)2cyvpICyCp*w#H02pQPH-K66(h(b_z@>&@6s{(DJ- zFO?HvKi*Qe8j?1myY>Tf^*|8~dKJ9v-$H|;oH--4iKhk|#RQlA=i0Wn&neu$IHxW& z=;wOfIO+SK3I9FGV#QBc_*tGil6p%dFOQ{>8 zBs~36Ws&e16M5k>=;-`&-^p+)OHO2$lVlglog|7ngh$8B>-0bH$$<5i6i$A`|ETQK zJX+dJl4PHGF1%SOW(zAz90rdjR8TS_;mBlqx5(C%*n-avQp=VRF)Ne7<%uSI^&~GT zy_732IsC@8U|#JpWnKP=!;7j zdwCV>Uq+msY0~t_md+u}^bG|+@_sD;U~y{@w8Yqry1%a=^EM~vSsH}v$KX>DxL3|N zkQ4FApJYJIMhs-)rwC}0JGF*VLhu=Gdv;8rZI3nj{d92tfp`(?M{`3`Dp|wYDRL$) zGMl8rsv<;{@}17atu)#Cz3I~#y&osH1TSBUZVsoc2iZT*4opfu)LuLePk4^sgzdPp z+$Lt#ceah+G;yD;aoW)f;@#38WIU`dT0Zq%QoEnU_fd}*`|f<}JRQ-GZq%SB_!$7W zIB#_V4N|%Ni!q<`pL&D7!qF|701HwVGCkJd1h61U_v<^h4_X2!NOtg6UV|SgiZdaX zNtr?N;^vR_)0>4{|1osrJ{Tl7(FjR}8i{G0k0+OO`dbYX`A@G-GyNFQJzn1OJ70Ni!g!6*IV15X5$Z zS`YX8VEjr+(hoD;`(48(TE{;r^m`KwFug;v;W4WOxth!u>nj%RT5BCH6 zy$KdpKLRFt!qm*y>LxDo-W3T48nn62VvWlkEUB+V>vQoI7L-S|z{A4c(=?z~H-S`c zboQb4_4=JnkP`(i7mVEOieL7`{M-4V@wvvakg=*x1XhCb2=g@qsSHf6-GC=|t3SQ^ zz5-V5wLh#=w3-X>YfAGPgu=gv+JGX4o$p~P7G|eN$5D>Nq&(uS5wYe4qCOen`@q0^ zUnZe9KkrA%m4E<=wsCs{N#AM>3CAzmgAM0 zH2rxHnYTy?x2dX3)KjtPA)ecthkP?{_aN9vTuv#<_(vZ>gpvgWmw&u7Td=?2DF<{c zIbe5j)xH~bAA&#tg1}vm}%PD-*GKNY*d}XmxaY?YFnZ0|txl`SdqMz(95c~w( z9zAOdQky70wR>+(sIO|*=DqoBuPu|l^;t+@69$Y3_LQo~&8Us|jn}9OWS|mW%4yUL z?bjL1NBd8IuSchca#~VHrx~LO2fu>MUrygoIwzPSAJL!&b|ra|VH|zNGLm z{|g8&g#-j=aAx#Y1)M_k>~eh2EEQF$qgr=;JhCld%bU~Qs6DcrwWb>^$q&X@7{wTC z*+_aQW}<5juxny3pcKL$c%i)(rRwoTvnjg?X;QZ(zqyC|(?gYiP*K5c^(Q zXP&tf_|DCB^T7bw+-xS+a8;9+E8cBwGLt?b8o2(@(&nI6Y(+(cAIb$UL6D%-0^>S< z>t15~2>RNYuLJ&<7d+GgN-hVdg^l)D%}|${m$i zj&8&Zguc9qI)h&zOY`ZCP!!({52UQbe8aq#uYaK&Cdy9;@{H)CIA<<;wx#&@2M__k zAtY1g`L-|#bx#m(^B6pdS zg&)2_J&ng(V)2W+?^h0arK}rr_ztRYES22?*aF{akbF@%4Z2zIZR4-dft+6pN)Q*w zhu;OVTkjZo)RUNIzSMK=bKHRXQ{+7X+43*LuRqq2)R7XSwoM69)GavA77Cimm$1=$ z*oWkoLnyd9jA*Y(wL_!NK2n~I?$U7=9xO*?iQ*Op)kk9Ic7lBhcxVvOGgAzR9xlpF zN;V~*2Js)GD9<^2kq|&2jlJBd-J$UI*20+ILW;&-0^&C@QMIVD_@~H&UaLJaRuZ2%xOG$4vhRVV zqffy*a?9Zv$vVES>~#RkE!FB)nY*H;vw?zTG2;Brol>Z6By+F^l;0i(d)FX?hZ%^b z9p-2@pMv0hRaX_}J>%dmiZ@52msxF$tO5GcmGcb8L#^1g3C_V^1NyEAQ@0Tij?sHb zIzHihcc5(Q<}i&#gHB(tPXclyZNAOVN4kHGd?{8nL`cHA<1AV%i2CI<+?+&z3r%&h zF*gePjGi-mEf|kn;=)G)IyCd_u&X9kD18JQ}>nmvUMLLI+eN9KwHL&CQ z@_Q^(YmXismlB$EtGHUF+&kSWb^NhX>ca=~>7kUF(6B0c`t+0cbGuW$Y{oIetmA@}PeT9lM%emOwI{Q*d9jLa-ZcpG#EaKN}zYN<+Kv!V8q|g%@A! zD{bSW{K_|$p{`VD(I7Rv4cYvuowsR`Mr$55_Jh+p+tK`U*cbx1d)IUvzf}7V7Rd$b zF62h4P;6s|2ug(%O7J90RjcJZh$wlpRNp}$c~&i<>Rw|!D1)K$v#w`moE0OxM1fjo z(LXr)p*0@mj!m~Uv|vW2bX4|`mBaIgOw#4Q$fN|RRfSxsAe4Eqe9C$_VC+s<4`fyg zJlw!{`p+OT5I2AxBCwn3nn5;w$0^n!pc$l^uA=~~Qv@xLIr7Lydg#tp4uxh-pooMy zyHQ_t6$3S7Q*DR(oM)&fmN&JFX#~2rK3@WFV1m)JYzD_ z!5a2mGvan?MB&{J+NMX|R;EtO^_=pxdAIQVf=fA1LFYQE%nCDrpK=eFwH`458)>LL z_{@h(?SLaoDJP7DfLlD-XQA~{gKP{TUFPI zCeu?Pohc7M22iA2Q!#O4qs=)VqM75dM{~9I=pb#&O>CDE1QfFLOErs4bIgQY&pYaU z+Sl~!!t_UG;V;m)HS5>!R$CK~0xFX8zXzOJg|Q$&+Sysk?L(0e(QbuOLxapbOYX}} zCiI(HsE=%n&g`4R7uA~4d`^YGYX(+x>iiUBVZ7ATWU>qY# z;k8E88@Pg2y%e+ioR*qx)w}cvY2oR;-3|`>OX>$qEZs)~q6VL}E9xQ8hn-!b#*SNa zCe|CUw)R`!^BAea~3xV{(uF`6AM=#7JppS?xJ z`pLvXD-HQ$bLzZ<(k;oxKohhI92;j5(dHjX0say$R2W@(w8ceN3}xIbj>vfx_d{Y; z*_0|?AF*A<|0u;tSw$VF#PiNIT;BsaUaPU@DN*GG++scm z-t1)8M?R?@cXKT^DgEGJPEwu4mF}?ok1*QWreF6va=yK#0oP-RM_<;FC)?G0AX|h7 z+$Vb4S9YDHYge!*A~e(9bByP^H)Z=EUCK$VEl$tg3d9~eZ@O5modwZwVuF95(y04aV~Im zbh)ol(~4m6%>tjQdg+>~VT|Q_zFw_I-)j_?gx+u*3wXkFwN&uOQNOqD33`SQDjPrf zvf3878C-#0UOk^6&^b6jM88})vTOSBM1+X{^3+F0&rPDoLcQ7pEgzjIG)*X&8R7D$ z9y~HOO!?43(e?@cPLCnZrqlPd0^8;_cW|Sps@aJpR8gZbBeM1+Rb-ONr$7WQv1Lw@ zR6d1+2KfQoH9=-h437uCf3D()gG0>W~a_k$q9{-hi%$KlWuV z$*lw<4it?0M`uMLcJErmU*2%{Tn3 zaP9tV&42`($os^#J4dbSPyeCu>F17M>qzW1{`9|7QbEq^nwTocNnHbdI{WvgW9ta0 zb#^KH_R~4?Gwt>Fb}9P<<%R|ucdAj&2Z12s&5?cGDi6SRQd`~yT96OUSErIh>s5jO zH8Kj}<_wDwM&MOIUS0Z!h@|5m)z2wLnk{Mt?Fbw_Wq9@$xnn(_$9kiEFiHn50;kn4 zUWxV61ivzTr*EYx-H-RAw(Jh4LB`T0vFMQ-m_^?VDX`&MC78CxyX2PhTO)aTBDWzW znss2kxv5(a&DFu5?*>WL$iBPrS- zMA|2>u;(Ri8pV+JU`v?4R#G?NVs3e7PYH#SuWAWPR8(Gl7bGzoCw0`Z{aq^@1m4V% zp>sZXbTbBq@f;Rh73aAVCE1i%AkHNd`|EomnCRmCN;{v8#0H%c$~PMjZY5W#Qp6>a z4$*cLb9Jt0*SCnExz8w@osuDM2|iPEw@$v5ELOU?QzK}J|JnwYw=|%xr4U#M7K*jc{(?JvE_4q*;sl?#;Y~#yvy!Ll)Y(mXMRZh@0OF^yo#+GB zlYFuqnrNnyKs3XAK}=GkO(_yh8LYWL#(R+*t|M%ltcwe8)#ShRs#y;GFds0sM?O<3c}}60(p)TN zKy=0RHR(vb*Y?C>*rr4+F01;h&!xuaM%Qjk(LqJoqo&3T6D^bsH<0ekpAi0XtuMei zDYLX=P6D4ZikPdxhhtf;kqs$1STPP7TCmk-TN$lvuhXUx(B`d@Z@?Ys8~9kA7XCcVAQb zw*T3X_CCz&wgN05&rh9lz7*k>$>2%7pScoLUGmgtEb}7^)m-95=68Xg@-+VV9i~$P zeTn1*BlXahUi9ab^|!ft_3z@x)&%e!J@5VQ9uxm!qqwyQt5_8_xhBXb*f-nGK%2kGMY(mCmE^XYUFY51#iL*>Dlme=b+U6Fc^}T`GAg3T zP-OreQ$AC7XhJL4<}S7@I`5{dt((=jWi-=m{%bchN@OYb&|V9Skv*ST>+#>upAO!{ zrPFjcs041S&0FCq%?8ct+dmPr&sNrvHumpUW2Fv}I%M80 zsf~>fVKz@-PGZ{`a;{@H`}rQamw!3m=AOjc(D~Pnqyn^4Z00e6F(;M5uDr;b)Zuzz zyaI@+C2S%3H;S5neqQhy?fye3MA0NTNFn9Xz$?=BDUUZ@hDykBUOyVOR~0yEoH_B6e<^j~LJRG&BV0sPi|KMB_e& zs?OCvZ4>xZ@_Kg8y{Cwj`sR|gzu&M~q`J=gby76m4R1jGpZ2aj9Lltf&qkX_v`*<@ zQc8tvsg%Ra#v(b@b`ZrBwBi!y}m2cb+P+h-}mix?H~Lx{PE2FKJW9~_wWAwezzD$*58QD0rkT|oOTIXJm2I& zU%PyrmzYU_wsGBXPMeuuGUd@;1rhKa*Wb8lA`|B6D0;ebh>EX8X0%5Dj&kQ_a8fSe zu5@IyNfj5$H6FEbqI!BC2{fTg1|+{5xnv~!z9X#qR!FMLGsd?l*}`uNHpf&XtcY}o z3do^*BpUDVtR;F$1fnDxRk;=p>k6I%fT)bMTK{0QMI+IZ^Vlu!7iyEz^`3g%L&D_F z8)cPZ4>BGvSvuKP1W~qgwPp};BK0xbwJa&S-1@1@oJZVpSD2>8ZDV1Y#|Xg^QFn?JC;Qi*A9K zrkJS-HhKnu=jy`JC&rd|uT^M}(DMlMl5K9ku`l&trehq}i-`;bG|Dna4){xmY#^8s zN^Di1FX$QWoc4*CrVbDHSh;O^cgNS(s@qpS!CNW4cR8?^@m~n~zu*D4LK*ZIwJUMT zj99|q$l|=3_=o-0-l0hgL>dyK+Iy&-%b_;hJHv_7y5e3aC8jc0Dy2BDiJw<7MlM?` z5FImXGYhloo}}50OY$?T!_DWnJuR32Nu;A0C8hGB`BC5g#7)L-;n=jO#I&y4^_H$2 zDdT$2Cqnx&>#|gK@hvn0wOx)Kb9%FU%`Y;GR~<0?dGp$hqRDZ9OlMc<00^9V{Ciq_ zH~7Ku5>vp%Z3E={gefuJy8B}W_H1JsC^0gYbh@f6g^gHa>Z7%K`+VNKlVfy$G+a`oI2BZ&Iu-XpA? zBtw}*H1egb^Iw9OYlH{B>CzzFEI02(Dxg7@(A z=9nD)JbUgV}EvTD|t^8 zWStM1__N4r0G*qCL^WPV=v_^dn`xSwstoGe0%0}A!E6~Mbk+&M<3!I2qB7(SI@YC zJa}xkP4D8pZ!nghv%q@Z4?F@WR+l7poPB-wD%?;J1De>*HV%dPM38En#%irr>Cg;x zRV!N;LYI`_SZ3yaApWKjxghE{VUBuv|I6(IW@#hibJvN*-<}7z&wdmgIDwJJxg23$ zxuymK?$e!HZ`=12bd6)v?41(LMkBmhLktfSmc|jP{Q*+&C3lPy6f|ub7Lw_o@m

    KJPS?;6=bJ(!${ zB%l7i_i7u2qz$-?s zL}DmxP-18XkpVsA#SYn8wzx6!W)KH^3`jl^xg#NtI$ev#^eJ>vbVo8I=(=6-8n%38l6TmwOIyiXQouwNU)9zlU@@0 z)>>^c&@&WO1Ld~bxaGOfrm{D(EOgRPdnd-9-&S89@~%Q9`xfN_$OfI%gHe*B>&dk3 zp&msj>GXVp-^(7_@pkr|O$Yr?gY<4$#0koyHRHA(9C?W1G=nHT32rC=iR=eVF^ef- zP)NL_0wSN>48j$9GlM9Cf*x@JzIPcM5Q`_vFraTdwGqh-#EEeQ}vNTPJn5z}3~6E$x`hPG8F4p3OTn|%hh71<5)-%q0F)L} zoan(B#Ap``&`|{#12)G+e_F~A*Q_$LYKx6rSmz2H^?bN(P0VJmC~vHBiM5q=^Ly&B z$BCqU2PZc^D;(abiBji&$F?8sr5tIOWE0~dH)mNN=KyJ>^Rpb9$Ex#ubbjwW#(IFx z`_^px2DuxeEnd&`yK`!F!aQ?%e<;S_iw zcL#HX>vn|;@`X*dSFUh%V-8h3K2mf}ZPohS9_Z7m$NToF89(e6N!j9J<70+Oup9X4 z;2F>3vG`-Ka-Y$V45L7qIlI+ z>F4AhUcN+*M&YGY9K009F zh5aQa_}TwW2neP_c+nIIK!;g+p5EvCk{3{I%dlO>(V(}SfrR}v1Cr8Rd<|OkD2sxh zUo8J1Y)NX~DZ&1J0&_D*5gwaI572aeJwPyn34qie74~2DqKsxWyQKZMvq7Is%;FF` zlG#u?z4IC`JKp(%LEA%OTx&mhO!PVV8AuUxn^bqYbNo_NRCuL|fqT;+#{Jx&CupbG zRlhq~F&!`IaPFfj%usJk`(xH`>ZX}_c=u)ROmXmOEqY27IgFUb2Z6U^wKG}zg9IpR zOR@m1Gj0m+4vzlp377kr>KeLw1qYRwGKZDQ3U`oa3c{pY92hw z8nMZH#S*{qvN5`}A3j~B)%F0@+gf>5>dcL#a|`bZ3C%i1z}$j8 zoFARYNG#gXJ=juV$7p1rmyL{$d?)4)&ofWbjToCMY8!P!Gqw6I^87FH&We$PJZs$} z*v(_BMLSpO8Z&NU9iLxLI}vm~!Emf3T(5p@j@a5t`I9$R8@D2K^abeUKs8eWEdalW zY^x4>V5}Sevbbbo*pL;LJ!NY_MHRc=f!!mYv)o%qZauVnZn`dYAIG50*Klw?0*OyA zAvmA^LluzWA>=CN}b}^X@&6^p|H&oYb=)auizLGi*-_V)c&_Ti66<@SZi` zLJLbn5A|EKtb0IV`mI9nm}V&?YnwVHWZ77_sSoTfzT37n&d;zppRZRPf~eEgH8;EM z!7#MQ?toacMxC~n09t;*v*T{9-qQj&AYZ;U1#q4ze`?s5)E$6defbeMEj|tyoAo&b zn;`q^G#Lk7TAB0rV8Aq9S(@-?W>^2-H&myTDHa(wcZjdBI-5Apqcz7K9IN zmhRNF>j@YqYP{QT=i|N^L=YdVyaPfD9F`a9FRMX9QjY^Fx?xY@)3oD41)}b+@J6<} z)&&Nj(W^0=b>IbzdIfO@`gbpw7k}!H+zB-KzvKZ466)~BEPWciA~CmFEd}t$)J$-C z2B>I)oWg$}?K11+7t8_s|NRaUK%ugqcNG+AZ*}AvU^`tcs9dk7FqL>4mh0g#3B|oo$+j25?pD*RT(o%yhmhJyn&tZu5)`R1u&_0`#dnD-)G@~c{~4S>CfLk ze{1IT)J?3GplBV}LIzF@V3o~t+PaTf>)qI+z}iBy580g7WA{=h+_>MxI%tzpc*d#( z8cBy-Mi$14arZUZHRIRe7E$2Mjd7Q;@p-_x%cQSs=Pv92U++&)+Frze91X!5 xVIPSRf1SUcm(&g1o!6P?JmnbOB2|v#Ygf3p6tISC+NwiNHOGCPPzueo{0k`s+hPC! literal 0 HcmV?d00001 From c77af40c2f976f5bd1420945422742fced8e01e6 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 12:00:40 -0800 Subject: [PATCH 02/31] readme --- ImageDetection/ssd/VOC2012/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ImageDetection/ssd/VOC2012/README.md b/ImageDetection/ssd/VOC2012/README.md index 94c3ae6..3b39c6d 100644 --- a/ImageDetection/ssd/VOC2012/README.md +++ b/ImageDetection/ssd/VOC2012/README.md @@ -4,7 +4,7 @@ These sample .nml files are for training a Single Shot MultiBox Detector model u # Model Structure The SSD model structure uses VGG-16 as base model. As shown in figure 1, it takes conv4_3, fc7, conv6_2, conv7_2, conv8_2, and conv9_2 feature layers to predict both location and class confidences. -![Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.](../../../assets/Picture1.png) +![Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.](../../../assets/Picture1.png "Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.") In predicting process, each feature layer is feed into RPN(Regional Proposal Network) to predict class confidence and location coordinates. From d02de0f53137f490b314e4f7e8698eb60c0f41b9 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 12:03:36 -0800 Subject: [PATCH 03/31] readme --- ImageDetection/ssd/VOC2012/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ImageDetection/ssd/VOC2012/README.md b/ImageDetection/ssd/VOC2012/README.md index 3b39c6d..e5befdc 100644 --- a/ImageDetection/ssd/VOC2012/README.md +++ b/ImageDetection/ssd/VOC2012/README.md @@ -4,6 +4,8 @@ These sample .nml files are for training a Single Shot MultiBox Detector model u # Model Structure The SSD model structure uses VGG-16 as base model. As shown in figure 1, it takes conv4_3, fc7, conv6_2, conv7_2, conv8_2, and conv9_2 feature layers to predict both location and class confidences. +## Fig.1 SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence. + ![Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.](../../../assets/Picture1.png "Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.") In predicting process, each feature layer is feed into RPN(Regional Proposal Network) to predict class confidence and location coordinates. From e53900352ed9376a5b1f69e55fb49837f56f8f31 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 12:06:11 -0800 Subject: [PATCH 04/31] readme --- ImageDetection/ssd/VOC2012/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ImageDetection/ssd/VOC2012/README.md b/ImageDetection/ssd/VOC2012/README.md index e5befdc..633d7cb 100644 --- a/ImageDetection/ssd/VOC2012/README.md +++ b/ImageDetection/ssd/VOC2012/README.md @@ -4,9 +4,9 @@ These sample .nml files are for training a Single Shot MultiBox Detector model u # Model Structure The SSD model structure uses VGG-16 as base model. As shown in figure 1, it takes conv4_3, fc7, conv6_2, conv7_2, conv8_2, and conv9_2 feature layers to predict both location and class confidences. -## Fig.1 SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence. ![Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.](../../../assets/Picture1.png "Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.") +**Fig.1 SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.** In predicting process, each feature layer is feed into RPN(Regional Proposal Network) to predict class confidence and location coordinates. From 06ee8a445751fbddad0af54c065dccbb7dac855d Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 12:09:05 -0800 Subject: [PATCH 05/31] readme --- ImageDetection/ssd/VOC2012/README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/ImageDetection/ssd/VOC2012/README.md b/ImageDetection/ssd/VOC2012/README.md index 633d7cb..d84b9ae 100644 --- a/ImageDetection/ssd/VOC2012/README.md +++ b/ImageDetection/ssd/VOC2012/README.md @@ -5,16 +5,18 @@ These sample .nml files are for training a Single Shot MultiBox Detector model u The SSD model structure uses VGG-16 as base model. As shown in figure 1, it takes conv4_3, fc7, conv6_2, conv7_2, conv8_2, and conv9_2 feature layers to predict both location and class confidences. -![Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.](../../../assets/Picture1.png "Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.") +![Fig. 1](../../../assets/Picture1.png "Fig. 1: SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.") **Fig.1 SSD model with VGG-16 as base. Feature Layer conv4_3, fc7, conv6_2, conv7_2, conv8_2 and conv9_2 are used to predict location and confidence.** In predicting process, each feature layer is feed into RPN(Regional Proposal Network) to predict class confidence and location coordinates. -![Fig. 2: RPN structure](../../../assets/Picture2.png) +![Fig. 2](../../../assets/Picture2.png "Fig. 2: RPN structure") +**Fig. 2: RPN structure** A set of Anchor Boxes are predefined for each feature layer. Each point in feature map is defined as anchor point. 4 or 6 different default boxes(different ratios and scales) are defined around each anchor point. Classes and location coordinates are predicted based on each anchor box. -![Fig. 3: 4 different default boxes are defind around one anchor point](../../../assets/Picture3.png) +![Fig. 3](../../../assets/Picture3.png "Fig. 3: 4 different default boxes are defind around one anchor point") +**Fig. 3: 4 different default boxes are defind around one anchor point** For example, conv6-2 RPN is defined as below in .nml: From e5022d270b27a8f951ff5c326b9fc08376d1f6bf Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 12:09:58 -0800 Subject: [PATCH 06/31] readme --- ImageDetection/ssd/VOC2012/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/ImageDetection/ssd/VOC2012/README.md b/ImageDetection/ssd/VOC2012/README.md index d84b9ae..a7a9074 100644 --- a/ImageDetection/ssd/VOC2012/README.md +++ b/ImageDetection/ssd/VOC2012/README.md @@ -16,6 +16,7 @@ In predicting process, each feature layer is feed into RPN(Regional Proposal Net A set of Anchor Boxes are predefined for each feature layer. Each point in feature map is defined as anchor point. 4 or 6 different default boxes(different ratios and scales) are defined around each anchor point. Classes and location coordinates are predicted based on each anchor box. ![Fig. 3](../../../assets/Picture3.png "Fig. 3: 4 different default boxes are defind around one anchor point") + **Fig. 3: 4 different default boxes are defind around one anchor point** For example, conv6-2 RPN is defined as below in .nml: From bc989cb91a2d58e6efe685350d424542617aff74 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 28 Feb 2019 12:16:16 -0800 Subject: [PATCH 07/31] readme --- ImageDetection/ssd/VOC2012/README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ImageDetection/ssd/VOC2012/README.md b/ImageDetection/ssd/VOC2012/README.md index a7a9074..dde79f2 100644 --- a/ImageDetection/ssd/VOC2012/README.md +++ b/ImageDetection/ssd/VOC2012/README.md @@ -94,6 +94,11 @@ In the end, all class and box prediction results across all feature layers need ``` + +## Loss +Neopulse support 3 types of loss for Image Detection, SSD loss, Yolo loss, and Focal loss. Users do not need to declare any type of loss. The according loss will be automatically set when users declare typical oracle mode at the beginning of .nml file. + + # Data The data for this task can be found at: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/ To run this example, first you will need to download the raw data and pretrained vgg16 model for the VOC2012 task using the included ```build_csv.py``` script: From 754d78d33706c9d43bd51bc736014042d537efe6 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Mon, 4 Mar 2019 13:02:01 -0800 Subject: [PATCH 08/31] cnews model --- Classification/Text/Chinese_news/README.md | 37 +++++++++++++++++ Classification/Text/Chinese_news/build_csv.py | 26 ++++++++++++ Classification/Text/Chinese_news/cnews.nml | 41 +++++++++++++++++++ 3 files changed, 104 insertions(+) create mode 100644 Classification/Text/Chinese_news/README.md create mode 100644 Classification/Text/Chinese_news/build_csv.py create mode 100644 Classification/Text/Chinese_news/cnews.nml diff --git a/Classification/Text/Chinese_news/README.md b/Classification/Text/Chinese_news/README.md new file mode 100644 index 0000000..61e1674 --- /dev/null +++ b/Classification/Text/Chinese_news/README.md @@ -0,0 +1,37 @@ +# Introduction +These sample .nml files are for training a classification model using Text data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). Neopulse text model support multiple languages, like Chinese, Japanese, Korean etc. This examples classify Chinese news into 10 classes, "sports":0, "science and technology": 1, "stocks": 2, "entertainment": 3, "politics": 4, "society": 5, "education": 6, "finance": 7, "house and home": 8, "games": 9 + +# Data +The data for this task can be found at: http://thuctc.thunlp.org/ +To run this example, first you will need to download and pre-process the raw data for the task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `shutil, tarfile, pathlib, pandas, requests, natsort, and sklearn`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/Classification/Text/Chinese_news/cnews.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/Classification/Text/Chinese_news/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the AudioDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/Classification/Text/Chinese_news/build_csv.py b/Classification/Text/Chinese_news/build_csv.py new file mode 100644 index 0000000..c50294a --- /dev/null +++ b/Classification/Text/Chinese_news/build_csv.py @@ -0,0 +1,26 @@ +import shutil +import tarfile +from pathlib import Path + +import pandas as pd +import requests +from natsort import humansorted +from sklearn.datasets import load_files + + +def download_data(): + ''' + Check if raw IMDB data is present. If not, download data from the official site. + ''' + + URL = 'https://drive.google.com/uc?authuser=0&id=1Jg3EcJEB48-B_dGeOGMoLzJg_HjSnY67&export=download' + if not Path('training_data.csv').is_file(): + r = requests.get(URL, stream=True) + with open('training_data.csv', 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + + +if __name__ == "__main__": + + download_data() + diff --git a/Classification/Text/Chinese_news/cnews.nml b/Classification/Text/Chinese_news/cnews.nml new file mode 100644 index 0000000..2dff682 --- /dev/null +++ b/Classification/Text/Chinese_news/cnews.nml @@ -0,0 +1,41 @@ +source: + bind = "/Users/hongye/Desktop/neo_examples/Neopulse_Examples/Classification/Text/Chinese_news/training_data.csv" ; + input: + x ~ from "news" + -> text: [300] + -> TextDataGenerator: [nb_words=5001,char_level=True] ; + output: + y ~ from "label" + -> flat: [10] + -> FlatDataGenerator: [] ; + params: + validation_split = 0.2, + batch_size = 32; + +architecture: + input: x ~ text: [300] ; + output: y ~ flat: [10] ; + + x -> Embedding: [5001, 64, input_length=300] + -> Conv1D: [256,3, padding='same', strides = 1, activation='relu'] + -> MaxPooling1D: [pool_size=3] + -> Conv1D: [128,3, padding='same', strides = 1, activation='relu'] + -> MaxPooling1D: [pool_size=3] + -> Conv1D: [64,3, padding='same', strides = 1, activation='relu'] + -> Flatten: [] + -> Dropout: [0.1] + -> BatchNormalization: [] + -> Dense: [256, activation='relu'] + -> Dropout: [0.1] + -> Dense: [10, activation='softmax'] + -> y ; + +train: + compile: + optimizer = Adam:[0.001], + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + nb_epoch = 15 ; + dashboard: + save_on = 'val_acc' ; \ No newline at end of file From 68b3a9eac67e9ae17ef421e377386137849e56c3 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 3 Apr 2019 10:27:42 -0700 Subject: [PATCH 09/31] remove .DS_Store from repo --- CapsuleNetworks/.DS_Store | Bin 8196 -> 0 bytes CapsuleNetworks/matrix_capsule/.DS_Store | Bin 8196 -> 0 bytes CapsuleNetworks/matrix_capsule/Audio/.DS_Store | Bin 8196 -> 0 bytes CapsuleNetworks/matrix_capsule/Image/.DS_Store | Bin 6148 -> 0 bytes CapsuleNetworks/vector_capsule/.DS_Store | Bin 6148 -> 0 bytes CapsuleNetworks/vector_capsule/Audio/.DS_Store | Bin 6148 -> 0 bytes CapsuleNetworks/vector_capsule/Image/.DS_Store | Bin 6148 -> 0 bytes ImageDetection/.DS_Store | Bin 8196 -> 0 bytes ImageDetection/ssd/.DS_Store | Bin 8196 -> 0 bytes 9 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 CapsuleNetworks/.DS_Store delete mode 100644 CapsuleNetworks/matrix_capsule/.DS_Store delete mode 100644 CapsuleNetworks/matrix_capsule/Audio/.DS_Store delete mode 100644 CapsuleNetworks/matrix_capsule/Image/.DS_Store delete mode 100644 CapsuleNetworks/vector_capsule/.DS_Store delete mode 100644 CapsuleNetworks/vector_capsule/Audio/.DS_Store delete mode 100644 CapsuleNetworks/vector_capsule/Image/.DS_Store delete mode 100644 ImageDetection/.DS_Store delete mode 100644 ImageDetection/ssd/.DS_Store diff --git a/CapsuleNetworks/.DS_Store b/CapsuleNetworks/.DS_Store deleted file mode 100644 index d886c72cf90085464fa5367d3330d0e0a49e4f14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHMO>h)N6n<|LU?x)}lZa*l3$6x8kSH4y2w(x5O|VD^2HAup{3WwHBTP9v6Lw}d zfe?%*O0hioQ}|Q<9JCY<9t>4l>cN9bd9cKkmX;j6aIwmZ7Fu5Pb@yzFY=GP>3Z|>N zUr)dHy7ztc_4K@60KjD4Xa>*#K%&Z|s!(&C!t|`pE0JJiF-atU01L)p7&4&4=sfFi zAVeTUAVeTUAVeTU;8s9@&TLks8=U(>8@3?=Ap&st7_ge_nEftTLrUI_f2nDt?~$}u8iyK&6x#)Wgz3)`K&{l>Wpq@@?>nb z+jh!2DXcR&*S7b%p5*WA933cFo|U%E zK1a`*)sb;oo~co*tF$_8g{0O?gZZ>|Ja73Y_*^SjY04pkqS)gaY&JtRq`34HQI3 z>h550>m!Obm~pdN3dO6&reJtSLQ{^gReN;L?;SNA`c+@kQY`k)dQBT*x^}}b4M~#f zi_ED{G>R^dyFL*YPbxdNRze(-&<#iE*6ReEg_qzB_z0%q0(=4Azz^^XT!G)=54Z|{ z!QY4&!E#)Ll~{|baV<7rBR1p1*oqsm9lNm?AIE)|!b5l%hj9c=bkN5<7Vs%NgU{fz zcotv4*YQn!3*W}`_z8ZBpW)~D6@G`8@OzxW%XkHU!#~9@j1)J)o?<9M;#HChaXZ-g zkr%Je?I`E=x?7gpKUE4s-+fPQY|R53S~qV?&Jv=80&`e;Eh#w{1=%RF8%(kZr3zGPz6ut2kykj0^0TwX^#+TbWbIY!7+F38-MOPx9?M#2u)-8|IQoV|KGjc_~G`6L24B_xed`QI-Bf)B#* Q{Ga`Z4bOit@3`~-KOKxh)N6n<}$z)YqflZa-31y=(kNR*A_2e5$6X0ZrAf^0$({=)3em`pi4v+T@n zfDnu)3b8!-Q;3#72Q7tz2cb$!J$O(l4;Gm6ujJr`i&b8<(DI_MyJx9n`SE5^FkRLC zdiuTBJ@1>Zr}xbQ049q@8-NA?5>+Nug_;`_rf2oK5(!3XNh0|JxS268pRwJ{b++L^ zh(L%yh(L%yh(L(I?STND*{n!6IroJ&Y(oS>1nx=%*!v+ymB~aPCxzs%4r+J`K(d;K zd7(b#0o*4R$V4C~h2*X@r^p^KaK#|SK;cgM7&9lC2;`)Y!W~e!0|qN&kf9)7o%~|l z955kd*oFv%2waZ<%iU|B8;W3nH6wq&kjdMY=f~sUW2CCOX4Y&;s+DTz)b2}Ic9-7TZTy{dj~AXaL1Bv(J?$e zPg*D>NtWbcRU04gjIU{pt?KBSXpN0`b;Vm_?W;N`CS<8`Y1_Kq1G&RT#*a=MJ0+sR z;LQMa6;ZxAGepD7+8Me@W;HExd!#L$b#wV?&eOGCsYiU*h7w(KhvN z+Pr1!u&Ne4bEj$RzEv_)x^H^JYMnD>Q=pVBTe{@LqvZiBL&d5N?@~n()_Bnda ztdAU$<*5d>zD}F3EtJ$IX|R~F4i+u{FrRDDVolj=P!!u-gUx2Bh7{O0X-g&boV15& zdkQAC26t(0(Ud-qqEel}RC!ogDXYijzNd*7+@@`@qV?N)#5jZY{w4l#?81LpTwP*#=W=?hw%WK=%9~9Ea5YF98chLcoJX4 z*YQn!3*W{w_%VKhpWHf zDfgH6K;_=(CxV)XB*ry$Hn&zDj4qLhUaBN6o1@ER+5;+1Eu6nVThSVgsu9`+fwnEW zN+#N?Rbr)PrAw048g9B~t)i&av)-GXQM48th@>$h=p(oq*W(6EVh?V_Cjyf85k>bCMRm-gO*GBpK`h`n9>rrs)@M0Y zpT`&Q6+DHf@iijtJNPcXhwtNAJcsA;qkz0$;5Rr`LD#JnbiF*2t~ocAGKzi{;D^(h zbr;W8YK>cH^P{(VrI*qL$9A0&nvrdt8t8=_=r982^b>+KcWkK=k!dXd-+1%y|95Qs zVPGKwAp-RJt4^g;NkXFh6QU9nmTSkT9-zt!vzru>yHLZ^agsb8Cwc7;L+Z!Kl=&nA ZIVmKKQ2F0K1n{4r;rSn)|9AKK|1T{>V%Gow diff --git a/CapsuleNetworks/matrix_capsule/Audio/.DS_Store b/CapsuleNetworks/matrix_capsule/Audio/.DS_Store deleted file mode 100644 index 668fdfc3bf80f1979386f30c6c8ca8dde30c15c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHMU2GIp6h3ELV5Sq=X+=AQ1y>6cq{>zbr5IqlU6j8{Wm{VQT4r~~cH(qq*_qt} zrC6T`VvG;|1kw2OAW?kqK}a;}gAXe4!HA82kq2LRG4Vx%#uq*J-dO^ri@azWotw=4 zX6`xX{(SS@xqDgwz+}N_0nh+IqKl!bP&G|qc6QxRB0-Bxh!hWyg@?cd2l7;hC1fCE zAY>q9AY>q9U?ws^XErNRnR8!Q!#ZRjWZ-|v0Q)|~=wg@%a6%yebWp`p0K!UY=7sJl z58yGe022XD2;`yEr^p^K2*n`9KoL&%7;`5~1UMm3gae9jz+hwyG8E*aQ(TO@111E9 zb;v--z>N&B+`S6Apa33N*XHk;Q7@OV-3-|;WOBCU`SJKS7^$e7HG7UERY_HItM;dd z-O;qK`votl=l8OwZrvZw1ou6zYhS(BGrKHvbWokw={mk{IcA;*TZTz4`}!=$a7UAF z!7)7EPg)oxNtWb6RT~>y6K`*htyT34+ZAD5;2B`xc>92hx#WbEkp zu~TAw7%LVqQ@U?@gKCXO6mH47PIulc5?y^+*Dj1WWU0;ScD6vy zW`}L3ts}xan|E!y+x0Bpa-H4%zIn)JHrCN~rv5#ioA*0;XZ;rSFTCp57&~$-*xP01 zJwjPNTU{T$Yw3!3Yx{=I$r`n`?zVY~vWH%W<6DnfX1;gSGW_A)x|cN_!*YiDik4?( zY_rGFN6gyDAz7ZRQ)_Fq25o_)HcI`4jCHVJ`G(=tuz@n}`56Syk(Dl296xZLv?sfPQsEK;;yThExbEb)fbYZR@o zD3+vd3mVrwplJQ{5=Uq$UM)5U%^MS%GQdXNu6zEDoaxZ4KBJ-7+Eb01wwu{H48t@e zNop(!pfRyb-12odCgS2rW#`yJh(i*#!T{vpFr0+v;T3on&cS*37(R!u;Rm=3Kf^C@ z1%89y5ix?5I18(>9v96551@$-`dGjsK90xn z1U`u;@mYKsU&Yt(bv%pj;|KU5euSUmmv|As!b!Y@m+>e3Rm?C_+8kY_r3l0;gyl3H zd+MFaG`~ zwTAf%q6?KfOS@s}F6kwynujE&I&}{BR_=~2mPu%;Bp#ch%VpXPDo!qFn6Is9jz-l8 z?T5hH5?v+J22d-+NKHzcB&)NyYx_DyQ7dUb$ilW!>5xeC>TJ<(N_kLb+fs$U74J=@ zdjT%Ok0jea;7<~371ohlVTy1UMm( bTB!W%e+UTAe<<_gH9Y@s{yX76b^iYa1xkYo diff --git a/CapsuleNetworks/matrix_capsule/Image/.DS_Store b/CapsuleNetworks/matrix_capsule/Image/.DS_Store deleted file mode 100644 index ff4ef2ae3bc32e8234afc999197fb9c8838ecd67..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeH~JqiLr422VaK(Mj2oW=uqgF&>g&_)j+YAINV{T$tw9|Tux5qW{+&14c~zhY-2 zBD%Pr=OPmk8Np3uW}#z>{ZKZtm9^|-f4-eA$LsatRlTgW8aRIA^*oO$BtQZrKmsH{ z0zX8+?rqp;9Lh)nBtQaB0``6=aMPOFLjBc&;3EKZfV3OdK1)E0C7?C6g(3sfXoW_r z`WRw)Z-cu-q*dc&NNv-yYBMhs@f#!J%G`@bVFWja|i4xEs!kEub~Eg(3svhk#>XAc3zE FcmQ?q65{{> diff --git a/CapsuleNetworks/vector_capsule/.DS_Store b/CapsuleNetworks/vector_capsule/.DS_Store deleted file mode 100644 index ae3a2e9dff69884e3f1d52357082c973a4cf7113..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKyH3ME5S)cXibRu=@_vCoFi|L|`2ikA2r0+~O7AM)#iud*ps*}UX=q?p+MBz* zojctWUM~RK9s3(#3t&Tc#L0)b`M&$iE-K<^amLVpz8s#1;lSm2=G--IaYT=IO!#Zu z;{#84!|Ttn=bfo6DIf);fE17dQeatuD$<*)QF3hXS_u^tV`4= z1*E`Kfy-QOz5d_P7yADxNh>KJ1^$!*w%R}JcYIRS*4g8{);9Vh-E+R^Zkz{&LzH7; jlw&Tu9A8IL<~5&lzbhOQgU)!+iTW9EU1U<=w-xvX%I6u@ diff --git a/CapsuleNetworks/vector_capsule/Audio/.DS_Store b/CapsuleNetworks/vector_capsule/Audio/.DS_Store deleted file mode 100644 index 0132d794c71906dab1bc6450b02dc03d813d7938..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKyJ`bL3>+mcOwzbaxxbJ9Qa}paiUR(9XmrP3I3&iWgCRx$;)Lli zu49%UHct?H;gHA-&5}w?s?~^LNoT%QT`wFGlMbul!|KUa6N<&td4G#?SWi@x0#e{q zf&1Lfy#FuhXXgJ?l6F!+3fzBnm{Iqx0;zVb~gt(6i*c*oo)o zD<)$Awz{2`z#PCQ-4)+H49pnMu*D7=>~T0<|IWXc%h}WT$h{uWd5y>QT$YG{2#A0P zh=2%;h(H|TJpaE&^h|mb5fFiK5b*Cqp}W@9)-^sI9HIrFPMHqlJbDRg@dUM|wyw<3 zEV~EGQj0dk^HEMMxv!?SuDu+V&4=Zk&AS+y^>SEYK(iX6K?Fo#Kw#42xu5?(^#A(* zL5V^U5P>%%VAJ(}z2Zyd+4}T)ork>r3CJ!R1)O? diff --git a/ImageDetection/.DS_Store b/ImageDetection/.DS_Store deleted file mode 100644 index c8a836c870f19a653ddf1583f4068362f9bb8752..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLUu+ab7@yy@z;3(HDIhHe~6al6+- zOR)wMV-(-~1JNgA4Aw{RPegq%CTNT%QUjXkgD>clF)>l&i{I?-L1}41d@vYxHZ$MM z_s#ds%x`}?n;FI!S_|qr#;O@(5@jKIITg1_!Y=Z(6!JB~2=ZrkE^Sz8YJHy0P3s*f z0t5mC0t5mC0t5mC?gj+tpDmI!!++ni24#RifWSS8fY=|Rl!c7>a?F$N)k)bAJb<(MZucLx9P0e@upClt7&Q~apg zogwDQpbQWQ5SWgD_-sm;O^=!$ntXmYblYiYxQb9o>8#mvB&kd)n_G4uF<=cPoRm|r zTT}UcqSc*p2GV}L$FhtY^;CMdt_>ym!ZyovQo5<-U1wd@sLTF7-BhiiR;ysDwi_qm z86-)TndolZriV32R$kqf&JZ z%F6I+nVV9NP4jbu)UQ>Po}={E`Li^b>%2^9l;mC`mDUVTu~B(M;+=Bepogrd+^lSo zDp4=@)62`!dTck`wpds>C`Ro{+0M?KX40%K)2zhW^l3E{m%Yif3m+2LOJGQ z9x70cI;_A-tipOUVgoj!6`kn9lh}zK96%Z>G#rM79FAcKV|W^;@eE$TOL!S)aSm_b z9ejw7@G(BYWqgAxn83IA5!dmvH@i@g*j>dXcoM&nyh&gG-YxXv?dcoY(Y|Am^7!3L z-|yymoV{;RRkUu^LygUkw%mB#Q{M6|M4Ze!mueoHT%HwKMOea)0CjnK;G$=88zUOvHC1tzHv}@DLH-Yd+i%QFsZ_U3RTaE0hSI zRSdI9*(?+1d8upLvXyMJiR6CNHf6gkzCX_XJxjk}SJ+R)(7(Yjo7lO4n7N#oxfadX zh8D!J9gh(wyRi!i^x`0rND(`aBI|Q=7$erLkF!pL7>FzR4vZ`+h@N2zmrfwp8$aXf&VE2D2*rLtu*`WQxv?2_!z-k4GRN2yURb**bq+9}FOD}+y-JPMGFrBG8v)e)` zsnJMGObp%!|F z2t)`(2t)`(2t)|n2nf)b%`at=b6;wsK0+Wu;Fd&ye;*RmxJ-p|R7&ybpiYzkBr9lI zTr{RSK=>p=nF{5ol){x_%IX23E5Z^3(w+1PZcZ{4%26q$JA-s*2vXA&J zH<$^sLD6r?7WVSHo!Ou-7mnLK&$&F#<~nR^ut%TS>bXJIcCCVNwoQvn_I28>=?%7c zMc4F2oQzZ`imIqRx-mSwa%Hk1(bU{D(vTQly_&|&O{+#mRAu&}#`SIc`wtx+J~DFj zWtkbmuLG+dm+31L({y?pGkLFn;0nueKsbJ>HRo_3Vjq@)I9CQjt(vAyH#WCz-lA>m zvh$vsu@754dbLkjw9)O(f>oltJM*4X?05Yhy(XV^@8ZY^rFib?I>YGZx{ao3nTn#+ zl_^k{s+U8Nx2p3)X+B%b*3co;&JHk-J^F9iU1EQ+zu7-v zP*DX9(=iJR(SZA~97(LjIy`{&Xu~daq8od#7bYz9q7VHj;Rr@>6vyx^j^jBzk5};) z-o`t47oXsBe1R|V6~4z=oWsvJkKb_-f8bB~oUtC$WHbFOC)oVrwb>jO zY+ic9v-#`{N#Q$Y%}y-3XL<9Q^(~hvFz%&a#mg&cDA*_IB`8-WmU#1H#+zSBnQJU-h{yF9?SxR<7;jRE z`uY^P(h_a8qUuwH>DqOgrdQBTP^E5zwoxIr>$l0cX>F6rH>A!crR{%`_6K&3U0|0G zLnYC)7ITTH^+eKDNMREm#ujYFc65Y9-A6P%Ks2>+5CKtj2#0VOPvA*BMTC7?(Denp zgcEojZ{STL?)&%vAK_!1#3`J{XCZ;V!x@|(N7-%TDErGq%JzHdv{?-DjQ#Z5h5GFA zYgw4ru^8yP-e Date: Wed, 3 Apr 2019 10:28:15 -0700 Subject: [PATCH 10/31] add .DS_Store to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e43b0f9 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.DS_Store From 7a56eabb3b10edfb067ac1f66cececd5a5405060 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 3 Apr 2019 12:15:24 -0700 Subject: [PATCH 11/31] Fix Sentiment Analysis --- Classification/Text/Sentiment/sentiment_call_auto.nml | 2 +- Classification/Text/Sentiment/sentiment_choice_auto.nml | 2 +- Classification/Text/Sentiment/sentiment_dist_auto.nml | 2 +- Classification/Text/Sentiment/sentiment_full_auto.nml | 2 +- Classification/Text/Sentiment/sentiment_multi-GPU.nml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Classification/Text/Sentiment/sentiment_call_auto.nml b/Classification/Text/Sentiment/sentiment_call_auto.nml index 0a1e39e..8833e00 100644 --- a/Classification/Text/Sentiment/sentiment_call_auto.nml +++ b/Classification/Text/Sentiment/sentiment_call_auto.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_choice_auto.nml b/Classification/Text/Sentiment/sentiment_choice_auto.nml index 50ff86f..c9e947b 100644 --- a/Classification/Text/Sentiment/sentiment_choice_auto.nml +++ b/Classification/Text/Sentiment/sentiment_choice_auto.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_dist_auto.nml b/Classification/Text/Sentiment/sentiment_dist_auto.nml index 272ba3b..8222f43 100644 --- a/Classification/Text/Sentiment/sentiment_dist_auto.nml +++ b/Classification/Text/Sentiment/sentiment_dist_auto.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_full_auto.nml b/Classification/Text/Sentiment/sentiment_full_auto.nml index 5829e4b..5187b03 100644 --- a/Classification/Text/Sentiment/sentiment_full_auto.nml +++ b/Classification/Text/Sentiment/sentiment_full_auto.nml @@ -2,7 +2,7 @@ oracle("mode") = "classification" oracle("complexity") = 0.1 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_multi-GPU.nml b/Classification/Text/Sentiment/sentiment_multi-GPU.nml index 26487ac..b62388b 100644 --- a/Classification/Text/Sentiment/sentiment_multi-GPU.nml +++ b/Classification/Text/Sentiment/sentiment_multi-GPU.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/dsl/sentiment/data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Review" -> text: [200] From abdeef738d3974a5b3cddca9edc51f94cfc25ad9 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Thu, 4 Apr 2019 16:58:59 -0700 Subject: [PATCH 12/31] neopulse3_0 updates --- .../Audio/MusicGenre/build_csv.py | 11 ++- .../Audio/MusicGenre/music_capsule.nml | 12 +-- .../Audio/MusicGenre/music_capsule_auto.nml | 8 +- .../matrix_capsule/Image/MNIST/build_csv.py | 10 ++ .../Image/MNIST/mnist_capsule.nml | 2 +- .../Image/MNIST/mnist_capsule_auto.nml | 2 +- .../Audio/MusicGenre/build_csv.py | 8 +- .../Audio/MusicGenre/music_capsule.nml | 4 +- .../Audio/MusicGenre/music_capsule_auto.nml | 4 +- .../vector_capsule/Image/MNIST/build_csv.py | 11 +++ .../Image/MNIST/mnist_capsule.nml | 2 +- .../Image/MNIST/mnist_capsule_auto.nml | 2 +- Classification/.DS_Store | Bin 0 -> 6148 bytes Classification/Audio/.DS_Store | Bin 0 -> 6148 bytes Classification/Audio/MusicGenre/build_csv.py | 14 ++- .../MusicGenre/music_classification_auto.nml | 2 +- .../Audio/MusicGenre/music_spectrogram.nml | 2 +- Classification/Dicom/IXIT1_BrainSex/README.md | 38 ++++++++ .../Dicom/IXIT1_BrainSex/build_csv.py | 92 ++++++++++++++++++ .../Dicom/IXIT1_BrainSex/dicom_sex.nml | 48 +++++++++ .../Dicom/IXIT1_BrainSex/dicom_sex_auto.nml | 42 ++++++++ Classification/Image/CIFAR10/build_csv.py | 12 +++ Classification/Image/CIFAR100/build_csv.py | 13 +++ Classification/Image/MNIST/build_csv.py | 12 ++- Classification/Text/Chinese_news/build_csv.py | 1 + Classification/Text/Chinese_news/cnews.nml | 4 +- .../Text/Sentiment/sentiment_call_auto.nml | 2 +- .../Text/Sentiment/sentiment_choice_auto.nml | 2 +- .../Text/Sentiment/sentiment_dist_auto.nml | 2 +- .../Text/Sentiment/sentiment_full_auto.nml | 2 +- .../Text/Sentiment/sentiment_multi-GPU.nml | 2 +- .../Video/HumanAction/video_class.nml | 4 +- .../Video/HumanAction/video_class_auto.nml | 4 +- GANs/began/MNIST/mnist_began.nml | 2 +- GANs/began/MNIST/mnist_began_auto.nml | 2 +- GANs/cgan/MNIST/mnist_cgan.nml | 4 +- GANs/dcgan/MNIST/mnist_dcgan.nml | 2 +- GANs/dcgan/MNIST/mnist_dcgan_auto.nml | 2 +- GANs/gan/MNIST/mnist_gan.nml | 2 +- GANs/gan/MNIST/mnist_gan_auto.nml | 2 +- GANs/lsgan/MNIST/mnist_lsgan.nml | 2 +- GANs/lsgan/MNIST/mnist_lsgan_auto.nml | 2 +- GANs/wgan/MNIST/mnist_wgan.nml | 4 +- GANs/wgan/MNIST/mnist_wgan_auto.nml | 4 +- GANs/wganGP/MNIST/build_csv.py | 2 +- GANs/wganGP/MNIST/mnist_wganGP.nml | 2 +- GANs/wganGP/MNIST/mnist_wganGP_auto.nml | 2 +- ImageDetection/ssd/VOC2012/ssd300.nml | 2 +- Regression/.DS_Store | Bin 0 -> 6148 bytes Regression/Vector/.DS_Store | Bin 0 -> 6148 bytes assets/.DS_Store | Bin 0 -> 6148 bytes assets/Picture1.png | Bin 70821 -> 0 bytes 52 files changed, 356 insertions(+), 54 deletions(-) create mode 100644 Classification/.DS_Store create mode 100644 Classification/Audio/.DS_Store create mode 100644 Classification/Dicom/IXIT1_BrainSex/README.md create mode 100644 Classification/Dicom/IXIT1_BrainSex/build_csv.py create mode 100644 Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml create mode 100644 Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml create mode 100644 Regression/.DS_Store create mode 100644 Regression/Vector/.DS_Store create mode 100644 assets/.DS_Store delete mode 100644 assets/Picture1.png diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py index c079336..2898eb3 100644 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py @@ -66,7 +66,7 @@ def write_file(validation_split): shuffle(train) shuffle(valid) - # Write the CSV file. + # Write the Training CSV file. with open('training_data.csv', 'w') as of: of.write('Audio,Label\n') for l in train: @@ -74,6 +74,15 @@ def write_file(validation_split): for l in valid: of.write(l) + # Write the Querying CSV file. + with open('querying_data.csv', 'w') as of: + of.write('Audio\n') + for l in train: + of.write(l.split(',')[0] + '\n') + for l in valid: + of.write(l.split(',')[0] + '\n') + + if __name__ == '__main__': diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml index 27b26f9..24f779d 100644 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml @@ -1,11 +1,11 @@ architecture: input: - audio ~ audio: [maxlen = 1536, nbands = 24]; + audio ~ audio: [maxlen = 96, nbands = 24]; output: label ~ flat: [10]; audio - -> Reshape: [[1536, 24, 1]] + -> Reshape: [[96, 24, 1]] -> Conv2D: [filters = 32, kernel_size = 5, strides = 2, padding = 'valid', activation = 'relu', name = 'conv1'] -> PrimaryCaps_Matrix: [] -> ConvCaps:[channels = 32, kernel_size = 3, strides = 2, routings = 3] @@ -14,17 +14,17 @@ architecture: -> label; source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/training_data.csv"; input: img ~ from "Audio" - -> audio: [maxlen = 1536, nbands = 24] + -> audio: [maxlen = 96, nbands = 24] -> AudioDataGenerator: []; output: label ~ from "Label" -> flat: [10] -> FlatDataGenerator:[]; params: - batch_size = 32, + batch_size = 8, shuffle = True, shuffle_init = True; @@ -34,7 +34,7 @@ train : loss = "spreadloss", metrics = ['accuracy']; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml index f223bd0..3c43231 100644 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml +++ b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml @@ -1,7 +1,7 @@ oracle("mode") = "matrix_capsule" architecture: input: - audio ~ audio: [maxlen = 1536, nbands = 24]; + audio ~ audio: [maxlen = 96, nbands = 24]; output: label ~ flat: [10]; @@ -10,10 +10,10 @@ architecture: -> label; source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/training_data.csv"; input: img ~ from "Audio" - -> audio: [maxlen = 1536, nbands = 24] + -> audio: [maxlen = 96, nbands = 24] -> AudioDataGenerator: []; output: label ~ from "Label" @@ -30,7 +30,7 @@ train : loss = auto, metrics = ['accuracy']; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py b/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py index b933aae..96c34a1 100644 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py @@ -54,6 +54,7 @@ def write_csv_file(): Path('images').mkdir(parents=True, exist_ok=True) + # writing training csv with open('training_data.csv', 'w') as of: of.write('image,label\n') @@ -67,6 +68,15 @@ def write_csv_file(): imwrite(img_file, image) of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('image\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + if __name__ == '__main__': diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml index b41e4bb..89f8991 100644 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml @@ -32,7 +32,7 @@ train : loss = "spreadloss", metrics = ['accuracy']; run: - nb_epoch = 5; + epochs = 5; dashboard: ; diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml index 1b8c366..2d55cd8 100644 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml +++ b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml @@ -28,7 +28,7 @@ train : loss = auto, metrics = ['accuracy']; run: - nb_epoch = 5; + epochs = 5; dashboard: ; diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py index c079336..4fc7123 100644 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py +++ b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py @@ -66,7 +66,7 @@ def write_file(validation_split): shuffle(train) shuffle(valid) - # Write the CSV file. + # Write training the CSV file. with open('training_data.csv', 'w') as of: of.write('Audio,Label\n') for l in train: @@ -74,6 +74,12 @@ def write_file(validation_split): for l in valid: of.write(l) + # Write the querying CSV file. + with open('querying_data.csv', 'w') as of: + of.write('Audio\n') + for l in valid: + of.write(l.split(',')[0] + '\n') + if __name__ == '__main__': diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml index 5ad61a4..e85a1c1 100644 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml +++ b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml @@ -13,7 +13,7 @@ architecture: -> label; source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/MusicGenre/training_data.csv"; input: img ~ from "Audio" -> audio: [maxlen = 1536, nbands = 24] @@ -33,7 +33,7 @@ train: loss = margin_loss, metrics = ['accuracy']; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml index db11587..94bd5d6 100644 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml +++ b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml @@ -9,7 +9,7 @@ architecture: audio -> auto -> label; source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/MusicGenre/training_data.csv"; input: img ~ from "Audio" -> audio: [maxlen = 1536, nbands = 24] @@ -29,7 +29,7 @@ train: loss = auto, metrics = ['accuracy']; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py b/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py index b933aae..f7bbde0 100644 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py @@ -54,6 +54,7 @@ def write_csv_file(): Path('images').mkdir(parents=True, exist_ok=True) + # writing training csv with open('training_data.csv', 'w') as of: of.write('image,label\n') @@ -67,6 +68,16 @@ def write_csv_file(): imwrite(img_file, image) of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('image\n') + + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + if __name__ == '__main__': diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml index b8aab34..37d74f3 100644 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml @@ -32,7 +32,7 @@ train : loss = margin_loss, metrics = ['accuracy']; run: - nb_epoch = 5; + epochs = 5; dashboard: ; diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml index 243859f..aed8e20 100644 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml +++ b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml @@ -29,7 +29,7 @@ train : loss = auto, metrics = ['accuracy']; run: - nb_epoch = 5; + epochs = 5; dashboard: ; diff --git a/Classification/.DS_Store b/Classification/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..0bfcf6fb600739ecf525172da4856012600e0ba9 GIT binary patch literal 6148 zcmeHKJx{|x41I=Fs>ITf@%{q;;Hb*LT*OREDuh&&p;Thb#^3JqM^U3FD+Ab)@8x`c zKIIK@3_zC0_6}G8n9~*Ut;^W-TzzC$5ow5GHM;ijy5Dx)ev~^EQ0@%(IHJWn2K*Tw z@qrEAu)8eV;0Zf?;qx5i)hzK$ddq`BiK$yNo(iM_sX!`_3jALMv~kkrH;%cc0;xbM z@S}i!9|~Qu2KJ8j>EL1`08xMA&G>Ay1hHsRVs5k*-(KVuedc)$>>Z8HxY3FEBcQsZr2@a9zzO~CBVzyn literal 0 HcmV?d00001 diff --git a/Classification/Audio/.DS_Store b/Classification/Audio/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..4a6be4886e59fcc6256e93eb6a789928b9a4861a GIT binary patch literal 6148 zcmeHKyJ`bL3>+mcOwzbaxxbJ!slfOBx z_wR?pe%dvvk^)jd3P=GdAO$8W;JueNzfV+@0#ZNQc&@8FMq*{#_mUQM@)%C(5G3l@xKCGT>HKABMo%gpWhxJ57DIf)| z6}Zjq!u$Ud{g?Uwnxvf+kOKco0h_H}S4+N9_14kLd9Q8sH@erH>26#Hg(2E8G1@UV f-i{xlDC?T9dEN_$#Go@DbfSI+To;)X_-h3& audio: [maxlen = 1366, nbands = 96] diff --git a/Classification/Audio/MusicGenre/music_spectrogram.nml b/Classification/Audio/MusicGenre/music_spectrogram.nml index 80ce904..91cbcd6 100644 --- a/Classification/Audio/MusicGenre/music_spectrogram.nml +++ b/Classification/Audio/MusicGenre/music_spectrogram.nml @@ -1,5 +1,5 @@ source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Audio/training_data.csv" ; + bind = "/DM-Dash/NeoPulse_Examples/Classification/Audio/MusicGenre/training_data.csv" ; input: x ~ from "Audio File" -> audio: [maxlen = 1366, nbands = 96] diff --git a/Classification/Dicom/IXIT1_BrainSex/README.md b/Classification/Dicom/IXIT1_BrainSex/README.md new file mode 100644 index 0000000..c2243fc --- /dev/null +++ b/Classification/Dicom/IXIT1_BrainSex/README.md @@ -0,0 +1,38 @@ +# Introduction +These sample .nml files are for training a classification model using dicom(Digital Imaging and Communications in Medicine) data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +Data for this example is from the [IXI-T1 Dataset](http://biomedic.doc.ic.ac.uk/brain-development/downloads/IXI/IXI-T1.tar). The dataset contains 591 human brain 3D MR Images with T1 weighted, and corresponding gender label of each image, male or female. +To run this example, first you will need to download and pre-process the raw data for the dicom classification task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script failes, make sure that you have installed all the package dependencies of this script which are listed at the top of the script: +`tarfile, shutil, pathlib, requests, natsort, pandas and random`. Missing packages can be installed using pip: + +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/Classification/Dicom/IXIT1_BrainSex/training_data.csv" ; +``` + +NOTE: Dicom files are big! Be careful with your batch size, or you may get out of memory (OOM) errors. If that happens, reduce the batch size. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) +For more information on using the AudioDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. + +You are welcome to modify these tutorial files. If citing please link to this repository. diff --git a/Classification/Dicom/IXIT1_BrainSex/build_csv.py b/Classification/Dicom/IXIT1_BrainSex/build_csv.py new file mode 100644 index 0000000..b0c9760 --- /dev/null +++ b/Classification/Dicom/IXIT1_BrainSex/build_csv.py @@ -0,0 +1,92 @@ +import os +import shutil +import tarfile +from pathlib import Path +from random import shuffle + +import pandas as pd +import requests +from natsort import humansorted + + +def download_data(): + ''' + Check if raw IXI dicom data is present. If not, download data from the + official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + nii_URL = 'http://biomedic.doc.ic.ac.uk/brain-development/downloads/IXI/IXI-T1.tar' + xls_URL = 'http://biomedic.doc.ic.ac.uk/brain-development/downloads/IXI/IXI.xls' + + nii_f = 'IXI_T1.tar.gz' + xls_f = 'IXI.xls' + + if not Path('raw_data/' + nii_f).is_file(): + r = requests.get(nii_URL, stream=True) + with open('raw_data/' + nii_f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + + Path('images').mkdir(parents=True, exist_ok=True) + tarfile.open('raw_data/' + nii_f).extractall('images/') + + if not Path('raw_data/' + xls_f).is_file(): + r = requests.get(xls_URL, stream=True) + with open('raw_data/' + xls_f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + + +def write_file(validation_split): + + xls = pd.ExcelFile("raw_data/IXI.xls") + df = xls.parse('Table') + img_plist = os.listdir("images") + + pdict = {} + + csv_lines = [] + + cwd = Path.cwd() + + for img_p in img_plist: + if img_p[:3] == "IXI": + pdict[int(img_p[3:6])] = img_p + + for index, row in df.iterrows(): + IXI_id = int(row['IXI_ID']) + sex_id = row['SEX_ID (1=m, 2=f)'] + sex_id -= 1 + if IXI_id in pdict: + csv_lines.append("{0},{1}\n".format(str(cwd) + "/images/" + pdict[IXI_id], sex_id)) + + shuffle(csv_lines) + + split_index = int(validation_split * len(csv_lines)) + + train = csv_lines[:-split_index] + valid = csv_lines[-split_index:] + + # Write the training CSV file. + with open('training_data.csv', 'w') as of: + of.write('data,label\n') + for l in train: + of.write(l) + for l in valid: + of.write(l) + + + # Write the querying CSV file. + with open('querying_data.csv', 'w') as of: + of.write('data\n') + for l in valid: + of.write(l.split(',')[0] + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + #download_data() + + # Write files with 20% validation split + write_file(0.2) \ No newline at end of file diff --git a/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml b/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml new file mode 100644 index 0000000..15e3d7a --- /dev/null +++ b/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml @@ -0,0 +1,48 @@ +architecture: + input: + img ~ dicom: [shape = [64,64,64,1]]; + output: + label ~ flat: [2]; + + img + -> Conv3D:[16, kernel_size=[3, 3, 3],strides = [2,2,2], padding = 'same',activation='relu'] + -> Conv3D:[16, kernel_size=[3, 3, 3],strides = [2,2,2], padding = 'same',activation='relu'] + -> MaxPooling3D:[pool_size=[2, 2, 2], padding = 'same'] + -> Conv3D:[32, kernel_size=[3, 3, 3], padding = 'same',activation='relu'] + -> Conv3D:[64, kernel_size=[3, 3, 3], padding = 'same', activation='relu'] + -> MaxPooling3D:[pool_size=[2, 2, 2], padding = 'same'] + -> Flatten:[] + -> Dense:[256, activation='relu'] + -> Dense:[2, activation='softmax'] + -> label; + +source: + bind = "/DM-Dash/Neopulse_Examples/Classification/Dicom/IXIT1_BrainSex/training_data.csv"; + input: + img ~ from "data" + -> dicom: [shape = [64, 64, 64, 1]] + -> DicomDataGenerator:[spacing=[2.0,2.0,2.0],normalise_zero_to_one = True,flip=True]; + output: + label ~ from "label" + -> flat: [2] + -> FlatDataGenerator:[]; + params: + batch_size = 8, + shuffle = True, + shuffle_init = True, + repeat_per_load=10; + +train : + compile: + optimizer = Adam:[lr = 0.0001], + loss = categorical_crossentropy, + metrics = ['accuracy']; + run: + epochs = 20; + dashboard: ; + + + + + + diff --git a/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml b/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml new file mode 100644 index 0000000..b432433 --- /dev/null +++ b/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml @@ -0,0 +1,42 @@ +oracle("mode")="classification" + +architecture: + input: + img ~ dicom: [shape = [64,64,64,1]]; + output: + label ~ flat: [2]; + + img + -> auto + -> label; + +source: + bind = "/DM-Dash/Neopulse_Examples/Classification/Dicom/IXIT1_BrainSex/training_data.csv"; + input: + img ~ from "data" + -> dicom: [shape = [64, 64, 64, 1]] + -> DicomDataGenerator:[spacing=[2.0,2.0,2.0],normalise_zero_to_one = True,flip=True]; + output: + label ~ from "label" + -> flat: [2] + -> FlatDataGenerator:[]; + params: + batch_size = 8, + shuffle = True, + shuffle_init = True, + repeat_per_load=10; + +train : + compile: + optimizer = Adam:[lr = 0.0001], + loss = categorical_crossentropy, + metrics = ['accuracy']; + run: + epochs = 20; + dashboard: ; + + + + + + diff --git a/Classification/Image/CIFAR10/build_csv.py b/Classification/Image/CIFAR10/build_csv.py index 75df8a2..60f2228 100644 --- a/Classification/Image/CIFAR10/build_csv.py +++ b/Classification/Image/CIFAR10/build_csv.py @@ -90,6 +90,7 @@ def write_data(): for index, label in enumerate(names[b'label_names']): of.write(str(index) + ',' + str(label) + '\n') + # write training csv with open('training_data.csv', 'w') as of: of.write('Image,Class\n') count = 0 @@ -101,6 +102,17 @@ def write_data(): of.write(str(Path(file_path).resolve()) + ',' + str(labels[ind]) + '\n') count += 1 + # write querying csv + with open('querying_data.csv', 'w') as of: + of.write('Image\n') + count = 0 + for file_name in data_files: + image_list, labels = load_data(file_name) + for ind, image in enumerate(image_list): + file_path = image_path + str(count) + '.png' + imwrite(file_path, image) + of.write(str(Path(file_path).resolve()) + '\n') + count += 1 if __name__ == '__main__': diff --git a/Classification/Image/CIFAR100/build_csv.py b/Classification/Image/CIFAR100/build_csv.py index 11f096d..47d47c4 100644 --- a/Classification/Image/CIFAR100/build_csv.py +++ b/Classification/Image/CIFAR100/build_csv.py @@ -82,6 +82,7 @@ def write_data(): for index, label in enumerate(names[b'fine_label_names']): of.write(str(index) + ',' + str(label) + '\n') + # writing training csv file with open('training_data.csv', 'w') as of: of.write('Image,Class\n') count = 0 @@ -93,6 +94,18 @@ def write_data(): of.write(str(Path(file_path).resolve()) + ',' + str(labels[ind]) + '\n') count += 1 + # writing querying csv file + with open('querying_data.csv', 'w') as of: + of.write('Image\n') + count = 0 + for file_name in data_files: + image_list, labels = load_data(file_name) + for ind, image in enumerate(image_list): + file_path = image_path + str(count) + '.png' + imwrite(file_path, image) + of.write(str(Path(file_path).resolve()) + '\n') + count += 1 + if __name__ == '__main__': diff --git a/Classification/Image/MNIST/build_csv.py b/Classification/Image/MNIST/build_csv.py index 2f82682..9e8f637 100644 --- a/Classification/Image/MNIST/build_csv.py +++ b/Classification/Image/MNIST/build_csv.py @@ -54,8 +54,9 @@ def write_csv_file(): Path('images').mkdir(parents=True, exist_ok=True) + # writing training csv with open('training_data.csv', 'w') as of: - of.write('Image,Label\n') + of.write('image,label\n') for index, image in enumerate(train_images): img_file = 'images/mnist_train_' + str(index) + '.png' @@ -67,7 +68,16 @@ def write_csv_file(): imwrite(img_file, image) of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('image\n') + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + + if __name__ == '__main__': # Download data if necessary diff --git a/Classification/Text/Chinese_news/build_csv.py b/Classification/Text/Chinese_news/build_csv.py index c50294a..59375c3 100644 --- a/Classification/Text/Chinese_news/build_csv.py +++ b/Classification/Text/Chinese_news/build_csv.py @@ -20,6 +20,7 @@ def download_data(): shutil.copyfileobj(r.raw, f_z) + if __name__ == "__main__": download_data() diff --git a/Classification/Text/Chinese_news/cnews.nml b/Classification/Text/Chinese_news/cnews.nml index 2dff682..36bc2f0 100644 --- a/Classification/Text/Chinese_news/cnews.nml +++ b/Classification/Text/Chinese_news/cnews.nml @@ -1,5 +1,5 @@ source: - bind = "/Users/hongye/Desktop/neo_examples/Neopulse_Examples/Classification/Text/Chinese_news/training_data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Text/Chinese_news/training_data.csv" ; input: x ~ from "news" -> text: [300] @@ -36,6 +36,6 @@ train: loss = 'categorical_crossentropy', metrics = ['accuracy'] ; run: - nb_epoch = 15 ; + epochs = 2 ; dashboard: save_on = 'val_acc' ; \ No newline at end of file diff --git a/Classification/Text/Sentiment/sentiment_call_auto.nml b/Classification/Text/Sentiment/sentiment_call_auto.nml index 0a1e39e..b307163 100644 --- a/Classification/Text/Sentiment/sentiment_call_auto.nml +++ b/Classification/Text/Sentiment/sentiment_call_auto.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Text/Sentiment/training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_choice_auto.nml b/Classification/Text/Sentiment/sentiment_choice_auto.nml index 50ff86f..1540e22 100644 --- a/Classification/Text/Sentiment/sentiment_choice_auto.nml +++ b/Classification/Text/Sentiment/sentiment_choice_auto.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Text/Sentiment/training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_dist_auto.nml b/Classification/Text/Sentiment/sentiment_dist_auto.nml index 272ba3b..4d9fa29 100644 --- a/Classification/Text/Sentiment/sentiment_dist_auto.nml +++ b/Classification/Text/Sentiment/sentiment_dist_auto.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Text/Sentiment/training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_full_auto.nml b/Classification/Text/Sentiment/sentiment_full_auto.nml index 5829e4b..fa2a142 100644 --- a/Classification/Text/Sentiment/sentiment_full_auto.nml +++ b/Classification/Text/Sentiment/sentiment_full_auto.nml @@ -2,7 +2,7 @@ oracle("mode") = "classification" oracle("complexity") = 0.1 source: - bind = "/DM-Dash/examples/sentiment/data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Text/Sentiment/training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Text/Sentiment/sentiment_multi-GPU.nml b/Classification/Text/Sentiment/sentiment_multi-GPU.nml index 26487ac..7ecb2f4 100644 --- a/Classification/Text/Sentiment/sentiment_multi-GPU.nml +++ b/Classification/Text/Sentiment/sentiment_multi-GPU.nml @@ -3,7 +3,7 @@ oracle("complexity") = 0.1 oracle("regularization") = 0.99 source: - bind = "/DM-Dash/dsl/sentiment/data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Text/Sentiment/training_data.csv" ; input: x ~ from "Review" -> text: [200] diff --git a/Classification/Video/HumanAction/video_class.nml b/Classification/Video/HumanAction/video_class.nml index 185d2e0..97b7a4e 100644 --- a/Classification/Video/HumanAction/video_class.nml +++ b/Classification/Video/HumanAction/video_class.nml @@ -1,5 +1,5 @@ source: -bind = "/DM-Dash/NeoPulseExamples/Classification/Video/training_data.csv" ; +bind = "/DM-Dash/Neopulse_Examples/Classification/Video/HumanAction/training_data.csv" ; input: x ~ from "Video" -> video: [shape=[80, 80], channels=3, seqlength=32] @@ -33,7 +33,7 @@ architecture: -> Flatten:[] -> Dense: [512, activation='relu'] -> Dropout: [0.5] - -> Dense: [1, activation='softmax'] + -> Dense: [6, activation='softmax'] -> y; train: diff --git a/Classification/Video/HumanAction/video_class_auto.nml b/Classification/Video/HumanAction/video_class_auto.nml index c6f686c..8fa76ad 100644 --- a/Classification/Video/HumanAction/video_class_auto.nml +++ b/Classification/Video/HumanAction/video_class_auto.nml @@ -1,7 +1,7 @@ oracle("mode")= "classification" source: - bind = "/DM-Dash/NeoPulseExamples/Classification/Video/training_data.csv" ; + bind = "/DM-Dash/Neopulse_Examples/Classification/Video/HumanAction/training_data.csv" ; input: x ~ from "Video" -> video: [shape=[80, 80], channels=3, seqlength=32] @@ -18,7 +18,7 @@ architecture: input: x ~ video: [shape=[80, 80], channels=3, seqlength=32] ; output: y ~ flat: [6] ; - x -> auto- > y; + x -> auto -> y; train: compile: diff --git a/GANs/began/MNIST/mnist_began.nml b/GANs/began/MNIST/mnist_began.nml index d2f32c6..d3dbab4 100644 --- a/GANs/began/MNIST/mnist_began.nml +++ b/GANs/began/MNIST/mnist_began.nml @@ -91,6 +91,6 @@ train : optimizer = Adam: [0.00005], loss = l1loss; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/GANs/began/MNIST/mnist_began_auto.nml b/GANs/began/MNIST/mnist_began_auto.nml index c453002..5d63ad4 100644 --- a/GANs/began/MNIST/mnist_began_auto.nml +++ b/GANs/began/MNIST/mnist_began_auto.nml @@ -37,6 +37,6 @@ train : optimizer = auto, loss = auto; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/GANs/cgan/MNIST/mnist_cgan.nml b/GANs/cgan/MNIST/mnist_cgan.nml index 3923d23..12557ee 100644 --- a/GANs/cgan/MNIST/mnist_cgan.nml +++ b/GANs/cgan/MNIST/mnist_cgan.nml @@ -1,7 +1,7 @@ oracle("mode") = "CGAN" source: - bind = "/DM-Dash/NeoPulse_Examples/GANs/began/MNIST/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/GANs/cgan/MNIST/training_data.csv"; input: x ~ from "image" -> image: [shape = [28, 28], channels = 1] @@ -72,4 +72,4 @@ train : optimizer = Adam: [lr = 0.0002, beta_1 = 0.5], loss = 'binary_crossentropy'; run: - nb_epoch = 2; \ No newline at end of file + epochs = 2; \ No newline at end of file diff --git a/GANs/dcgan/MNIST/mnist_dcgan.nml b/GANs/dcgan/MNIST/mnist_dcgan.nml index 4fcc913..3583ee1 100644 --- a/GANs/dcgan/MNIST/mnist_dcgan.nml +++ b/GANs/dcgan/MNIST/mnist_dcgan.nml @@ -70,5 +70,5 @@ train: optimizer = Adam: [0.0002, 0.5], loss = 'binary_crossentropy'; run: - nb_epoch = 2; + epochs = 2; diff --git a/GANs/dcgan/MNIST/mnist_dcgan_auto.nml b/GANs/dcgan/MNIST/mnist_dcgan_auto.nml index 578a49c..e76a17b 100644 --- a/GANs/dcgan/MNIST/mnist_dcgan_auto.nml +++ b/GANs/dcgan/MNIST/mnist_dcgan_auto.nml @@ -39,5 +39,5 @@ train: optimizer = auto, loss = auto; run: - nb_epoch = 2; + epochs = 2; diff --git a/GANs/gan/MNIST/mnist_gan.nml b/GANs/gan/MNIST/mnist_gan.nml index 9a0d33f..9964446 100644 --- a/GANs/gan/MNIST/mnist_gan.nml +++ b/GANs/gan/MNIST/mnist_gan.nml @@ -54,4 +54,4 @@ train : optimizer = Adam: [lr = 0.0005], loss = 'binary_crossentropy'; run: - nb_epoch = 10; \ No newline at end of file + epochs = 10; \ No newline at end of file diff --git a/GANs/gan/MNIST/mnist_gan_auto.nml b/GANs/gan/MNIST/mnist_gan_auto.nml index ec16742..1e3d0a1 100644 --- a/GANs/gan/MNIST/mnist_gan_auto.nml +++ b/GANs/gan/MNIST/mnist_gan_auto.nml @@ -41,4 +41,4 @@ train : optimizer = auto, loss = auto; run: - nb_epoch = 10; \ No newline at end of file + epochs = 10; \ No newline at end of file diff --git a/GANs/lsgan/MNIST/mnist_lsgan.nml b/GANs/lsgan/MNIST/mnist_lsgan.nml index 8685323..517f74c 100644 --- a/GANs/lsgan/MNIST/mnist_lsgan.nml +++ b/GANs/lsgan/MNIST/mnist_lsgan.nml @@ -54,5 +54,5 @@ train: loss_discriminator = 'mse', loss = 'mse'; run: - nb_epoch = 2; + epochs = 2; diff --git a/GANs/lsgan/MNIST/mnist_lsgan_auto.nml b/GANs/lsgan/MNIST/mnist_lsgan_auto.nml index cb908aa..9315e03 100644 --- a/GANs/lsgan/MNIST/mnist_lsgan_auto.nml +++ b/GANs/lsgan/MNIST/mnist_lsgan_auto.nml @@ -35,5 +35,5 @@ train: optimizer = auto, loss = auto; run: - nb_epoch = 2; + epochs = 2; diff --git a/GANs/wgan/MNIST/mnist_wgan.nml b/GANs/wgan/MNIST/mnist_wgan.nml index 99cb63a..19c9e11 100644 --- a/GANs/wgan/MNIST/mnist_wgan.nml +++ b/GANs/wgan/MNIST/mnist_wgan.nml @@ -1,7 +1,7 @@ oracle("mode") = "WGAN" source: - bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/GANs/wgan/MNIST/training_data.csv"; input: x ~ from "image" -> image: [shape = [28, 28], channels = 1] @@ -64,4 +64,4 @@ train : optimizer = RMSprop:[lr = 0.00005], loss = wasserstein_loss; run: - nb_epoch = 2; \ No newline at end of file + epochs = 2; \ No newline at end of file diff --git a/GANs/wgan/MNIST/mnist_wgan_auto.nml b/GANs/wgan/MNIST/mnist_wgan_auto.nml index 807406c..28a6025 100644 --- a/GANs/wgan/MNIST/mnist_wgan_auto.nml +++ b/GANs/wgan/MNIST/mnist_wgan_auto.nml @@ -1,7 +1,7 @@ oracle("mode") = "WGAN" source: - bind = "/DM-Dash/NeoPulse_Examples/GANs/lsgan/MNIST/training_data.csv"; + bind = "/DM-Dash/NeoPulse_Examples/GANs/wgan/MNIST/training_data.csv"; input: x ~ from "image" -> image: [shape = [28, 28], channels = 1] @@ -38,4 +38,4 @@ train : optimizer = auto, loss = auto; run: - nb_epoch = 2; \ No newline at end of file + epochs = 2; \ No newline at end of file diff --git a/GANs/wganGP/MNIST/build_csv.py b/GANs/wganGP/MNIST/build_csv.py index 0b454ab..8ac5f29 100644 --- a/GANs/wganGP/MNIST/build_csv.py +++ b/GANs/wganGP/MNIST/build_csv.py @@ -55,7 +55,7 @@ def write_csv_file(): Path('images').mkdir(parents=True, exist_ok=True) with open('training_data.csv', 'w') as of: - of.write('Image,Noise\n') + of.write('image,noise\n') for index, image in enumerate(train_images): img_file = 'images/mnist_train_' + str(index) + '.png' diff --git a/GANs/wganGP/MNIST/mnist_wganGP.nml b/GANs/wganGP/MNIST/mnist_wganGP.nml index 285d4cf..9564c62 100644 --- a/GANs/wganGP/MNIST/mnist_wganGP.nml +++ b/GANs/wganGP/MNIST/mnist_wganGP.nml @@ -66,4 +66,4 @@ architecture name:discriminator: train: compile: optimizer = Adam: [0.0001, beta_1 = 0.5, beta_2 = 0.9]; run: - nb_epoch = 2; \ No newline at end of file + epochs = 2; \ No newline at end of file diff --git a/GANs/wganGP/MNIST/mnist_wganGP_auto.nml b/GANs/wganGP/MNIST/mnist_wganGP_auto.nml index 00ad5a8..478ab60 100644 --- a/GANs/wganGP/MNIST/mnist_wganGP_auto.nml +++ b/GANs/wganGP/MNIST/mnist_wganGP_auto.nml @@ -36,4 +36,4 @@ architecture name:discriminator: train: compile: optimizer = auto; run: - nb_epoch = 2; \ No newline at end of file + epochs = 2; \ No newline at end of file diff --git a/ImageDetection/ssd/VOC2012/ssd300.nml b/ImageDetection/ssd/VOC2012/ssd300.nml index ae039d5..6ee8e80 100644 --- a/ImageDetection/ssd/VOC2012/ssd300.nml +++ b/ImageDetection/ssd/VOC2012/ssd300.nml @@ -239,6 +239,6 @@ train: compile: optimizer = Adam:[lr = 0.001, beta_1 = 0.9, beta_2 = 0.999, epsilon = 0.0001, decay = 0.0]; run: - nb_epoch = 2; + epochs = 2; dashboard: ; diff --git a/Regression/.DS_Store b/Regression/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..6e6f4ea80854cbf9e8c8663bac262d00f6e8d2f8 GIT binary patch literal 6148 zcmeHKyH3ME5S)d8BGIIzykFoCtSBj{k*I+rq6xADq<6)4@oCIHgk)JRC}?0-+MT=g z&Ye7k*9*Ybr~M7E0igz5sv)8cYBH literal 0 HcmV?d00001 diff --git a/Regression/Vector/.DS_Store b/Regression/Vector/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..a246a9636cf21ea0ab8f9ab28c06c295a142a5ea GIT binary patch literal 6148 zcmeHK%Sr=55Ukcf0$y_TIKSW@tRWIFg5VE`21E$%A?|%oepf!t>IY)j=*^2%Lr=~0 zbj>hz*xm+Ut6_TuECDR&j`;9lYQFD2v9pReTAXo*du+D*=iO63$vz!$?i|+`@Qxne z`2EfabKEjKV2@Y4;U$mGt<&czDg~r~6p#W^Knnb>0Bg3};v!L_6p#W^;7bAjJ~X;x zS2!ldr-MVZ0K^%?VVp-VL2Mo%c7#H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0+cr!|2ucZpAYIZSJ%k7X(o)hLN`t@*F-S=%sDN~cfk+J9-5t_HH`2@i zBMbxIp5N_#zu$VF=lu`vwKj`2d)8ii@9R3R^E{5@Jg$k+)mFWA^WIGy9GqL~YRb=W zaPWt5aBx2o69Q*MyHhd1KU|+@s!BK|!;HUy40AWFBJlUJ2dPK_2?XrI$yWjPQzJg<6=I;gZeombxsGr4|aZ985>U{n>`Gd z_CmOyCEmefrT8DmXiDo4`YN6IF$;mx|2S@j5^p_>pdb$YzYjV^mUakErt8iB@eMI> zN#*}?mg2vc{CC9u8!-RN%J}bk`CkU*zv<$?3HHDB<-g_ie?X7_XSQ7agkSBTPaL86 z_jr3Jl)=6Bv-EaucsldtGLa*9;5V;2R?0&K$i&gj+1{_!RLrC`&^FUL8z8i7m44UWEtScFB36AI3} z+p3~T^A~Pemr<}h(~;gt+B_N8@i_2KX?W=0f?gPiQfu0^IE2q%Ow}+&hw)%8w-lB# zeWy(DQD$RL*v`2X&eeSoecX~Y4Ga69QXWH+)@W;LyJE{zU!iG?&Bqx1W^9HWQY<8X z%UiffAqT}WV8Qz?LpfI``6cC=@>nET%0kk)*2p5{!b|fP7Lum4UZX>L-iS6|RmGhRPiVVtY{Xs5+fvV&qS%ik8a_p$z1RLd)Kv zWRJ`N@cb6+OHrV8){Q?_m#cS&tgbJnH?{ri^(*ZAK5oYQH`+;mzgm+zFRq5a9BR2n zw@9LDd*s{=Zf|@fe&$ejb+H?l3q1;FZZw}qYRisH8e3V!H%&JkO#Zptb5d{-Y&>db zJ75N`5wB^%?8ikn9|j-J&7TkZ3tH|E2(>hV>zI#T6D7%r@cgLpE^b8hZ_MxX=(bYB zZe!lw+<8q;h@m2HL0oT2w!CnM446rPWws81sr9xDvE#s;)wH;k)NQ7hTQre`_%;Hw zy2R7qGqVMEfeCCFNc0R$H-p7MJcTYF{sGiqlz~j0RPv;s>fhM9R0*&1v#!f(>~>*{ zFgrm^BdT#rP9>sg?f#+juzR?;mUA**4~E+zC7-c%t6m@^-E_)ZC%_D0zV=d=i*4VYdW3c+X`OT zwVZAQ?&Y2hghh(XUd4#K!?hf0zG61y8l$l-Qzobx)UaR8x3YcONPzsF;SL8~pN|Oj zB(-a;Ft4WgTT02`YNk>T_lHfJEh0WWTNJ37FOOcei^s2-oXpLb=<*tPRv<204 zxUkoV9KOZ=ngWdt?=D4_ge@R|99n#arSpZ`U+osxxMu?cwXhA>{#-lDiCrs>$%+xk zqE)TU|L_(IY8G7*VZ4%912+F;L!$?Hs8I4CYf4Mz^Ip}by%nXdOe;f@OO4SJ`Z{t| zCV~4^8Sf+)Yo#x?zho4a)c=fDTwGg=mn|VaG;T&Dt3=#PJDW?)xmu8$%sOT`z7wC~3BwM#H?sY1b19>aCRJYJa6-Vum#58q#g)$}e-U8N zi>T(!xuq*VBoLep>n_a_W$Ff0WsUvf$HSsbYLkUp$}k|FhR8$ybaX?kPV(+1L*|vr zNKT04wjT}Y=Uhyrcin7(bi^FygEk%vUF^cF-jei3_wZRB#}Yc(AQ@asANB3_C|r3R zBdFnx+kty!s>km`5Q#bbtI$b9V}UUAYMhWD;p=x2@E_9Xy$TJcw|FHv($mm6&yHK{ zna1GA^2fvxt>#qGp4l33Y2*JSNp6DZCyNd_={rlx0>=;5(?-oiDK7?){N{i9`C$Aj z3retHFcg~@B}W3LDq^>N`9~_^Z2CjOSw&GD23mX@Ixr7;oESATeXSE8(~F`NT4jV@=!*5 zGJPxjQ2w7HQS=^X1(n(Z+T`-dj4{g7XWPp@`N}QcI2>`jQwXz!a?kHX5x*kyGEw;< z-PD;Y-Ho77a=TtM2w4+>Vs@>p5btw>qUG~4h~@Vy$Dc-5EyZ~HB$7N?w2ONky#1J& zw>?vhR&+SdY`}PAEbw6Qdadn?_E)OB;Q@5RoO=V!2lEk)2~)$y;g*A;eAG3<{d-WOQy5ZsN}V?Igb|gt_nMWcoqAaM8x?!Zg9_ifBvv+Fh_i3 zM25fC1QLFewT4&f5Nsh;_^E7D{Rh>i_;FaxjZ$x^ZU~9?=T)KL-x>q*`^f68i0MY& z@Qdq0b)%_Go^(ld(DTaAQUezBooSbK9t^*p~xixfnm7j z=RnrTdY7LYEUQ_@@=zbkC2pNYi8!7Ve~)q@KY8$l(Gz}fbc`L7Tj2+!B436*^!(R1 zjSQa=xtgRL%GX4TL}Gjq)W0^Lh8zYi@i7YPGA(_i*q3ZRe&-M>To}sNus@vf??S`0 zrB{FNfyTerZ|hAG3d4w0-F04q7y)GRZo9csq(DShqIz%BSryDJ+6fsOBmKFpJ#T0s znNerkgkV4dJ)kpFMq$si_nV`Aw61s=1Av`8{%22edE+JV4Ek3j z^9<<3JW3GooUJK5b5$mYeVT%%8lTJDn)&W>Bkcl$mv_-e2@Zh-#%x1laKpeG;#>%l zRkxaj>WC!j>cGP$|Fkax%-LRxb{ltSg$SU15ynf0S4Mt(?+<}>-6dQ6Hoii%ju_%&c=-G_24tp z2R{CgH!h+N1~NGbiFBu)YwD9Gc3PPir(Eh+bfsQC)3a*KG=`8^HiG-< z>7y4?8rL~FNzn*%GcWVznZAV4%`QjHqYz};{;s4iOE09_g55#;ADpkp1}xmC#_SFx z|A!BYuqgg8dpNjTMf{sQXc@l?2_jU{p~Px+)=vmTdmv9{ymkK_4T{A4mUxW|zPBrn z{A~`^|3P@~u?X)y%zcOU-{6D-uR>WdwL#ZqWyQA)W+G!)ehYg$2scFnve7E+&i_vX zyr8C_3Z8XHNxo5iaVQefLN&MvQhhkGB;K0_Ji%r>P@Lz>hSUDftXP{8xYw?{!2(&K zxL~c_);-35Zaew_5CyfpxGMg$R4KOOUmF%cGGQ%O7YCwR+W#u><6Q4iQaTJKw^Lp4 z20jV6JfCMqypMW_#hssW7WrgL03?T~Cp(Nesi%O|9^F>EM%Bk-tFI&pYsFI6T`O}! z*6=ekXx=rtqwrD0VBdHi&&SZW({7jNBO!u?G1XRvh>)lazQ0PRqoyL=2^J6|(s_|F z)UEye(=8E%x6eDb#|%wcEHvqHfLq*Z7n43mNXn7Faf(N4_g2j$YXn% z3=49g+$8uDO2jmCxvi?e+XErrZo$m8B%_?qm~yV8{>5WjHHj|)NGCAzaOs~JrNBd~ zvV4Xkd*q$6*+KzKaNHFavbc!J+nDQ_9(kjpV*tCEQU3A>oSRFR zonN^<)JDWSZ|*y3xFNMV4V|{|w-9q~+2Bt56YhJR=`;M|Y%%bVZ6d4YyS}#cxAQAh z%hKg;%asdq*uS}=ZeibjIx`5-=j+q_S1Y!5#>+MV4YOr}5LBq%nIj1bb z&}3uFx`-FoM^Or{*@&e4BpQ$q0NXc#vg+9>|7{LmK`XyG%aa#vspX7P{~8dpBQ#)NYIBxqquC;);h^^C~u z7Ey6EpTS*HmPocEMyq~r9R4fPY)Dc+ousr}ezUp6pqH-YU)WESZuK4iE84cw z6N}jum#b@<+$igN7=1L64NI7kI)G{zwtqSF$w5Cv0ViUoQ2?h2tl0Tk_~}>quod)t zLuGIgnT_IC%1F+R=6cONH{v25kT~t?3WPuBL-b?j3*EJ>$wc;xLI5#Kc>b#5e3JAV>>D6fV{#A#DgL;=) zp=%)X@@wN=M7hA4s`>BLN|Ah>vYg>8(d3{(_KnTHfhWV2J@_xMQsL=E;royoH8<8 zt-{~(#unGRwE(m>-{i%&_lMY&K4xN|Git zZL0|Je)eerh>pg2#DpPb$QOJh7WdrHX>E_7C9|Jr&rMXaro+p&$oBmQr*}?Z(Z*)& zTKqs|u1)UdjP3eAe59ymF`x^Vk^_haeRg2Mi)+uiWRp+~BtQo3?ybPvzmdFHeID?q zgV_>VqdN`Eb|ZpfIcC$w-E3oD_0F6}#gK$qvsJJpS6RcxCo_p%SPQhzn5m|5L(7A* z{Txej(^TEsXqmj2ZQqLnUm6-qUqjTaN{?XY=F_7td9_28fW0KsH zh}V&=Q>HG(2ziYqaTD>ttt^0|Z)1t!MzSub%hIr`LX$;M)J1*LGv45N~5-LO~*Jw=sL zexx^P))~uo%~6*Ds_=ZuW6Q7Mh|TcWHJ6{mJUar?f}C7K-UnW(lw_M-9`jr0E_wWo zTP_M^r7l@;AmHD!<9ghn6hWj0-;&_!iF|Lw94CY@CQ{|6 zvI<`lxXWLwsFD1wveuU*bgyIOe>>STT*JQ+VuvB-buVZvPy`|HwJ^PYPy#5mvs{^% z|H{#{H*m)#G-=fT1u(c3g|D&21hzU@G^E#J`e5k?h1!p^ALcPhb}YDslZbWVcW1n} z?>D1FN4|YCpog9oTG5d18yJf>0YR1l)Uc9?p;cQYsRr$S-~FoTPsyE_cQHqNaRs+l z!?%EhKMTHp_md^09dAKf`87>{vMjP@&K}`UJfHM>vzjjK*+1OcZ_dq__Vy0Vk8p(Pm;|5g zK@)26-So>ek9}4kQ8P$1?3HWy6q+skJX|3->*LOuX9O>Ysz?PoOvlR}%z!S&5}L>G zPXKc54yYS7Nda~_Ky(XB#Tnm`;IRvNfrQ&AYy~H3qX>Yz$OES`Q>*U zVz(9?g&#gaW0fe^#^dfGpg_wru{`UiNM#GHGi1`|H}E43mB2kTzxoFdtln+mIkcV< zL8HT&FRhO^W6vS*M(ZMnD7Q z<Yf6!wibbezhQ>I-{930SD*Ft-xLY{_H^fJZ)Z z=0&^g9o)spgJ9X65a^}M6WK>B1>|BpwDe{VdgrXS&+D$hODzQn>1b! zfDd_B49S)Y8}l>0pF%vu+24S$a^q%E#W1hyMr1KD?(_yWw?BSw1?YB>L?57lB-~<7 zj;27(xB;pfXJgPWJSRYI`HSC*C@3D-1>Ci(f8o&mKuo>csRa-eGqBK+-OjTGNT?a4 zg0ojEEh9& zu8T4wkYlRM76d|VHP=^YNvrdIRL<(qHgrP1O7cRVEWCx8De$8h-4#ExV`N2;a0Nf4a#} zU8s!E3Aq@5^I7qH(Dy)#DdIWRkaAO0;VvuLf(QS2e^}uOpgPpHz0VK8!WM_XnGU9@ zOF-rpvUCqRT6D?@zDNGhAtt3HCE$wrFMaV*ZLOG}39|`FO#nIWlx4qz|3d;rp0RHw zxp6T&tSK(ruVkv5W)iG=S!PZL7rkui=PkTb*7v19j;}1X8PKVPl#u%#A%T=`o%EFb zh3blk2t+(pG-!afJph@pWDYfRDXzMgm&B6vy7=y ziqjt&7+8h9^0(X}MCZ6vew8wT=lqXqG*#$OhXoSG5UB`p7@BTAtFk2)b~0tlBXK${ zC-*q`Wad7b31F@v%fNj@&@NEZ6Z=7qc*(7t@spE}`|Fraf2byVEV^%jI~W#qcML`r z&{{?1?N>b^m#RKpG{z4#xHsJkomzmx)Ec}(YV{m+w&{j!;_q>${ipf~Zlep%H2pfx6Jxek$PHIatrT9_KdnB^$pZ;)` zM6#I;!E3ImyBjBI3t6a)feM`wZF-LQ*zbdtP9Y?dTo+*V4I$>RgxU?QGcAsq#HASJ zEq_g#ksTKMw5S;f6CmRYge(=7GPjQ_S#?Su3V9VIqVyE9Eq%U3W|2}plek7T=(-o$ z$vjEBE&peks=IhNoGKJOmH@4kKAW-ApsNcI<2~UzrET7r2OaD@VX+H62N;1q*=?o8 z;`XNB-!o_(H=i*FY=7Wb*5$itT5R`IE;s<(=+xv*7y zi)6`2aJzBf3s5+SG^8I=SG=diLcO$OCy$g}JdURlz5Yxd{Da@Rn}=nVX>8ud2C?*) za}b9PlS~{>lrCl6<)@FQ0eVqAJKh4r?M9!Uf)S2X5pHAfs}QtDwj=A%CK+{wQe335 z8AaT35i&XQjv4V=<5{RuGbZP#*k%&+vwv)OX>uFWvK0L&&e25i?eogo6o?22ru6d9f*yekXPIqI}TTphq6rV zoylKvqD)Parkg?Y5n>YZ?Ngkq=r(ewC*`mZxQ+CtD(g&t)VEzvMCUGy7&`r}JhLe1 zyy=142yZ?cr8z^~xQH(Co}cAs8WZ~hHZHuhF?9ZK>MW0ih&d-d2M;KhvuI*nSzN9}MqGj=S#TY#dm1#A ziRY?Y>^%p?UQ*QdFa;(>7E8?9CyE|5jRYU)ZxWdu&w?Cmh6x+us}R+p&2{6Q-$tGA z9k*78Hp)H`+h5ldOH#tI&C7w@S3qXwhvjLREOeRVW=$P~7VVJ!5 zNI<+P_vg(`QR;5rWi3U@QrfwI@r*?Sq)_HEGY8$OF{%`oAMEf4oyT}i-;ytzq zx)ik9cz>j;fUe8g^{N)gpnw`V`&-SBb8i$--bZ*AMQ-nLzwCh0-gMv5)HVbjun&WK$seC0Gu*D0 zfJUdS2~1CX`IN~;;hZ;tC(+M)pME|%5c0r|<`)@0JS|{A_Q7AJNy&QEur`ze?~8UT zOZ;7=$w+_^rDMN~qG_aP=*!m{l}$i@OklNcDrTAM>8gxkKSD-`C!u(TX#(jEOciZKZ4W;y&1ghy7g}L3#227w%UOCY&n$)*_sScw}@l)rQ7D+0p4x?G5 zg+W+nVnH$G=Ln%=)sugeETwo6hr;4&UQ5eGOB6o-I~N2XzcnkHPb^jPWgGKl#Xa{7 zsr{sUA9v$&vvk^T5P5-ZTGHi2yP^m?5;}7?g2;ILRes$3?XLr9$$lG>XsS;H>DxE; zh=;eXW^CgG`p=`Kw%F}4B^ZhVHa_r+m}i*R82BNPFS99+Ze3Qmt^pvFhXf@#{6o=m z0vQ!h8-3J^FT+x8qK(t^NCMGoL!>UJ;L8P!Q9cZ7h2^Tjg@y zlBMu@TZUoK$ZPvr`H>qn{Pm;iwn{g`ijSCm{c8_l%j-irwp_`AfsZke$$e!CN@8*8 zs00u=s_ZBJ?|Pj>N!AI+pN&m1UTrR|MR%HqBGFpO}=>!T7&pBD5GcYy-#qT>GR zqXKne0WY<^r=7ITWDh$Z+GwyG-=UJr<1J%cNuCBu?fsP8K}r)yhV#nr{^9Lb2Y z_7PMT+*W2MFLS9zA2cj&Lq&5IYy+!=*yraZ;a|?!Ur5S#O0jC-h_T)G?qyY}tgp#3 zI|;9+@aU}Q+5~G%#wboWHUH@nY95nQ+)K-A9d`{u{X|gwq;2*|5G|=wt$c*0TmjcJ zPXREB?P>|tjG`H74pi!`S`(fsiFxsaBUqNUaqheF%y4b!{m`LW4$6*;PjDxoo2Wb? zPOt|{c%QPgp#qkTLR;R6Deqg7+iSRTl(oANPo_JiIcg<6C8i36$Fx9d_oKKMpFd#l z2-_V2chBVBuY9&Ca~R?J&Z71{hAjTp)GgsT$n4Vft@n{U z3Z8&gR)}g4Hqp@+Z7E6I(&VDZ?%%J;pIrN;+Qk{K_Ftufxmx!8;CS$x!9};p<*#Op^h-s zB6#Pw1%EkEC1*DRwz0B}ib{9Bb6b;+`YDv!Z?}e9{y65_tvU(#2bZ})_KlOHnxeHOgLl~&AQ^GCXy zM<)Oldc8G!zMyg1e5U0&wSL9~TFO<587GLgnosK5_%rQQjB?>v8*i~ER{tsN{ zFbFh)zix{SjSc-mA;;>!kDJ(X3k=31V{>B$RG*Fv8w$sx7o~=~FL4|_)xN3|CMYKb z@8&KYHlZnr#9vLErSCZ@nvUC}rSsctviJ@0ddkTUza_eixEpxAN1c=VLu<3uCP z)=MuG6@F&)&3$hyQDoC;@Rf@xNaSvR3t@`^*)&|x?1x5kRDwlQG{I1kHlbI$eI z%*_SAx#BxY4L(EWh3U~I`|SpV`>F4z1UXt{zc0AYM;*A~Y1d_H$Wm`=Ovtu$ZGqJV zJiiORb%uu{>J?rtHg6n$(T10LIzoE&rK?nZJ5`^_XKEUDtMcI(;t}ZPc|dGultOlWXe0-Ax|F6RYEYe-l8&;nbo_bzkKgQ3C@*8%zlKihRaoHbPpMlo}T5n zD$MOdn)ZsZN)Z0dyo$T$JN}1I z!ZLzb+?t?UAFKErDqx)r&$le6f_D6i!>^G~R`T7O3M_@%vshB!TF~f6+=;vf@4D-rVdjv_(nTPU& z(A|tF<())v~wIWTxi(~xTMHwOdgt7`NyZv^GEE8i? zqg@~Oqi6i2N$lNy14H+?jZ&3S545(U_?)V#Ge1IntRM(9);RtVGIk(~dL$n9o0|T{ zL?T_+5bq*rU%5Uou5Due$Ga1S|-%FFAL)dBH=viItg2#aO3H*mcS= z>ng$;2Xabvi?HE$Ii0y(t;_dnN9c*@)=zLF>UO6jls<(cfEjMhA$ ze3ZalVr={R_si#_hFJ~$lphE=2t7f!3E4L(&TcD`;SJ?fEad()HXpHwae{u|DSqY@ zHmsQCX*Sr0Gx0i2P1sr7X`sjv01o zIfLxxs^4P#B2;08_CKex>i<$yT^O}QqT;J7pZ7~%xd3x{aLD_W#1kz{P+Jr6p8D05 zmHxX*U7;;K==qFJT~=c0X-&W+W`g|jCEbXpw-t&1S%X?^08@{xz12oV%LQ*vBZafg5nwYY-q|Y5!;5sfu!8q@cFB9{0pU94o)mCMW%*2wXfssR!vBPdq6WZ_z zrrGN!xAx!W7#4Qn%J&K>y$dwSs-vRiSYC;w4C z-pe5AT>;nI5M&lz?&)qf)5WFf)bCFk3nvXbj-Z;Ep8D~P8r`#m9>}MXuMFcXTMu*~ zPf7GakoJbX@26&3GMV3~k@Arui0C6cI;nX5XFB(J58vQ^z1{U%w0)b|cr4Qi-*+XG zaO9NcS%_bI@>5t>`4Uk9vn4zVr`^b2t=rl2vZE94l ztJN{be{pmn+^tEzLf<1*ROE0x{3x_p z)lBQn#^3$G2Z}w-7EFJDWKrHi%iIcXgAq zSTU0a?eU;)8~*o&w*jIo+=B4^kcojz#NU$Ls>G_h^r@}4M`{}aN>e-?b)c_sJ35g&Pf1z=&bqV1UJq-&>HXRKdcsZ$=xB8uXCTIm z9&+&taC}+&eJgaWfHOOWg;mJiAk1Y#-y@1I=)GFfD;b$@(Ol(7KppLQkp26sbUkZ~ z12UC_F-s49f4G`@I)dD5|INOwNP#-(!#IYC$Qu>+NV=<-EZ9Z;fAK{YS-NakzWG=k z>BF@(eh#>a;3^U6KK3QjLI*z-dXq*NAuxGS@=HKdI@0`^WB0eG#zOF2u@&4_`RIuY zX)rw>ct4x?(dYiAH9~EQHQK`p1;9Zbs|Q8z*9gUNdgDY3b7ao34(s!D_wI#7UBujU zAloh`5|meG)dkp{F_q7Wz$M(A0>*Hh&c@Vp*8kJp_7JCPqQ%ex@;ybf5pF+EwZcyj zEv~cGuQuutIqOwue0QU26`YkZD3vF7RDU}$+Jmna`H=M6YUF{%J4ouK)9!O}cL!AK zSwNH*57MwxEpfc8;hxzV{`tMOlrZMidbk1+sZD*2-5>n>P!P<)`)> zI=-;wy_D%6nD2vm3llwj%0o^@Dd`hU_wII=L93X~4?eYeemQj?D0lW|zcWZKa(6P* z@o>nMlyI&|V`Tp3>a2@yME6_UUn>_U8njY&@75o|;%uv|Fux8{yAT;__}$ce#nYvi zeNx)V>K2(=f07dXCZWWd;yHEmIt(kJsF!gvsKAKI!{m)!rnHYYB9;?;h9XfdY`3ht~`~X`~xK9`sb(58gwCfZj#KOh|5zacw&bE)1&8V zRDPos;=tAjbb12k$Z~J6+wmW?{i7A-b|ASH{oXClIQ6PDX1kNlBO3}!q4#4tEp;#* zJBB~DFkIZpgkz-m-`3;>I0XL1fp5idM{GZYw^!1mo0`_J7GaYYb4#5M538o9-_4mX z@5qHXG?>}F5r#s9#!@d4R#c?-7ieZ6J^Pkl+ z#&L)ry?+|l=HyRW^#-6PJRsIv4>F>x37#jzL(bXKUIA~mb$*Y5<)&`|TR!UkaN}UL zndZ~yLgcGFw<<|q(~vkt53O-+4$fAG{8AvU{Y2Z1u(o}^jkrIel&c~pTuqE&=6nP{ zyqNw?3{QGN2Ja6_IR{+tZa$rqs!4`w#+Hx`QRW8H&%Hkd;EJ`G=Z-rv+I_2UzNgi~ zw8Vag#avLYzx8aqYeH?4X{y|R571n6NhUeY#C_ zE-H_qdv?rxPjGGvyzzb#)E$a-FDi$}+hI08ZrI8gM<9vK-3reQ6N*Yr3)yOJgtt8y z5j%EmeYtp48mI1S=vK%^9a30S-v6$DKBMLK&)X3(c{?UM3mzg@OXB_*3oDNJDA|StPcL{klalg2Xm!!C6~t zc<;8Q*)vtw+bOd7&4F$-xrVdap=>4~6)T4+ZAZtu+);i%`TkZeWbJ!8)`n+WWslH&TCmy;JKUWK-iU0QUCq98>X=o+U>hJsaT-&LH>%;cFKv-FFZ&UTlna~86 zRU2T>Pr#@zr>Em?dO-#1LAt(gA7{#)+sMf$#E-jK^;pQ1$;>Q<+lp<5;c-O1=1^2=HOQ$;+?YG`4}USHNW|!=D6&maxHF0#bUE}#M68TJ7js7&a(to*Wj0go`h%5~ znSlK(4yMsic++S9C&NlD~Th zVWBU4iOSCCWpZDQeQdj=^2L$&m6T(UI@v={z$ zT}w`{W=^`lXq_ne^ox08cLj~Sm)Ww+QBMkJ=BdDi*~L6O7werp2!h(tc`|h~ z2fw`-SgNQxYl2r#h93*xc(YvP`CgnNN&Wt<%B(mqw=CbO2jM4+n9OM?UF<_Wf4xT$ zE#)RbBnszhoiSUSl!IXBknkW>R(UXB!4rJLNjAZNucXqH9K*U2Mk>hT>(f-&>m@ki zj?~{xB7L)Wk>T{dyk5S4Hvq0v2S-DwqoFwZBeZt3*C~v!U`pmime7>ob=6V3qIY#Q za~^jd5N+j1a|@?=P7PPy-#2xx7FXbZ&d0mWXLbJ6#KtU;R^(goOo(}eSTEc~{7yad z{BE`^yyS)52(Y_ABjlTc%d|i{WH@8HH&ggCoZaWal^zATdpIfieNtJI7Y%~x_T=Qk zaq4N|ZFugSw&Cx+G$NuRN5Rsduea28@crj^e)&R39!{t9bhnP@+RfN6|FB3?T{t{- z>ey6Z=n14F#UFnHHX+;FdJHPeh;l?Pc9a$d>NNNZRUV$}mHyJ%NWaH(toyum)c4mg zzu%k5(!SSl^npOE+Wa<(%=}|Lt935_;G&%4L31VMZ0@cIr|r{*ht^z2AQxgS`~j>< znb;&;!&dv+!1$Ouq7pYawR)v_rHl1X=19Y{4ZDUH50q1VQe!!ovpwbnZgc-ql=Zfv zh`?^Aq4eliSED}kRXB%eQ@x3(Mvq7R-~<(472YQJ;^*Mm_$p7qh5-3fwZ(+7SJn*D z?`kD#Q&hOGWo7??CGQZ}70G+*$F%4krfU*CkF_=3qLiJ5A~r0beHxa}rubOXlgWwP zGE3KK67z37jsdgj-$^g0EJv^`THGCp(UO?``W6>_O}05%cl(IM-tCx)5g%TUji;?b7)15xFj)nzeBtdgWdH;NfpoemNAacgK=d=o-=!*M8uimf-d&x!%ei9G$SfH> zEr$B}#~@!@Z%i8uZOZ zMgyNZ+%;SP|IODB3gK;$=!YJ?%e{|(#(iy~TfXO-1)}8Y34FK{)1z$G)4A-?tlxy9 z3!Kj&yko&}9uQI$xb%8SF(~t+r$q3lP>DyAlZqXMyWm4jot0Z!JMlP_uDY6cChc`K ze~9S*o{KTX5O&<|U?4A~2vCYq6az~*a}nOk-Svq0VHcyGmuZvKE3U>VRh3As11ig$ zs*C0y!C8%+PEIZCzS~3B+F!wi%JQ!2cnq;gwvy(rcY2f~$L*L`kaIQclXM&jPRcJo z2YMSlOPw`W-E+4}3)HXIAVuVCOiANSFH5-Xz9I{9mUf_LhLSD6{r9&_oW5DkC8^%` zEE#_W?%S_%Ra(y2)!|?UjCS8Nce-&tAGXos6FTD=J$n+?aBsn!dio9V8R!G|m)Fga zxtwItrl)?{7a1=7y=@dmT~!w%qRwKVuK22)07h){h|EmwH`XdALM-em)t&df|A&Cu zj>fBjmU8c=owIWNEBXg`aGH@qmqU4bMsG|F&^_F=I1Dg;VOH`P(=G6z!YLWOE)Iie zA+d%D2w=FifFA=ccnh-ji%^^Hy0vbO4GWRb5P5o|Mvt@9$9PQfOqomvgugnqWg**O zmimHQ%Uc=*^oo^4bo-*(8y>XI_;y+4xP}0@rF&SuE|exLSldjfX~BX2IGmn|^RqF0 z`Um$S4ZeT)x*qs-9SZ!M6CY6mWco%IyVl)o$>;R>_MRm}rqS^@+q}p9^vk5l`~A@q zhO^z$%!?+${&Po9k0>fTlie=t!{VNErA8~UHbk&|p=ITHHRJO-x~N6u52Xddb7V?q zL*+HKE2yhPU`(Bk6z$Jknb=kw*mCp5+=*TP^PD)smbbl~_aE&9wZBO=R{n(S3y&_$ zOIcoDn5;<;EM#o&h_QH6;}RkU3}jRmKpROfn>)Z=L^1vxTnW~%gsM)>yoPf@HthdytaaKVo~wJL8F zd@A63Y=%zXI6NSB4lK!7XPSg+xp@u+*Wb%|WxU!Z{ab%NrQ4`m=^(^ri%oVy<4%O` zD;J{Fo!l!dKKm+eK*j!N-$or&F?>cXT~yOqK~#eq6ihedU`Y1q78g7`{dcBSRAa4n2cNI+*|y- zwV76`S!0|G)38pq6t{~Wp`aB~2WF+T?T5r%e@Ilf#uFwkNpzV%cl8(SDfP?sB&jHj zYj-}tClkpO11hIufU=O>XTuWOoTPr={b? z^yH?^W_6($rBIz8g4T3L>iojY>MPTZA`V5B)LOm*tDl@kk}8xS0Hc!Ex{TiNNj0!o3x-y*4|}6zn+{>Ad(~ZAw;z z9nyBnn3h52N_@9QU7}YF5_0;Wgaw4TN*@*gn7Pn^GIv2`yNowW&Y`upJhqP0_Of%d zEi0;Eq1DxW9Ww_W6FJe=y27~#f1#SRB=zP4{9rzjb&tzLN9|UOHknxC&4J0ge_HA~ zFYIhTuqt~CL^?4j#^H<}ip3L;*+;N{{YlwZF*5%O^#cPIAmO*2_MJFP&0|0QLzjwp ziTK4Ehsn(6T8-u5g36I{qM8GrS?NB`fHbOnmzsh^qQr=QB$m3Jvi@Ai@89>($KB~4 zwnS~$t`R(jlC zke_YZsi0F4bYch9H};V~HneKttLYW)4m&HWUCm{l5HvR%JmRhmetADDg|b6#aGPg> zQLOtegkwku_YF1k{-Cc!yjM$o_7zOXNyNaf0Mhek(Er!L zTjmp$Y~!mbq3xOzq5~&A8sKLE#O&U0!?pl#=El`49gY3^72&-| zCNC=z`n|A@Tp45~9YNsx0Npc<11+1l1G#WTR7YvSBJ$t`TX^X<+mg+IVFI^NDfjz} zh5SY{LL^9&klpZTV$JEIxP=wL5q?&J$a(>xZTGt1n^w>85rAA@?xSRS?JJWj+g|b< zXJDdTTjp9Y85p;kmgeu(OBWf30hSOjl)~i9LqTH)+HZt>U$Y*Bs17+@y|NonF#2l5 zb^8DDbd~{8wc*wty1PZXk?t5mq#LBA8|m)umhKP`QIKwiknRDIPHE|ouCwPo=llM` zADG$udG1*2TIVS(PutTEEfya;xA*cpkJ~f*S}qj%gM;5FT><-4{>32h1K1e(D5COm z4*b#g4Mz9FhnJzT7E7rJ{GuyJC+2T8&AIl8^MjKuL8{vle4f z;xEGpba(T2Q-!hjd{DnaNB6D{Ql=~6z@S`YI170yDEFU*>H^nj`|W!L%>JecuHRge zb{*ArVuM|nrnLIfuY9~Fyz66%ybg0Tmc1OI2P39Eq-=hgqD0LHlXp1!=QA<3=A@%zm+4Jx_D7$s=eQq!Vsyy9=b*ALH@>WtKg6p` zx%!$fXGy~UE6_}T1fla#SZ(cEK6#L$KPMSj8~x$gZm$5 z1gA}h(3rA`3m%>&hKc+TxU%b6)g_Z-6Ngh0xZe-UmJcH>*kFI?blwomeX`#WaQ}tX z>9E;=>ZRz&dHFu$=V|u8%ogk}N9QaH)(77xUPSG!qRVFJ>T}2LG1)gLU77vCP_a?B zm-toon)_YnO-s<#af``B!@73|#a-T4qZnZ)#H?<*+=rCBmvuwBz!u-GCakt7{5t(* z@wfLSB%KuzI}iSb_~A(+ttdG8dLhX!Yk;gY-fZqSj>Tt~ZgQt>z)U+)V@tC;-TfLB z&@WLgtGauI`|pz$V7V>UcT9`+#f+Mo7&CQ9 zfz3nX<*o1ja3n20=7NWr9#`u>^^K7Yeg+qxCOP5r#|ypeo3F=ppXN~>0y366hhA|8 zBu)<%yDV%MdMB=4Rww>!t!;Hx0OKZz?IzF8$6lew<@}a;AcQk@>7zS6HBRA63vgFP z9cO^MY1yy+%LpBIRmL>DKgbnANR`bB;a-Iq&=Ni_eb*ZDMQmJuEGqV3N1Jw4I_PO| z5DBYwfHJ+OQm(}1z6TZqwaO^xvOw4G+R@&iy9Rh^ES7yI*~XkqjhR2wO26}eMm_Hd zm-m_T*30eY&Qm}!`0>doUrvs(G5XVcIa>P5qGyH7tT0Iny{QpzJmfDFcvMpMzKxye z$Tu3Q$jK)NIKuAr<4pz7?KM7T%TBM;^9a^?*h$dOn=&kFRlV6t|3p9xCZR=C{<+^f z{Mm#+YA9xS#@Rr6c}##KKywl{sU?+xkEq>ep&1!`E$zl1uw zqU5ty1_V=NkS8^6EUZT{)jQ+biXD>m(IEL2C227s%Is#A{+_?qmWltjYuDJ}8i5pN zRblbr!5V?=8*o}^c-Cr&RX#qaaEd*Vcji+ym>@x(maB@%ET!TAi;PKt#U3^_ALoZN zcVP`vH*w^6h7J0S7yDT(azipII{WVEsP5$Txf~21jCXeFaVf4~n!$)K@kb4lNljX8 zE{tyOhKGXl#~v7+S)`4F2G?XN6J($i$$^%baQpYRX7P7?4 zbxe$i7;K}3<_x0_c}mLNFriNpJmK0{eb-!-$!M=x&N>Fyj8C-nFxD%8vW8s%7 z`;C-LwzHbW#2XR+`zpnl@WeMi)W~`CQ2Mgv%SSd^O_FKusX4guC>vF#r2*oQJ(6a*$9a7@z{(V|C^NH7xHIxJ9^P1zIfWMuTFx_7~O&;yk zxapqv{AlfHbFK0QB2SYvjgk1T*JhAeV|qHOMP80K&R zJZE~gi2nv?i8{$bxtdcOyB1E5W%j@~SF@O@0L8}LR}{Xq^Ssb?vC{bJdwu3mC;^7O ze!ACIq9CQOh5u=j134VHsOn~Ou@11kX3Ur4{W4N!KM-?%mRJLIS(Bkv<@F` z8F4-%r7a=Y{b59GknzwX@pb7qL5#U-HZ8)+7jcJ=hFFa3G;y=(%GKuR_k1 z)rc`YJO-r=1ol8g7)L@7x3`^G@GB@Mv5QV=X+~-H9ifqK*~??!PuC7hly_Ce<_$gW zcnCcL)#gP!o_;vfy2%W4?VeAnUB}h@&HwN;p+GnKqgf@rL(qDxSg@ep>OZ|aDFZxW zeb=vasuZQQFUPAJh5Ch@P3w*8U$Tr1=6+K%U#-txDjMEPY;~STKMit|DdlQp0w=3!h93auwL97nAJvyPYTENV`N@wiX0QK5HfHByuX$23 z+ELAl4qvW(@pj2}!xG5^ZL!!$3IUcYQRltzNwGx%977!QLiJ4h&!a{wOeell5Dr{s zt+p|LzxsQRbAO}KP;ThZ=oex$G6B2NT~TsOzfgh!Tn#P!-?ZcJbte^1McpUv`g zGLG*N8GS#H_uE({KvgpNOrfgY;ce?dB>i{rc$5NV3?v+8p~7%K{vNMg63sFbmt;jx zOG}nQO6YeQPDCL=z8v{8HXGMqqHk%Sb8n5`Mo%lGt+Tghh=#TJVitw_`b>+971jQf zR#S?ygkuGTxX~9FY>PGGeajSH|4*i?j!N3%2p9yd*ar5G?o_#+@8`RX#Rcf2$n3~$ z9WNhmj_l&MVceoN;A6?X0@i7ou3`t{zlTU?+<()i4gMO$8+^>0Ds>Bg1FU(`+;{5e#3Q<| zIbsCz_9&wvVjD3wVL#vGwr6iC1aKCdt(>D?Y}bz``E9b?m`S@)jeB(%;^Evd_GLUk zJz}eM-e%Q!gXjH&R-|@F1*GMCuA(cVs)@aLyYv6V$jNj;SpU!X@@@+Pw zW|eIQc|W{gb<&6dHcvKIZBHDKf&P%6oRweP$F1>o`a3?>peeOS@X4*x`i+ zRO3(gD1LORM=2FnqKxUI-z0OV{rz#77$>h%KrGFzvc?v%&~1=thoao*E==SR^^Hm8RUpya+Rl`sS{4DZ20C$m*j@GnOKv+ zj1GdGWwz9#e31E{GQcz+)g|n9pEHpP)h>`lV}$GQN_p*8pTI(fW_;UAlIiC^@fk@-rR6)c z)oT?zv*#8dsOo6>Y0M%xeYu|$xqkx9viJh&UFjg{so*nELQKCjjG5 z%4sdU)%U%*&*X$3#(P_!6<_Y~Cq+CdggNt~U)4}@i#c4F()7Y?c;?w;+Tk)kY&vmb zc#e^>a>_X(8r=QQ@Z08%{T=@-40JNB8_fSlNo&9Y{q}8sPtvPT)}#TSi_+)$OTv@J zO-5m(Cx-qHkFqFp&Nj?ikE>UV=oi`Gnvl#r0GPS8OUh$xWi1PBFGB-udfDS`yNQ_TxY0YA_{s1(TIGW1`lyhkkZryBr^CXd&4W*6 ztAAqpnV$xO)N#lD`-p|M=QWCBaV}0se8B%#uxQXo68e|M4hYJe-EuAPxZHQJ=gNH< zpJ5*Y7ZohNNS@dG9V1U&7+I*d z5E@xkfNF(t@dy|V-(X0PH7331x&6_rPcdY6a%>3Py(-|2Nux;>Vn7fo0fzQw5B>=q zu~05EkRE#%)J5>w0q)IB*{7EO#)6@V51~FMoqKy7YvGfpK>{fGri^YdlE&K(HQ&5Y z3NvBA)kJJ>G*ppjj_^?8`rV^w48@a_# z7+j%HfnUpC-FUIt2dL>er|c)jVq?%S6olL_gIh;%dBYe?! zh1mEiQ;7&XJ*3`EC=enqsvZPg&FYfi;HZ%%Cns5ojPUPoiYV{pdz$R*A??8JXj`&7 zM1ProB0g-no49I{=xvVuz9zs$AP`ZZOQZkUsP&P#TlWL`p9v2t+;6>2k;qVQ10HTp z<%MRiLmHJ8&*_)!;6ds^yyyxg1C( zOr&?ngd9Ao8ml90o2{FHXX+C+U7H;JBT16UIserLyympb^Vy}KS8lA+P+4l6Z$gW= zmc;E9M}HG&@Mp$8o0OXCU-bn?Y8&L>Ac$Iab$%nQp^3{Dh6`M zWX69)Dwq}CLKglP;VV^iD2eRawO;D}QSX_kFN?Je#3}DtQ+cpv1Q;t%g`T@CdplV8vO}dsHzHqO`{7yg ztA*uzf5NlSO1t=2iD}iF?Moqc)bxyot_}B7|NBG(x7u_7>1= zn6ZQo6Tt~WJUx8WTf!gq750YO=hWyI1ZMbHjs1wO)O>mgMD%6WP31+cEVus7 z2ieCMJCj*o1P8LHMP{sYyp}$gAuW&C#3ASAN%Pyu2A<#nm5}L9)ffAkx1Wo6C!7t_ zY-;v;$=A-JetGA0>3uEE*&3HDeo;XWRWxObZ0NNg?n4###T9I$`IPMofF^^qkqaS9 zPttee?uBb9lK@#zqCH(^$VR*pyQav8A7o|q`wxMdvBVT--S>+*eTYKglMbuEzJOdI zjT}9qMmZhhIOKyaX@}R6bNAYHCHrW!t@<_ViQjU?OT)8)rJ^}ale6>3)9B=AgcRHF z;V-&BO(lH)lpIz4c+&A7)HK>LtHb^CMCw<=yOut)M-%R#~{ycKzrsbwW7GZ#Ej5ai$}`ueA5GS z&ELaCbyQz?zU-SwW6j3o0&)Ww^Qp%H?+lY)z~^Ck-MFx=oWxbYH|T*>&WG6FEBV7O zptz*~Sa<`)tgJth#tv>vo`Y8oGA|}6L+gkp8=~oj)uKZ(;$p?-hFSZ|-2<{7L?q4w zp6~Xu?g{{;tIm3;fU`YOtj-Rj%L7BW7J@wv`lUZ=ofqwM3IG#^A7`l2>>qcw4!z50~bhk?|K$ve6fubu0_?rx83s+|ux9dw*KJTjlf zDwrw)P)A218lGbPJ~tl73lX4?N=}W=zZ>>%)-SGWC~`NO`JlVQ6cw7`79ze)H;&y~i=BrV+C~h=HL`-OB<+e40J0@MRPkBr z93XQ1_wf=;&F@FcWcnug7)#u=LD?+`17E+}7Gi|rTM)=$rqcC7!&9?DFnM-bL;;G7z~ z3Kgs&byeIr7y28M@wJnRbFclddRe!XCGAqdm_>H`AAt`f_CgDXHdnCC>6jb7@I#M$V|I;tcSH|HSO)~p~(T<6wzgYZOM)L zfI4fqRx7uO%IKsT@DuEHbEafRqOSxng%|(A_fJ)00Cl@Q`*ys3r#$@1~ zk5rPR>MLeX-BmKV*UVAuXa4a*Shwfh$Qk5sH$js}}3Tn^Ze7 z^x9@C*R^t)*`4a9UPdVSfhd3P;!sP@@k~}7ALC8#%(oTkDrm9v#$P?%bdP2cqu5(d z<)`3>1;F&Q?*Af?eWR~It68Z|(O83mSao{;OLPFH!;$eyCabdz8r zXE;PK9h#+v5;0sLz?WQyz>G0rGrT(zFP9Q|Ginl@(hrHHty63f%=PMzPX!)M32~1 zC+)OZ($r9%wuzi-P4-nzzH{&cotisWJZoL-w#M$ItNLt;Rm+xXdn(!bk*N*sFc zCR_z|yDJ-)K2~OY^6|~}7gybEi!%f;nQSG4?^k4gA;%b|Fi1O*+--ggVKJBV;ZGR@ ze!NMR#feRe+Rp4uvZF`|^yL`IAzf8by#6x9O~leK`}4}~b*h*ar_3VgX0z-WBFdvS(YX}RW)U6Xmn;qtF0cAKb@SG&r*??<_X^6^IY=Zz^hwb&M z_j(k!4&Rgzx=o?`;q+^OS z({l=e*|yGpV(5>6AIu^g-~8Mnb@I}7jmC|}=bKB!QXc*@T@zssrFo~>QybYQ+rQ9Ef zm5-eS#(OZ-vOZ6pS5)_GR+f=brb)62$vzkQiUlx6hHv0LwYnv@>7dMTjNR3?&Vquy9ioYLY!k)?JqFINm6t@2Z+B*Su0BILR% z-{yr9!1)0%zzkeqRLd_o1ra{QKIM@v{E2`95LTDGwbxufV!)=&J-^X>cDaQPZn9L^ z_7#K_-|*}%I=#wgeno}+mdZVU$g}Fp3S#|Vk%ZwaBQJe25asZN>4%-2_(O| zRXYpVx5c(xjfZu;u@3~|YMRVs45#0jVoz-o(kYwfFU7=QQ;xwzG~;#*J>a8gl@mOS zD7>PJ>w~gyRg@A(z8xZK_TSj_=+ad;6Hhn01Y<5vTo70&&1-1M1WbnfZ|d#nM-OwW z8-FPl6n{$o$(7u!?joD{-;8;mL0znBm$Aru&3A0Q_Ghh}q#7fpuNm1ccD|1l!B@m^ zj;)49poif9;7~WPfNs0CZC~A1?mOSHrd=4G08cDRmoIGgZ#7{KatuiImZzX$Yxb!x z$yC5*!xv&*Wh7GXs=6zGVN7O=E!z6p2Ajnnb(|4SC$Mk|3S9YCT@BIxBsY1_L!e=E zvkSs|QPmMc$8d7@-w_-Hokl(qnAE=HF&bfz_}O`^DP=5mZ*dVZn8k2-ShO~KPXph$~jq|fZ^(~lSfSX+Ac2C_va zl}|!IQQ-BW)kaz0c^v7fBN2MvrA1w?tGpDBCy$hP2kCI~yMp(FxY;=HyXO9_TT;l2DF%80v)Wo76$sJ=xae;i5Hd_+rAd}!j&w4Y@}7Ff6)xRVF_x%%v@ zj}FF|r!vRAW69xrarNz)kaO`g-%r+~qC-Gkk-!NUo|@Q67g24aOqX@|YYII9& z!e1rsWM8yITQykUkMj5-4r*v|X`-dkrTTDn$U_j3WwwS(=enb`csD#V{!Q#Qc@{VU z6e<$vGmHU<@5lLj+PROhkl@sZxY?1Czs9|{Ir>ZP);m)(JZkhhmk z7`tPWrscRt@p5Nv$7*fQ6@7o(#5@kMbCU+Z$jkAg-*KSzcgs5XE!h@Vd<4@9G*c79 zZ(i%ln*t3vXQs&M>c5X+*k-rMWTEwaS_oir*bI7Hd~-az5mN=8_=9%zuoP zG8ejH+c)pA3sC}J+J2xg{FmCr^&f@<*VOdz#w#kQWbs*BC_Z%s05FeLevMI-TK2;N z&kh<6{DqIc;ctZ6VU_dt&8?@_6Ea8>qwOu%=pVIe4xd91{- z^ccg&NIA)UvGV``-Of*Z`1fO^w}#5a>V0Ygt@zgWjBH)E;S_BAp@pY~+cs_taO6H-wnLJ2f*t__i@Z7fP zS{QC$R%(_=mkw;lFL}HtFRs~)#H+lU+d=~|4>~RwC*EGko)CfsjeMMCtIb)|Z4gN- zHe(MlZFq8N;AT9P$Ao>&k(~b|0US^-roe7n^!SlkZ%tnMxwBC+V*Y60Z)KB$&lf7p za0ZAmna~@Gh_`x|z&KpOHY;UGmhdwBO-u5&poRROzn*M`sDml(4wnYmsC&Y_&eOzy z+(l^2c>4drfNi@;usj16_g_^8`@1V1Gb&?=U^yA`13r;#jYD;hB}`nQWP`=1u!3+= zI#Vl)c!``XB041-y|igeR6Ped|9?)}j^{uGhYu4aHf6~dg0MyRq24q^KMKUj;%s4h z-~hrSs3>uxq2a=|spV6JaW##~)#LOCkyy2s8%G(GGWlQt&G;DWgz;=Qnx>GPS)kao zUMcrXC!ct26Hbdj@6P<_1w6f31HGtVrBw`nv%6PJKffAFA;^8>{dv_mqxyzojXA^K zAdyiN$_iH*hyIg?im%sKM}z5-2zN?Oe>KxJqlXB~?8`Dosx70VVb`v<#DPZgfpogJ zM)NivV3`z0j2Zt{=1T6a-#DLdt?{{cLZ$Lwoa`jYAUyn%eTy^-n)bW|6T1^obGdsw zMUME+TZ`)BRx!G>54m=InUbQ|VV;1S#+&oq%5d&_SFF+_MMW$+5=Q07dgtGTN?J^o zZkI;9f=6pxJSTIJ%Tbaqx9lDx_rHi5Vo^9~uxcehN9>Re=fT!OcuO(!N^L3G)H;B# zS=nF(*w>NVTv4@Oix5(bS45sCyE)Bc&u;EjG^o^cSx?c3zN+idK z!OCmA1R@pe+kA-4Xxdr+djW}g;Q~u#$g_?4qq5Rhn(=u!jMf^u_g59+dDB?gM#=a$rq8O({ojAYNGHjLpWYS8ec3+eVm-7lhwWO(R;2N0WCRJ_05rmOzE}=KgOI6w zDVuy(*&KnH>Qm-9I;-3duH6Coo9Hlc$sb70$cs!}d2aY7%*>JR@Z z+5G-4uQ(4%R8I_kE0Fv09?UZ9CsZ<^T(_Gd8JO&D4CPHGq4`QF-?M4j+sp}g`0L@p zobn_iJ-S^0Xp1Tr#x!II=!g$UlLOAMUK-O!OPVUWB%zW;fWn_^^BP^Vn!;N00ylVnqKx`+ybz6ym^KEHa@^J1%mGi3poLb zLLdL3hJlg)5Y-?!W8J`T7CN0r6P|PNEm%s(-R+Qs@qgha2H06a^FkiS5{VjA9t zE9KC9w~9{DV;2{iPo};^uQ~tj%HMg2@%Q*`3g|8vWAXaF5=fljrn~Wq;5f(u0~W4w zHOX~%7(q9(&I{OR?*go08-0>LfH}*-{q|2ZCrJh|CB5VjKN_!zRfe`AqUsNn&Yp@% zQ`0}o#(xa?2Vtg!&6Jy0Z@cgGq?Om(ujW3i+a|ce%4Bb;_-ixVV-?!C|MH3Ekww;F z;|Lf!wCG$In!0^FeELcVfFRF%sG;G!FQQj|0ae!AK@{ksQ#k=v(I*3}m{XZ`*k`4dthZj9t} z(zZwZE_7e!Nv>w=$MZVm6sU@=+x7*j0n&JvhV&^hP%cAKCochv^>+ySA0RwtLnWf& zk!{HbU|RA`{(X`D2PE{YH~?dNULNI&dGUv%_xIX+E4HPHnOS=Ut;9BFq zTFKY5v_FF!#neOYHYB>{I*IZjT+yj6Zbnt~2gDx`x8Qyo=ty)Oh|B41qRS_77A4Kf zC5PBmLQs*tkl!bW8_}CDg4FfObul<{P)G8|g5Fnb!{&)n3?2C*W<4f*u}tk{@tLV$ zVgk4=%RmGRis_cS-8?}c0Ouo~7LZ0|^fw+GeAW|%ddP}yEfcUhGyAr=BoM`Mz8h~O z;`{wM_MsXM;PTpyH+eLF_vW^sa3Sz!HNK^}$VvSa*e|-b{ru0yaam$=U7y7soLIb7 z#c&SHcAJFG?lVkY4g+panwOmhHg14?r8Z%}?G=YAD63&Ct93aVc!@r??4w&aYPjCD z;q7c$Yns=>7qBO~&N3K!?eRKr6Go^+ZL1kf3C9+x0Fb6#n%6t$st0PON0FB=Hm9mZ zHkIEW&Tmd1o(L|eGi<+$D-57;mz(q96a4T|VPUbu1eG~vUjMZ@>$lNx@cotjr4iMnquYhi9(QMHP^zY2WQRnrV zea%g-m%|z2AJ|Fh(8Ea9qx^w!c)e^##1#nLnkS~4E6P9g)_#oU-XFrul)=i?P)rX> zaME`i-4lq1-$hl*!=h8tEXE?Nt#Q3vZgaQ6i#QP3S`;5CQQ?ZW_+I$9&BU2NUo{aS zQM`z!kJu^VBY24OXZpE&uh8EKJphxx+nO)$aKyB(9XJMhwi;!qK-!AKKr8e$-;~<_8W3D-CcTuB-U8}{HQ|@X z%Xq;g|K9kfdu2!(k@06qQ^(9uf2AA3h)=O??(qa)Zy#ZR-z?PwShzv_<^=#9*L1BZ zXG^4a09rTQJp`&-Np9rxa@bb$!^&_$mkaQKX9_t!{5&6`G8sgq{3QYHA7epCSOT6$$>qYb`;9!@~7J7Hij(2w6$r%sWA-a1*sW~Pw)S7?5RT*e|C;0^4Sy`LYUF98jlE@|Qb z_g_h%hHmte3=qoGZ~JrLTK3JCXFO$@yW~pT)r2Y-cs)9E>LzVC(d|Y(sRC~LHB~Y z_(UqParb|3N#H?$EDQc>-heYDcZH$8s&K7{28+6f8*~xBVAQJtMJ3$rXWeKSf324Z zGBgK0zYwp>okeJn7kgkTJ^m?!a=oOx60}X|qx8jt=hqN^c-!+i%O6_(|KQzLQb&G6 zj9Z1e0Sor=;~%Q$KZZYfg(;KMrly`Z)8S-x5AA%YBuB`Jgd(X+z!t=nnmO(wk=rngU))KBV4{^G+|2x+)dJqw~TR)kj#3tMU-m7ns*I6A6YRURDJ zKxT6`GB4*=#I7yZR!@5&llobAA<6?#TvQ+CxgkV*O^RYC%NfR|-oK z9;I&+gwETG@~*JTB^|W1#kp@!m;i-+sYv}f{lI37rMzGyXcDhjYH#_QxQFC zdJL~Cv&Jka8gsuG-7S|%rm>i=p4-ATw^||u8zzn{0r%=*oKo%9_xwXP!mzMW?>_W4 zHXaY0j+%AvgTYgfYCx#rDIe$%*b3mECCv9zdIA7)VvDGDeK(>^-5za7U3>J^n71Tr zJWm(COwbo2BQ=tq{K(fXUo1cU&9%y7av-|1Bw;Intup1Tf~c;2BL z1^L%S*rb!$s-m%Y|El|3*`0wIPTmPoT$Hpc$R~+$M)9GT4-tP?wU4mCR0*sPQ zx!H1g39gvlI+qC<*xBWOk-*Fcb3ag*LHPp-Pq0i511C_z1N&i!@>Co5yx^O0(XX#S z5fKal$^7NmT|@Uh1B4KR%pu7!r%kG@@b59x>O$YTHWHSsjqphR5t6}|gv8bR;ulvsJyfqZfF9V_qHRE5 z)|l$RmGL|(0kvWnw!21j*kmzZ66mR3d#|_-;2%D}WmuVK!S2mHG;aLQXLm3tN(Pvt zcXKBJw2E#Qf^F!>K$;i0L!Z`G>kG6mkx!ZhC*;}*B*_Mp^0zJHYxf}nOKKBo zcH#``(j|N-saj?m0sUAn@IPsb?Cv~IbY?@h;T}2#YAPks;kXYiPVB~xRr7!nUHtg? zY2A}!kr1s%m`o3iZ1r~J1dGYhqt5`&yM`_8eeqS#A|lUXizk$>p+Y5<)wY>yi;}qO zD}WF0KzdeFt$&o^M6#vj{#;JzUaWBZmgJaQjazM$qAr4zKQc*oFrCfodYnh@F*Clo zm62TdSlIDA_~j;;WMlKelZ}mFKXgyi_>L`)De{XR+ogok6Jd{>Uc@;bv3Vvz*x#xF zFm}9`UT|e~Ov9YS(G8)RB*!1aA|U;tW^+yGNN_9Qt8u2sVjo%^gzm^hKNVgfbd?39 z6eC;dvYEqd9y-Gry%#jR0Nh z`3O#WB)LLdxa!VYN8V5Hk7|m;e+HU+=IVpK6N{7}!0cG}_bmZr_|lZ1Zi0{qzfOav zk5#hy+I3z0>i2uWPRp|!*u*r?Gj?95rI{3``;dK~_H&-iLgiDJA6WbJ>Fx!%n=oB= zTcB29g9Bwut**{4y9~N-4H#bSqDD|Dr#@QaqwD?dF-=3qAR#GjH-EN1CZC{D`a!HB z@&8x=+p@Z2OuN!YOc;EJxQXnhDLuYAtZLW(94eDqjX5gs{K;AWz>zlMEw-MMhlmJl zju`xY?g(eJeT&-@{cZ}b)o_f53Oxe%I+8QMEUjC5Gh4sVcKmEQ=I3R;OqgO2|LG>&l345O{s#d(QxZh7}nf~D3LePeI#9;WTaKNxneMyDV6M+ry| zkOJV`wm^IO59)zk?T^kl$UK3>tX5Ds{2}e%*K=f7Ny|F2xNR!3L^`_&IAE7cfyYEC zuDEM>$de=|m*EvXJ{`Nb@Tibuajnqyr+1r7NOX?RX;h3Ql7}iF;#iX6e?T}&W!VTy zVOvB1&@=r#TBs{+s3cGOLw%{l+1ZE+I)Q59AnA!5)H?m1$sPL0eVwZF&mzWC+-9X6 z)gK>h2v8(s+TMZsH}p08>6bp$vt`1EfV_QAif0JHh+K7Kt^f+K=L_Mmp6*5hzRzpuS#Qd7 zR>$|gsi#Mpi-cc^AR~>M@PEvA(|_a<#yNX`MzRi938J$)=dogkB8%u3>i3W6VB)7S z!p|{X{p&J>rV>_YnldPZ*d*$S<*p^9a$DJAdtZZ8OED#t-~oH(yeZuzk0=t?L^)AH zw#HY=t? zjSIh;h<`mkfK{s&__jWEFBZVBP+3J;eQrXjuhQHfH#lS4x`UZ3Lfj~MJ@tZz>_|S@ z{}{Qz0wX{?&y*?gw&6II4W~zt(nV)3B3$oNZ|IRSRp5H2L{g>Er?!#D?c)Yo(eX!E zr5vl<557+VArz~yxY+n1!z15=*g%iPz+c$MaJ#-i`cTXFnTxJ*>+nm2?wy(Lk292+ z3@o^WwqhW4+~rupoF` z8d6Te)xmr=z9fStNz+qeX;Ra7wVq<^Mz}JkQZ@KKF=XY9kecEPM1p0?5?yZy@?+ea z*vejyR+1++@CU0XT^>h`VghP-$bM9s!S>U`iG%G@Lm@%@a6*A(N1;Xkk z)QQ%WJI7Fr7G76&Mc?<76H$w`4)y*4w@>snfZ1`{UTCe5dn)1&$tdtNwUp)3vI9X~OEM z7lV^czgoY_C|=5`W+q-)v~PLV#zNzZ(UpJP&y9E0_VKz+ttU4pmjys)nf%dYv;9E| zO+C;npy;%eWy){Ep(LAHvLUjsgpF*W+R5+|CN(ubafoC4#W>EF^C+95_2GRw}*)K~m#=evy zE05|6xS}7TA(}$JGM!qH4|%O#=MYzH|6$RX1x6g3Dv|U`}tCmp9J0lU0KEdQi}kvWMy z17=U_6XZhY5NuW(Q)U8!fgIEW$R8|c<`KVshpF^BaTDL23@fQ$Ix~!UlJRUI9k=$- zW?}y>1$%A-p{mU#mTwt{&n+-ZgiB5hSvChJ2FG;(ch756`A( zApduF^(8I+Q}Pdge{W7(FSa@QIB++U#pM)@1;TV^poR%imFlmxgHE4J0&OsY@wvkj zZuA6tAZ%3WqB)A@7W8_DSuJgSSpO$o)K`^+3=vWU7T)}oFlRERD-HPPONkeEyIxoq zzcLL4Uyhd?m2q*G;~hS$z5G*aVy3BlGTMP|OT!j<&EEu6uoScuFRo^@-o&@X%AqjDHIAA5>0j&;ac zoJ0cWkaj_L3@09{+5>&Tk$7~e3ySu?rZ3hPy+S+l2sri!rrYMB3=&aNt2ecXT%xKk zQC9_K35LP@pQF80#qYL%1zq_Q%k(;~f-!({(55b6OJ?b()Q@gkt8rki);-zmxUAT* zrGH>5BO4xk8y(Et{f>N1wrU?=MDghQ^yT?OA@bOH z)uVr-j8C9)fg6iKNHF2Kblos1QmM>HUTr_d%=k^$*6;OSxbb+h&%T!hNNjY)t!B{$ z7F>y>MgvB;XO8`hQ6X%~QI8_LP#QjtZSr+Cmjnd)%EMo66|g4N&g^vjs6hm}XYA=`jTa!071 zzYhn94yQ;XK6ZIsX!F2^N1Ejtf)`Rm$mien?BcGI+(d@Z$FiBnekE} zkeEr7fKMEN4)>Ib240d}LkS9oDIM?S{HIe+@N#C>W3@eT;0(E>4V^!q8rBP19YuDsXFJ}^K%UvXVymoyN@ zlo#bDCm`efrON&4`D>?zJ~ks!!YKT-SNuqz3X5$UrXTiU@XQi#)sH(PU|0jY50q8m z;L9O2qTVNL@cbCA6qNK;<2}6UlTDh}E|RcEuYg%HsU!h@<-a_-HZ7s>-o)%TT&9EQ zqi@{urK+u{FLE1gaqjfZ?5x=Xmu3oeASwuKss>-LUBbP<`*~~s%JHk;?w(}O*fa^) zlZ0?UWQ-E5EQ5?IdX>ofhOeJ*$iz49Xz681nLds6?WQ^fH1;Q)A)bw2S|85+K;pcm zoWsLLw-|m0lR%T~pr}u{O}J}UHJ)#!jDhwLk$qUkZef?az&Q~I?p{%FP)@cvyfOrE zLlO+^jFvT^xuxL7I1?Nhk#40qT!^ywgGNEMGOF)wc$OcHco*$TZ4;EudqM1kpT<)s z6Uh-uQ}DZ2(@Q#+*2}^cO-ZY8Kl|8fKGd_1eFa%%s6?O)^X1)M_<%Ne{ zd(Sb4%op<6r?A;6&5K>meQ;(sHi;NpTNu(WO6gWZ=gWb&{A#pnsYM0EYQ}+OjGG3l z5bD48YgT1#JGrrKY3Y63o95){xQ{g9;acp_cle4lu~ea2FEpjN#u$Awns9&2Z=Vuz z49jqZt4wu;n}$iaL`1!gn2qazlx)L2AbRs8Kj{AN0K77qyg=-=ZTO4+@iZLh>w=4s z1T%i?$5Iq^l|j5DFTgHHVmd)VA#Y=Ja}T)v-b3`;!W%^5jc~yH_9nVdH$XXoJWRhi z1dT4b9th)!)yoUDI?;hydG^ZloLCh!l|PB_$#1D*8?+Iq3Vmh+<+We?!3`|8pQD=> zgqj&J%k&}@wb`}GEd?*a)9tol|F~~;r1GRbr4UWsQt-Amr;mB8dBpGd#moZWWLiX`;)z^lwU0WvKSEfP0pkJSDug4A~|MWTA zVzB^;_mj-X?FAe6HcNsX#FWfeuk>mU2*fkd!2I-J^v)h4=MOT!dz}O<5hj{9RO$v3(;}n7NQH(|3H3;embdzB6SI4u{(C8n!TsKg*GMj2Ka#qH4=~iR z5;ofkcNV$e1syY;2bp&+#yB}adR07;p(W7Hr>I~;58;D~Qh{?Rz3+i2??J;p3$Or~ z3uRewaqe^qPcJXNQ0d(MFTahd_(oyI;#dow+wp=vsf|%9^PFtP7dC^^>FugLY;+K& zqJJJXNZ*V5HzWMMpDdld0`R`TDlW$NJ;p2o>JC&4BErQ$32>V({b_@x`ID)F574dX zp`g29l=E%Yp>5oaYqA7oaw`g0^H6g_ef@9sw?L4ac62c4Rx3Qdi-T{VPPs|`KbZu&-b(cP@fqNEVQjDrro4PhhzDOe6PSjGu#n#C% z8zDx;iw5H{9)~}^_t${f)P48abu3fEP#ih7J7kM}6^za{1>yuf!J^47VSOw({+ARp zlT|@;4`Mi`UC`+RXsVO%7miO1y!tb}jcSBFVOf(5-LYjt4a7Rs@Qk7FNoRqqlnezV8-|b1UDJfr?_3Fj7RQYLhpDrUis}!y z{>;#!)X*ssLw5)W0}|3D-3`*xJ(NL9cSnq_@pAAm&+E>v2L^_m$Jdn=ZJlCj2Akh&A`$cx}bLPXr2nPMy zKuevx16ZSc@h ztd{3hki9w$(_^f+r?Ll@qGBa5ksN8(Iv&@4U4$=XXG(-+MjVmndrJ+}=b zsZZsQimcc~kSSCN0F|%8hi7TzCgRRdEt?5SBcmt37}01h^TI{Gq_&j&(;$4SwT{pS ze@kl<9(M7Q-I<8xCs1Y(3(!cbszl&>DjWuBnOvoi6-)gb7ZHB7KJ2`n;j-U-(k28RUv!` zkM}gSVAihjLw_2Lf+UebulSf@?+G4qz-VW%#LFYEo-sZ$pPf;pv=GPMfEr3cb%rO8 z8@;=n2t|{iPwerOqd%I#7+nvfoolDDTP>vf+@{B7u5BC#E`O2|s`FSP&ucRL#jSO} zj(_di`eZd2hiFek3V)gJ;3|)+9ecCtrg)TONEi0<@xt-3PJ7j}wC`f!_r2wG>-N!1 z8f5zAb*E5n1ddl~L`NJ_MU7SWIlt;)>OIhyJLv_Kml|DtaC^4!i)p{hDXtipY6U^6 zk7!iYTg&ND&=HE0b%SLiL5!%S1L$d-@K_~K-MY59HgbbCe_+K^!<-btvcrj5MZuWP zI(#y2apZN0BX3Sv^8F8$ztx_^#$+_|0%H$(4%NKROGYNU#K;63>fsr7$swqnqTpYo zrytlzuw(@gGAo0zvP=@mmnpShRcLXL*3!s}=p^mgYcBr`a68YMd?FObG5|`>sS{k7 zn9x17aV5ua3}95yFP7S8t-#P5D*vK2$h}<&vBy8rBT}J+RN*CXXpNwX(^Xq|8!S7o zU~G}W_)VRT^j4=Qz|1TUz0I@m6}^{sf|b%lUBk@LiT3hqP@|iCpW!D{MIqf@itR?8 zawp<*bygp$SHG%nBK6Ki?}$I|d1O^`6*m7ayee&(CZqVK?;g#6Y)GfP>O8_$=l@#% z=kLEwI~;o3W6yo1sB1%GDMwG{S_%C)kV(#>TT^$YV`AQ7t^mf<`Y~{Y@AaV3vRrZX z{ioAYz-h*`QJB@E$F+Q6;}d)P-&4Ry55~V1B}6eijZMN{bSn5!@sWBwNGO(-#Q2%U zv}S0e{4Kgm*f3d4YTK;uqO3`$8O+M9YNNUweYG%fF@LFne$ z8heWWi(!WVbmEXRvFkXoxr3XAM-u8zf(k2rbOzG`4CrEnPPSbqU}Z6IPIydfyH10Y zfNeIUa2KW&f0&x_GdAo_tPUPBxolU3mOK9)^PwJlGrLt)hWl1$bsX%6o&v=}Rt&d~klGGT;;N529~I&gK*hKjbhzZsj3%0<4;|p} zKM5bPU(z-W`?TsjS<}B*y^a33O?LSxWIZx(c`x*UiRdcVYD_K{dF!b-r)9qBHZ&}9 zbKmze+O@1nem<|ICu_nLP)^xJ5TSG*UwA&Tf6lWr_RO8@7CHL6%L#c7HkbP@0%cbC z!;@I9Ixt^ck`K57JaRYQ97tK$`Q2X7me1}1t;3`7qc<#rB09k@vhAx-h2}pEZJ-x& z^@JVm_hJ)X?yA{kK`G>S;2Ur$by_(0Krip^MpKy&O2n!J@PxYr@%{R z<1Kz5OLUC`i|~W(7b{J)?GZIL(P>AFk2i{(#JBrYhn4&;5<=o!z%u#y6?f^eT{`+% z)MiBoer98ElXMCg7vFB^O;CI#@AICCWT#O=0{ircN`OTVYMz>zmcXNXMco90LM8Cf z>*JA1cDDt<(Q0T0Lq`8{X-rM%in4L$@h&uTaQQLLK!{fe(e?K80Nt%&=COPuYV;yj zk3ZdHk{!tZM$}4NXlCY-skA@;nlwrDj%1-HwSYNUJZNpet-k-XX{)sazq7r0=IHrG zmB7fUYrCdoGNKA z3ethQ6^LG8_M=Y+3UU0i7#h<&0^+b&1ZDOTqGpRV10ZBTJ#;G{PL{uUf(?Yk&(<_g z{&q<^7bbsT*4~#^cdzuQ_(mq9>$&l^k6RN84IwrD3P(3a0=FW4#*k=DD)}!N@JJAb zZhjchfn}%?lF@xp&{~McK5o$)MVWCnB3wQe8Y80(EiX~2%P|^qJhDc1!ab%Sf14~- ztda8b2FTSVP6Py80Iv8CnFZ%$wjO7Le2>N%+61TYRTBhcEnqsA-C-W?&|p84lgzaP z=I9@Sb;ICRl~JSfqh9w^g1MXcsrWM6c6$sTA>Uw5f$udv+Q*kA#UsRW8&{e>@>?Sd z9<@C_gerp=;A|=+?WCS3!kB!RvdiKk-$C|pjZQ&LN0HxtAoiP|IG`yic%R>PECubz zUlfVJcOsu4C`Rr>IR40syGT;#DoT|$6bJbu+|wNG(_h%;e}JOPs^x5}{f|+3+WxIj zNM{)sY(>WqHY<~t1l)J-ETR=Ty0(n%V;zpWn}FsEj863K-vqcpMyBOJR#ft3s=7rl zp%oW&ryn2A+{FWKBfNwiD&1jUVD3794>WeCJTO7$J!d_o)fktgJec4Or`_*`L`PZ7 zPSetI{2QrHlE&Lriuk_E?=Tvr$?f=dFW=KSCr&GJ1JE;a0=C*n5q|eeTuJR+U&l!Z zyTokWcLGtwb8;J~gA^vG%rT9V`S&bHh)fuEXM81X1#TBcK!{KfvMt83j9{#OcQW2S zNcN~F3s=hah|2c+0Xz(M&^|gwe%ADF9w#jVzv17KFGKV0St5J3WcX8xOj~Dcuo)#8 z9Z{@p`mEe^#quDo(X>?i&2$1^VDFUbkPKH!$p?MyZrjtCoyuWkigbRlA9z&8DPXASL& z0nzgVds81ML(kA^Jx$p1@SpU_dQH&ovhq|#nrpXhT6%!MMSRaF8I5;~pYeQ-f*zW6 z{DrvnI8+Lw)SxG}K8RnVH(~LY69}8-hVi#_)_z-Na6M9oVM!mT$_w4K>7!UKWksZn zT9~>DXcXjqN3`@i(jD~5Ta7jwZJDVYRcI|Oe#f)a^kCKsB)K8}wSQpuOb{iD9~FKF zRNcRWDqpZx^#f#cgNKiR?U8T`RgzYlVYZQDzW#ylD7D*8PIEMEd zsYCmm?>n=#Y9+UqcKNdGhcLzSbhRrv?=plv5{FrkpBYE4rdSA~SZ2ILX6{VXs>LO_ zWnw{e!KRKPZI5Pq{OoUG zHiX!UdSasXQD-HfAnkL|t_D-d1N&FWZIMBJ@_<3T!m$n&q)l%u`Z=PoQ># zl##h%A%yh-OL*!?o^C5;tjm*43dmFd*#_^tSuc2TEcU3=d#kf5~H$;^D2!~JE-9oLtk z0wr5%k?P7*v76l_$!KW4{?yb;T`osY_w18FoP7XXBhA=-2}Qlxarr-MWKhS@JK15e zI$21_HRD3DCQpezw z$>CRhL9c>1raiQwgyShzMe+`s1CBpjR!ddx*|*)1cye7uc)#tskfl560>g!AUi?bm zl;HSFh=t@KPJ{s=d{O*7e8wD_yt1Ux^w}P2YRrb8j1lnk*WwXBg^M-`)%o)U zMy$Q%J-lQjd`C2E0lsMApEXWS&RL;2#w8OdEfLu;sKJEb z?!YrZ542XarjJpf_`Jm!<9+#odhPibO)`P`STsD7o{3W#^E2AgM6I`O?=};B8ql-P zd{?`41q!FI-mPKTb9fSJnb637zrcvuAk%Ld>DCwzD2L&rdrQ5#-i-4o9(02r0q%8< z#8#CcEw4JdAl076=vXx$gzHAx0m%NCG&`}=0Ng}4(;!#vi*)VDpS>^0vLf{pc1tUp zG>XTpk&jcqz3T`v%Wm^l#OHa`3b0aA1HC(%4>sx~HPT5vygb33Kl zQV`8r$z4QFFo1-43(M>{J!pxPdW;~QamUPD_P=Vo)Fr=7u`i2;V5INmSxo=N#dK=P z=`K(miG3R3k+~A#1gUGF|M57=wlz_tR~bL7=DkulM*Z9vP-$2pG-e%MXl|2#aH)S5 zm(bG!zxtuAObJcsQi=Uvvyj>DEY7|te;hV+s zf40H4h@z8Xh{nwA67~57;DvKUH3;`!Z&~p>St|1I!4E}a;d>aE7nMD+JB6+)>5LyK zn6VGrBPZQRlA0<%0ERv7?N5H%S%fPaX!;Wivu(0HuZ}3QfM1}uP-A)S6NFVZ`9mCL zDRUA#yJ{Z)Btxz6LX{vU$Q@ZX)pved7E4^uj?%-deZk5~yh=z?E${Q0x_~@}ZH+d6 z5ZYKD*#Q|A`ela(Xk>BnJkkP!Tfix}lkS3P{rMY<=f}S%|ew{ z_4~dCY`+CdaLd!huux5VqZU}PZHuww@#Pq{cc!nQ6gnR9VeX+ygeLHo~Fz#EzU!yjK8JZy1e@r*o0T+e*=PdJ?M`w9($j<yZuo;-Qk1J5-J#Ae`kLnDMtyffeYriMA;aAK4{)@d|T;AH$<$O7S3_5$7t3%r-ndcTH zlkE09fg`O5RmN!<7-}r_5?Uy~1%hmCC42*MV5M5?$Te2>=h{+A=BUgAox0&z+-n-m zc~?S^PYMgHm8MyZ;RK?;{9gI5OnY?3w=M$yPN^dALzgVQ_R0rb%`Rb=!R=I~W6S&) z&yV5mAPWZ<nBBxLQjuc1@Uy6_Ioh`*Y5Z704_vQ02yq%wMci6L>?r95rt7(mC z>Luc-bgsv;A!Y$w7}2)N{Z(}LWcccIY)kYmRb6w$ffI%bO`wYk?Mh)T;z~gl0h|b> zTJKcDnLspcRtS7s9eu8^RxB(nriWmo_6Wa|B!u(*I3wq>FQ!D*kmD zd--a+*Xn!_o1*grpZo_(=nGGzt&O_2C@ajzb~9KJ!f(1$2#yf#OLz1|z(|XjhN?;! zP$KHRT>rD8071OmG3c4?(Yh)<|JA=qWe(Zil6-#X-q(plh`g9@Vpf;PG@4%lddyc5 zAd!*;tS&xGv5(4<;0UX)o$Z2=zpm8Y2tLoBzRXO1S#9@^wR;3RqwaYp>x@dfaRTNV zg+T$6wFKVI>p^cd(2*rQeGQ~W1Ruf6{up*H4SrLxHGM=0yBL}LwRo}s^u3pB>v{=@DJ7`kL-W#M_ZxjP+ymUpm?gYS6wQP8n$O3BjnZN2ie^%IqjCW+DxSslU_Ti&Q-)jd1?Y6)tU@uYd=PNM@T{4E(`dXVb?|bAMu7w%bZKHt9LO%gV5- zuoFxSCq`*f7+TE}YbfDrqksvqN1|c8WY|YIm9ksq19d zpF{%FDoaO`em$?`BA|Ab9M}HrqF?5E&&d%a;2CM-DlG zs%yRF)5MV{N_4#a;b1JFHq$vhmm9jKR5uCzFI1LDQT>&-Z*7;imiI3ZI4T_Md5Jpq zsNawH%NnPOAnafPoD#Sww6|y3M`zwoIT#8|&`@)E7fp3km8AhNm!h;PL`z03(NSb- z*GCFz3{z8bsfJd+t#~|%4$ueH2(DD{%akMRQrvXnwQIC$k+V%+Y=0PhMuiS}Zprp% zB(~g;YYU27?YTBIpLyqZ4+W8x%6wUUN&{B02P&B0C z4(G{FYdyaEmR_wy(%2VQ8MRyJG(F3Yd6!kk*HC&;+b=x$?rTg}P`*_CiJ2Tfjm$J? zrfmEe50q;7?^m&GC>C{6nz^eYS%PJ)l&55fEN`*JT&l)5kfHeq9)IB%q<^bo6myBi$2I9L!)GV=G5-F)MzwJ&y|g;1QJw0KOBxb)QDub4jAjZ#Ka*>%h49OLD@Y!aM*KYL%sb9U_oZ26yHAgB- z8+H_sRHE4n>}`0%i_5r;b+V(#q~=Zf44x6eyoP%{y{j0P{)|8cj7jNLJ4KbDrywMg zt0&NmIrc;=nev)UTYd*W`?+#|NLwB^3V5u7AU4j=<$hm9<^1Eudt4X`G))!e(t7Bv zX};@0l`J}KyWM-cu4-0)quH$ywV)J-DGfW6)7RpBV5sF-sPe^BDRbh9<9u79nfR@} z8HUJmu|UxT=}}4RM;)MS5Z!hIfC8f{QgC9pT|)gmkTr&xh(d#m{&n!v5@Oh{nKRXrto8=wBZM2hHxroY_ha=!!H?u?nMY0p`+aCO zO)h7w3}PI$tzDUJhzFP#U<3u|1h>ZVAd=`ri3XUn@kupM5Lyfrhv?5z(t!yJL12l-h7BX>TD z-I+K~7L4anI(VUS5ZQ)5O-;fwv_S>Iqmyg`OJT!^l|> zm)#uxl?}pSXtl~JA|Ei2{V}qkuR{1-)J1-jjByiJ@8T+j9^<+fBV_(WAL+brT%XsP z-}kpu2K%@CkZQY4Q1P$S%*$81f9+WIq!gvey*KtQZ@FN0J=9uZHhLN7q)bd^Sq7SL z)BfEskQVUdY3bjZAlGUg;I)@ufpPVvZYrF9H|stoUe@xAYs3qCZW~iII`1qOw5b!X z403^T6NzrIT3ya+ExfDAP*co>*JLYb{Tobee{;MOPrtzX}ZD*Qhn3JE)H1szo; z!(x=TJ;9W}x;{WxE4@dB0_u`3dj4sG+ndHw-_B3e_Q`7w?7|w=DWwF-^8br)LIFHd zP?Qi%&xlflyR7wQmz!NgxcL}I)n+2SY#)1$DoU86=3xwb*!U{3Yh! zyO}kl%3cm9nSWAn0nal9%kRLRyqV zbgy@Gim%DNtz~qkHeXv?r%cJGWJC@DO(JB`o9AIZ{|L48$DK<|%o`N51qc6>J~$5J z7&_ofepQ8C?e%F%jI7n|Em|9TEOsuCJU0E{Y1fv_oqtaqY}Czn7K}eiGQd4-pC*Gv ze5ZoC{-!mFqy2s@ki?Dmp0Yer|BVcVv*g$D6Psqu_3BGNZqG1S+SosJO)H4rG+mgH zTeWlkAkcBlHm!phImfG`)a%87YBp(%5)%;!pWHJQPU5s7D)sYNqTHTY?=Vk&8R0(!zJWNj z;ziYNKD{|^=&82>l(V-%1kxL#O!Nx!J)O8}dR+^I9XQ-lGNDg>K;YnTi{Fi2?H(`;UfYG+%SbQa({Vbc4aC!0kvnIyy~>DP<`O%Ac4D<^uJA$7Ww z3p|taQxnr*tIcji%ltcS;)YO{IR8f7t$uSz~8P?Z2htE4u=Z5{-$3Pe;urEAGZ`Fx_37c zc|E{V&bpcs^gJehXzVY?;+>`7~z7GXs#G zqzjBQo^__j(CibIdrF;Ln5S&b)!Q%B;q!l3@w-fTMbu_ezDG>!%cg2JwH6{r1~pF=BFlX@5ka#Y%1# zr|(KvVLWGKnZ~*OW6)xu*K2BC8MgtD?852Ir*0DO|G!HQ2EX!HM{!!6<$)W|wj#s( z)*X}?^WJUzvC1hLBU$y*5_rRfb;W(Tt#eZu)yAi3u5~7T;V-uXN-)hj$Uo(+`EK2Y z3voHQU3v7k5!-9z90N!W)iCgo7DvvrS4IoGO=pg_y6HR08%vq(q1q9jvA>UaCVs6L zX3ssOAmcKvF<$oBRwN1|yy{4KlPZ*I~8eZT#^c!S|S zHZYC6?5u|g(Fra)@P`GriyMv?Jl{-#sOU zT_i|ADLB>+2P69t)`db zjKpa{yIE)N5_{45JgG=34#xk6oTvWai9NwNB~_m%JDN9_r1(x`jJWNpdj#l<;QT5t zS}>wUlN0uGh?bnm@=%b>**Z$@ZSmhbnb%8=S#QnGc>YdgvE%RH(z)1+eiaQa%hnkw z6#t!aAcRwIQa1PN{Z6Jl(;-Z8Rj1^%q}#veeqmYqaLsu}Iz$>kvug6!4OUQ(1>w+742L(1X(0Up+>`y0af3Oe%3X zJ@{ud^#dAm7*fs0>WBh(NhS5e@u!qVciu*ZRx_ejLOJK#pH8farj0L#4t59mh+f^Y z$8hbUgs8KIv$gD%{Uob2QdXGdy9Q^XY8(2UPh|jfwz}%^1f6aH`7(>=`T%dRySowN z+MaZ&Sa>(%%7_9ly?A%htI3YW{lHKI=?}m9r?f-!O^7F zm$tp}bW>HIyQ4s|4~}Lzk*H9cLN1LmjzJix*edU!D%`Wmsb40k?e=FIISW$0Hf^eB z`(|9$o%t6Yb>a>bf+TY#1+T=vFhuKOwX_8iVcTd67c2WBakXn$eegzeh}Cgwf3*&E z8sRkErFxlvEX~>{uY$W{LL|w`eHC_pfDi?9u~i#oDa(r|oUYy!jU~1C4h+JlpLkfp z3-yj11B?DAnHRcMgAFpbjnvsV(It`WBpaCN`Cd@a=YAK@7M^ym(G2Gv(r&3V#>J

    uP72NWycduRDuano_tg`oQrsmWl)u|p-K~Y=INTc zXTXoguF8)GX?j*vijhxIE9InLPysvaDQ4R82)OhJw@sB~iRvforC&*hziCg&pAScw zN8L`n{>mjV8O;gDK8|D#`6zZjDaQ71M;1)d-_Ur7qT+7XJCY$B{^`r;_dYK>&Ay)9 zP2}rmAPgDYx?!^FzU#;t9d-Sn6PYmOSG4-ng%!txpy3uZAjrFjJhGwUaOblx1*(C- z{{^8xsy`_2+7N}wnj!v7%f z%jRzU*1s^iKYz6u^Y5e0Bxtm;6eE2{Vb!^~_p^1gbtAeyFslm~ca6NgWmsZ7Nr{B^ znNgxp%(Q@-^qQOv>fH!(af!6KK4DDi=b1~;hfE$<_29~x2fj=C_JNhPgimK%9z1Vz zvf0x$|7|vv4s3{?`m4vM^>woH!8G62uhRNu-H_+TUl$GpEi`QK{jAObREz{hDn4)N z{FRK+o+o3U95*SXgpdCa`!x?i-d_G> zEH9-7q<#VGAuuyGU_y6`C0C&A7fc+Niula0%*tL-|5kui2FrL`Q9e@&7rE7%pNW_* zgXA4LGb`uQg}%|^>B#5;J}w}%$gzpl@KX^>_0AX+#u7!Cu}TwebPz_K2I2ooKP<%M zKAK`N0?WVLBQ*(jauk)OLa2_~4qi&o9(;P;Xf+f_6H!MctIs(A8};(n}2)x1ea_FDc67 z^;6KK(~hE2s%L^xbBF(&?IsRTGun4$SN5QzXQH-_uoG*r2wCE$ z!!J#5ly>0hm`4k*+~t?`lpufyF7Ty}RpETNJ*Uh@on?aJ@U|c@iS02HoIxwJf0lZz z#*znhmBK~YDRnwPMg!PEuLE-~>uZjm;sN75E+++o+=u+uqYHm@?RN#I^O<-V?LZ+= zOjGYh*VK)v^`B4lr88i!ihpP997SjCwqvLLZ<`l?lRc{tSFWi9E1~ zm)I5x<;zElA#_+$Exh&2o|{QuY7Psl`m@K}vhXv4Ftm-!4+yQyc>xhSA+g)JQB1Ly z8%#ZQ9vpT;lpxb`*cHn1iwja5OdXci(aR?hq53@THE8pesl}=6JhcCpaur%33zZA-!qC`%bs5(Tw#m2NTK5EXjw3q5_NLZ z6WP*=V=JQ1Ses`Y&es-u-^%&bSvFP>bixMx0oQ5SyT|SE6cs8x`F5T$z7j#{kK6!e zG`yaHlU#u-Oe^gADXB$&viW45X&+lPavf-@bquu)|=**uh+zIeAs9vW1=sFQ=Pjj4IVwP|_`^pO-w<1_3QCPG!)gJ=v}|sGzIkTF zXqD`tOLY$|DQ0;vxWB{EE8eUXQ zFuJX~F{V3p5;TW<&I-1Xaq?sVoS4-wnF0&`IT}24+ zNs)5PuaNre&uus7Mq}h!<@|a7z#r%m-0PONH;v&sKg(*)MQRU4*9M{Uz(4xW@^XE1 zW9O%4zplAhi=CBDx^ts^(@7|f>xqi*h1WG-GyV}*+*uNxJ|2HprZJ-~B@;yJ-;D`; z`PnavsNw`iCBvR+4UTf3W${aE6GW?G)I;QUty=Piv&RTs|KpH_*N=fZ`RTDOBCcx& zT#AHRZa5IiUEZ3w_Sx;m?GJumGnR(iD+ zpGyL|^1A-2-h7q~{_UQX??$`A|Oxu%jV!m#1VODS>jXv>s3yiHJs%wHFHT!VdoJ_c6 z(L0d!G%|#)o@3h5c9u*#|7zPmFXTCED56@>X<7q>r|2s)Mr({mf>Fq72({T$De=f* zVI95lno`f14}eglrijH(37W<8tsH@(-M}N(owk9U9lrReaBw)gPC+4P85v#Lm(iAX zu+<3*lX4&fxcOIyV5n*?0yhK{biKBcs_iVd@ItUMCXL;)cjL6Mbo*u$rLbJa>FbbK z=NrSJ3;6@&&GR>HHCeO#nH!qc9zU)#t zbEJapDUj)@Qj(TS$)Y?h!=!+`DJl9I3_5~PfL^!9)=oJZKAa{IgSLL7 zt)#S(8{putj^9Ba^`EslQF|DLal06mp}7R4m=!siGa)iXap1VoTqJXU< zj3SKazwGJLxL=jq$+@>TI5Bd!E2ifUf#a9xfV&NghpKdS)Ia5>6ibtJ{?Gn?@Yg^B z&OPD^!>RrDIiy&`)gE5l?eoFL;KGO0u46Pm%X7w6CFV_lT7PFAPqC@~xQH=H=rL+aZuR zq{iGNo~;EwmTubdq6F_y2hEEg*ZAc{Sy^38e&%uWKwgU_HJ9-y6LF_PiQ+A%6wxZW zWLj2LJW|3o;~tB#{R0@kl`#(V`GS3K{&g%Q%&iBfSOlk!I{<9CtZ`bqine`K6?QTB zfP<`ltm9I$AFl~#V-d+zsw^FaA{%ZJmJLXh91HkWB>EArG9N}i7WAWMZ3 zd*%0s4q0N;2fQJ7K^gv+lJ-iMbNg7+sZ>#FB?5dD{KV$$n`_yyj98d1=Z_R8UuY5; znUa3bRyoZ2T;IaZ9odyVMsY^-Ic<_6CX)N+=VvvCo{VW3T4|Ub8l5PSt(Atvms1vz zcyQW57s~D)xiNG5#?rKCXPVC)HO|5*7)1d5Pv{-zuI?W=m~^Yjx+u08X*yeOd` z`;Ji1G))ET#nNEw3_rrAl}z4Dnn_mmH&*)av!`icHsFxSmc&}-+Gbla`(b^n&4So5 zo@TRtuq3b-tr_8mrO7W5O&j|v&ZY|pzg`cnn^&>fYTTT(Tj;6=XgqEs1RL~JF&9hC z+k}wxavzf^MyIq%BR|+flamM!5n5|&fCWyj(JIY{W2tlg<%0P`?6RkMX$WzTYVakg~ssNpPZ z3Nu%T-5o>peALidIxG2P-1niMEzG!2ZiavGTI8>~)lvwlgLgc?Mxb?~)tGj;V!~CZ z*5_+9+)^JrelU&PXN%J6I?KOM;||Pde&Cl-o_>vzg%eI_B0OjYg}=mPMo7-$vzYK9 z)BNQ%!bG+vyMQ863MI>lyDL^o);;1;K*U>Gpi1pY`&|y(V)u=OCq4M9GR3bgPi{jL z{MS@Y?cg*-lcFCFJEBe7o(k0YOd^wih6$B|hvAE!ZpH#{)tNo8&0SA;u-77~gLw9c zOy2pjj!hLn#vp8qgxxw)tQwSl#;w3Iy-BM<#z-=ar--7v!T1OT>+I&khrW8DCrmSR7~1_Xmu~#)9(D?9 zGxb|b9Ws5|=s`iyoNd}}NOT-f)fYTUL@>CuveYtZa!sPq_ zirK_Zv?!ExvJwLjW5U$HWNy%krkefQKCw>eM$dk`yK~AA{d1h+Ixo5xJ*=~3ezsoL z1_;e415=x5<>HW5ot)lme%w8>I}V z(L^?z&qi3jt&R63#_VrE_7@moN&ki4Nz1W96_tJ)XJ9N>GhyJWvSQi7-jgVkB!U&sl_x?v zB)}~W3DIF{BWq(MKE?jfbMkb=#wQqP_6LR}L@Yd8qB93)qhf98<1KctK9K!T$NsYc z)OM&SaX%#{S58c2i5K?uUF2R}TF7D|qfD$*#9RLres)Xcv#+{XiJn^mo#KJsy2*c> ze!AUL9CR>G$9L`y;cv}`>5Q5f63&ouskMiC>Lkj;OCrhNsc+BHE|{c8Bze9MVhP`q zzFeGieELypY^G))}- z;-;?=q;g5*!*Sf+^oN9*(f!}g@=K5F$6Ym$c2^eD$8i{oOTU4R1wHn! z!g)`MgjN+;%43xJMz?GrU71b*U3r2atJm@F$eE4zh2(P~MYij-eBi%Cg-Dv3Ho~9F zalT25z{4Hm41V%^HT>XROD0IP($pMEP18B|Oq4}0G2*}1FmXo~r-A_RN10c)oYnZs z+~hYawhp-1+plFLDlJ^1%sz3edl#BVGjzFpMRM2fnNLs3eq>F%`*Hmz1RRup9)(lK zg=}@2sr-nwm+XO-IwuSxIfJ$J{_T%U4AQ?7VKEWg+EeyFjf>4zk7X=;Cip9vNC|sn2RkAM)U_#Dw2$KL5L(Iit&JA(#rN zGOpr3#xacEG?yKNWdZ5SAApFipD!IQrnj%x|#KSS^sKEFKMdeaYg{o}l@vO*3AxD7Z?ZFBgsr^xT2bnZG)MorXii zT$>%mN>zP11*e7m?!kjHAeY1;uQ?wlHlQ-u$Yq$1(gl%}#nZImOqdvIIT%bC@t z516bB+3#)ryy@a0K;3!cn+pO-oet~Y4fS62IdW7JA-VP?JNkqF0wD-Xmsp6MGExJh zS5NLycYD_D;!Yx0c$XX#iIN!Rc6^9IIP-{S!E7vv{M+tu!+*K7K>Pc(qVIF@a9~Qt z5nD9nmFYupE8>s_8%$qeeaQ`uT2<^D=1zuyKv*QHC+4f~U-g~6cVa8Pe)ftcD&Sj0 zNA!=|Pxqm$AkJhT)^d9h7th3Tf<3ttiNEY?8U_PM%ciILZ_6yAAn(iA@{-Y>_^DPv z{0#+nqm6v&_yvMWa37`3#LHu;m|3}p@80@lMMSGw<{;GJ842UzTu&x)Bfj(`YKi74=@2d%bVy~OlHs6v+bTDc|I~GLNy+v(r~RXOZT*>pZM<7T zHw13p(#MlYO)g*4j`l0O&%1Y}8Xy+$k%XQ+VIQF7)^+IRu^nvhM+7`0W6*EW9)kJ* z9e4n) z6>C!4lYRY4uRoPP)P^)JJFi&NkATP@+g{PU<;una{#h-Xufjrqi}e~$Q-#o36YfvN z>Gj;h42kY#gM(W--ccW|Xh^k7=v1EUo=;a~(n4yo9KaJ8dSOWNYAwV4oJrYB&SYJ2 zl4Rmzwa9;3OAsf9)58-XDQXpW&tNL1&foYaWkrr~YM4~!R;;B6%CCNzin; zzUx2zgLz%wlJ{glTMkD3~8Y+tKZ)2SX|jNR6wJ^vYHOM!F* zu3E*DT)_hUZgAGcc46!OPB|-0=hV;5|JRpz0D;R##LJa0!=iZ7ZEuMwE76s-mt%Rm z|3&Gp{R%LiEg!98Q7i`eznjzqD@RVDU-YmLW6KN5P&$mKht-3_$(}ITbypy5aVSLF zWA1R83U{aEcx>`(=m^8DP#ydiNyhG`;noryD;dA+zo&5YflBc3ZOY31m%F|{C%eS) z)edL>+cf?rSas8mak*I9Fjd<9qSS&@ju}LdKm zYU-bGMZ^H%Acx4enQ2XD;hYx$30CiD&-ax7ZTvLo^cj?P#?dF50rSRKxaum>wq-1t z^&xq%~1O*a?%4 z6+TeFN>!&qr4t75$ZVP{QK}|hXv?M6GCT$Rsr%jHUkChzotjf`{cq4YZ4B%TchyPi zcg{FhFaOZ|P;>3)*tfyif5~m&mZDE5HS|;R5y*^zNYrfdM0n{Ha6ku@tRxxWI=9sp8P3XtJf)DUM1M2**F&6bcC7iYalgweRUu^pACY5< zdzI}i>yv;j?OY=hHp34?X}Y4`Zdc@d7ipl6no$PUAS<>a>QAH!;8A2Z>OBh4n7t`h zu#LKiph~PK%O_T$5H|7e0bf7)PCleBUfKytFMR$}>UC0q<3*+#4O@17s0Sh$3md+F zrs_kKH?8F1tWNX@0PG|FLoZS6`JG|nXZ4aT^a!<*dEs7GcUH&n*fs7v+g z{T(>m0}hz{mht#zL$=38;_Co|iLnW@(zn36eV)k1nU`6g@vW#zR2^Kz)`D#hqdda( z_cK~n&+usRf5@RkBBhfn$@J=@^3PhG+T2R2pAc2}+Q4<3e;mfF#{X*BBg~4v|Na>P zF#D&C5cwq&WxS_Cd6TB>yT&KvzWv^*8j<-jEiDTXK~|$U*E7=sIhQ8kRPmM-u_{hYV`cU zTA1RF+oF-}Npz1c73*}_w(zhK7giU)F4V{?AH4*mKZs?X4H`-5D^QY7v1N^m5{15% zCxwvhm4)Z)iuE<^dGNb|LIRHq_w3`oj+lHQqHlQ~7?oj|pJ^l}7nldK#Gua!LLERBuPo*qYf z{TeefaWmBK8!EtqiWbYDWFHvwR&I9f>K)OB3RXf{W(#L)&ktT!RU93R-YGv(&SC&` z^5pFGg;Z+{95_HB}Lr1pVcc5ysd;_$cGTsG~Ru#4%%pkR( zlPNcY5|s5Q##CdZk-*|$8Viw5Af>ce`NqRTYZ;tNK4BJ|%(}+%>q|o$jhrt-ACL zb4}4`p>#5K^YoXQ$Lt{ug%Z1tj5#qp&{#r})|lJsVi1oD`nrRhw2iWptmWHxtrHh0 z+z>VJTtkE=TRuiiLfRpJAn3i{V;R{C)ZpNk z;X4FI>(+74B>2YhM^wbBdSA1-7v}n|Hqc3>p!q`g@tsnH$nv6a%TZ!{DQ2~KKyNK0 zsJ^+xv?aWRN*rkQ=4|}J$_AMulKmB`?v2DqP26Ranbu7#RERJ!K*piGKbZYXyTAz~ zh9nv&uhOPA9{LYE$P>1ONXp|@2x8n2k{&Y;Yj%g=`v%aUVv!z0Xz81H8aDb_!|=Z!qnw5 zmMb~9QsD$EiY}mPQVlkz+CUn{{Os@n11Fn#2;V7=9NpsMu_-l7Gig8i!5-spl)Soa za1PA_aaJ-s!ug;`e4Ns;)#AtNbfE~XY{4E~d8j=sjoP1U*DFudTldAs*NDlvQ=gXF zBHTK(3+>BevDsjyWa$>iwn0L3hBS=x}HX@WlnVGXeAHrUY|?RONx6&}Ys!?W1A zqEJ_9P;mE;FC&tlFt9T2S@8j~QazLR7`M`6c!39DdLL+T=23%?H29BwOsiw_iyI`5 zhqCEt!QklEA#YN6H&@Jpt%xys3Z*zUiJrDzE7c{eV_QJ5t2mBo-`YGoBgg#uz(Qj1 zVJw9d^?Xf`WVrSGN(xhKShhKJLK(#sGw zpf=5#1anU+l)q7DcNjuTk;auyU<+oPa@~6mnE}Oy2ddyNMZ05hL|2Ww(auf7j{HsW zl_cEA7YWygl+dIfGX`gl6Fr4FUhV{f%l$?pc(@rPEK?)icVUR&k)xay*ZH0^Rts}{2)ma?FU0MhvZ-GiQ;dAx3kW~8B1FLhox7Rr; zj{0M{@*W_YM~m@o#T^~vREF?4Jxer34D@~WMSRdvaxRBwwryBLQiX<9rqsv!$4`MM!FAFKqLZ3Q!&TyFUeH+G1NF9n-dHfBz#gkmc?V5xmHE=U zvxM*2CcByYgEW{%7Eqr0wgXwr$NqS>L}-KMuJoe(Zl2(}O))b|?gk(+r?R+9bBnq5 zK;G9j5R^%Hi2rNzl8g+L$i#(T!hLlA5L1FWgujv!1YM4dUdW zufGxaM16xvvmQbFCg2m+Fp$+u0`C?8M!-A4yuM8pW%7BJ_*Az$9d+$Y8DS@si~pu0 zX(-!?tLfyY;v*d}I9P!N+mgp?bA%+Kknt-;^x(yiAQzlN+W@Q&+m zKT04P8PaVuIv51fXKTMy*J>mRjT&4ljPiAs-iL7!so!Fv#p=5i53?UvtE=ew=Lz(9 zhAE+ZG1|{5ZtoH^yZoVl=H2RxsmuMNWVGO8#bIW_@0uI$%#Ag-BydbY2cgKqN8`j1 zRm{PsD3kc05PUU@Ql^HCs;$ZU$M{QK7y(r#ijSS4eWIIuSFd%k+zD&}7-~!cV)55l z0gkruoF-_6^S6N-N!eUe)du9ca!Y4!UsCTUOYmBy)es09Gfn=)0V{Y?5A;reXGOcN zRkZLF>qA3}5=u(z$H$11u|tnzDk!LuI6AVPc=c;S=~iixFP-1%PrJ#$I@{aWagAe0 zLdxa<1z(yhirDaeUnBFys_?ns8nsi15_rLy3nkiXk1LmVsp9fWlQIg_&uHJ2k%(JCOv>i#mO0IdDYqI1oA3x`E7g*b zn2}F=<6LMSGud3xy5ES~tFOBbax69yw4f>qRAYc(J?&b_WveQr2f=^h z@ae;t-g(hL^V#XA3dzPkRdVlKzt}YUriyR7UqbhCUm!Ne{B!+kJv9A4<`lHlq`UqN zwdV?oM_BHDgg;sIyB2iJ8tZC7k&QzbM|%@G42k40t-yw4DnyaL9@7hFG1vU`9plHb zC8z%W0%?V$qN*tR#O;8ZKBYDF(UEWS_54v|x!9ztqT@{xyEm05>xH_hr>7>(y2pp! z_XAbJZdC^!3qDlV30w|?a|~y(uTLy9MGs|FP@TSKC$6IIQ`O#UTwx>n%ug0Kn27vK z&Uhp>M~9yv&j`f}S^cBi90)V#_6v56W3yhr1L$=|sg)fOeJuWxOlUm*T^c=JFZl=) zwp*J-EHgVQ$t+yU+R_>z={v1>4Mcs4l}R_8E!3veh_kS0?8KpAAV3bsN5@kSI2eVzoGD zQ~oC7K-70Oe51i<4kkpl`j=lj!;^Z7#sx+gUwQbi))$(>0D_>iOPbx2&g0%th4rz8 z>z*3V{8J9u*to`X>eI$ZtS5Bnzps8PNgs$K9OQW2dd{K+gxUwuc9k{^F|RV*kcCsX zuTuR;4~>s!Z@vXGo<-ZJdj(US=vG)g`&PE(21>BeTaW1pyyD*ukU>8+*!I=YhOCMr zBi|c3KmtSb$?rwilj`%u+MvE?tA<{+L1tb{50_qzR3qn}TeSLz_L;6(j<#9=b2bt+cy~!Hex9{&SpqW9~s=R&(;bdr;(2fUm`(ShebaZE>B-}ag{ z0F95tXgt)86<=%el)vM6O^T_T!k)GPO#}<`HHF2Wt|+gAbnS`*zip0SquWqAiLNi# zy6^-YN*p~)J^kEqKlK69tU9u-9J%8KG`Wm|F)j4zM&JCjFdKSw)e05t7M%0gkMZ7; zt7{8vpHe!H^Yfk{-v_p6c`t_cn*juLSyS0J%^)as-gl9_i45c_y@0o(|~56Ox6cgvR@0~XZawvFAbrfYNm z-MU`Rg_Q(N4bJK5Bxf(w=Sk^>U-l^!-0z{8Vx^of^ zfIby*`poJ_TM36sG)}!^qE`%x#}BrZXY7DZUm-Gw1;coTg%dv4s6|& z9=~2sN%ywjJd$00GXumhQN zr7t!db&H;U#PoZ`HS!$0EO%mih|OYo#AL=@I!*cCWTzHYlkxV~=T*=AY)zFz>K`~V z9#SJ2L#@^kL zL#`ZE=}A3%LBgb7!yiQIPf3iqwcgbe&YMc@w;V_3?KQI%!3Gd_mm8#=`W&@Q9~3AK zPaMLiqpwcna$AErdIferM;**~?H-8T>l34Q-1bS`9VV6CGIGCGMk$5}R;_L+dEO7G z?Mhk`pI_n*&G-FYKx~nGy3fZ$6tO%y1FAi+TrBZC93#TE-;vL(@ak~Xq06$g6R&|N zhqRKD&)a5-`$NgiyPze)+zWYviN@FdlELwMNL(Bc9|8lVS%qEtEiBCatAr$a*&vqZ z*RO+bUw?Ed}Bpq=L{neI$ALT%~78-zyX^YCE#u`QvPwUYX;wf5}sbtF=E#;71DvdzF&D;$2@S%FMWSD8FAv z^Q+$c`0d{G98s5--xxTqF68yV>j;kO&7)u+IW|7|I9+Atw?BR!cy8D(YZx3i0tnKe zo%AobQfWy$X1xw;TQ6T7UGc0#)CAPKeyE7I+r;n%@0*09sJ^O69aE=4VY1GiAaN+6riS&~)L;nGBKW5uP+14Xq}|iS5k8Pv1fy9j6iN zVOOK-&4)xkDE;XdqRq9w_nWU|idc(3VqM(Y^PEg0GGNBEi9HsL^F9+`9es|8ANs&F zIqcC#rCyc$D50e(zHl^h|3p%Da_h%QjGIUVtSEDitQ5MCJua8^309sC>j%^`n z06H>{hYD@Rtqqe;3Pz4m^T7;W?+%!uY$8`P8)3^@hFf4IO;0CuyB zxCV|s9)eg5D~SHCr3<5P{+eYZl^(<@bz8q>g}Ao~BT-q9IHF^ReBqNgpia|UeFK!d zDuYEvWP@bud81(GC=${%!L|X=!kjMBmApE6jeW zfyBB-X3)#d(MnZ)BSvji|Dk<8R+bU-0~z%5j(GXTN%y=|y&Bf|(8*rds#B zY~v1tdD|vga|&_R3r{2ZB45g84GQdf;#5RS^xgczJ8foOM<0zf*Nk4Fxga#N#AZzu zADKN7iZz#|wSDPQSX9?UgQmo&60~l!>+IOES#GN`$C_aY6t(_hD04+$^_((l=Ge%nD zgC*ZaZ)j9sQd~3$M=3-!MDYNBWM^@b!MQL;aH4x`r{b8vYPLr==uOFDK*wPZz6^sy zPlP59mTT&;LAKsSOyH_;jXI3NfE9)hey&Wg0ZW9v^d(dM*dbYC4YnyAJS!SFQ=&nf zXw(eN_Qd0RQhO1%In>XqB=C{mSvK;o&JR5|X03haxK!+;8!N zj$H)*GSj?+lF8K7?e4o$a&Pb7*e_0@35^;DbW{KM9(i?^HvktN|F+wxV_k*0gZ&Mzyh6Sh-=n!e%Rms3sg2k{y2 zN_L*9rg3p9^Y!6(%~DAT=xwF1rLwr+kLKKKT;xH*?$hSKU1oSTP~Vp2{7LYTD5JhU ztPYLLB@43NkX$w5SKQOz%04@VxaiR1hio$(Y}~&quZW9}mWI%~%VFR*$tOtLM$^uz z#bkiMDxv?yU;d2zkF`r0KREyGarSlZqz} z{>q%LtC9DG+QTi^56pE6(e1P94cs7dUHZDyFO}vXN+f@eYSj(^7M**7>``66w) zN!E|{y)G30CU%@?1HVPA-NO@SsSEv*T1zQ* zc~Ilx{GvdY5!vNjjF6P#>XYoXKlwc^l6ySu^IQV_?7l*0|a+w(Pyba8;9a~(zo(J&r^`(MKxqD*iBZznjUyAc(#~s8o&BhS|K;Q1Pv2~>C z`tvm`@WykvFY?*fNM0H{R4s?>gbX~&uGwgUeopaUoR&=k?5RHNVSkuK zV^!12b?Cv_Y_4k3dV4mf8roM{wrwgaLmzBjMeZrNz3?4xp4+rW@7>cVD}FSo635C^ z3$3YWf_n|+J^YP;IgtrZm(6eYd=oWCGB5ksLW=F@5LSDav#+S9a_-r07Hv3?xYw5< z9>wysV0%^C<$!u|Cr&Uy0}2FU3nm!2fik1i@7K&SxkH4)ztU=J(mjR6Lir)w^|w%Z zv_v~6Nd6W9vg;75E^Z8!EMB<==* zFNWV~?@p+p4oW5CYZy3H0y7(;urZQr4rcA7q=QFMykj+hEfGLcZE7NX`3VzxA;orr zQMxtcU`yw9rQACEdQJDDMKG^I2lItE*VRe`#`@sP^e*n%@0PLy z2a8sExunK7EE%WmKOM8(d_QD<@3Q!qfqvm)J5(M%IK`W%WzP z&SL9--KJth?Y5?eHcYG~#khc09=i5&_0xt^NC`ng${9aKt z;yqs6sP`6eops+Ido%Mow*Ia(hemTmqW)P%2AC%N9t2N zi+={}P3O6PcYiy#^sbqbJjX1Zw%wUoBD>~6M$lZhcQ-UI9@E{^9AY>p3{+;{mZR-j z_2(Ak_MNV`-RYm!ZZFstw;b&(Z%gl)-$~Ub$-V4*eUq#ERvI##+L6g5Hs}4Yx}rKV z2xxhRbiS7ZKI*mV=nIkT*w(y(C&&kQ7RIXpeVx2lmz62H!SunJef1zk5Zr)oI}bYf z)^stKgq8p-|H%^pw4KNB2-Qf#2~$(22w1@ep~?~)0Bg?^(bMTdYAx%_kMvKi6FzF# zp>Ptz^?nhwWVq1I!XHiWmVC{P4$j@Am`nQ*85H&C6iOEIVU=TzkC2V_R6;xCBLq^# z=yy3Pr))Zya+!00#obal_y-Cwm2*8Si6pLK#Q%j$ z=?+(!4i$RiT>C1`A%mP|RgvfbYo6HpW4nBM-udS)rhyS&Uhk{RQ8_%$&i3F%164Ep zN3(T0deUX42&DQPOz=wK&vIG%;ba?QX`gNUa-<%3lYK5o5>CJ~imSJUgR1uSlYP$X z^#nf{s@?XW8bN;mF{SrwGLm>0MtDt|siX;dF5vO}te@fixKtWl581W*2}t~XSbIE8 zXewX&=Z&F8TQITIq`vp^(nW^ux;2E%yfU}poiHtA;x|bsS`fCW`PN}jKg!!jfnnpy zrvVAG*eqH_3sBlKOKud402BKpen}y|7bd1@c2{*yu`_F)Il<@sLE0)-A`h5>qT|CU`XD9UCuGRy3()}|4OmB;c zQ$S2bRph=gYw&7)P8Hn}d^aE>0@0ScEBS|l->lsFJZ;ruVb-^GZq{4ka!>Hy9j|Kl zZj(B!zbtabSeeJWc>(_PtZ{l%_MT9WB2nz8#J1%+U=DC`kl=x8K6> zgP6{qGjzOiMcGt7RkQ&tZbFwLz5AVA+Un4cO5-IyfL$MPwjKEd;jvfcb-8qq08#^e z;q=C6fLsVWXj^>Y4xIQUgwTrQsolLHyOwZ1mGLKY#=1UCh-nwL;C|@J{Hg?9DcdJ!9M$CHe{@(A*&5F-maR*Pf1v+($5M7@bUQdhlLG1@4 z=PHjbW0{Rz@R~((7oGdixLq7**ykj>rLVO|?1%kL@JQQ)v5|p&`7R=XYbV!N*(8V7jKsdZQwutkp}Mu+nDC);3adCI@1qP`ebQE#BRZSw6drUS zwwtY!fa5G#oFj;|3(U*$cIPWVY6Qg+HY4|}ukZF2ox8z>BHsizB>VcZ7dObNBN1~h z^+yb(4`VVKkb!i+3Ddea8|fE z+!e03?q|){&P%E0Ot_bn(Nov+1)!|v3M#Sp3WLOsOe8~_Pig=BN*+zt%=CkOhH3BI z-W-ply@&2m26XyVU4E@NC3C_cMKruK8F>-#f*E`d2V-mm{dBN(f&l9V3LK}vQW79Y zbfEvqh!bTGHmh318&f_ZJWuACaV=pl{#aC^cJ#xeuh)JUICL?b=A7Mc&TjOzM;fUr zxSDJKe#>>qt#zZxLJnKcU5NoFav(7Ow*AyQf+)cC``;@(c!7lY7sqK&TtjiEdjEOZ ze;Mzn!{1!Cn364N`p4G}5}22+Q>npgV)1RLn<0wF62p`;B8zD!(sYXqr_xM|$+d$= z4MiKxscmw8U;JhdgK1AWh9>Y9Ep^3H`p?Y}`#M^Ejdi=%+0;3w`n^Y^uR%%s`F=%X zhesL%vcC>nH3zgLYE^$z>Yk8OZk*zmCbyyP&QCp0xJ_-qbz0Q*!7T;Mirm1>8I@&V zSd6E+y3}Q6eZc;NuUMb5b=%sS;|g-5+5|7^sP&Li>%?%;6#XW|JlkfjZt~dFIq8{Il%45IBz*|@9$=$D{RHS*4yI5nux!*`i;d{NWipDt;i04_JxQoXpv!7@H_%;Gy!MGuD-48{-NlwMT_lte`b!_8U z!7f&ub2y5QNb==j%J~Y!&*`)06WSV_%BZ&!On?O$_oeyPSl6BI&zQmtXP@oRGBJd(2&$NcURfDbLc++GgRvNw$V(6(99=Kno~*-sVbj752Hw)B<#muKuZLSJaw zihobTYiW{HYVR-iJ&m{2;_Jn;u*dRe4@yitX1J1TH1+sPIo`t~vA-@>SR*-_{Yv?Biiplvr2t-7oEH9(ufGD8qGDwuH zNSyh3%0tsx+%e^p5yimQV8?(q1}R$|3eIw|NtnVfX8tZT&KjkcP${UoA)sM)yjo{% zR`%zYAy7_4^IzEq==U3=6lGebHnYW|95&7{X+zy84YF>>-MZ9i1`GTI2&#@(Vz{|B zEZHsN-*z$4Ba;LFk!^pkZYB|qm4$YYqc-@g-dY}VVLa+)M~q6}L73pdpn}mf5UEck zLbEIiR-(*F40zC9>Ux{_3*D;IV921-bZ&uir?_W}U}RY0^kjUe5Rq;X7W3!r;j3qA zRgAS45mg5iEGDf#<6Y(oN&HQT-xaCv;?tY?$WI8z?-P{HW=XmrAlw?NXW?rPy$>Sy zEj8rKeS|B>XwWaxjcRj=)#VnX`XZDymvupT7#(UY*+X;5W4ZBn00u+GSunq#8X2iA ztZ+U`u|JJ^Vl8s(p?(wKTbAj;Pd!2n;qneGKYFsge5)YaYZg%7B)Pp z{G$P*v)dYaMtHE`IYhnc3F~w0c5A7#-noe9rNAxEui;4di=$B7@mXB*sll`vUb}i} zswQz4_{A?t8aaYckyWW464;Y`Q$>u5)DoL+7&#DYyVozFpSCJ(StnqBTcrY+Gtm&c zYz_Ny=6@P2ZhBe}b07w_RYEY&9LF^2Jm#}jxv5y(F*y#5;Ro-L5y~hJ$>?G5kQIkM z54t(*v&{ufiacYJkDr<~k%Y?b7m(NxqV_~0AnCNJ-=B{-xz?Wo#g#P11N*QHp-}le z_C&#Lc}@IoNQuujOqziW4vAur;a`@{cI|D=@ac+7hrfUl?JfWW-dN}xbgu4QwwP5V z^MDA}KxtYA_x>SM)3lAE7L`@p)6w!k{HTR@yT4bgBQwkqo?IVQvifQocg}HLy@#=x zEH%go4Q7Svkvc&bOT`-o8|i+|^spA~X?r9g`mJ^3Vo!pP&M^)KX8khUqa&!k3yUUD znzp+Cf+F&PmgX-y-H6nQ57Jh6*zJz#?TSR(@ZXJ_%i{rGRl&+!GWvr$Mqb((8TcW2 z2_}@)dNA#6;N4DZw}Gf#w4^U*vV6?BfEc&To3PJs+KTsemA44iwv^5AcgqP@rA>~N z23ufnHJoColH|+Ly_7B}jQ1~@hA~_#T<9sj@%^4w2H1&K-*UFEnEU#1!1QRPBa?I7@<>jn~xvxQAb&vFzu_(iZbERM*vDbFG0A$75?Yo zn9J3rd80w!K*D0@pl8;p?4lbWnz}CZGuUizJ0g}GS(LoHSY5jdD>-z3HpSk2!QR{& zxzJFfPgCD3(Yu9iz3HX9)kreaGv!eM9J2aMqurh5gM}Il4@csXxvVrTs{v^+;ZXFF zb|U*zK~Loa!xzp>YNINT@$~@Vvc>9b|DLb)Q4W3hq&={F`OzF5$glWcsnYr#9Gil3 ztieLbEKltRCKz@Aoh1<|-qM8!dIiYB`9q6{TifM2efeO=?bUWgOi_ky)^IWY?kwXG zK#QBX%diev3Xg9aR{K;By(9>RB-cY`XESAkOTH2F?lAs@CN2iuBQgY-3pmRd?`q{B z2J33#>8B^;%Mpb$(ku9+w{y{hmQNcjH{ApYNv_H{>EF2bC4Q&-InkY-p5rHv zC+h5z;@Ox>0tF0{{Y~?*YxcWxVWQBW|1DsJeUicbx5S(A!-S`)NrU?`iMdF}$Pdho zmpG!xMft@GhR!`cYoyujSiE2yL4m$8T)ei^+`T8+f>DxDCbmqTOsjarnNIXF&-ym* z@8FK=({#>M{818`tV4d1;nk)~0K-L++p*A)c~~zR#@%q=P=C1yWa1j3lg0MlmoeOJ z23w9wU}dmsXt%E_yfQLDS(4nm??9;qz5;bt{=AFbW*y}*f7l%Gbn;MFu)z-QbnuYg z&}XVFh_5iI_~%lg1>ZY>AsBQPscNGv2aJ!=>QWW5JP0bWYSG@+D>)Fx_PiO1g35;D z^IOZ=ux{n*MX6aS@u()^(||o|T%wOJ2okeoh!V3HkAJIm4LesH*LZ9ZOkBwg;+w=X zzPu@F=Cp$i<-oA*9=%nQ5u=l4fx*KuQ1Wv?znv!s6*Dhu-HajEcE6!YC@sf?>))tT zf8BB&!9(o|a zK-@hIXP@os$5Y(VOVHFyz!PnMB7T|li`@5s^o#06io!q#iDlV!deQopY4XbmKhdbA6HILH~-8Xxc_FmDbA@;e{Y5LFOcSb8ZPq z9U-jG6AmQ`g&s9DA~}b?&2|cmaKoK1dLGilsp-Xi%=M>H-hD+v7+~w}NO*dbt%-PY z=f-fqq6^^~jHV5f79&_l>gB#=h1108*EZ1`#U$0P-Ubt_DQ)*}V&Hqf2Pgd&f2#8z zp0c#`0mp?P;WTRZ#+Lx=)>wjdLcVku5S(1cFGJO?A@`|< zFpR#4bbMbzbi0PZ?^kVw?HEr&8jmsGaJmVG$zQl^I~r@cIad3nhFm@S;ul}Flm66# zS*zPFBJUEALno=C9lgKg8CR@q2WZ$-B@hBJ>n0FxV|zn^u_psLXupRm+Pdt52nVj1 zXql@|5}-t-VrkxQQTS5jC!pn3?^=-x;IN_4TXNEx@g1!R2I;{znD7?tRXjsS6x|t& zcq>Ewv4@O&dPbICJRO_V?a8=;0X}1DJfXu)SZe?n{bOb&{i|3kF?VDZ6{d{s4biGH zXvOvHy>##B4r8aRuX=7O>K*X>8UJwfr^eELoLMor?MR}Mhbf*N_9;&<_2XKrLX|-W z3V|c~)2{%c4UW|fvTL}`U?2sl>w3L)%BcP zyQ%yaYdAj9ZcVP6Snk>Fq>vO+T{dRXH;STNJC{3_g;{gx0#;;q7)MIOlUy!wdnA+< zzipY9BUqRNPutF{6fC$Inr;|Ac>ia~2{~W4E*$9mW_Cl*O*p~RYR_y<#`C5F6HC0D znY;?LoeQ1+4Dl15Ru?_+0s^k6j-jW)(>&}?LFpoV$F`G7n#v{ws*ng!)^~br$T$Q# zM*p;)b5jRO5K_4`8(VCf9y+u75eKC%FU^-0TIcdcn=;gNF@e#!jysBE$#t7xHnsh0 zVZ4P=M_JCkTs#YGFl+3&(mm8{mjE1pDwKs7HoHyY=V?|7FiLU}5$Y`A7}_8!Vk3+LJx^mc^j-507`n{PC^ z>;X#cr6i+D^X@*9*0wa}S(2RSxBE0Ehwyc%(Z*Q(p?fSZ~>Jy7=NAGUHR3 z`!Y#hyCednHVtrdvgF!e-JkNXFpWm@i!2_-yUQZpb@MZ2;RC`FX1HvWexTFi4wCbZcm=-qX&Ak4$=N<>Lcsnfvj0bj9fXY{n?9NnONWc0*iV3g zS)vSAhSR`ZpkA_B2247POmJN2i%=bKCr24P-q3j^cl3iz(jTl4Hf4Hc869H@nFiQg zs+DR_rS+CF7ykI1-;Nh{7B@{#ljaAUVNNYN-h5nRMm~7cv|X;G0mesa87T$Wzy{26 zCyB)(QuaXgg-AJYKiSWKti5QKB#>kM*R%lO4$W5=d<-S{*E{~7v_!mg*Xzj7RR5Vt zzz=y34A_q8{pUpT7D}|sEpk8O|2w~+0t{3;umjuW-!EUukm$ZZ=cx4GQ!ysGn;F&; z|JQ9z6Fe2(PpoD5@2OxT?m*$$K2K-=`xhEuz_ay9TofL1F#q-~ zZwPEy48HsC|9qIW6c}KSssE1)08WD#a9~g+dH*Aw^KVn>0R!(9jQ`^Tzywqw;m5ej zV*foAJ23F$W7&T{!~c67|M_PB?{)m=D*34T0cqU&iYZ3bY0Max~E&u=k From 315cc21b2e04f3cf999f2a9437f4a9ad962b43f5 Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Mon, 13 May 2019 17:16:30 -0700 Subject: [PATCH 13/31] add unsupervised learning examples --- UnsupervisedLearning/.DS_Store | Bin 0 -> 8196 bytes UnsupervisedLearning/KMeans+PCA/.DS_Store | Bin 0 -> 8196 bytes .../KMeans+PCA/image/.DS_Store | Bin 0 -> 8196 bytes .../KMeans+PCA/image/MNIST/KmeansPCA.nml | 24 +++++ .../image/MNIST/KmeansPCA_batch.nml | 26 +++++ .../KMeans+PCA/image/MNIST/README.md | 39 ++++++++ .../KMeans+PCA/image/MNIST/build_csv.py | 93 ++++++++++++++++++ UnsupervisedLearning/KMeans/.DS_Store | Bin 0 -> 8196 bytes UnsupervisedLearning/KMeans/image/.DS_Store | Bin 0 -> 8196 bytes .../KMeans/image/MNIST/README.md | 39 ++++++++ .../KMeans/image/MNIST/build_csv.py | 93 ++++++++++++++++++ .../KMeans/image/MNIST/kmeans.nml | 23 +++++ .../KMeans/image/MNIST/kmeans_batch.nml | 25 +++++ UnsupervisedLearning/PCA/.DS_Store | Bin 0 -> 8196 bytes UnsupervisedLearning/PCA/image/.DS_Store | Bin 0 -> 8196 bytes .../PCA/image/MNIST/README.md | 39 ++++++++ .../PCA/image/MNIST/build_csv.py | 93 ++++++++++++++++++ UnsupervisedLearning/PCA/image/MNIST/pca.nml | 23 +++++ .../PCA/image/MNIST/pca_batch.nml | 25 +++++ 19 files changed, 542 insertions(+) create mode 100644 UnsupervisedLearning/.DS_Store create mode 100644 UnsupervisedLearning/KMeans+PCA/.DS_Store create mode 100644 UnsupervisedLearning/KMeans+PCA/image/.DS_Store create mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA.nml create mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml create mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md create mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py create mode 100644 UnsupervisedLearning/KMeans/.DS_Store create mode 100644 UnsupervisedLearning/KMeans/image/.DS_Store create mode 100644 UnsupervisedLearning/KMeans/image/MNIST/README.md create mode 100644 UnsupervisedLearning/KMeans/image/MNIST/build_csv.py create mode 100644 UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml create mode 100644 UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml create mode 100644 UnsupervisedLearning/PCA/.DS_Store create mode 100644 UnsupervisedLearning/PCA/image/.DS_Store create mode 100644 UnsupervisedLearning/PCA/image/MNIST/README.md create mode 100644 UnsupervisedLearning/PCA/image/MNIST/build_csv.py create mode 100644 UnsupervisedLearning/PCA/image/MNIST/pca.nml create mode 100644 UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml diff --git a/UnsupervisedLearning/.DS_Store b/UnsupervisedLearning/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..416063bf61615bf7b6b360391ba833fc48b40f22 GIT binary patch literal 8196 zcmeHMU2GIp6u#fKl$mx)r+~C9gFhxjJDYzzKoEoI#i~1T!OuP~gu_ zc5z>wAt5jnYN7X5mWb0^l5I!uyxZuevsU1d*(6I$jX(G({h)s4H}aR)yj(MrO{|_ zE^VI5ncf+-R+7Y6E242()uqEa&9>Lkxux#wud0nk5AoiXtU;x&zMnT)8+}MtZK=zp zjrpo|8x*DMBt2pEIa&6Hjjm8OOLDiRr437TY*n^NYKPo2EIcbZcPNcgCFvqMO649ir-jI&+**i7Y+c#v`v?{*$Xl-@P%6hH@-G1*)+W z%TR-L*n}n|(2OKH(1|Co4_!EdG;|m^1_whpg%M2P8Jx$nco7%z5-#B~UdKCl7w_RJ zKE(~(#4SwXD}0Zi@H74p3ykEsyf?oSf%ubf+JPl52Or!b7w^vH*zUI7?R)NFE`PIt zKg30gA6QZqU%l?(%?*z=Pd)pB$32aa|0bf(J3%dVw3zG`C=$x4XvVv=NwqNt3YNclff_7=O%ejvL34Tch;>LQ})N<4@S zXu#t{(N^rlE+XiYND)c9aTEj4h^i-H!6vGn!YIz-9G)h+K93iOtQUx^FZ*s28 zyoI-M1=nyLAL1i?j8E`6zQk>OTR_;(0>XY*NZ8?2*0vn`IQ8dAKHVg2&5Z|C1B z{@*$K_y4(BOgKyEfzSj0iw98Lnrcnb)U&;gd8{3$JV2Qzt~VjjH=)80#|iy#oN(%g fA=Trg%54$>P6(tPN^|}pAiV#>``>5Y{ayS64H6N8 literal 0 HcmV?d00001 diff --git a/UnsupervisedLearning/KMeans+PCA/.DS_Store b/UnsupervisedLearning/KMeans+PCA/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..ab17254a4587713c22cf9cf77cd488a4293b2b18 GIT binary patch literal 8196 zcmeHLU2GIZ9G~A&V7DxETd^!q;NU_bTBQvir5N;R3$_*l;R#L?84&Gkb(9lt)8kCz<*0 z%>QF&e*4>*{V!t-tp&4*F@rItP~}peMa@+T*YkQsiG?Fv3G!#m$_)EM8OO_9VH*uZ z2t)`(2t)`(2t){69|+Ky&5JU{xi7U*8zB%Oa6=-%_lG!DE|Z}gmr`sU)QA*-WCab2 zLf4cB2%mT;lc5}!Qn*q~nLQwMMVMkhx|2T6%}FLhIWDDiXOQj;;hhm?C=hoizql}G zNJ<&C5dskcS0cb)o9WDEIhJMvW%>J8nIXsagT}_Qh)t`QK4Ye$R4SFTEALGWdc&z8 z9Tfc5bbc@I?MMfMnee*X^PEf9>CA518t&KUws~%lwp}YPoNd!0lYKq5YkI@2Ucohe z5ho=TilQoNziy08mscNdCzfnc)lIjp4-XdwLL}Kw=<5lYuGk}!M?PgwOrG72lf#1ZeSmHzLdJfeUw@(=8uA<5yw`r?X{g~Q)kcc2`nigxuE+?I_99254U8fm6MY%M+D;(T# zhi3HBBh1m1{c1GOuvlNxXjSzumHYVJx2FAI`;g_*thmld7+Vyjk83uWre!LL(&JAg zSB8M;{0v&e>e+fabGq39=CKFa!WuoosQ3}6sB6mbM&IErI<5>Mf2JcAeUDqh1$ypH#9 z3LoG@e1UKA9nRnae#FoC4Zq6?#!3X=TAI6*_=99Qg~NYvu_vxh;W|O#`s<#;Kg{9J z?xwjn*UpPCzh%{$^&49+J@-jZxr~UH^G{Gta!T;5#3spbWB!7Lb&IqmQ@t0vh?&-i z!bzsjoLD%2fw6R1ou(^9GgWl!S5O!9Ca4o4RzuzG3Q=B>(iumh1RU7 z`gGyhvO&}I3Zj4Ld6%|HA&ToWgto1nw2Z%s>eaYZhw%^|#v{bi#{@^8#dA206L=Y~5L@5E z+js}>5?fE>V|;>7@fkkHH#j?qr(KhHdaj(OIj_ClECgA`&i(V|OOKwItTnHp9U8pW zCSFYCUB`0=C}QcIDW6Kz;|NTXFc>`l-#qpIe`yu95dskc|4Rf=(Vl8=rKL@^S@K*v zPIW(3Ubx-3l){A?k&crT={U(S9T&E7GUYzWP>xGUBUJwL9|7UlWOV*V=f5D}4chz* Dq{u_n literal 0 HcmV?d00001 diff --git a/UnsupervisedLearning/KMeans+PCA/image/.DS_Store b/UnsupervisedLearning/KMeans+PCA/image/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..a0b0e52a5d8dd223a55795b656b000e75adaac46 GIT binary patch literal 8196 zcmeHLU2GIp6u#e5V5S}D6cEbt(}e~EDO!G{5Cpf|<)^d-bX!_}+ufa^oiIDI?98@6 zDb`?OjPi&I@x>TJ6pTC|#-GFoV*%ki6;Ie#s@w3-dRFh;Kjs{bZ#>D z+`%S#jArVkphq` zqhV2~O?iOui3Ktq$YCjkE6pjh2L!GNQVd9U(#N4u-)^Tfdn|LHTc6eBI)2)6%z|*X43kU_bXtz#4kX;7 zV|ZeolvF5+s;J$1_2A&+8yf0jYuBtFs*4RaG}QC(;BXX**lA)4thg+bQdWNn@2> zw&2?KZr8K;G<&;z^O(I@F8P=geQU zbVdEz4V#-rD)p)v+AM8$XTRlH8Qbg_unfO%Z`#Y6j$t{y`v`K!w+>roL9L1$Q`?<% z&K#McS5;QejYhkQ8S7}#@{jAaiX!J)7>%jAp&T%1wKmt_o`$erTpNw<=c7#plWIeM zujH~U`iQDKO1npa3Ddfjn$~`Vc3*#9RmHT?h1wcL?Xc4s)0PA4wDpSKqIUL6WaZ#S zZIe=kI<<>7Fh}?8$$N@N6RLhqImB0QO?&>Xyy?(g@z|ra(XEQImungg!!(rI(!STm zm&v9Gxi(%ednz473)pIwWUZ`+71$sd!K#EK4&BB7WpNHJYvv+f>o5SV~=kI`ig6t7}%&tW=2idPL|Rt0&zg zV!kw8yHu;uC(|_CRNJ6!REhq2nK0e7nRHW#?{bkulcwp6Kac8vBJz{R-yq<5 image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[]; + params:; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1]; + output: y; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8] + -> Kmeans:[n_clusters=2] + -> y ; + +train: + compile:; + run:; + dashboard:; diff --git a/UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml b/UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml new file mode 100644 index 0000000..72c1c92 --- /dev/null +++ b/UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml @@ -0,0 +1,26 @@ +oracle("mode") = "unsupervised" + +source: + bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/KMeans+PCA/image/MNIST/training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[]; + params: + batch_size = 16; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1]; + output: y; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8, batch=True] + -> Kmeans:[n_clusters=2, batch=True] + -> y ; + +train: + compile: + batch=True; + run:; + dashboard:; diff --git a/UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md b/UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md new file mode 100644 index 0000000..7380904 --- /dev/null +++ b/UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md @@ -0,0 +1,39 @@ +# Introduction +These sample .nml files are for training a PCA + KMeans model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans+PCA/image/MNIST/KMeansPCA.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans+PCA/image/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. +**KMeansPCA.nml:** NML file defines a pca + kmeans process where the model is trained with all the data in one time. +**KMeansPCA_batch.nml:** NML file defines a pca + kmeans process where the model is trained with batched data. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py b/UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py new file mode 100644 index 0000000..fcfe45e --- /dev/null +++ b/UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py @@ -0,0 +1,93 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + # writing training csv + with open('training_data.csv', 'w') as of: + of.write('Image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + ''' + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + ''' + ''' + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + ''' + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/UnsupervisedLearning/KMeans/.DS_Store b/UnsupervisedLearning/KMeans/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..50407ec90c7445a32d3ae0a93a90725a678cb356 GIT binary patch literal 8196 zcmeHLTWl0n7(U-pV5S}D6e^VE(uIZ!(rUR#AqZ}_%cZnPbz54l?d;CbPMDopc4oIg zDb{FWjPi&I(FbFUQ8e;^7%zzr#srArg&LKp55~k76HUA%#s~e+oLNF!;Kjs{bWSqo zpL71p%>SM5%*;QGF(eB5YQ}09V-g))suk4Trg1$#CbdW~!<8U^#*Flkn@d|xdXjB8 z5h4&G5F!vF5F!vFaBm<$?`+BB>>4X zn)Vx=Qyt*@!~z))0{iSWIT|gLJD^V;m#0T89{{t|LWuy z^UWFJLWX^aK#0I(1o*a@&PT!$N;m~u zcl|gip^zk5lKa$};o*ljHaEo9t=%xv5F2i8ZsOwgcx&;8sEw+ju7?BGl0B)23kjpESSSwWu7o-uF}oq z9n0!;T$6{{+v^#}Jg%Fh>fPr$d9T$E)@xFHVWk#a;|YU;nnfc?p!s?jck62KQ=i#;|qM9@s~a+n#yYH1cv)+qOBPqWg7hfSx z{gmtCO=2X|i)b-h!;-9n^|L%1W~bT9>@{|gU1A@w&)64ijNM>AvR~M*>^Js1`wJ0N zU>0Vh5;a(Wg;<1@Sc5HyqXh}HqXUm&H@a{TY3ML;7!GnciXn{RDLjp5@B+@@MV!Ss zypDJ9F5bfhe2UAsf~y$Am-rq(;b;6IZZJ~f`L0qXLgG)7DAaeTa$hCb^Av3$yv2qlP4Bwx5_u~OSj zWHgRV+#ID%qt4`{lg7C9(q*-ZDiPace^^mZL!AZ-7es4nSJgI3#CtX3>mF(%-85pp zFkM%#)T+}d3^!FaDw|}Yzgp&-Zr(z=8N_#SkwmMasEqGN`9Bi&D!a~pAhP}qh8aZE zxkS>XxF3yZ#-l{gB(`E3(erVnh@#y%gnnp5)FZH96H$-i1Ww`b3R7 ziCXxvnR1_aAV-Cy8EXIgkAUEVGJOAs L?|+|wQ?&aRj-^XJ literal 0 HcmV?d00001 diff --git a/UnsupervisedLearning/KMeans/image/.DS_Store b/UnsupervisedLearning/KMeans/image/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..472cef4727ca390d308085ac2e376d6a17e936b4 GIT binary patch literal 8196 zcmeHLTWl0n7(U-pV5S}Dv>=q_(uIZ!(rUR#AqZ}_%cW3gbz54l?e5M%CroFSo!J&B z#TreFQ65nt`e2MPibfs~<0bLIm;f=pP;W$iFeJX1XyPR?KInhWnI*IZUQ7&0=OlCf zIp@F3{NMS`%>2U`L!w};WUPuYrqIEqUPj$*8rSnUqk}q80+1}B zY0>DM>Hy&r3uHWyBT@=iiYcoH1g;253`lp<$GADkcpyillf{#_ z<_vKu!#+eHL|`HUeA`TAJJ~MQ!ct@E_bZv4ZFzn}!`FyRDVaKLx}uaSr87zor25>! zl%MtsULu{}$4BjHzb_M)>0S-oz!E;iKI*uc-%4iBr!+@&iww)FNNKQVN2_|#dM z6@n)ND<7BTxv?qQhgYVuZh!6;t6KoDy@6)=10!;w2*C9s0BUKOHY2*NW!v^0T8~~i zK)JQ$e9Lvb-Dch+*`t?xghxx(b=vbL*LP)IyU_1=J$gkpZD$L1+BchQJ7t|PX|CMM z=3U!vcRh=T+0*Tt$9%4vpz7Z1xp}`?1nagazO>Q{p82>*L9GEZNufO*>iL;-7c8z_ z(XeLyrsmOdy`oZ^tEvPSJO$T>}zXIk`dG zs8pa%?WP6nr~CHgJ;kF5Rllqpl?$Mg)7Dd^^HBE+L8cI!Z-D~2@ zU3vKAY0c2pn#38uI;V1?%f+z75p2qVygBNfX z=kOZd#yfZy7w`!#;|i`~6kp;8{DhzJhrGc^k>@*$nMjF0NseW5@GCBI=Jrgk-nwmT z%l3Pi$=}Z8OE_!xeRJlkyVH&=e#VvphDtuz#@lf)8~D-uc)ok%`^!9uON zlgMZu8^1Y%~i|HC-XLsbW}GOGASO^XEmYs#jFkE5v&}B6JTmkZuYw zUz)C|)vEQW6o#8>>$MFk(O)kSrW-erZW{4jUL?`1X*%QkQT>mEy~?h!ABn7ggJBvG zbrz9yG44k_8u2I*G>OgFLiBtLDWYg64x$%nBI*&?aEPc!aRMiC3QrJOpT%=T)zd`P z7X?{g#_Mdbq-U+;?@~^E0c9JMJ8Uw>;KJn{{KH&;R*8&5ttMRpd^_} zCMeZAJsx?j9iyX{4&J!kh?K&GI#G_36y-R{pd1&rF*4;o@j#A9Ni)>`_a6bl2W9yF M58wZSfRnWQ7vJ1T^#A|> literal 0 HcmV?d00001 diff --git a/UnsupervisedLearning/KMeans/image/MNIST/README.md b/UnsupervisedLearning/KMeans/image/MNIST/README.md new file mode 100644 index 0000000..591bf83 --- /dev/null +++ b/UnsupervisedLearning/KMeans/image/MNIST/README.md @@ -0,0 +1,39 @@ +# Introduction +These sample .nml files are for training a KMeans model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. +**kmeans.nml:** NML file defines a kmeans process where the model is trained with all the data in one time. +**kmeans_batch.nml:** NML file defines a kmeans process where the model is trained with batched data. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/UnsupervisedLearning/KMeans/image/MNIST/build_csv.py b/UnsupervisedLearning/KMeans/image/MNIST/build_csv.py new file mode 100644 index 0000000..fcfe45e --- /dev/null +++ b/UnsupervisedLearning/KMeans/image/MNIST/build_csv.py @@ -0,0 +1,93 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + # writing training csv + with open('training_data.csv', 'w') as of: + of.write('Image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + ''' + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + ''' + ''' + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + ''' + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml b/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml new file mode 100644 index 0000000..c48467e --- /dev/null +++ b/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml @@ -0,0 +1,23 @@ +oracle("mode") = "unsupervised" + +source: + bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[]; + params:; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1]; + output: y; + + x -> UnsupervisedFlatten:[] + -> Kmeans:[n_clusters=10] + -> y ; + +train: + compile:; + run:; + dashboard:; diff --git a/UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml b/UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml new file mode 100644 index 0000000..f3984aa --- /dev/null +++ b/UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml @@ -0,0 +1,25 @@ +oracle("mode") = "unsupervised" + +source: + bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[]; + params: + batch_size = 16; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1]; + output: y; + + x -> UnsupervisedFlatten:[] + -> Kmeans:[n_clusters=10, batch=True] + -> y ; + +train: + compile: + batch=True; + run:; + dashboard:; diff --git a/UnsupervisedLearning/PCA/.DS_Store b/UnsupervisedLearning/PCA/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..59a13628f493b21bae6315b3a5ab353c5d6c9a22 GIT binary patch literal 8196 zcmeHLU2GIZ9G~A&V7DxETd^!q;NU_bTBQvir5N<+6{Hpd;R7+piN=H&H33aDD(VxO_!ad*B{l)U7ax2uCK_Kb(Fgs{%pTzi<v-9#Y@>k) zfe3*Jfe3*Jfe3*c0|7d-c~Pb~_oX&!BLpG@Zb}6B{t&0iWipiGQi`pE8j%8!tfFC2 z=$i5X;S&#KGL+*|3Rj9Lvj>E(2vZD5chbkXImu)w$EB3+4APw;yfeZK1>)}H7Z>IX zNhzZ?LLfrmY6SReGo9Hi&oXSFB7grXTX1YYXl_1-*tDwYGiEAEwNgF1`u_BwH=GVK zLCJ5=6!-Do&P*_v4X?XB&$)b^$?mbO;eLH?%5#H^?OH|QY?~ID?C-H%(;II0O0MaP zIBBU+6jf3Cbz^jNZCh(oe9h|hV@>hV*4Aczy>4twRpu>k+1Pm?fB4Ag(XnGM%2grU z8CcDvRlZcYL}#ESo%8aAE3B>n!10Gt@&`ujR2hH^WdPLbS=wx4YvN!R=sC_#&$k2HbNhM& zYa}QKs!;Xr^}S+{62ba)z2sZFEQ)0hSsfISLe=rR?I`dt{vNC7lT0L=<-*iA=gezZ z+IV~Onzp-B6E%8moi2KGVQDyp@y5w+XR z(@zGS#qSn;k zR}`UGvC`1GeZoL@6;%egLtCZl$JOpbL0!JwiV8fK#& zOAyC0B+!gDY{tFVicai85Bjhd`(VPt00xoA5RPID$8a1^;b}aBXYn##$4Q*R8+Z?= z@c}->7x)(6;VdrVNBoT6@VlH~tW5B2<+)3VKS)+mIQ$0}d-B>8ZV(i1yx}SQ!yNwX zZkc;){k-_{+g7bvzp?%DbD#8-D~Na{{{-bErv%T+Y?2H&<}X;-ut-}n)qAm zPZyqT8#GO?BKn7(_h_3GqPRXoXj2`eW&BlCuf^5tGW9IGzwf>-exvGpyy zjd$=avGoi-#wYj`pW$MJlBp> zJwTNgZZ|HaaG^$|<0M5oPBKi#g>9Tnxlb~b<5JQHmH+%lK=?Hoo&VAKF9>*(Hva;k CJ3+|+ literal 0 HcmV?d00001 diff --git a/UnsupervisedLearning/PCA/image/.DS_Store b/UnsupervisedLearning/PCA/image/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..58f0a55a68b88cd2e0dee073c01ede0f960535ec GIT binary patch literal 8196 zcmeHLU2GIZ9G~A&V7DxETd^!q;NU_bTBQvir5N;R3sMV0xI$Y%%iZnLuI%mB+r4X{ zl+Bu3*$Of)9Ms0nDIQBj}J#IL9iDzOPjeeuBuVxsW{6MfMC%} zGyjjB`R#9K_P>lVv=+=J#tg=oLX}H>7B$x?T+iz@B^HiwCCHz#uFQ}>m~p(!HMY?} zgg}Hqgg}Hqgg}JA&4B=&*}N!IocmH6wGjdl0=Fare1C{j| zhNP5H8zB%Oa4iD-wVBSgvj^Hz$Ig0g z%06QC=~X^q&_Ugud8R;FKb!X)XQ$`ef$h1y zJ%KeGlmeBhdiMHWK4=rc`gFbETe~cZWf!e>ib$bq_}z94@-Y4$EANv`Bpc7Akr|=_zFFM+_F$f$l1*404yYO4X06U5AJW!lr4lX6$y-8Ou?n)7o{K(Or~F(>ue# z4fkk94?V&hP1&zT0}YGyC5={9|4Mn7-+f!!4|WV%F3pPTjD)d8QF^&%lWAI}k|;g? zL~>;an9k3jMXa8!r!%LE9b_JRj6KCpvNP-~`HD5vA@Bfq7oWr zqZUgL$1)_)h!$+d{n&~Q>_Rtsu^0Pb!a_d=kV6qiF^Xe2j%V;Jp2PEa1#jRKPUB6y zk2ClXAK^=UhwpI?6Zi?g;5Ym(Cm1Ucd|PSmQsNJi@bYUd=y2Ims!(vl5#m!;Se17S=7&mQ3|t>?USf z!wM&vK68BG`~}9+Wp$da5Y1H4tzSW1(3_x+i&zbHcPm7BJtn$ZQ{8Hn_^wZr+7()} zqUzIyXUhgn(<_Mnq33J6ECj$LGz*k!~plek%f1;oyk z#LRU_VheHe0d!(J_Mk^_bHCuGjYn_@0SXwxFpl7HJb@>Pr6&YOU%-nviC6I&UMIG` zjd$=a-Xpf2#V7a_pW$GoP@$^DDPjg;-yIBabj9vKWROh9M$y)OU+QESv zZQ{jL-gP{;pCXptnewSbJ&wRM34_7&|IJhX|Cd%#8zB%O@V`U=7450^R$AIrn +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/UnsupervisedLearning/PCA/image/MNIST/pca.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/UnsupervisedLearning/PCA/image/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. +**pca.nml:** NML file defines a pca process where the model is trained with all the data in one time. +**pca_batch.nml:** NML file defines a pca process where the model is trained with batched data. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/UnsupervisedLearning/PCA/image/MNIST/build_csv.py b/UnsupervisedLearning/PCA/image/MNIST/build_csv.py new file mode 100644 index 0000000..fcfe45e --- /dev/null +++ b/UnsupervisedLearning/PCA/image/MNIST/build_csv.py @@ -0,0 +1,93 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + # writing training csv + with open('training_data.csv', 'w') as of: + of.write('Image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + ''' + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + ''' + ''' + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + ''' + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/UnsupervisedLearning/PCA/image/MNIST/pca.nml b/UnsupervisedLearning/PCA/image/MNIST/pca.nml new file mode 100644 index 0000000..879ddc0 --- /dev/null +++ b/UnsupervisedLearning/PCA/image/MNIST/pca.nml @@ -0,0 +1,23 @@ +oracle("mode") = "unsupervised" + +source: + bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/PCA/image/MNIST/training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[]; + params:; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1]; + output: y; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8] + -> y ; + +train: + compile:; + run:; + dashboard:; diff --git a/UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml b/UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml new file mode 100644 index 0000000..f1e4c99 --- /dev/null +++ b/UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml @@ -0,0 +1,25 @@ +oracle("mode") = "unsupervised" + +source: + bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/PCA/image/MNIST/training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[]; + params: + batch_size = 16; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1]; + output: y; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8, batch=True] + -> y ; + +train: + compile: + batch=True; + run:; + dashboard:; From 7efd7dbafc552593d8d1bb5f1cca9e528e3e0ca9 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Tue, 14 May 2019 12:32:34 -0700 Subject: [PATCH 14/31] .gitignore data --- Classification/Text/Sentiment/.gitignore | 3 +++ ImageDetection/ssd/VOC2012/.gitignore | 3 +++ 2 files changed, 6 insertions(+) create mode 100644 Classification/Text/Sentiment/.gitignore create mode 100644 ImageDetection/ssd/VOC2012/.gitignore diff --git a/Classification/Text/Sentiment/.gitignore b/Classification/Text/Sentiment/.gitignore new file mode 100644 index 0000000..cb2b32c --- /dev/null +++ b/Classification/Text/Sentiment/.gitignore @@ -0,0 +1,3 @@ +aclImdb/ +raw_data/ +training_data.csv diff --git a/ImageDetection/ssd/VOC2012/.gitignore b/ImageDetection/ssd/VOC2012/.gitignore new file mode 100644 index 0000000..91e6ae4 --- /dev/null +++ b/ImageDetection/ssd/VOC2012/.gitignore @@ -0,0 +1,3 @@ +VOCdevkit/ +raw_data/ +pre_trained_model/ From 87be99d1bbc805ed1586b8e96c9b6aca9cd4077d Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Tue, 14 May 2019 12:41:08 -0700 Subject: [PATCH 15/31] Move capsule networks to the right datasets. Remove images and .DS_Store files --- .../matrix_capsule/Audio/MusicGenre/README.md | 38 ------- .../Audio/MusicGenre/build_csv.py | 93 ------------------ .../Audio/MusicGenre/music_capsule.nml | 44 --------- .../Audio/MusicGenre/music_capsule_auto.nml | 40 -------- .../matrix_capsule/Image/MNIST/README.md | 37 ------- .../matrix_capsule/Image/MNIST/build_csv.py | 87 ---------------- .../Image/MNIST/mnist_capsule.nml | 42 -------- .../Image/MNIST/mnist_capsule_auto.nml | 38 ------- .../vector_capsule/Audio/MusicGenre/README.md | 38 ------- .../Audio/MusicGenre/build_csv.py | 90 ----------------- .../Audio/MusicGenre/music_capsule.nml | 43 -------- .../Audio/MusicGenre/music_capsule_auto.nml | 39 -------- .../vector_capsule/Image/MNIST/README.md | 37 ------- .../vector_capsule/Image/MNIST/build_csv.py | 88 ----------------- .../Image/MNIST/mnist_capsule.nml | 42 -------- .../Image/MNIST/mnist_capsule_auto.nml | 39 -------- Classification/Audio/.DS_Store | Bin 6148 -> 0 bytes assets/.DS_Store | Bin 6148 -> 0 bytes assets/Picture2.png | Bin 78978 -> 0 bytes assets/Picture3.png | Bin 36469 -> 0 bytes 20 files changed, 835 deletions(-) delete mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md delete mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py delete mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml delete mode 100644 CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml delete mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/README.md delete mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py delete mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml delete mode 100644 CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml delete mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/README.md delete mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py delete mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml delete mode 100644 CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml delete mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/README.md delete mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py delete mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml delete mode 100644 CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml delete mode 100644 Classification/Audio/.DS_Store delete mode 100644 assets/.DS_Store delete mode 100644 assets/Picture2.png delete mode 100644 assets/Picture3.png diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md deleted file mode 100644 index 9308533..0000000 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# Introduction -These sample .nml files are for training a Capsule Network classification model using audio data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -Data for this example is from the [Music Genres Dataset](http://opihi.cs.uvic.ca/sound/genres.tar.gz). The dataset features 100 audio samples from 10 music genres. -To run this example, first you will need to download and pre-process the raw data for the music classification task using the included ```build_genres.py``` script: - -```bash -$ python build_genres.py -``` - -If the script failes, make sure that you have installed all the package dependencies of this script which are listed at the top of the script: -`tarfile, shutil, pathlib, requests, natsort, and random`. Missing packages can be installed using pip: - -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/music_capsule.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/training_data.csv" ; -``` - -NOTE: Audio files are big! Be careful with your batch size, or you may get out of memory (OOM) errors. If that happens, reduce the batch size. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) -For more information on using the AudioDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. - -You are welcome to modify these tutorial files. If citing please link to this repository. diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py deleted file mode 100644 index 2898eb3..0000000 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/build_csv.py +++ /dev/null @@ -1,93 +0,0 @@ -import shutil -import tarfile -from pathlib import Path -from random import shuffle - -import requests -from natsort import humansorted - - -def download_data(): - ''' - Check if raw music genre data is present. If not, download data from the - official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://opihi.cs.uvic.ca/sound/' - f = 'genres.tar.gz' - if not Path('raw_data/' + f).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - tarfile.open('raw_data/' + f).extractall() - - -def flatten(l): - return [item for sublist in l for item in sublist] - - -def write_file(validation_split): - ''' - Iterate through genres and write csv file using the supplied validation_split. - - 1. Data from each genre is shuffled and then sampled into the training - and validation sets respectively. - 2. Both the training and validation sets are then re-shuffled to intermix - the different genres. - 3. The resulting - ''' - train = [] - valid = [] - - # Sort the genres alphabetically. - genres = humansorted([str(p) for p in Path('genres').iterdir()]) - cwd = Path.cwd() - with open('label_names.txt', 'w') as of: - of.write('Class,Label\n') - for index, d in enumerate(genres): - of.write(str(index) + ',' + d.split('/')[-1] + '\n') - # Construct lines for the csv file in the form: - # /path/to/audio/file.au,class_number - # where class_number is the index of each genre class. - csv_lines = humansorted([str(cwd) + "/" + str(p) + ',' + str(index) + '\n' for p in Path(d).iterdir()]) - # shuffle the list: - shuffle(csv_lines) - # calculate the index on which to split the list into training/validation - # and then add to the respective lists. - split_index = int(validation_split * len(csv_lines)) - train.append(csv_lines[:-split_index]) - valid.append(csv_lines[-split_index:]) - - # Flatten and shuffle the resulting lists. - train = flatten(train) - valid = flatten(valid) - shuffle(train) - shuffle(valid) - - # Write the Training CSV file. - with open('training_data.csv', 'w') as of: - of.write('Audio,Label\n') - for l in train: - of.write(l) - for l in valid: - of.write(l) - - # Write the Querying CSV file. - with open('querying_data.csv', 'w') as of: - of.write('Audio\n') - for l in train: - of.write(l.split(',')[0] + '\n') - for l in valid: - of.write(l.split(',')[0] + '\n') - - - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write files with 20% validation split - write_file(0.2) diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml deleted file mode 100644 index 24f779d..0000000 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule.nml +++ /dev/null @@ -1,44 +0,0 @@ -architecture: - input: - audio ~ audio: [maxlen = 96, nbands = 24]; - output: - label ~ flat: [10]; - - audio - -> Reshape: [[96, 24, 1]] - -> Conv2D: [filters = 32, kernel_size = 5, strides = 2, padding = 'valid', activation = 'relu', name = 'conv1'] - -> PrimaryCaps_Matrix: [] - -> ConvCaps:[channels = 32, kernel_size = 3, strides = 2, routings = 3] - -> ConvCaps:[channels = 32, kernel_size = 3, strides = 1, routings = 3] - -> ClassCaps:[num_capsule = 10, routings = 3] - -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/training_data.csv"; - input: - img ~ from "Audio" - -> audio: [maxlen = 96, nbands = 24] - -> AudioDataGenerator: []; - output: - label ~ from "Label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 8, - shuffle = True, - shuffle_init = True; - -train : - compile: - optimizer = Adam:[lr = 0.001], - loss = "spreadloss", - metrics = ['accuracy']; - run: - epochs = 2; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml b/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml deleted file mode 100644 index 3c43231..0000000 --- a/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/music_capsule_auto.nml +++ /dev/null @@ -1,40 +0,0 @@ -oracle("mode") = "matrix_capsule" -architecture: - input: - audio ~ audio: [maxlen = 96, nbands = 24]; - output: - label ~ flat: [10]; - - audio - -> auto - -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Audio/MusicGenre/training_data.csv"; - input: - img ~ from "Audio" - -> audio: [maxlen = 96, nbands = 24] - -> AudioDataGenerator: []; - output: - label ~ from "Label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 32, - shuffle = True, - shuffle_init = True; - -train : - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy']; - run: - epochs = 2; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/README.md b/CapsuleNetworks/matrix_capsule/Image/MNIST/README.md deleted file mode 100644 index da1bb84..0000000 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Introduction -These sample .nml files are for training a Matrix Capsule Network classification model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ -To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: - -```bash -$ python build_csv.py -``` - -If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. - -Missing packages can be installed using pip: -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_matrix_capsule.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/training_data.csv" ; -``` - -# Tutorial Files -**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) - -For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py b/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py deleted file mode 100644 index 96c34a1..0000000 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/build_csv.py +++ /dev/null @@ -1,87 +0,0 @@ -import gzip -import shutil -from pathlib import Path - -import numpy as np -import requests -from imageio import imwrite -from mnist import MNIST - - -def download_data(): - ''' - Check if raw MNIST data is present. If not, download MNIST data from the official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://yann.lecun.com/exdb/mnist/' - file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] - for f in file_list: - if not Path('raw_data/' + f.replace('.gz', '')).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - with gzip.open('raw_data/' + f, 'rb') as f_z: - with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: - shutil.copyfileobj(f_z, f_u) - - -def convert_images(raw): - ''' - Convert images from the MNIST format and return a 4-dim array with - shape: [number_of_images_per_batch, height, width, channel] - The pixel values are integers between 0 and 255. - There are 10000, 28x28 1 channel images per batch, in row major order. - ''' - - return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') - - -def write_csv_file(): - ''' - Save images as PNG files (lossless). - Write absolute path to image files and class label to training_data.csv - training_data.csv should be of length 70001, with the first line containing the header. - The test images are written at the end, i.e. the last 10000 lines correspond to the test set. - ''' - - mndata = MNIST('raw_data') - train_img, train_labels = mndata.load_training() - train_images = convert_images(train_img) - test_img, test_labels = mndata.load_testing() - test_images = convert_images(test_img) - - Path('images').mkdir(parents=True, exist_ok=True) - - # writing training csv - with open('training_data.csv', 'w') as of: - of.write('image,label\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') - - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('image\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write the data to PNG files, and create a csv file for NeoPulse AI Studio - write_csv_file() diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml deleted file mode 100644 index 89f8991..0000000 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule.nml +++ /dev/null @@ -1,42 +0,0 @@ -architecture: - input: - img ~ image: [shape = [28,28], channels = 1]; - output: - label ~ flat: [10]; - - img - -> Conv2D: [filters = 32, kernel_size = 5, strides = 2, padding = 'valid', activation = 'relu', name = 'conv1'] - -> PrimaryCaps_Matrix: [] - -> ConvCaps:[channels = 32, kernel_size = 3, strides = 2, routings = 3] - -> ConvCaps:[channels = 32, kernel_size = 3, strides = 1, routings = 3] - -> ClassCaps:[num_capsule = 10, routings = 3] - -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/training_data.csv"; - input: - img ~ from "image" - -> image: [shape = [28,28], channels = 1] - -> ImageDataGenerator:[rescale = 0.00392156862745098]; - output: - label ~ from "label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 64, - validation_split = 0.2 ; - -train : - compile: - optimizer = Adam:[lr = 0.001], - loss = "spreadloss", - metrics = ['accuracy']; - run: - epochs = 5; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml b/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml deleted file mode 100644 index 2d55cd8..0000000 --- a/CapsuleNetworks/matrix_capsule/Image/MNIST/mnist_capsule_auto.nml +++ /dev/null @@ -1,38 +0,0 @@ -oracle("mode") = "matrix_capsule" - -architecture: - input: - img ~ image: [shape = [28,28], channels = 1]; - output: - label ~ flat: [10]; - - img -> auto -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/matrix_capsule/Image/MNIST/training_data.csv"; - input: - img ~ from "image" - -> image: [shape = [28,28], channels = 1] - -> ImageDataGenerator:[rescale = 0.00392156862745098]; - output: - label ~ from "label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 64, - validation_split = 0.2 ; - -train : - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy']; - run: - epochs = 5; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/README.md b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/README.md deleted file mode 100644 index f69475f..0000000 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# Introduction -These sample .nml files are for training a Capsule Network classification model using audio data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -Data for this example is from the [Music Genres Dataset](http://opihi.cs.uvic.ca/sound/genres.tar.gz). The dataset features 100 audio samples from 10 music genres. -To run this example, first you will need to download and pre-process the raw data for the music classification task using the included ```build_genres.py``` script: - -```bash -$ python build_genres.py -``` - -If the script failes, make sure that you have installed all the package dependencies of this script which are listed at the top of the script: -`tarfile, shutil, pathlib, requests, natsort, and random`. Missing packages can be installed using pip: - -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/music_capsule.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/training_data.csv" ; -``` - -NOTE: Audio files are big! Be careful with your batch size, or you may get out of memory (OOM) errors. If that happens, reduce the batch size. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) -For more information on using the AudioDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. - -You are welcome to modify these tutorial files. If citing please link to this repository. diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py deleted file mode 100644 index 4fc7123..0000000 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/build_csv.py +++ /dev/null @@ -1,90 +0,0 @@ -import shutil -import tarfile -from pathlib import Path -from random import shuffle - -import requests -from natsort import humansorted - - -def download_data(): - ''' - Check if raw music genre data is present. If not, download data from the - official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://opihi.cs.uvic.ca/sound/' - f = 'genres.tar.gz' - if not Path('raw_data/' + f).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - tarfile.open('raw_data/' + f).extractall() - - -def flatten(l): - return [item for sublist in l for item in sublist] - - -def write_file(validation_split): - ''' - Iterate through genres and write csv file using the supplied validation_split. - - 1. Data from each genre is shuffled and then sampled into the training - and validation sets respectively. - 2. Both the training and validation sets are then re-shuffled to intermix - the different genres. - 3. The resulting - ''' - train = [] - valid = [] - - # Sort the genres alphabetically. - genres = humansorted([str(p) for p in Path('genres').iterdir()]) - cwd = Path.cwd() - with open('label_names.txt', 'w') as of: - of.write('Class,Label\n') - for index, d in enumerate(genres): - of.write(str(index) + ',' + d.split('/')[-1] + '\n') - # Construct lines for the csv file in the form: - # /path/to/audio/file.au,class_number - # where class_number is the index of each genre class. - csv_lines = humansorted([str(cwd) + "/" + str(p) + ',' + str(index) + '\n' for p in Path(d).iterdir()]) - # shuffle the list: - shuffle(csv_lines) - # calculate the index on which to split the list into training/validation - # and then add to the respective lists. - split_index = int(validation_split * len(csv_lines)) - train.append(csv_lines[:-split_index]) - valid.append(csv_lines[-split_index:]) - - # Flatten and shuffle the resulting lists. - train = flatten(train) - valid = flatten(valid) - shuffle(train) - shuffle(valid) - - # Write training the CSV file. - with open('training_data.csv', 'w') as of: - of.write('Audio,Label\n') - for l in train: - of.write(l) - for l in valid: - of.write(l) - - # Write the querying CSV file. - with open('querying_data.csv', 'w') as of: - of.write('Audio\n') - for l in valid: - of.write(l.split(',')[0] + '\n') - - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write files with 20% validation split - write_file(0.2) diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml deleted file mode 100644 index e85a1c1..0000000 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule.nml +++ /dev/null @@ -1,43 +0,0 @@ -architecture: - input: - audio ~ audio: [maxlen = 1536, nbands = 24]; - output: - label ~ flat: [10]; - - audio - -> Reshape: [[1536, 24, 1]] - -> Conv2D:[filters = 128, kernel_size = 9, strides = 1, padding = 'valid', activation = 'relu', name = 'conv1'] - -> PrimaryCaps_Vector:[capsule_dim = 8, channels = 32, kernel_size = [9,9],strides = [2,2], padding = 'valid', name = 'primarycap_conv2D'] - -> DigitCaps: [num_capsule = 10, capsule_dim = 16, routings = 3, name = 'digitcaps'] - -> ClassCaps:[num_capsule = 10] - -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/MusicGenre/training_data.csv"; - input: - img ~ from "Audio" - -> audio: [maxlen = 1536, nbands = 24] - -> AudioDataGenerator: []; - output: - label ~ from "Label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 32, - shuffle = True, - shuffle_init = True; - -train: - compile: - optimizer = Adam:[lr = 0.0001], - loss = margin_loss, - metrics = ['accuracy']; - run: - epochs = 2; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml b/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml deleted file mode 100644 index 94bd5d6..0000000 --- a/CapsuleNetworks/vector_capsule/Audio/MusicGenre/music_capsule_auto.nml +++ /dev/null @@ -1,39 +0,0 @@ -oracle("mode") = "vector_capsule" - -architecture: - input: - audio ~ audio: [maxlen = 1536, nbands = 24]; - output: - label ~ flat: [10]; - - audio -> auto -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Audio/MusicGenre/training_data.csv"; - input: - img ~ from "Audio" - -> audio: [maxlen = 1536, nbands = 24] - -> AudioDataGenerator: []; - output: - label ~ from "Label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 32, - shuffle = True, - shuffle_init = True; - -train: - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy']; - run: - epochs = 2; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/README.md b/CapsuleNetworks/vector_capsule/Image/MNIST/README.md deleted file mode 100644 index 6b6ad80..0000000 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Introduction -These sample .nml files are for training a Capsule Network classification model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ -To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: - -```bash -$ python build_csv.py -``` - -If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. - -Missing packages can be installed using pip: -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/training_data.csv" ; -``` - -# Tutorial Files -**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) - -For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py b/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py deleted file mode 100644 index f7bbde0..0000000 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/build_csv.py +++ /dev/null @@ -1,88 +0,0 @@ -import gzip -import shutil -from pathlib import Path - -import numpy as np -import requests -from imageio import imwrite -from mnist import MNIST - - -def download_data(): - ''' - Check if raw MNIST data is present. If not, download MNIST data from the official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://yann.lecun.com/exdb/mnist/' - file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] - for f in file_list: - if not Path('raw_data/' + f.replace('.gz', '')).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - with gzip.open('raw_data/' + f, 'rb') as f_z: - with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: - shutil.copyfileobj(f_z, f_u) - - -def convert_images(raw): - ''' - Convert images from the MNIST format and return a 4-dim array with - shape: [number_of_images_per_batch, height, width, channel] - The pixel values are integers between 0 and 255. - There are 10000, 28x28 1 channel images per batch, in row major order. - ''' - - return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') - - -def write_csv_file(): - ''' - Save images as PNG files (lossless). - Write absolute path to image files and class label to training_data.csv - training_data.csv should be of length 70001, with the first line containing the header. - The test images are written at the end, i.e. the last 10000 lines correspond to the test set. - ''' - - mndata = MNIST('raw_data') - train_img, train_labels = mndata.load_training() - train_images = convert_images(train_img) - test_img, test_labels = mndata.load_testing() - test_images = convert_images(test_img) - - Path('images').mkdir(parents=True, exist_ok=True) - - # writing training csv - with open('training_data.csv', 'w') as of: - of.write('image,label\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') - - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('image\n') - - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write the data to PNG files, and create a csv file for NeoPulse AI Studio - write_csv_file() diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml deleted file mode 100644 index 37d74f3..0000000 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule.nml +++ /dev/null @@ -1,42 +0,0 @@ -architecture: - input: - img ~ image: [shape = [28,28], channels = 1]; - output: - label ~ flat: [10]; - - img - -> Conv2D:[filters = 256, kernel_size = 9, strides = 1, padding = 'valid', activation = 'relu', name = 'conv1'] - -> PrimaryCaps_Vector:[capsule_dim = 8, channels = 32, kernel_size = [9,9],strides = [2,2], padding = 'valid', name = 'primarycap_conv2D'] - -> DigitCaps: [num_capsule = 10, capsule_dim = 16, routings = 3, name = 'digitcaps'] - -> ClassCaps:[num_capsule = 10] - -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/training_data.csv"; - input: - img ~ from "image" - -> image: [shape = [28,28], channels = 1] - -> ImageDataGenerator:[rescale = 0.00392156862745098]; - output: - label ~ from "label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 64, - shuffle = True, - shuffle_init = True; - -train : - compile: - optimizer = Adam:[lr = 0.001], - loss = margin_loss, - metrics = ['accuracy']; - run: - epochs = 5; - dashboard: ; - - - - - - diff --git a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml b/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml deleted file mode 100644 index aed8e20..0000000 --- a/CapsuleNetworks/vector_capsule/Image/MNIST/mnist_capsule_auto.nml +++ /dev/null @@ -1,39 +0,0 @@ -oracle("mode") = "vector_capsule" - -architecture: - input: - img ~ image: [shape = [28,28], channels = 1]; - output: - label ~ flat: [10]; - - img -> auto -> label; - -source: - bind = "/DM-Dash/NeoPulse_Examples/CapsuleNetworks/vector_capsule/Image/MNIST/training_data.csv"; - input: - img ~ from "image" - -> image: [shape = [28,28], channels = 1] - -> ImageDataGenerator:[rescale = 0.00392156862745098]; - output: - label ~ from "label" - -> flat: [10] - -> FlatDataGenerator:[]; - params: - batch_size = 64, - shuffle = True, - shuffle_init = True; - -train : - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy']; - run: - epochs = 5; - dashboard: ; - - - - - - diff --git a/Classification/Audio/.DS_Store b/Classification/Audio/.DS_Store deleted file mode 100644 index 4a6be4886e59fcc6256e93eb6a789928b9a4861a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKyJ`bL3>+mcOwzbaxxbJ!slfOBx z_wR?pe%dvvk^)jd3P=GdAO$8W;JueNzfV+@0#ZNQc&@8FMq*{#_mUQM@)%C(5G3l@xKCGT>HKABMo%gpWhxJ57DIf)| z6}Zjq!u$Ud{g?Uwnxvf+kOKco0h_H}S4+N9_14kLd9Q8sH@erH>26#Hg(2E8G1@UV f-i{xlDC?T9dEN_$#Go@DbfSI+To;)X_-h3&H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0%xDiv z1jBDE`5SUhilok_0}Zt6H<=!85_Mzuu^iV-^z;xGm&^@K&Kvv|gu}uqAi#%)699bu zLV_+C>55*lwG(An0l9$!EMR1Fat;pAv9LIQQ0AY{-{t@HL74u{J1`-CP}X0`|2@Fv z;u`D>3U2|DUv~+1_62bh0MMRv4hRYX0J=>uT|7J_0L1hlmjG`N$RLO%4zT+lSmOX+ z{{tWJ$s1&ONe`sC1~v(+lT(m80I(nMB^u`J4wl273gQ!fE}niM?gX*6vxk!lh{+(9 z^Y!%yF~>0wAG`iv%5ncM*vaYoUu8Nux&MX#kOj;MR;=$C9N^;=`TNV?`Qhsm3hL{3 z5C=b5J%WrcfluB8y?Oha9$;1w5BXicU=Ct#5Knr768!_OdW0HVf%t%)=p3vE>Js1w z02)>oCw)^89|p0QpU;H@JszZMyLlR#fEd(0jiqOZi6w}^yfiLB{+IqyK23ycu)$?8 z9juEc!YK$uf6yd{dRys$_@K-}4_A{v%WS{lVQB-VgM8Aw5A(D>7zOMgUI_NSe89s& zy^TgBZ*U zb#!+6&6^;Iy3M^HPR+sh+SUy-bEkErcK%Q0=tVxF!4%V#( zrv6u6L;sTapSt{q-kDA?(J%=xon<=q?;Nnpu&e!5%7Latfq&}a55BJdr7gX`wDf0t z{#pM&-*^HpAeHIAQ$p`T8=-^HZs;(y6WR)hLQ&9{&{62C1N@Kr4S%zrW4s8Dc4FG)O|83JD{BQo57Wf(fP9=kO*6KI3lRsn#A@Xs{y zU9j6{0KlvF&Y?kJf6&n$Jm>&sfCC&ULckF~95@C_qzs${G=Z~#0dNj52P^?wz!B85 z2iQ-8fN&rhxCJBtDd6mw4deqwKq*iO)Bp`YE6@pa0|USa@E(`~=E1)58Q1`D00AID zAP@!!8-xcU1c5;$A+iu_UFgKxtTMcxgmvq-YdrG-z~aOld4>uF|;E1k&7~xlMDA<{?c9 zO*KsmO*hR5%@hrqW`pJj6o9fo`JqRl@=y(^9@HFa2X%u6L1UrG&>Uz9v=-V9w)6xP z4c&zPq@}0jp%tZ-2m7lrtre{^Z6IwdZ7OX(Z6$3BZ6ECfEru3HOQmC@J4`1_cbd+G z?h2hdT{v9=T@Kw-x@Nk5aO`}hBhb^+^V3VytAk_Dmfnj#iawS8F?}8VYx)oLEA&4Y z7#M^YWEsvdm@_yrgfJv9e%9z@j#+W`ck(jxdrJ2t#TQYkw-(r5qT+7_gyukd8g@r|o z;une(aSbnl{v&yjQu{yAZvfg8@V13Ox%leItjZKQ}ESnu$2-`ij zXKcM}OKe1T9(DzG6LuGNBzpmS3;Q_xHU|@jB!>=%BgYMn9F9hgF^)}6CQd0%Jx(Xi zSk3~@HqIH&A6z_KN?hh#zFetX&$))VzHl>eOK}@;yKvv;F6Hj!Ug3fAi1X<2IP=8w zl=1ZQeCDO+mF6|!_2Nz8t>Jygi{s%p{^EswCzlnIttNJtT7_2POBUq@`@65~W(DzDNs7n@Hb~u999j z#&!&TEbv&#vB~4~$2E?7AAfxOy$p?vnv91`fy|gJjqFKTPua(^ALQudPRaSnmB`J= zv&!qphsjsTV-)xm&MU+zv?^>XN+{YXW-1OUQctLz@HtU-VqS?`>6}uWQoGWwvW&8` za)I)s3Y&_d3R2~z3SL!K)m61n^`jb(nweUnT8|p#r25IAleH(;)g{!gsXtPm(csmv z&`8x7Iz@L%_Z0F}=c%8kPo54r-EewGQ(n_s^SS01Eom)Rtun2ZGh%0)&OAAT))v*i zrd_O!h98AH!As!Sv*Kr6&OSZ6rgL1!OQ%|ATla)+kZ!Z?p5AG_7`@l}P<;Nt2frAyJ51~2nm zMqGY=nP91FnQl36b<8T%s>hnc+QIsn^`4EsO_mM%isF@+D{aBo=WXs? z;l1x;<5TC$=zGn#&5zs9+po|6h<}9thXC1tgaCBl>A>tjT##wd(_l#O)!>&Qyr7dk z8hR}BPADc!JFFm#7;YWj7{L|c7cm+s6PXmbcEj*Sc@%Awb5u|C(db*zm>8XyC$W&& z>#;pZapY~}=bJ`1D{nF1^1k&p?nGQx-1m5!`1ac(w{PBFxnp#vDuF#AC}Ad1GqLC{ z?Oo5i?~+uK@{*{@F3E3FPNd|dkW-yg-=ry}Jxtrb=XUR1`pNXd4EhYejOk2xW@Q#v z){U%{`)2oB9*90je(*gTkv*KFnp2d^oEw^pet7=j%RGs^j68C_NB&fSZb99n!;g|4 z?LBsRJYIOVu(n8~D5dCEv1jqeCq_?NN~B71O6g0(N>|IQ%LdCe%AY+w^fcuurNXZQ zU3sap@0t3uXU`8mPp_h>3awhNcBp<|qhIr~R=)O09e-WQ3*bfQi?8+9>Zcn_8+sZw z8f%**n;tduG$*$}S|VF;tsbrDm$olIw4H0~Y1eFT?ojBc=sen4@QUwMdKYuo?XG>) z4b+d<0k1c^-Md$M5Iys~S9&M=F800aH|-x9FdXO|)EPt#of+yJK0Vy_M&nKE$jOnG zQMJ+Lw`y;j-<^EdGNwNE^8Kmz?H{y0bdAHuyC?J~1}04=N2V@JeVDeI{y5_>gZ}99 z@yo2wEPgI*jx>LBfo>srk#jL;>F`p?^6}+5^hxw9j6P-*dl@^w;=Ho)Dfkol^X*l( z)toih+OsdpUpm(f*WZ7&|N40&V1u-ou*J3YX#3c9)6UtQQQQ^WXM7NTe>e4;(6@^3 zs^5ElSo~Nb_!3Ba$wVPyDgTHKkt&zgX4CIHDJUs!UO7qHE6NI0eb&=`{3~}VgEJ2#YodfcNhv018BG)P%g-R9{>Y+ zrvn`mF#Jgkp@GuU(K9eIF|&XK&71%Y1PZ00h0@U-EaV|E;Cq0Ui;nxKk`6u3MJEQa zKwjlrnNJwSb?dwNE`7jBs9X<& zvbF(TGG`Z8H+K(Dui%i-u<(e;8}YaABqZKVO3u3fAUh}bVP1YoX<7Nxippot8ycIM zTUuYXwfFS)^$!dV4Zj(mn4FrP`8Yd=!LEGzyt?*f{VRU=+xH)YJ>t(_2XcXX1b>M2 zmt_AY7Z)fO4J|E{mf=7y2u(N`p}&`%AEY$~6x<&@{gTl!gXM2ZciE=;^>f&j@;3^z@9( zjK2f(KLYFTzB@BgFAzo{QV_doncmw%fs$N!_tzfG4D|Iy{&rpw9y=<;vV<mMO?ulFlpa!Ao`F;zGw@)%G2s=o&2Hced;mOz~_6O zd~eoX3f3snMB5VW@#kdox(-G38sY71eFYfKp;}+MSiNS@TVD80s#Fr^ZayS z#nUhCq0{x%i~UYc&OBp*fq`QEtV)LEg(f0HR#q0KN}_BhRR=H=zb29fg{rMjlpA9l@-m zSEji{yl*|TX!I(m9k!)J(jXSuwBYuEL~WS}8_Ow07cTp`xs5OtbJesD;%QQHn^APM zGmm1IV7u-`{HRT6Fy$DumXe3`L_%Vxir80)xw!2yix(5Vb*mFDiImu2)kBIXJ8cE2p_=@1_v?`CV9>*jb zoUDqWOAXQxnoH9xcD~n}4{TM|iQ#tnTe01}a1iS3a7oeoGV-R*&n}w{5N--g(uPie3>`fHsXvSQ=+}xPAB5`x$@VG4B zka&P#ZG1O^OB+TunQ)no6^SS~kH1jFT6lTEGdyFdCkyg86{lJ0a~S>TJ%h8sHDW=( zqX5o2B@k;aS*a*X5?Y&Dr7&fg`YKs%(Uf`Q(;_hkHwr zeSPwka=io-u5voTBg`#~s*vpvOAB+hQKf}>Q72L})8+d6Q(DdgzG#XzW@A+59^hHu$bT zYO#E=hElCQts>dq+#{DcBFtc9C}eFd1ePlvf0qAPHH;^ss)x$_!B5L%MgIDj&bU0F z;)qx<^7iF)zSU)Gdokz&y}8}DB4+9bMB_eiulb?YDRf;@?4g-v?=1b3VW(d`?jJ2T zyf9Rn{rCm_1qk-$nXF{l-q51CaDp#xyDqrcdmrHIlzsZuI!N{Myu(JAk{RpWfMGGM z*QLZ<#_on%E{zv{oNs&XwarK$J zIc7C4*=o{l$3tVJ5fk#EHOvGy(+jWCT;o>f4Dy9aow6f7=~_Lwcbl6n)`zT4$$+~q zGg9XdwGJRy$roZwv37{Bzxt>`rwboTw0!%Df1|*fQ$L`fZ=8GH;xN|(xlNUq&~?SR zB0?)Mlq4SEZGboL^HsN++zOpygS)5~#DXz?ojYLBNG`gkFR4tSFux}LyyV%}G zhdxOXFvIiRsV(=BvdFrtC}vx_RGcB~&3f|!M=;_Z9A{tmQ+$~nwRr|k&1#OlNrn;I za1|K;%~@Os*%Y6XhJm+6nChQgv6__MOr>7EEljT0d=k?pH2=uO?WT5P-5FvY9)Ur1 zqu9~~$d~YZMF}BcdGcAsl5$s=nm3KXD z&+BMzNK&vq$WqiS z|GBT#Tx`UzisxvSHB(c-OWJz6bZje`dzorJwt}EUcq4yBwiI*B*CoLZdRF|B=tQS1 zUT@IOCLz?`f7&0JLgc|(>Nev^3gjEa8jL?b z*`N4`$}m$aC-z~p8rjseA&#U~S3XDA8J^jcu9z2Ok-czArx6~tkv?aR-3Jo7xT2-- z*5y&yMfD!4_7SW<<%;89(m-~8j6}=~P&tW# zm>9i*C9SEgCJ_cxgQ$Y}Hd>25kvN4bd*QAI7e|zsi?bl)B+x87Q$Nl)Die__r{JFE z$t@hu?Y?HDaMdDa=g!M7K{`g*o!xX-7NC!U^C`TP;%Gix5h024xKj^PBp6XEulf4M zkLFhkbDz^bH|X&4hjT5KZG;}n;*gD&eU(lZ9LFvQ+_332Pu>UEsLBxm#12gO-D7ihE2P5G_=IgkKGuDs z<=FV8cM)HTXIkDv3?e}bMU^AV6R(Oip}5ITrNoQ=;Vi+=m*;0A`;|IIzAdV+h-(HX z``?O{C(96v6_6M^;)o$0@v)`gR=F4d-hjwH5I)MO`QRP9y0a?ei{*jY|GT42)@*bP zk>^=NWdzR!&?V}q<1R>SJmg^4U$7kWRM`qcI{n7=8*d!5?*gOfJ=y8X|DV0ux6`5a zW-P{C(>r@CzS>NFl5^_nx!k!v>}$|lXAfE04DACA6|GX=5T1;1>${@Q8W|r43|?}KaX&z zzTo7pl!VYS5(;%?Cs}6uz_MRJw*H@}=-*V1{P)ely3ZSgUcA{1nHNgfdaunvKKhar zd#Ci_cR|JCK(%_~GSB8SCc(|;1LI!vxp9{iYn9 zv=MXKr5`6YJRwK>?=%w9uFz*E{+b1(Wisyo6xx=Sgm-hYrY>B)G%{lMlAoUdelg>B z=8ULj%h+5>a{Dqh4&R)8PLx+g(rz4DipR=&+)cME#3=Gd zMyXA;8Gp!~p9NWNo}i1(n;Ay5CX(~k`s+x*!ajI~=cfRbv3nnodQ!6we4Iz_17%@W z+I9N??NeNMA2`4rRf)s+Z-Y1LQbh6eRe_OCPwnsf$k#|@&&k@>x_FjFVA67qjl(3w zio3g+Um&{$k)I~jP$Xghec=1OH(gpdWIt+dhst~vKi}sl0gr=7x7x!+%G#a z(5v4rONjX)i__;$eeqQG-?Pm9_)F!b-AI zJRQT8l7Osj-|B)S2G+pLfgrdI5}k+KbuMCsZ`>>22P!n%0WxR?(a0tqKhmW|| zk7$bR2*i#ZlIsciQ1L2f&ii4-&!>|vInpNE91ry)j93J0p^^2zltjv7 znm@Lp=au`Vl=CyHc_A|&{1*L=-ucv)94Hq|uC=1I=W}824dyxePWCfmj)Tg?g5KdF zChuU?H)pW!#BeN+rekk~u`o$lvsh1h)-|4I_E-SoGmi{6tw-_`BM6C+I)!RN-QEX0 z+u+yh=7O*>VHE4_rRh~%_1<*u$(*-0%D1~01O3(7Rb+=>4SNh-%j1P$k+_R&j$>GG z)N}M8X=7E0CH@T2%HtDe@mmIjZr<=`Yvv)WMeRwS!}wRjSu<(b(a)7<7K=EM=#1rt z!Q>EoL!!IFGxQn98JCRZ*`<-#tPIPY9NCKKM>Lkb5?8Ksi2Bx*Aie!RdZYJ&t#D`e zaBxm<@+dq$?i3*xTE$j)qc2>!JK!Gk7|qapk4SY{YtH(sz1S1cXWzqp&54p18cV%m z596N`t1yM7A=88xBdaFmZ^)z5-#)$>^;vx&tGyAa{2I3ONckqmdV4aqOE_X~5L{N( zu|yk9h-OWX_1jt9t6^0}45NDq!I^|cM=CiFxiwv^2OzP}#kG&;SepW|OYFu~?k(g_pQ-rgl*zQQ*ql+=M zN@GpsRn77kmnj~zz8=63{CX8tk{gnC$V5=4QX(q=5HV4O5aq!E>Ox$`=Dfn!Z4P2* zYhBWHZ#AbWECtNgGikI=ivE0e@%)7Th9JP8+@gM}PLa}}<&Re+U??q{`+##gu#x5I z$o}5n?DlIW?a>+Q%)DmdjfcoESMQi~UdFdg59m)`*5rYcx##T~@MBwh_{71bBS|v` zQ%!Gg?9%XDueO=wm1@1wlG#XH`klvA+ILP8}%gE ze>K~swIfd5hjyf~j|e-7EV)4q?Sxmacbx=VWpTl)P$)3e+r4seZuCn{v($u^HX&T} zQ(#NPW2P_mPfiH|pt*~?>qS8FqtL~%JVbGPQ9n-#_*8rV|x({`+#?cuqH}?QmT)?aG#~yx#o(`w`uXV9G2Sw))D-MOPpJ- zlAWz)J019VEd4E#7qVL>^s(?|+*+DVKZ5m#@YB}j1cb7gg3YR1YvF*kv%BgO+dcc? z9Dw>A^8kpg1ZM_I>QUkdW_j~Y^$5D{X;XpYq}=y{j;b)1EEd_co#U*Xw-j@Z`Wag5 zYR(LzUhD&8o(+N;afPJn>8LVOD~(S~`Vo@hWn;a!J2zaq6vS3yuaCL-U!?#+S|&f6LcAA zW0lBupq8(Y5k$H`B;K2#L`-^?^6Ioy#(JHKomUMFu*0ivJulY2Fj6FCjJ3X)+HsAO zV;VlIPC$HsyPy*1P`&l12qLsL?iMs6JQ$+QeKclNtO}NzF$WD5RsW=EiSeEx0QO#XLDO-HjdvZ z)WF5LdPs28Uu1sJvla+vCadlP4?b?de(VEP;Lc2r9k9nil%FAI?j#}zM~e0V6T6p+ zvwBpfz%uexVoYhYGv#@wQI+?N#Jk_;PpQ5-%`#DNRegxXlaP|gZ9IB(mt2`q(o-z% z_il0riEc&g8g+>vKi)u6B)uqSC^_3Tig35|J3O=IBBKagvd7&cCt1t0-bgxSE}Rg( z*T)%I%&~TP(2)brK<1tx%;VmbV|;^lN!@mF^NT87=Vxw+^bH8lo0c2gJ1(6v;$~Lr zhhgoo>awEvf;xYR9DWFHG4YL~zQ;t zt%=L+b)$7eNff!LXaMyKHw~)&n=j%i3U?m$Q)w&#_G1uqz9t?o#=0WALG$&AOjX+l zW)fP#S+u|lKoI41WbytOE|F@5U*GltMWvZ9W8lm~Fd@3*!Tq0sQoQ1TO@4V{4Q}P^ z^!E23FA9%g^7>#NRxeGQ9yBE0j_FBceSLp7oBSD+bqYCbM>N_890icSE{nu2#g4;w z{q_M~)TY$|UkP3SH4KG*wEQByduML`b(aEOvG0|+eSQ1;=3~C1S7oqEyh@$gP6|>j zk8*~;GOjeo_EO(3Ge%3}gsDukD`U3{8N72pXlY{RhiaC#b>w@0HsqCgidgYBYNf_n zS*Y9uUe{&q1L(VVkeJKS#tx}zejit~RyKQ^W8Aj0O zfrsz)(Q&CLT&Ar*HS{qE@~fP53E3?j3$Z5)fF3yR^Z==sDvRrS92~Y1tw{V6KXY@L_RE>yP3b-#_gU4%0-XEGHwP#l2swrj||S-MDjgi*9Gp=*e8Zy&fm zMjtzP+C6DA`Z$(N50TriVmBPHSH&&o?bg6Jt(s;e@#b@qH2fHF?GqFaB{<-udy(_* zgb(;1Yg>rgF-n!(Ot9xK+E#O)nenpGB4?95xj4;ZhWtxn4JU+eG4UoZ*P+ zHgoKI$HT-OT>Qn!PVIadM~OMP`@$odciNk?W_3r7f1B{k7}D!}@=$@sx#%>d3Wd%_ zFjL_XMVEX4v8& zU8N2cHQOY>UG{-FJ?q^5Iarcn^``t1)(sVU^@4fh+|`duZyUnm-}N*FeUhnqK5*|C z^q0urbiDD`vbJulhdy{wOO+@l)2M7%0rbsu4h?O(le7TgnilvoSMm7 z)S2boT*ceVpmD}1QsnBsWZh(bH`N>?eRe{jxh9OjrhcIzt-Nq`)gVF8`C?9_L=c}P zdszmZ$IQ2I5`vhHRoudmLTsr+U2HS#1?XiK$5(lpB3mVG4R?wzO-d&Pu9^w#$fKhit4YPAJLk$C5otZ*{PG@0}5gRM! z-8%^a7FWn7_;-mI`>HZ!J@#kM>}^eDpm9U@jZ@!n(B3qFzeWjZb46R^W9Kn-Y2VHH z#yp;^rJtMht#e5dM60H--m~O+Ex}9{r6i!xkL?7u)X*rRCt}XuVXC>IYsd1` zwsz&tl8~Q&t%K^eOSb*>(xc3`MqUO~4aF`ZUp%J{aDG8F6>WZ~^UC*4D5@M}>oY$U zRil8twsaonAa?xZR=VW$r7J4O7B-GDLMXB`Oe;mna8|0I-TV^ByWf3`Y1F`R+WeaD z{IV*Q!&gSP3+^7yXj9uo#G!nmqQI4@0&H%rkKda>I>R36Cm;6UUX0_kb^Y$^^iie$ zz=(LKPi1~AA!O#!Xz9@>SSomR|CgIW9!F zwiBZ7IYwDXejbXttJ|aiu`jQ+t<(E*GPNQx& zb8wlu;&yucZx99+Qg^ZTU8`c>Imvcmu!T6KT4P?4ZrncBZaXkN*1;ye|l)Sn{71fmcPg+U-6Mu zhX<13>=r&_OFzY_jNnO)C@Qt+$Gpxy@ZP@)+y#!Cu{=77-u6?DE*CF*`CXbe7}36n8IEBU_x({$^Ni@zm&Kqz3a(!3l)PB4qOGWrT^o@|Mt-TDR=q_+F+8w`v4)0AfC-Gnz z`V=`Lyf4kcHrsQmygdW!+tst>l8I;TDcQ%;5uf1(v9|@(5p2;{%OYxJ#i|14%qXvL zcOJRwCosg^zV+J24RwOb4Z6OE_W}21*7apCVtiFMt}@Am1m+iwr8UO9i| zXtEzOI(pn_l@qi*g!{y9OxWE4M`EiP4t4j_`yZa)b)rIjStpk4=2Z-R|Me^b z`V;kgIa6QT=?dDbfuPv&B0e9OW%JZgesKp>W79+XPH1?};epXRgD)90*3`b&{jYrK zXs}ZT$M(P(P~QGD#Ew1`_m4Av0~@-NPnvFpRHZ~%4B&p=ewjZPWG`JQes2;&Ipfj3 zl!i5vnYoJFqI5lbx-6Qr^mW>@kg3!!{qWs$4TXK&vhnz_f#rAvtH0AktVG0)F0T1= zPh_Ufb1&=ZV3Q+(ublG@+#K*3N(fPM4F1T$+Ip9gs1TBVN99fHAqjC~9BFyz{ssRZ z$Cvap@56G%`;K_0C7ek6yeY$r$H6yYV1{)ZVz3U7-V!HWyy~Sb`wIW4Re(Q-$I(G| zTwl^-ZkvEPw=kmmE_JxYqRW=Of8^#(SX(ST@d<7Q8&f{;1UKKGZ=SqWyuIXNve1?VpZp^s9>UeX^l0z}1{RnI;#|$(nU0 zb)2n6t!C8hj?HNNh?~L{S&s1{;&JlO6L2*r+6AnW@ekqgvp5I7A>~9G7}|pg%j&9o`t;`>|Xv@KxOvEar7q+BAB&U*wgDG zIs9^it`hxztlV?(-@Wtc8{_QFxvMnwmuxhz#Q!iS`yja%QM*LJ@V%oqH2Tl{vZS7-ZUHHmW*R2G{RGY&^wLOo_) zC?qXx?&|mJ(#piI&Fr3j*O!UUZB1=ZOsFCX(jj17ZJ|#Dq$p{JxjxH?X(Uy$998bujV^Lb0C};oqbb=J|XEkwypg-Xo z?TL}May3T3EL?V;HIX82Db`dJF6qg!($Ch+XQ;H%W*H#fPw_&?JJI2kT#(79h!%!B zKfvX&OY@5l1Ghe#JRzPO_&P1o-j-#2Jn0In*IDey^DqLUhN# zIg#D&+B{!&BaR>#J!9bsGn69})|0h9wY6F=C&k)FbEn5m9{DYY3!gI?d4#`Ox2Z{D zkG}C<`-~U78MWnEIjETCXT4q{cc(90U3YuSZpn6AqD5*>V|?KuI*$7b*K3-i+@-N2 zAI`!ukxdM@55$9WAU(i&lWajL@WbG9Z)xfK9yvq5`%B2T#-OGwpk1x`d(JA&x5)H@ z<2v`_kB+_3zD~>~X&}4Lq5M$%Uk<5nF;sY>*tG=8OxAo7JlGyx80EIj8fPh}N=d0Z zmdTkneF7@e>FAt8z3oxV?%&2T7M@n}tF-wNA-w{2NV(InN@9K-eM&uuU3t?cbPWJ`2=L*g_h zSU)^rt`#{9Qz2+417x8|vKhGHQ-kNY4a0nkxG>$bZF%Bt&HQkg{a4mhMxXNv)OU#t zi3yUSUsHwif1#*M-$+2~W(9u*o(HQ?%HE8J^>M0|rFlxEyF0mz%cZgUU|1X845iDZ zmeT1Oy$;X)z64g2LMhgg2J2BGMv;ub^>5A(B83uL&wfC8oY6ER2BKeynQOYJob|zmUCSEH z$#7V7aJU+&fA{vg`kO*l*l&khzl$3H(ML(v(VWB`#n&8kL>S@J%qQGHGj4asJnBl< z5+Oaj!m}l4e$WF!f4o)w!Fe0OgtHZqXeh2Vi3|=tx+nPXo7=HMV`zCzv$n35lZ_iW zHiulh%09fD7nkd`IQc>InRxVxQmJsmi*cExdeB~hE=#u-xCeGdkDyBotkJ=eNWMk` zjtXtbF$UEs&DETCiT3s(ku-VCky)Bu{Xfe-LKsE50&vosx%eFu(BF9i`k75`6`1~3 zVwB{&Ct7xyubP}?7CdWw1?@w!Yaz|b63JcW6b?2?;s1tj_zvax8ud`=zH8uuL* zuMPSiw-Z3SG4;8vEoCdLrO-jqZ%8tz(?Oz?LeE@gSJRZr_ zrAZyV(b~In=Y~09`L2TE_n#}xBz^w*(zt8mFlDUqir``Ni!q-<*=?_5(9f`sQJC;F zZPwT~R4CCC&pjNn!{zb*Dy4ST^P13-bddqFc9| z@iy4pzA<)%bj;h>`OER4Hi}Hih>UCA5$Az}OXIsc-m}bl}C_)6-@l>>-(U&NX!39GB*ihuF z;{y6KilL;pI=(rpEF~?wI=_Zfba-ag=xp19luyV=F<=!tAfPqFFO zR-Fo!t@;*xcr2hL!(mX+G}O)bCRclhPny1WhgWp*p*N?B=|8&dFAoVS;%qBf^y&+}@{;;UA zVsK}Eg5XNL>Wr%c@5OBn-XNT8Gz&g6Nfn&Yo{9|Abbb2LV^(kYg~Tp1a2|DQ+okEjnddQC?&``yMK?L) z76l5ue_f)aQcpw&5od6y%Xo8eKBPhQf-|_sBtz=(GAtUreL%-g4973`I35z{qW_=}4OiPPCCqDdK6ufl7{JPXzOmkN z?Q>eIJ5*YYq}UX%tFCkz$eEXSAiF&k-vawp2(eOLE#L^*rUb(&xGc10@&l z8gJFf)k?VDJ9Uu@Dy!v9dhxY?m=;QiLx~~LO*!Ef82~^Lo4p_tHEDqw%d5;u4eZdk_EE7ZCx^fjrMnv$n&Z3 zzeI2wgcI+Z#y(~Z63!5FpRrwEto9S{c-6?pksBTuB~{#Dzx|1lkC_=!05U`sZYrGv zCP9mSgP3B1SKoBUHS8I?D$T7UogT2gt|nX>_CIzsNi$q@t&ec**-I1iie8+~j!iGs~Hz30@3v-R*q4c%8rFUQ+v zeb*TOo?(+?s}*Y|&{Nc)-oI$>n0KwEsMjtnPK8FNZSttMrYri0ooA-ewF?*M&7x0% zcG9xxWg@{9?>bp|Nj9B-G14W~kIC7E=}4)T`y|&~R}e9gqzTRs`+zSc{2Xq1@FRFC zn@DA9c&uEbsl5;2C38?E zr^#ObE8hS-s^y_FPF2>%lMKigafVGUW^OT(kE)-2&YD)QI*gfoRJEOVVIj=td);z+ zN`6jJX|xPJr>e?f(B0j$ba_~HLc@5)pf$)g?*(w=DT*KNRre7}xQ7FGwbi!_NJ8hN zCqre|LrvtJ+v0@h*>u0a=RWD&=waGglj|>)Gv7G`KQ^Fp$r}cs(B(xf&6M0YlxNgD zA_2)RKYD4Y*JHY3E1hXbY-!H3EMK?#3ezWJ&YNG#CyxUMcfZvBb(a&o7}4#8`cxAw zz7N!~peV=hbv1y4#WHpjb{OgQ7LIj*ABtAr2N*XCMRQHO`ADwi%lY!wU(O*vNIg_o zR?{Y^6sp8KtdV)w2&Tk6@XDhJ(Yu;_6o=igi%s#>5}0_kRsMXwDVM7~$3Dq9Ojrlr zVXYPN4g!$k3E=+LeJvvB_*@e%T1SY1tO&yY<%-!lr5f~Mi;|Wb{rk*;2&plWk>gBA z(pGiNXlC!~=*PK2m+H*gz^R8Eml2x(n@Jjo5Fqn+P|d2qdvXMId*pf-q3hlR^`!LX z2B@ofod1A%JW#neg3P+$-q7zBT*wOZx z%~?F`b$(bYUci!=q|oNITQ>I`Rp9f&0a7ot_VB^=Pb;6hhu5@X5^>U)F*2Vw;nYMk z^3F0FnXQc2+iN4>FW-3es@=kzk*t$rl%aY~I9FMR<%7q%aq5v?qn|rVSJcToGgM{< zJi-w?LL0j~#*r57R5!YEvDuV#?7n_u{IGY#Y*~(AtXo!euSb?)p(AB`JOh9i!~gYC zCn6h#)7M4eo^?qm7#g8-;zm9W#&^|6KXZ$U{Y?SAdww&YF4cw$ZhlsY~AH4)lK!4<(C2Fpu zr!6-~qIzI+ONeG-U0=SlQO$_YGzQ(SB2@E9&-;-5&z3&@-s??K6GE9mZ-2sZ^Mnvk zkVmn-T@uq;a2)%DIq?>{YW?llgKzf2Ldy?K?50<-ORG!h0fD!iZF+YuOFZGd$q_OM zN2h87Qy+M&i1k?0sno2J2u#NDGscULY`z*ScXK4(80@z9zo{()UYEff#YG=7t4+lr zPOeL}Hl~RuEv_(HyDC*xXKuM>FqoCmz>g_})Dh)u24jN8OlIEwAJ*PGs;Tea77YT@ zMVcrmq4!=yYEYyJNKu-UsB{pJ4gv|Hfb=FHAVdX4q(o|H5_(6ZL+GG_Kp+7O!4&WQ zp8M{2zjMzy_q{X5{UZ!U2s>-9z4m8)KJzo@+yyvz&yUUQjVS&UGIt__dwA6h4g4 zVe1!qeD0`|j`oNjU?6f10A##QnUuev6liTmueq7?uL;#e^T@&}8?W;W5pP?Y7IWFeW&0=+5q(hHJuq*?xvllr)H72Ji$%MxzPc^$fgc=q%=y#m zlt-^fS*l58KXGDkZRZ8pbzEpe+)$)2SV ziFT9WF?Hh>({lU!a!ruDl}A=unh&UVnpp)j=iXY<7;yi~D~PIqM+23wS9V1HK*B2im z&`96YaFtC}W*n5rGywTc7;po!zW>uJ5T;Jd!X2%bAkJ3f!F_ztcv=7Vhe8P3FLC`+ zVi?B}qv8pNrV6d=5}=Z48jRK5W2}+I%rVK&(WE7r&`%I8VOk5#mU@?^FPhbwT~$@a zbKh9-yUpwpe4XvPH&V!^jzRr z=&bsz=KHjc@()f%;hy1q=B(eEv!xzpo;5PE)|WZMD7cnx0J`^I;^fU1NpP-c7F<-< zlf+&~l4hM(iNGZJZE(1-iEzJX?t@gnBn8yI{q)$S^u}|w*TsQ?0^5ApTP!Il-R$x zzSV=~CQ^T!E)qR&bxeP^N_|=P^yh=|%g^7icP(BT#{$JTy?^i%gbxnUts%C+rCcL~ z+biNpSW3EJZfi-VY&&|dJwylUszu+&g?B>Y0iu83Dp-5>EisHcJG3$7Yl%_e-MEYI z%swiJ)(J~_+@-JE`^3WD2PX>i#+Uqrryw{QFKX{5@taPqwFkG}uwSwWD}bnvMkhRc zlBpweV%JAy#pU<1vpYu^4QT3oK)-`t9P7(HmI@3|$Ppe9ATIIv)d_zd+@X2H{?nCE zLTUdCdIU)q;L6|d?AeubTo35xm%Ak+W4e6R+fQCWzRV;{wlMDEN|=Dz`;DevD|cJx zQIRmpE+x6$07WZw=Mb7#SHE2e+_;t z{)WBgpWsCWngMHhe?f;ZINx8;ZV#}lPK`CfZ|0Jpk_7<19+nH>bYqYk7Kw28UAi{> zKw3bkpRkmfjB3;8D|${QsvaH}@S8(4Wd#mAOxjm3#w6kE3EsetmIM2WU|-zKCI`)s zZKHMRS1P?-t9|mtXI1Z;z2krT!)dW!*ZY|ubBp-ZR24~v<3NLGejwH#QYzIT804BK zVJl@+q~GOID|K{cRJyjhht5H$okOadDKoPZ)Zt@NYjgGEsl`S>cK}egVfpa@3CjkZ z24A-%t4#b(c@>gWRWtS)6~-}$?^1o(RkH7yEtqXAFP^OPHH^N?07NNyhb~gUXmzck zP-HIZ?sa0lFCKjtznUN)aA6<@`(vipG-!L0(Z6AL$TP#Og8zB$l~?B@(~>Vx@BpFM zdT!&O3(6SnQphvK)%(PtsZ`r+wXG>@lTDf7k-Ny_P9<7tl|ImuA3WquA4G#4HUn6_ zT*Q;gXkE%HvVQm)dt#x;CHm#6!(SWx#s!mFZM)6fP7QNzr#YJaW)w%F3$L5Sx82EBf;_s@nqN%Y>d z;BEdg8?ZL>b|NHVNv^*%q%e0vFm!>=F}M6xoQO4uO=N&S7B(~iO+w%-iot|lB2Z@m zMW8Qxnv2$K!#M4T*l1x`Y`)$3U#&UjT%8>@0G28yC)eYCxxzEc2BmT~7#JZvC#6zX z8(1%ww8cn*urZaPvWSd6rNzddrJ%k=-8ToUS8>CgloC}N6?0AU4s&iqw@EuKAuO31 z1z@0q2-J8d;yEwkqG|>P(yJ@eilbC~nQho^bhZWyGj^&B1&E^L<*hkd7DtV5_sMcM zaencoG8su=!T4KSmP9O`cB5}ArCsQ<`X8?Cxw$!%%Hoa}A*U?u^6qzd#iJyS`B>7E zd}bO_fB{?uApn1rfpPptfRMaF=&N9o`}3X*5w2K%ueQp4Rlp+@PjcBYc&4ut;s8L+WJ+pLG*Ml zHrunNFY$vw__C;U)~7HZ5M}Qj`XWH2g`%Ln$Rro78Hk4k=K9g5pOieTD>9h#-in3X zGoIGk))#ckft(4CZ=997T;kynuFeKrLs8;5KB^Paq|i0a5&`aS9Y{QOTy$3UExMxn z$Jfy}No+@LVF_!^c8(jy%s*_A%UsoH~C|Pjt zRSqX&;kXUjS$!Pcnq!@ZIouH>aAB5>`8-jr%bce{JdC2}Y}M`VkQ_*cWWNcxCp4}@ zir81lQ>w1eOw96a4^1Y=o~VuvYghF7ii_#x#dbEH4~Bs;YYBpUyU%pyiGu;VPgOL^s#f8VfMc1;3&+rDtxTA+(0hRVG7H5 zYhQ@@3=IVW8$}%Yp@Ds3&-xqsIF9dx8K~J8a;WC0bqFQ-9xpXh-(_8o{xOCf01^^P zr15|%Xq<(JGM?4G{*Kp?qWP7&vT-=tPjyUMOT!o^qlXu#A2e&v1w-Q?pFt4Z5gcPv z+ga2n0y$AM&;x&&2K0)sDC){82gDc6W>1vpgw`n}%Bx-R?N@Pmzh6oJ&WTM;PWu^O z0}6z3q?C>Z{&YI8nCQX(6Z#d+0|U??+}&c8?zs{*i?kC=e{BH&QnB7pN6?en z2LC&ey(O--GEf){gJ}4T*ek}VSQ4}IYIiiTOG6*Fn{pTq-KQmtLR2Cn0;EOLegw9J zKR@iL)cj0#2ENB&yA|Dwz^nq0@ujWW!$-0qiwi%!7L~f9s+KGD*(Czm_?+$|_JhC5 zOu^ThR|H?KL<{2@>|{lZe-|yhxy0;{Ru!nRXM9FaD;04Mv;||1W&w_xLS1m-gIsPm z!lUotu4Yvyoh#skI%8tqKh(NlzbDJ8c%AnA@Z}AoMYOAoOe1?dv{3=;KsraA{1e_8 zfEn-2AtJh;y#8EuVt1kpeNl~sj-*)EcQms+{${wNU-p{bf!nYSWOOXDmcjh1gC6Jd zbS;fX`wOH86H`!`F_5vg6V%n(;lChtctM8F?xaKs zzn5D}OV+~#G5P)P(w{yKiI#w9v%SvTlR535fp{So!Bl1m{DCbohNMJHIOqhks~Re^ zv^o5k>b;t!QWhRQ_gJ#UyJqNA&Q;40gt>Xg!lkH&45U(V2-uK|@Mrwi(+*)D2p?J7 z-|ubvR9VTv-GaB?cNMd{_lCDqZ?iUKU*ZTFwW3j_Spx1>^!JQtO<)r}{tNnZYvxZ9 zNf|!WLvNREa^NjnP zeMv2zrMrSZBAu|oreNw_dL*2@vm2<~daUbYBJwfdWwT{x+;#FWQH~@9TLCwOcP~QI zf$SSAQ%$t9!aD_g^e%j)J~zg4UEIuEs-Y!;|GLBqM-TVY^EEd*wx?-NsL7}51a;gh zRsq;svekB%CIFR{xPL%Ur?6*pQy?xR$@= z%6(d%Z3xaPV^PM6l1;t%D>Fh^QUX7h`o3`0%=e|Tz9)TaMN7L||6Bxla8hz9UY;}< znznhLeS%L07tjf*g_F4HSoD0CGIGrHvu1npm)wUq8O|k- zYY%cp7=Bu=XNG`lC9s}&()tM&97*DY&GL>{?clZZF#hF%(qq9_dLPGpko?i2b#H_J zqi%#-j*7qAoy_@E+7r%XG7MPK#nhR00{bxYk zy{-V&=JX4BP?hKcTl0@LQ=)SvtFSI8&JxKK5j5Vn!bT}qGY{y$`cz(gvZnEhv3>i; zwnE9R=0dN12D^;H!&No`HDCy89^{2atW-=7G}c)KMFaRS?phsUJDLdb(JP`Y~`@2B*Hh z0H%kmHQWO>sN<0))Kt68$JLb1$RjvPbroxqfd>0V3?5?uIfm4KS+V~oF0V)~;un%( zG#8OYH^mPh-j}z+m*1-(8YZJT{L_^~Z|lUE#A90Hj4RdgU6eWfRr{K{u!^(YQm-H& znaCp9&RHtb8{l(8wf264S#C4#?Bgr(O(kc6QC#?6qo_)Q_wupFhxm?j5E0f*ld8qK zDsn*4Hxv*+=Q{%weV>wbOwKg!en&3$P|c1u<|wiL@b{n?p0!nh2=In{8qgZy?KwFA ziz+_Z-k*ql;=W{;M}6oT6q#9H^J8%EtD!+?=&2Bp-QNf5K+9QRllMad@kmRW67c^3 zNaPtJgcLzc*??oq0hn&VXRG^aD=dG#rO{q^+8X=4IjdoaE^2|9zW0@c7pa9d*j4V2 zjUGMEsS@cdbqLak?1ZFtNaFJc;QW0XIt(_G<&7LhRZyR^o0i!-OxC|jThe%CE(c{O z;RTNrX;|kep8tYe{jZks224ES3sr!84G+yIZQwmvuNEiWxzKMRY4&I>^0_;qX_=(d z=G`CsY_j7zB@X^b4o>LC*(H(9f%$ce2!pfla4dYf_w`7$LiC1N@eZTlR?GOC0YUJS z{|bb<%d38M)^>8mAH(oy7NGp=Tx30AIRX0{sTMwq&1dV z5@7QEr!S-~v2p!?n}}MsOC?LK#Tb!9dY^nv`MBVn-w^^7>F!^eo3gbc+voL70J^M9 zfSyBOE?ul7?Kdyv7=kDfzBdK*lD~%OYBdp0HoO|=)rLX{lWQr~Ez=j4tNR8pZi|}2 z16z?`3Z3m>a2kHJTF_&=;d`k#Va9uZy_yJ+Ic--MF}p(e{2>YHDh_ zrrU2p4zdGaV#7pcMt@TsPbraY#Pw1ovWBEIIH#uPuQR9TWONYuI8AR7O zbv~gLA#2r;vwj_7)Zw`f)rO{{P;{c5l~1Oop}-wa6}c~a*E3`U1{_weM8_W^-GDe3 zL4K68S-$sn?cltR9n^PD_2S}CVHzS=7@0nDOs%S_Y7<cr7Rp0xk@vRO8eAux+fZ_pXc*TFK^Dl^(sz~O*%QfT`9yc}3 zDc_(7<=kw^oBVloi~nR9X|a#EMX98eQq?f9uV}zh3!g_aLbyEZN%F_K`)_E7kqky{ zd$xq)J{9OIb#=!*_M%;b6J^KAg&sgObhQNrUWUs39k9m*6#l`uB5LPjK-6FkeFkp= zq?bj8l7~^Hrk7C2QaZ;rVwGUFi7hL}uF>s5Jv4Es9PrZp2 zBAToPJhOwcA{!%*S?oi8wlqzjE0a4Tb4L=HptmOeG*b^E3 z$nT4+TJ7tQuk``_F}o+@8v=;8J}V&n)!dEPv{D!E{LiaQ*Ytfw&()tD6yawYXGrfy zzl1-dN>Iu=IAPybFLXen&wLY`+iU0ev}&+n8fvBLSH9{pdc*w8=hq+nMiY!*p6a@s z2+@qsz#ELnJSp}>3NAY!Zk0isCZ>#-IAFVFI_+X&O_`NJE}MG0atH1g*YKpmT!oe6 z!i?H0-!K3a3p(Z1pV2=3S2ueKevgyQ0x7<;FhKxEGt*TDdYKbZI4uI081B94xUB3w zX?~mBj&m~dk8Y7jdsw(4DcAd)uDLJ>*JT)LvBG>$uw=$~f^HbXc zL-J3WzCUy{wa;;XUxikpp9;xt0!h0|8s3pOE zLzAM?FDL@hG!B3mT!aClRrNejO`Iz&ix$LH5m1FIjc~q8?ytsJFW}lLRnKRsb%Vt< z#0vUFhx_Z8Dek~1B8i^ncnF#6s1wGC7ppBYpKVxnncTS`n_%kOMrsJmN}$<Kk~z3;PYM&w@glbIdNcB&ZC(;uAn_HJ``^w`?9b*oDd4t54)`c7$R9cD>y)jx{AF4%}Cxj)2Z&vPnYW2-axQwkU- z-2os*%|8ObgrF+H^F2<2p8l2Zx%RYiGMSWDXwLv9K&y;1gVF-dR^QLgL+|bIURC$I z@T*GU?Ax%}xxlm1CN!rs`J+h%Dve_xST5jXnE13EzIKJSAHuqiJ!qHv1uu{LmPKNxHcGP1?6}6$DtxFCBkuIE7pu}A6>fOeJ`*419Egk zr#8z>y zJ+mLb2x&jUzBGO57GeUA{nQ3}vWf*T1p0@#tpWCDe26Y|9PG?20Qc(D`|*XerC)MQ z(E5H@!#N>WmgiV;#o=9s)CCrXE}0*QZx%%H{Jqmn@WLFy69r%oy^Mw9P`rG3V9nBe zGtC}sg8M37?>kriVfCcZWgbsSLpzC0^E-q}U=pf`8sam*Aq#T`5jkgrAG~*2Hru?m zc5g?yc6VB;bL)feZpJ)d-ov(zSY-BtcJrmjt?=MKBtAmb-4pHqIlDiqeDkPBzE{%P z&L-lu8^aBx#Q100qx$txMGN3uKF{>xW!u!QILtl%I_rGl#>FTgc)d%eMs!?UbQ%Ia zfShv{4zmvk^%fN}VN5I4ObF03m88RGGXz4vVL|X~&^lH={|exPA;hA{Ds1-7fCIj0 z;eu6`^0Fzz$ne?=zU!G6H#IKjfhVwV%~h09Cn619hr(K-n@&256?gbVPvDq*bDp-P zzVa1qahdIdQd@aB|LMUoMK@-5uO7qhs{vL(vcHCM;mYs)K~}>JlOtx`B=hFf19c0y zs~GK$HqS)Hz2=O&I1O9Ti(8&nj@i#lP;<&F7Tb#nMC4E0T| zaSv6PqS%7=(zOs)kSj~RxS$)F+4|{S|0H00;R^665mzF%uH!NWz-PR(eKLu<{B6$e zelNd_UdUE9XB)ihJjQJ%cH|hoe3B+X-94ate2BN4A=vd6$R_t&R`w z{s%Ei1K%d@9VldYI2-;2*(rzyeJ-+^5No3;R9KFyP_DRpdY+te_*4(<34FcT-Q_ypM^#k6rrEAKC2GRS?86c&yNI={uNlXmi73a`;`0f)luz2=GT1lWYq*Avb3_xrj~{Z^NmqmGd@Db?7i`&vrtK#g zUou<2Al8`%3Q){s|Aim`8DEYc5x6ZM=EVb<&-uA#yBDz0H^{wZ+*|wm8BE)H&j81W z^K+*1CU9^_1tAP(s)4skj2=Ow%HLMe0)z)FpZ&qmg!r5$cPZ{nle%SWe(Pv+K z&6DRem!tvf!yNR3G5H(tPM}P^Nxk$1$K_KyRAu0CWCByA7|0eW$xR#3RdwqZK zgNQ%-uVQY?Gb++j9)ORp$u>P)3*2;q)!A5GjM{(|kuFn*7k?wzi)Q%J$ma}d$^-ou z=!U5K+5;yI&pV7Y%^K~_S73buqXt0s-{K?l1^O=&`02Ek~q@9P%jLP|WsCECzTuU@N za6A95-JhXPg{IRdw^u$y$4TH zy`tWG&#W@g_0yt$DBX)7B=@$c;hp~L){NJXz!-?~HZzEY4mOALY%7!}dJ)3|Dd&Ti z{I3nvO#O1=-J1Yn>)jc+Cp_`$>KU{b#Au>*g`n9eQ)P(Y7Yi6N;kFD-Z?f)jp zkwmj(Dd{zm5dK4(X$_qQ!QDN;WZWW-ZLXeEPlRz3W8S1aPZG_8FA#E6)5;x3Z$f>V zn}c>$D>G!39)j%O0m^rkHIKxPMAtP$&jFh+7--#=5u;2Rng{PkZ_5d%5g+p(NRc|h zR)s$yK9xyWhzEPhiS4B79sRYxp!1rVua)b59c5nMyN-NP1?7y0GQkHVhFSn>yO?sb z*=o@!uVQE7+V(Gu*UZK zmPP5Z3E3(aPRBEbMl?;v=(`akRAx9y4>rk{iw2_ie8&1R6;bUxM3ZmRlFUyk3i0OV zeBhQniIF8Eb$VLqC0@)kfUW=rNEddg#wR^U5<3tl%BrDn0L`1ODA8i=WY2CbAZg`V z3rRe2eVonTTv z1Fx{Ql|ltX4l1hX=Txp|RB5eMdZNFnuQxhua=hHgGm-#D(70F#_P4#DdF3zY7R#R1 zE(B<%SAcp~zO-@`1|$oNA+G>u-{B@~cm?{^FL`wy^*c)#$MzxzhqvkK5j|&2Sj(s} z)YZa#Yom*q`Nw+gbFCggg0$;^8v0KF zlvvS;nohvh zYjr>r56dK2|C*($5pBu`4-ONj8;Syz?6M}NhQHK016`#mCYs!ZrnajVdkokw_>+x+ zy;U6Ah?s+`?3UT&I9E@1Z>Fs)P%+WuP$Dl9wfK6GJn@d{yImR!+hA`jT)RCGxar;# zJ*&yG(;)}vqOVQA$ETOOi+!%2E?n`uB38_-ULb9<<#e-UC}S{n`0V7w{t^gc1b0O) z(sn_b*kcv6ZPpi=ROLKx7Rxm{OMBafH$~oUw-)`xJDQ+(QRL+fhs!+tL2yNwGLRiW z{(^ig7FIV6>q&alo_iB{i&ohi!S8!ym0U0bA!D+H4=h;($B~=&KZpjoG2G{9>lX_M z0IrihY#huShi(?^<@MbD@}eu45ToRObzC$cG1XkGM_MNSVMRMz zdTR7hCmd%#f@V;gdk*(mjV~-uB-@n-N=^zY=6Q(ax_zE@Pj-wJaxt3>(71p8z9mCA z3FszTkgsnZBwvNpZ()_zX@EPO59(CJ5;f?WH!1SOXdmmpx1W^9CFr9_hTLVdXXi2- z%J(gZ(yMzwV+TR&fq7GCeNbiQR~X{J78re8P*k$(;k!boS$xx5Ewj z7v?w}i@YN!8x`&f&7b-i(_opD0`%HjIB=Ri==3K6@zG4Uce?=fnGdMSy?y7*<$4V9 zos*7d@WPIrEi?k==#h;?J0yxOBGpa?AV$<(Tuv;0{ z*&FWJ*Y>}X?3B7L2GlFdWOt<|mw6s-5`cIVm$1FsWY<#(UD#X$_oTVrnby7@PIsj9 z`;$dnut-z3S_bn(*mn^1sV;Z41unlIo~X&tr*LLT?Q7zOmQ4#v z9+ahgtKFJj_JVPlZMp%gUZ{w~C1e*QNg0D8Ul=!M;$ST-Nv*rQ8C=YHh95_7v<|I> z*Lrpxa75El^2xL~WM7{lstjYUw&ye4&7VRl2oQZ1bir1!p6tMRVrHtzk`masY@w#&150QB|nUfp>`Q>KTRPhu@pX(+t^74-X`yid~y z*x00@dd3rBQh3TvJ_`3WH>P5+^pcc2?PyPl1`{+9!21+fbVdPCPglf8F>;RcYJPcF z#8$}j+U63hGA!WLDQoXQyzV{A;vS@pTuOV5;T9bRn6~9f?e0LdtUon()nMk;%cCbn9TRC| zl=2W3uyx?j8y4M5`22LZD|kMZh$6h4S?#0B;+(5@oyQ3a2Vkp~7W7MRTuhDEXn8~) zV@e%ZF34Cq_p|^9=|ACOC+%f(Mz1Cot-?UnXNL*{o-Aq2{o8`Fu$(a3OZAYfeleF#X@y9Yh6&8Gf z{%YKNBN8M+fy$j%x?agnJLGlf9q1&MSR$D1eYxRPe23@5(Qr;O`7cO!2h=+QQ=-lz zaOM_im~-*?x&ClCH;gahgTociw&py7N%ez}bWfD;2XE}Y-^>JtMkpTjG*99Q6-8aj z4{J@_bH(4L_PaMXwcw5?`-IOAZm&By7_Y^hyN(=&1K^V}Fq#tBRN?KMcVuWY?rXW< zI}5QsWR6^Qed+L9@K!RzTwis&)vcXF&|cM!u`Dge(4{x*bimR7{gWkvz+5mcKud%6T21YcpR6YP0*fkf zy(FW`si(}`J?znsw$JHC=}e^;SWkkrv*@y~2dY}_2Y;4lRsd?~|Ihuj^dk+S%`T#?7wrS)gn zm1=LSR#R2liH#&jYCoU)3m6^oDGVGj(x1n7yOd`f<1d#t^SstH{0xBJ-L5=rm648K zqrcBH1kQ$_1ik?Gy&#ZQFE&!j$ZBuAs7z6$t1W&ySeGB-E77@H@*Y~In_9|G+_1~g zO2FS~E}W7GO?OKJ5@<$_0or$#HF}__LvNQXS{N&VF7AxDX3C-d|-}BqMxnB0q*1q~9>&8cA z$KBGFGw8{pH(+g(HvC#{cT71r~kOm~^!pKJCXX5|@wPb)dO zovnxtsbnc0&kX*NVFrSIk67&cqX2}jv%V|Z_{(V*a@I#!stvI-;mV7cn^VR&(oHbs zpiphQjRP`g{>DFc0O@cpT}iSZv2@cJYK80NP^;4PoB7e(>J*N7zJVPpQ#PKsAt(Go z(8j7!+>-H17-a1YxbbW825?)q7}I|Bt>+AQ6}hjZ2O{9qz~KvxJE$>Z&(j+*e@=U-Osi$orEmd0{Cj{}8TxMi*Lb+6K)HBXlZS(Z@pmQZ0@ui}Pqo_X z%BvDuyV$>=Bq$?%tp<95%u+>G$A2wfcU)$1dA4`*_^Dg!P_z2*$l1GbGyr$!5*;6i zB14oQSw^^StnPLBl5XkLJKS^p6`tQ(T*x7^Xz@W7;hPkgZMgla-aZGKY@ofU!MYGtwl*N_c!HoTlQKhwk-mZj(y=yV_4 z0#TSLnYt`g44jrK<4e6nT*NbyjfkFsA55g7>S4E7_1%Gr=)USS>AEjBS$)Y zv5VjLw`MFv2|$O+R?q$gc>z-|z+}Ni=EoCp$OLVUQvDY*u?6vFT!F(Qx=JQxX}h;!4Ngp(y)Bq`|-*T7A743~@UHM~OH z@lW3Tz!8zFrI9*arp9!D+tlTTtQ#H?L!HW+INw8~b^1gy_Z=#;P)rp527A&8n3c@o z7ex5nR2&@oVNoS!D|vEuEYCEH3B%`D5HVzvRp4y52l(Hr^?s%fukfc|?H=D`;SPK{ zoWgd!7CZb53a}@-AXOnTS_D<1)!BqkO$Vh_;|(C)3Dr82#}OXiN5^ws=L|8!ENO_p z@TS0q688+=rT&lHq^%Er-oHaELc|_1Xg4jt-^kCjK&!ss;rL~8;p-sliwE#mQa#Nk z=izz?23;wl?`C-S0V~-K_iTT%-Q4}kl9z+?dhp>GT|5(1nMvP3kS=z|AR0ho=^dsT zDp%*9^`B@ymlFLG^EFGSRFQ5#%SZ911JibNg{V_{Z+Zd{+qz^@5c2`+#2{nNUH;%- zl#0-)MU6M?--gL~lyWlAAf*Qr7x1fVzYOUC*@cNF8Jw3(#z^x5$^+CA9kc1E7EQ1`G%kQlk~{)o7251!xyX#L>WGr|Q>7OOZK?a+sP^ z>;?)Iu{*cjPP5-oBXSinO9882{Yn#qKNUg z{;BOUo|AE7EERiN3pD}k{fid~7PIbt{eRiuK#{iBy@EvIK2I9>y0#hZT*uDKL(7 z#Z?e|Oo4v<3fVAKu((Bi_IuIM7qEkSzEun#69Ilb+1HrXpO^8Z(mm^hrW!HSm`4;zJ|b@0fZ?oVpDTiuI+L)lLybYj%6@e!X{zn88ZS{LW+zvcGpv zcH*pWX|v-1HPggvQ=LBdri6y=pQz1~c%aaQe%6^w=#bk%Us7YgoYE15e4^> z$EZjw?C&9y>`biO4!>I(&)i!JTo9R|x-}}cWcFaCF7LV1fA=2PeZV`8JM%|E$d#;Yxq7BO5kK*0-P%Rls z%Qr7|=h=TSIj;dybY5nI@e}h22+C)`&wcfBC?XzQ7lx8}N@gij8O|phjg&(c1pR84 zS62EHe(|mU1eHud{(R;xr{MEkgmx%q zbDjIoN*mjkjMR3u$4&Rf8e4iFxjEyU2QGQIB|p9h8lboZ(HzEFU<8Dw8mggs)5pa@ z+|5$~ejnm?G(c_bqCr|*lU0qZhjD2klO<+w!by|UHFxIL zzMlO; zFs{1o`iTJ^Ty4*I zOZMB=(Y(aSiY00Iqpl662TcGIQT5e@q62N)yrxHWPA_hMn_L>{zg3kwyz}J%`y()roy^%r2v1eCx`s=> zM*E5F%#YZu|PrqHey@#npKD*gXxj*|3fv0SF2`3wpEW}~MXL)w|; zKS1Dg1oyvT3P4W^-v2~BrQHsHc6Y-rPA!QXA9G`W*Xna|rb%G{)1m4rQ z*ILiB81@GeIuy1Myl~H(#PKzmg!FNzZ~h9{7opyM<@V6gqy;AE)fIQIRl3-1BcD?^ zmASs$+YVjJkhUMnXl-&#Di)41yw8*#Hhup^9oM&0YvYzHNmtl{0A*3!7L}>8EK&4n zIkCt;Mt3pl-*$6g{l{Gc9z9TjH@!hD_rt67VNfY&bArosN_Xb!p(YF%>+M^dCJRe4 zktT=xi&>d8q6j`p+rO!k(DifJ&2ncVm;JPwoN~`ilwyS6fqz0yXm})juTsP849z^OM)@z6+VhDESnbS(Ar^JeHe*)`ta2kvasRHUZpZc^pGp zhd^{lMo$OJmxPu5aSP3+@*+no`2&T$cT1h~AnGox=fq?Y>IU5Z9`Nr|s%R2l!(R|Z ztQ}bizmGUoK|Eh2&;kJQ4Ij`S_6^TxkEkNi*YI{dSiRgvfEO0?;g($jQzUJ7Z^(Cs z%|h}`;A{jUzV4f{C$F;0`zJM`@=a&t{P5jF%H#CY1t%Pf8QtjUG`J=J!7cc`CsyKZ zP3=z61Z{MZ0tRHYW*yN9{u%TU*AZMxI^v|l^Hk7ky8QbCz5Bby-Q}Cy=ZB~$1431M6(U500t&fc+p?P z%hMoR{i1Z|d82045no5gYtJt{F5^f~;fse32HpV<$87}|{HQ?AHzHoZKdY0uWq^dR zfuR?92AxEf+bFiUThe3692S#fmsQ5*9y0RdJ+{HE7q81?{wp(Ec7~#I$C>l4(%{>f z1V$0Tj6gkbnt%{6xeKuIus>qcRBoI_vKpJSWl@BJYi)f~q!yDz`u_SvOz-WO%530s zj%0e`ghW3WOr3*YI1a~kN6fAev`nR{3m5j^&jh%3lx#?r1t`lzj9zzLDIeu@cVx<= zRMz3<5>~>ZU$O!QbgW*2t3#a-R}SLC`YP9i|ews<~~WjYJD_t zqQ@%KeG2cn?RigTu;XmBLB>W#j~uv9b{%O3_xwUj^nZKmw%N3Qnca|Qx3MR==XRQd z^~c7P`qL|_zJ_JooAuLx%zsNw+UXa8x0-*&(Ln2FP{K`H>6p56(#!uUq5pq>rbse=b&!q!x;IS~2Y;u>Yi zj*u#v5_n0vF7{hn?5(m?J<*3~F0?yx5e?`jE`1m$sluk9?7s~rk4;mO9%;W!=O?Ka z+C^G%T4ydVjwpEx7r4{@^g{}BGf|{$qKZr%=i7@0ts};@{s~TeAhH$UAX0-l06G)* zB4h~nh#Hj@iiQlaX@DF1@#TGW-!wJYr68*;IzH)P+9pl7@Q4)6boQH?g9eW{{uiTm9918m zRDwFGXNxZf_QqX%DXajZiDmc<*iz+FnF9QR0b;{4z5^~rZmw#4wcJ$v30u`^W3tV} zJkOBxY}=OrAv6=!|AJVK2!VjN+_vyh;}r{w$dL7B39^|VV@m`73oV-uX*}a6Ea!e7 zhXRhep0E+>dHft!17EZb0r+E41qbaS>qVrBwA5s4tt%SuN&X+(vh?2~MkHBEMBOb7 z^^ki}_7lfraH9?!wbnoXUf6rOCe-u0^Jcm5~y|2c48{;n9cj?2(yo1*Xj}2#&9}wU#;A@Rg)*3&- zI-)afq?eD14jH*@i$Vr?^K0SGsI|EmZ|QIwvV;sGF#`snl55JJyBIPJY#OKbWW!^U zF)}gB(G7RU=RlSWBY=HiEq>04l2Oy4=nG?_ju1bzo~!t%zdq$1oD=1@Rb&hs94BOZ z+)X`oeEz&?BF+F7PE??>n6B%n7!(;3f8gfqC&=y}s2dwsBySQXoebX?-_sg;P&ju| zAYgTr{6E3dV! zq)vSE95$8Atuqqf>2zKz>w9Pok4k@*OUyW~@lgOdgr8dlHq zSh}-@S*01{MqRv1)-Kj!K~k!wK4mUf0^nkuXa<{Qj$=Z5RtJLKQ&4);qj7fRSl&(y zj`qX8aj3oc$fb~qeShCR-bjH9Y88-Lt0zLd_XR_Jdv)e-Ur(=}5+TgaU`~4K;o6(V zQ<3(C0h^hVt+gtxCDAJo->S7Zc^Z#f!>OrT-s-FL6Ur%T(Mn+?TI;L^+RRH$Iy~ll zP76pgHk}zMK4uSDvAws1Ll97>^&IeF-Vu^-62LgRPn8B1evT_!HC0K|8*u2eP|m4L z2R4=hlMqR-;^Qw?HBOhXL?hlJeGn(a0dsMj$cOX05v6G}lYF+)t?3Q9|HWLFgMCry z>&$@P1PLP!8ZdjN^mLVJ)+W<8j?XO49Jy4H5zkWi7fvAs5ikoF zoE9C6_elC_IT#MP11GrT0l+u8nuog${38;VOg9C$#`Wf^Q&n4eD{v5HY6{-ibFpu* zk=>W9SLg^l9QWG~_^s(K0M(_ikD?4$SiL)*RC8r;M}Cyp@36DBu{h&9!0XHRE$H0? zi?8@ zrkO8;(e288T+7g3??w0s^6u|IKOl~YC`c`1iVGSt4sPb7a77zs>IOEqp7>I``NXZW z4L=STW4Jb9$l}L+%Uqh4G60#swskew1|H>woXSDCO(auW)5j z{Mq@KS*2SqXh$Ba4Rq2eUN`4;{=Ch!vq5B1Cf~Kg(vh5ic<$DcHT>P{dsK@+4TQyw zfnQ3)j&9?m0wh-KowBUYO>vjjyoxy*_3&wB|L0e}CnoZhsU-H8rYG%YcZm$jv%R%; zSSSrDn9M~;DJXq-%-X|9_r&RN3;Fsl`sdiIloSYHXQtFU=ietHKBBGPs|HA)l__dG z`j~$G66M|^=^e1gbl+GzIh~Aoi791nCRrRO<+RZqT{jbva*_Tr5&u0 zh4@ZkY)-9p_BUlBqsz|i5rny&UuDm1nZKODr79cHqRBf`Vk3Qr^gOdq$JT zC%=~*Sq@p-r?WCKd6X$=lTo2N7u0_bTnNf@q#)uwuU1sYlBwZ^H8)p_rj`fpD8|sU z-*!4Z#A6u^ zBS*fa>lPVf+*u|x9IWeYr=}hr3pa3! z=Nkh(qKp5SC_86|79m`Wt7C>2p_(rVo9VxImEi827ELE4PC8MA95hk83YqE&W$2k| zFx@+*wM7{MBJA!IkmAH60T^*fE9MiF>VwR~eZGQ3+|ujf5VY4-MZ>lh4u4mr>krq_ zg>Y*o5Lt}e1+I*2vHL37wzBh-^r4P2M0n*S5fip~32 z&7d56Hav5Wh{Uvj&s#50jwL0| zkbJ`K=*oLflN!sfN_=erA#f_!slD6ni9K3MBw) z07O`N$rIEER}r;jgHdrvm%K5$cS5Z1Ql$gO)cwnQlaJzWpY%0kxd*pIc0UdP(UbK?;AjH`uMy{%q!Sipbi%Z%qN+;ao zlLkj*n0$(1bh@bybpH!&?Mz+p)PO+{YVN^Cx@d-relh^E_zMJ7QxYuLzb-!f zU(OfNTRG8c|7RtD1$Qig8Rv3QF_*#v+gb-&fauXiCg3A)nhEKeuXYYzZSE@SbjhB=rf{0+PUNK2vC zfF{o~3ZarVWdoHm!P?CauXJm1IEQOsWy}MekS>9uR%XTaZ!0GE_)vT9r9C~R#{%97 zq`JGOGr3l_p*O-jEfBM*s?J8L`-^K7QPJ~fl;c0Tzht3+eW8F?L63W%zkyepX$Z$| zn;)pRD9Vv(+LEKyZo;o1_4M2FHw$caLEGu`ATbUcKin|4Wxs#AWEp z2PJ;eC)v@J6u0QMLJEzTk$B9^a)Ti;^3_aAYcpe))CU0Rl|GudpM2{qY`T5v-CuAU z{#Rh;FE|f%kb?Q!>@KB4+z|^WuhvLEkddUlty4lL0Yo`MPH>k8$XZ*Du$bQa7<44} zBMXS)6UgyM=1(#{9)9^hUZfyoG5}7X!&s-E%16~>S6G3x$iw?eBl@-0lQhdOTg2QJ>~^_5se;V=R=7%Q5!5V++qAbU?g?%j$dAc8 zyEZ}WwHp(g8SZPBCbKnud}?oX&(+IGi94PViZ)p3pss4Je9~)QDNPo%H?Dl|-2zm; z^wwt+BAdzl!$L^Mj@c^z6vb&^{jr2tw&TMHG0Qhf5K5cY(-qWYgywQheXT1c1s%b^E*KtjdnRCh3CmaWkSg zsvbqjx~9$erNE1W%enkUM-|8(1=Res%u{e%D`Liz*0q^qd8E#** zKe&N#^R^f`t98qb>iO@WpZ^L|>kr}A|B3S{HW7_TjHfmM6zU9z{QRsI#|s^1xKC!kY-KRf|X zxk|ObP@8*Lb%Tfd3-_RzUgwP)yvTc6&dvwX&GCpM=K-T07AYn*~5^z z!VgXYT$^_fL7kGthoH|7D0WFqI4Yt%`woDPc%KNgpSLjs$oM}39S<_Rn-E*t(C^{- zi0@1wuS3u+Pl`hj-EGn#qmo5CY|ollL1b@KFFH ztO4{}$@+~Dyrui8yMCnH*oREh{UBIcKT0U*&&T^=dH;>0cVUox%H!a)@uZRkUCW$4 zzf<_#B#i=wBkiEF82#J}n_X8yv?@m$c7KN`>puyz{=08TlfpIfAe<8QWX0&h<}56P zu{nCBPu!V%+{mbIVdC=ZeCOi3GurO9rl4a!y?5Tyx%??dKPciq#ry}w_O~brpfqSq zb2t6OqxwTodXMt`1cmCFCIwwE7;Jb(DopTlg$sPc!$$otY$e6=7XPnTUp}*UJw}{jd0H?);oys@wed zb0_|(UqR=8Uj6Hs{*+GhQP}-iFBv$G^4COd|M7p6cz_$g10WU8*a3nf)hK{2Suk&2 zbsr!x`|&8Lwi&()Aoaq@T2QH3@a`J`n&JHn*p`t_>|OaA9^@CS(%<@_Y(`mu_b!kx z04Q`#5+neXn?%<6;Sf=|1-bhT@zVPRRAqW&*9s9#z5_fTacjTmZ@nk7U4(FXgBTw< z)=ma?yd>TdUB2*qeiJ>bttgy;=ZJckk6pzVeup z#tIp<-CZm^9EEGm16B26JEa5@FUR=N=gVv_r1Z01vPtX*eTW7mT4GigD@Y@g*6H*< zJ?GnC=T)&tIjdTCutvW=S_7H6f_7wU2zTOKi~Bsu(SOD95l|Ameu6vVtuLKKhH>5F z53!m`eXk9iO>UjlIq~bz0}#*1!&0@`Q4po_pFh-p>Ren4GIupKzm5p@Rqc7X zdU6N(Vavus4SzhrrogiDvYWYcEjC^(&SVhBJ$J_K=$Y#ms-LsAF`q>@58byeZh(js z-0sb1tIodl++eWl{c&(ZY zns~-`XJc_xML7-=kY)~xJ|_Z^Vn5%7PyKwNvBNCvfvH%Eyi9b~yz^vPML1-oFo{V` zIPaP?x&yMmVk4Wb(f=^r&(mh64{SwK=xV&UrW*=rYRTh)WQm?CI{Nrb^Vg9125!$M zIB>Jexuos!9`8;L14EHS$-#-a-0Y82%{*DwSy^U^XY1>-Udv*XSWE;uvrEF@QnuN- z{cu6F_b8agR@lrJ+ofF|{*@Ba%qT!fyvv~bjP8+Uxfg$iwG2E+yw=A0R<&9eq#Sd$ zI3J@*iGXYp3^?T=%_elYQG??bjC{zyOlKc-iZ=kTNL zm_UUib+Kwg!gHvh^V}*nk9}d0ADc@>+fKAYYM*ZvK71k*!NoODHv~&;0Mp#hueNHf zobym;+B}zdpijKgP@`sDdn&eSloc z(YR#FuZ(NP3j9G=9z69BD8HXzP4Cd*Ok;7 z;ni3>iDMm+0B|Jng;1hpk z&El7w(99D#ohtm+vP*>+w~kYcuT#v zn|?2~FX2G3PWXgz#9wRmi2N%nW$x3$)>XC}V=EaLYDBe7 z8n_DaGNBY1)6T{Q-!o4?(kCC=gMux^7^i_2cq=q0}d}T@3c3L zX&G8^vfHsBpCN+*3hkUH=Zw$3beNY*2%ErH-@Nfc7N|TA5OTzxaj1Y3~>8%49BHfpAby^WHVWi)QRYbohwQRafjV&fipT5fZ1?`tHqbge! zcJF9BRU;#?=ghH-D6I4uUf-b*SSG>QGy`zclOD$$XlEyNP=M^ivO91+SxVYG9n_7yLi zRt59F9Jw=hue0-p?tRe{JiaHsSueuTR5KO<#2eE_+AWgKXV5EbH-#J%=r6PlD4&_U zaOZ*N>?2u+exF84?#2UwmEHZ(=8L5$#1$*%7<5xR@187!&e0f$vRaBS$3V2NajX%- zX;RC26`4*wtb8_=>InFpX^|s;z74?{q;M_Y>ZVonH%38*^3F?} zADo?z91EYEAg{(q`cm=x*tZ#a1EpD~7wgpAo^8vzrurtPv4n8*Z(LE)FOq+$Fua-G zlpa_k(gF{2UkeOUlaEN}8y;->fK1Vy=2mPzD$)}h;o8pH`h=s=K3-Ei)FuS=iThgZlbpHp|-5{+372 z0eF7cP;y+&8lrngNU7ERR;0hMrv+p3r07)thLqt z59bW`@2RXIbJ|I<)TWm)n1H+vow+ALrB_eA zjqczWp=wSKLi4w*(b}D{yz+2-y!rV^t8FxW*ZaJfBeG#8?b0i05C95fjbuudZA!9u zWiyeVRJUN|!*iVa*_nJ9;(%y0c?Ri5fX#>y6cxbV8aSdQAKn0Yyo^F1E^qR&a$bMJ z?LlE2C)io=u+!Dj^Gpb^OJE%HYUjW`51^IE#H%cQt0ZdPKwhpTh)i>8wY5%J`l^Z*KaDO5nK5dL!#YSRO2zXpoMbtdB^;oDzClo$?tVN@959cN8sY zvzZ8E?e*AdXGJQBhl`l^>88ts)~Fa^Y?!;}8dW`sWs6;2A|BrS><3D&;6?{ev0l?f%Z80{1#Shbg=0FeY0DjAjfkz(2-NfR`WNxt|{}w_jYV;A&==_sInaDVna>oypRHU__!f?aZ*ic>BosT@ciNF`Uj5{&Rb2O z_t@o11*e|Q?^dk&2`Ff#EJnQE>^+kc+?qS8YxiEY6D@@Z#^8wO^wXQdsWi};!}Ufx zMrA}5GL^OQkr5(`1;HazA=2bn^DRM9k-o)Ow@v%1yVD`UEEV=X_5(;G0B=Q`Gpn}2 zx#A2(C+s$Ho7S|%_wQ*1l@vdsjH8s@eH2y*=`ZWySL+uX)acKjDJlz#DH?ApzMUDN z!33h@of}R32|3Bh3-A}@RVda>kLm4?G%Abi#-BIPMv9m#7~2+XZCd<`N=oPa??{Tr z8?;zP!V77KDZ^~X2E^#zfy7xYcm|mJk5Z98xFy@#@XpHLxGKQ0AVM6OM28dURbG!9*1ke&NgAJPB%ohq1!z|UiRyty7u zS=pKnIbA;2i@q@;8FkK$ire>NL0Lq2Wf&|FpuMNu64F)*Z6`~V8QoSJaj>wtCdAP8 zAt(N(EoX{fnIz1XJ?8cp;*WfmhIE1{y*In-J^o2@M{zsLAt?SNx`Fwt5_77C#PF=D zK*ySk1!ASEc#Axs=UF41T$=3St)|AB#!ZmL^|i#)nRv{MUy}+GtoV{y*Ad}wd?h0ggW{-)J%gYPuRYBGufKAyBxR*M%jq+sbY0r*Pllf(!Bl4p zoT%mN?P}oz^3^lXuM}+SUzRG3+usM@0_-R~FHWX|BZO7|jzH(_8%Lkt;G}U)4O}~k zTUc@$6R}9X8+kqPQCy*xaw<>h$1{Sq>x;XOqsz`n%TY#H@92zfY$F5IExgH(21NME z{UcPqJM-qAt?Qyy@@gAHCYI)iID)4IspRW4{oPa0u zbKoPyl1ABE^n#VB<#=_LG0GK_#Pq4#?3X@d8cU3q3FU5+5R~7#?9iB*l-*!Ep|or} z#v*O8X8M~)Ned?-l2k?fXdp;^4{sc8Hlvo#+On{Fx3bKmXN2Iud&$4!Zq!L`7oLd_ zJSGBsTJAgo*RGSUF<>Joyk+fNT@imm_u?(TMMw4{^ZHUW*fP6Fv?@VAUm}7FJ!Pi3{9- z7cQ9>+#+;GRvt?($@RT1*sb`H_l2?q4(Pg^Sm@2_`FzBi16~>=5?1lqkkX?u(PrLm z7dz=ATZOzuDy-9W+&E>GMql}&9L*oih7axb1w!e|G=tF`Q`}!3RaaI9%oK!7Uf@Z%1>-&H!qOb! zFe(T0GWNnDfy$Rxp3r_z=?l-Qd_lXE++?!$f~u&DPxtkmBlX7{vW1Nq~#T8n%G!)WPPx)ao|vmIX;zJJP+c7+Z$zbTR4kSa3-|okcGy-F0J9-qp1_S zBX{0i4!WYA@dP6sXP>f&x@(_A2*eI}I#&e@Y+#2oyDYz`f*&7es#MgNGMBXDLShr zVWwcqp;Y|dvV)*u@c9R=)Mwsqu?Ne!@NO5H?Zgdxb2el%2W)ssIOMukET15E$v2pA ztG4p62h3hQeTC7C_N{2jXep5z&;L@Q?+E%Ufu3uGM$kq0HqhU)lgB^4h-kzp=PA4%L(3$F)8_$-4-7*+YbVHM1 znqL+QB6jqaMp6uhmpq_}d-N7F>My$P55W!l7g6OnVsu50-O=SjzlDSK_v&vS1w6{O z;9lc}ErCC+S${IX_?-rF#E#khtK3zVnE|)u9&jBes5nex zf)sf1r7O^9fh|I^$#5Qvc=P0*hYdTEMdr$#Vk!86db_&=j`a*Z;%jWZ?9_KUAT)K> zXP@fL&D;ZtYN*@ekf(etUlo#E%Q36*wc*_BP9mckaIQ1q)T?ncS*WX}bi zHitCQ)i!G^I8+6X#vh^>}n8Ox7;X&m-#G6u1fuUN6gZ7-nvld`n2&%3ckZ7(&t5Wuzv)57bUHolrTI zbxF^k>g|NeY&Cy@m%6bSPNXI6=%^gEwb}Gd4?XeXc}?r9$6Y^|>Xp5EK4sEH#ldwQ zAYoY@-tv@HQKGzkH6|`**oNdN=Q(AomlWQHUIjb?59O5K^9z_}FlCX%XaqfCJewTX zY&>hsc>qMxI+*dyq#DH1Ly5UX%(?`Syc6k2-Gd+Wh~Pzfz{Bc*E6@#e;rRnsfZ{jD zseT6@`%eyz05F`Vh!Yd8fivh8$*XRaY^Sj^MbuJb6VDP@sC=Ajxn4eq9cXxyRU;Up z^~wCv4In7&!8YA^Gfoy)BDwaH&PBI)4inLkZ3}+d10W zKy{D4o#&X?c=pvd`;XsyX5Q9dwqJoOTeFQs&m5eTP=B!l$aBM1X;t`f_tR;MZ#h+_ zVclF}q*%Gjh06?2jai01eDT~B#_CgRM2*@-f;{yl zCTpH|FjYk;${vfJixMH>tmkohVHi3UlQd5C_FKY_<7$lLf$cC}yol+^ia?>%aG@>+ zQ@n&?bV`-QMt+Jsvz&IuxvJvQV_iqfE}_>eysxMl+rD;i9+=6&_zy0+)~cfbUYb{2 zPDaXi(^PaBFk*(CN>umUedhhlvRT_^(lhEHGxF4p?lhkV%9Z#M- z>EMke9S4CtG1&AmZRPq=jr!?STR5%k)z}Wb?qK#bTuGe`%d-8ZncSmCx^Gr#WmrP4 z?b~Z7MG9h5k)yYX2hjm7GnuMc=WN=9ri4eok4!0N58 z`B}(W%-Kon8Rzv#Yo5<%=OA~#-x71%==m|bmyOuG*@oKSzzZSsVYbPm&gj8;k{n(yCz?l7pYlG_ZRzXa+WjkJOovitNU9_3tUoxhY;1e6Jlp zH6Dbfy0KzhjIF05GaybcPjPILdq_Ff5|zo7-hC3MY{#QhEUta-D`4f)H;#bIF!5L^ z*a+85O(}Nc%W_R;b?I((5x_FupW)xtx1k5r>VO}QgQb#(WQ0cjs%Dj*W~egST4#oENVGpP=4pbNS+G6 z*UtC7z0VcLA)0f!jFdKX@`RK3yF#$Z?0V%a z{H@ar;yC%l7?CgbeyvQxPWjYZo*U$rSJN2 zNPqVVHm6OrX;6Dq$3xalzF{-=nCh!>o$RhLfbGg{9ehRtO1GqNdPJCb@3Z)zjeu&z z^-ap0BNBbO-XZeVbKtL2=x@u${I#G^^N=x?x#?;Po`_dIbb|`pUr?**xy!rH)v-wS ziKp~RFa49 zzqEJqW=aB&`e;Z|gyB>Cj5s~(@sv9zUw~#S)RAW` zle+hXNONs&FT*s!Z`=g^9G|_a?ABOd{RV1vOHB3Dxz?s<@YH=7X<^4-IpsW^b1(U6 z*`9|id_%q&dpvuF#NU|IiV0yoo@A4$qV<{UkrVgr>gB$ECAeBD_(kj#l#}k!U-0m3qUSCqu1bCu8>A! zFz&b?U7)8FRjdu`)M~q9%;)Pgyy0#7MY8`kJd}$$kMM8myaaXJjeErhD{?B(l^W@z z+9PQ^bV}4RXR~`3ZNvHOwDSQ_?HXOmAEm$LWOslBZHx$3~Rm z3muzxAc1+2XDf~N5agUdNT2fsve$i@d4F{6KRo_>se4lR?_~K60g@N*cjRki%b6bN zWG;YlAm81)58QPr10VGVIdjSd1UkJ*it9YVxuz$~*rv_zTGg{7Jv%P!d~FodoQ3Ug zb$GXOF+XSOH2=bK?m^oSB^Le?lZxtU&1|oe=+fq==q^3HE-G`oHaf~8#rBby59kvz zvitk05y#W^>GvPdW$!1E3u%~9!}J@zZTE5l_Y3*Zy-OQ*(Ocx=0xK?XQS5_J5tE{_ z&=6V_MVzsp-3*G(kd)SjaXwq@{N#mxVq%F3kTSdB1LAy;Cd-n0zh&UT zQm2qz52RisE55w@Nzqpq*8b#_SWRmodCe5W<347qN-}j7&aXG`<5FygK$GEuwcDLX z%XIfRik>;nUYihGf`a8tG6~uHBQ-%;rhrsA;qloK^uXF`9HTQ3Xq22O0S+VIp3^Z`3i;rhoG6c*>*gZgejgahSAA^WktoUh)Z9N-Mq=7bMG367KEfH*x*zYxCny=_`sb;l#Ram4Z7(06D=jd8ipPK z`w=HqfwSiMJoV{!nmXzY7TGrB%{1u`7mG#HB0NMACx}37DaIU&Iyg3k_!f_3Bt@a- zl9v%X=YXt?$~5vl5%vx!gx8!&HEd2pjG@Yfo>Xi)Z;UOxQy+gJ7PiRBx?RXWL)9!o z&{1Hk#_jGT>|~>Dr&fBtdxqD}hD)wZ?X`6;%J177NAOeX>Zb}S8e>D%muiU`s@FaW zo?VjKkWAc^T2p#I>8jlhbPRu&ar>OIWpf24VNLfL*F}ek8WZ~^PIINdZp|Kf-XnOJ zG+~-!=vK&f+w2gK`ezTfHy#b!D@Z&m|Ul3 z?}c!jX3?J;o79<$^F?5IJzyPJ`@JOX4@J3#9{|Z)NJ9x{*D6!{)>*72t>R-Scifa)ef3_Lhv~j`Ou#k`WO;?)()`>_vOjgb+eJn2 z6=>kjgn2S$(Z4Bona(X&2S_{})oHm&bUoR2@uFS0Uf9jtR?9=sbwHty%MJcHC_fI$ zm)|o9zky-xvi%mQ#Sg&E?UCe@-gh@T4?$A^P|e$toNEcd#>}DSe`v#gRlNMmIndv! zYWwGI{(kRW|ITCoj{$G~LA9R0m6LxCpl{%ZV#q(m=8weqcRa-ZUvZ#=KPxr|D*%r= zj(;|?Rg=l{JRa2uKgA!$i_>1|@_OP#Z6yC`Xr>~9A6Db2n9=F=wWIf1Bd;fb7&^tg zvkFwXQe$O1vV2v3E1*g9@{6OX*gu)UJ06 zKwQXYMb^du?lZbWkZ=JQ=$g>~I~Q9;jD{h}2*g~;A&6%YHJoopJRrjM-aOFVAYOux zmy(0PWFrG85HipB@p|MsbHwHWfGSPcquMVAh)mKEA%`HbQz4VU{($`Vh$eqhE%e*VL(u$i#vurghW^)$xer0Q#i-2nZjhKRq94hi z4Y(4j6_MzF%NFqae*1(oisZ@kt?P%N@5{j05!S#fZ7`sT66(7?A_|eGhuPaDAUCgk zEWrHc34c3sBv-(kE$))lM84B)TLbPeVCR^~Pnei$f1DBE34a(mBrBXq27;vK^sWi24lub83Grd_N4`pHutis{KoS_b;_-)31^L z0z!(=ksyu%6jFCd)nq@&vWW*@>*-dNM5{ftmt)y$5FMsFE-5C-Ns`heA4K) z65;Xq2J65!(^(ju@ZLoftzM6t8_aHZ=~`i()BjuWM5E^$^;H@?I6FGTF|v@_neZ+T(Pk z_helb|GK}%m$$nW>-K0=sK9;2=7)lk+3?Y zi+Ol(t9uV=`?YjQNyx&2zL}c_o0Vkk8Rsy8wA=c%AF`s?GhW71YG&r3gIlw1dF86jTzVW)4=Rq^ z_qe;si3IyEZRRZ(cob%HDd=X1c*Kn>FL=Vt54;E!atKSp{Wb(#z|Hg!^yO9D$QYvW z`mr3kPLAeRa7@9a&d6`1Ydd8%aA(l^AR_BN_k$;6&D~+xGKZ( zqe1y~LNm$QE+63ypT9f@EqAvQbZ)CLeu1r*6T~h9k$T?v-CB6gUV#F4yTgl|$$gRU z(1+ek-ox&6t?7(PGd6V1Z0{e|B=RNfiX5d!!s3Xn=N(>t;*9|v#lv-Ak)BKh>9?M2 zxYzDn$)Vwjv(FNCUzudTp`C0JzH@6;dRaxG86L2x-Bfoo{*t9! zXmDg#`DN6c)1@{Y-aU&-FX%L}Uc;;@O$?VE;n+GN$CKNbiCNGuu-f%1G)sFXppN2HGA zEe?AdTMV6rfv5Yx1S#yfBni9Y%ebfUMBDGlyV0>Ev^oZHORLEx~Apj-XuDA@g+VLif!a@}>MiVb^*6VC{)hTSa^7PosC@=Of)LQJf^E zu?`rghDgkAf_dH{=;fwDR0FDbV2uwoRLE0S@ya$vKSi01PWK5n#Wpg%9Ez5`j;c>k zBIsmH5e{_1*6dcd#?0BBH*M#*4E#vbEFMF;(9*Qjg2~o=Pm|G&I=7FvUGiZ{H(zD641a6cW2#8qhRA|0>|b$fYmtKra|?o%$k2kD&xIC728( z(B`t>YfR9p24HuFR{*w0ujpp8TkNJhmGG*rN~*Di2|xynKoCF#7>;&O6`wej1ki>Z zA#xEMaC2@A=lV^2@udylANDxTk+4r<9X2P9P?fuzGez)77F;_J-!ntYh@>M@$1(+J zt~I#i*Ve;{ba9bgn&V0{8t)bOEXkK}ld+S~P+xSJmhgi0x91^OWII?NdgQU~uk1Jw zsImdowSg{YQcGuCzC$xN8_lqN<|d*ldjmXc>wb*#TVjEbEw8V2(0y2i1Ml5VmbQ7- zbNAgj^Sh$IMt?~vHqH_Yv1ck0$I0H7Z6GDQSLOoqWok0YX~HTK5WleN?fHHEr}hHw3Tom-Wmz5*V*GeN?L2? zwXdlHrt)rs6!8)X(;~#-x^g{~Y`QOQB9d`~2s`I_eEXu9k2G)6zHi8O&Xdjg2P>J^ z3L~Aq?HVV$afp3tBMxC8-&d9^5b23c1a>UYxR_TD$ch82d70w);~>t`)-_)BKx41l zbk9bejKyZtUn(5M7vS(mwbb6ahKSY3!_*wCosrAkbt)TGj1d$}O+ZBT27Y!1+JUW? z^Hj#I!oLF|{5DW0n99w@d$kHYA-&#X;H4Pu^}Mu{tgjD!U4iiX&cC8_`4Ge=<6`JJ zvV#9N-I7Y6!&dq<@EM9vF$|kkr=34ofpx>~MK>}i672oj?90+#jlPG)8NXUmY5HJb zo$+NYTUYGi7nd8i&g$Os{|6Fg0^yNg5pcrjXElQrjuPCgcKTRXoY4l+aVlUjpwPvX zuNw%|Q)nA#Wyr+b%SyI;KNJ$j!_>q7*m;|#wYB~OgOO2ZQS%wW^;ISv<@+krr}py5 z=P<1zLGWq`;RQ;r(u#Lk_KA zTzGsUyA!+3439mh6oc3VERPh&W`NDTv?t;Yf6?!WzSdB)NMynEd}|eL=Nf|^g ziId`VffX*^awD;R$75Mzj>?Bs^6#pm){cDL(1+qey}!U26c8-dUx?5F-W8J!dWE;0 zhyb;=J*|4|=N;rbM|Dai+(k4h zC;e;2#kZIiuvc{;YH9f?>1o;K0#~xCALaXoT*{|8@;~IO{PsR{zf*4gk679Nc@4_X zL=T+uC+Hbrm@cZ!OX^|JdZ5nW1bV8iIm0b@w|Lz3nRBZfX1oFFd2v?T$5R702mg*9 zyUEsl30luQkxm(zuxQqCq2cZOdR5l)JFWL~U(cY8v|o_k zy$J>pAK|E4;w(L-axM*>|D4n`%_4Fhvg_AGuuFIciQ7eLAcgR|jmSWtlz+^2n*{9` zKHnIqRc2=A!6-5b5q;sjLsLbkx(S8=(Y_|TZDG_|IVomb^|}3_OY-cmt3GW5wNM2O znSsbcF3%FOzIRMNG+m@SGG8Z~s5j{X&&8})Zy#ftBU2EDr|E)XDpEhdCDMv%Z=%h= zvs-_A<8++=;OqLmBF-N7R<}1{LaL&3dFRp-$5hJ%ge_$OmjOa~{a4EIKZo$|`>uwb zBdjz|9W5pg-~)j_#rD@(Nw>?due|-z?F*sk{Wyq>Q7gz5g1fPh^;*Gv=J6`uh(8(wK zAc~<_V;U?JE%wR4`Rx=K7gdcfto@>IS)82DmFO0@K78Y#-JYi^{}P#EB~8q!j)1|g zpcpQnN0qDw7Y~e*VxptI&z3S|kJPU;r!nD1Ux{xI)QMz{FX@IJf&|yz>oqDAlzFxf zrNkO3^ie5eV{=4=cy{aI9U|IX&u*zFT-Y2Yjf>Rpb5W6E4{i}v7Y&Al8rZ%KAiBzf znQJmGuMVatvu;_EO(tM=(~?DvcTOk!O|8DN4z|F=KCQw#9-iOPS!OQGzk z^TNe`f-<~^6I30`;@1V;q@o#VCU{?DUc7syagH*sPs><~6pi1Qk?MJCOc(6U5lS`^ zNx6~Nzw_ZQJqCk@nn&@Ik9NmL|0o)0^=24;>PAaqpje{dktHj9?W2qvzT%|$$ipjUcZO$7QA`1HySLCCt53}Yr{an7cYONC}X zOS<%%c>_2Cu$a@XQkpifR)frnI7_jEuHbW5TAc5>nXQU?ey7L5=M>w45`?7&NtbgtK@Gyi&UMj@b1SjG#nUsUhRLpT5Gp$ zp|sJWHRr{tS>Tc2FGoKErqrVmYm&KD5ogynbJRp1KRLc3y=$uOB<|(yFS6_Ig6I)~ zcS29?ZMugp4_grRm(ViavZN6?XcN2?(;RskgRsSnt6`0tf-Hudf@#(oO(yL2&PKF8 zpkFbb8GLz*X%Sl-XRVf$pX`ko3Dn_`yPS7J4_K-TlK<(<{wsX{9+vAjLh!e9rcH+e z9oIKjubMkaq+l4MKfFXsUR{zY`>2}2P@??g^Gf^a55CezLy$kxk`i@D$tZwF%!ND! zH0vpRy30nAqL}Hce5X-aSt+V9QELR>iwtYEcPYse@b28Pu^XTBPB{eqHD+Ddf&|m( zA?OT>{2uxpPX5|VaykTM!}p|!lv|5`m7^7P^pg|-t znGEzN{&4_*!%Pc*Go71Od~8H~`YtMu??RGEn6N zA^}^Mza-TL|K$`SyPsYHT7CoAbuI87u#BF8|D5jsvz4z>jwD>}TfLEExm)9%inrPt z&wKMY0`hT7-e~G1>mB_g<OMY`&l6uU%T=Gh<@!O zcVcvTLi}5B?e`d2w!I@EEAM8upWb_(9a7?fmL$3%?S#Sv@J7-ds*WOdPy*Fb4WdyC zYQoZQ7KzoN>%nE7Eo#iBb#1*E_}TW)qU$PzK<8|qGDFP0Al^Ku#U8E zKbedC-$4NO?vOWcS%S>3y_5O`s-Wc_|2NN=9~G}%sW5Nx%aN* zoOAEl`#k&E&!*op|IsrdtPGX`BCL^c+zKGVItPfbF6Km1>mtb~w@wmsx~X$$GHMR+ z{?Qf_|1X~A_pLqD8s3P2o!8GVhyR9C@8o(!>3X<$lfZo9OEcC4CAqwj2p)CLk z`HbD@#($BZ*#f=>HhYr_Fr|`EcYl`R{%807pVSBWYZovh+tn&0-d;BA{Ybrwi$~d; z?~ur4Z6&iMGV?|ZQQ-4*dz6)bwL8ElGG3eBX;Dlv`ESw_&Q;o~bnd-U7)6bL2$Y$^PDphNI)yM`O z3U`){7?ZDv)-5VaGQc7~se1;7iX=pV_W3%+JQ1by_Yi%RduTuGFxh_sCnTxl!Zn0! z5X6+0g^N7VJjtR#7(TLLR>@EDHt_pQV3?$3pj|?QyCk`N`cPCc6mSLeNDM<;j9fbKFK8aYeNqaCY1uPK0huY=GU6M9G=H^KX$YO`ts z?Z%J1dA7?4ZKRHgL`F1!=!0bL?_AgCoX60S(;bpW7dx~ zcXV{-W9HJyNyIBPfVR@zZ~BI{hpk66gHDHY^|B3|HLgYC!*~Qf7PY(+2V2ZrBe!{T z_@OS#0nkhKU1I1x#HYBEgyE{8Xczr6+aG%zqRhXtt4BpBoBQ824yZbCeedBr8WI^~ zA7WM&m?JZ_T`kegZGq0m{VpOgnP;otO(DOrs>Z>e9xOp8};ANH0gOp{4o(ur{l(CD{87qlxWO*8f+b}lz_+T!tj)qQTmi-}`n zqer!c6GDZAVv5f5fj~1!U4UM(W*v%=B8@5N_8rK}%R?u;vB0tMC(sS?)-*h0@#f}j z>$c_GF~65#hQV}TxSWGpO4GIHVhxn0NCAGt`QvOA7LV^WT&a_2YP}a4Cy-*PzN*7z zZZp<^ubefLFko|Sg7i0f_$##IYzQ^F6+wz}D<9Pjx7)&Q|#h;|i_e9OL26aUVT>Jh>#|94~4ZdxT&wUeJ@(XFl7&eqzRnVXvdXL8hu4;Gp*q#d?ud zNT1!1b>OR~Pzoz+xmVMh7!S)M`&nsyKHpNw^&7T3=0F8_vku&?OI)w}=1%h(u+_@E z(w6-`n~inTVf|o{RlXXg=nPFxv9^o+#pW)%AV7U;{QXUdj%?#lujGrpV+&Lc0B**$ zr~WPz^>;QYJ-^Cg`NKHNpX(D)@C3bhFwWh(Uw-7Gq^Xnvo}IUPb-OL55V5_D$zERI5dlRK49qCiRGO{v8{qOVJkFQz+hmNF)c1Eb> zqE+APsCQ7^Bj>JbK6er^AIdTEHiHkzG4SvzLHcxgbuRQmE;enCU#X50$Uq52W9_F= zamHg54f4tximZ#Cthv6YXBFSE6w9~C4#6vD-q9zJ;!U8p7O{t5tZx>kRSG+D`f8lb zqh4vUyYsY!Kad*nZqdHn3Ns-ZY*FZU^asghrcD-Y8~%!Q+FUf_VVwc%L=8ayYjF&# zBI$wi%(bg1d@d!4XLE?t8c~(9vcyf$jvJFLjfiL;0`SwH4b9*mDIE- zj+hF-+gmbnyz-Q!-@^&I&F4LOJzr3f{Gz50%L;dCrBjlPc67fsa;xDLA|52fD)Rb;{!-TKjK6JlMJH)nND z%QbNo%mXrvDd)^gXYcmp1s%f40}K)E5Z)12bK*)_hr{*aAv$~R(Ps{8*CGRUL3hm1 zgXpi3wSc+JanJJZS2+1GdY<<*4=WqUkF5~hs~?n$L*`{TDWigUQiqmg5vDg&?s|Pp zj#oPk*y*>HKb~Lrb;8f>a%h!@~wkn2$d=EPLtqa=iitos$DBUi8 z*d0@0o=roYWWl`ndSgBkdc?|YK0{IQMu-%xM@C2csUCrndHPmdY5FjMoq1UXGvScuO`hB8E5F+1PoC zumhP;MxKu?4UJ0GD&sfy1+3tXlE|9V&GcwxFQ7&GOLv}3N^b*hpez^9X-c_EK zGeGXYx%dZ{o_ATIgR)&a#m*b~>(lG=YDeEyeGSY75ErBaiUICiVRzd1vPpfbORyMA z{uO^OK9z*d@|3H3@cf+1;;1S(0t4=(oj^SV*r2z9vg`<&!ZQ~-T8}=pZtlC9pt%<> zoN|0xen+i|)@)P|X_Ai=E&dYg7-xDs<0MN2LkEJ)p^N!M?8n(t3nuG!%#tB=Elrn1 znssk-$T>C)w0$vO@RxpvcDqnnPY+>*FdJKUrRvLHYDrsxeluyUva=OII-7?U+l?Xm z-0atm0;iVFLqLcNEk{NYFR7BDlXHC+?dNUuOC%+aOzB3cEGXdA8>D7#YA6(#$=P{b zSm^e3EmQYsb?!{j7E`q;dO@Bg>I_e!A7<{HK@k;udZ!9iLyaEQ=&80h3wX=P-)6BF z4uC9Y5{6c;cHzt6A8I?=Kh&Kl(%5HVg=gtvOH0hRDG%-)k>P=G63Qm~AB9}Hhop(a z^hETfTd1{rIkhQ8wO-k6$FEe;hz-KXI?Sq>={2CvYz#;abYP1wFI=zYNYT-zj(9@% z>W*Q5R`bX4ir8!xNrs1SS44^6&V%30p5gzhRq_jS_;)sYy?-~eHv@B@fFFdZbj)-p z>AKIWjLRq>3u{Eq3dfez97<4sF|E6*y(q7C{1*fE}oeBC6>s(+oyceu=w%OW{zaDwHb%oCNi z7FGUnmj%=Qp1Y9tYm+RcFMGARAhz!4k}`o@yKb5*e`&zB*nO{ta)g-!%5thXpK{(@C zHT{6Ibe{-18W|7OG_)Fp*XQ3s8`)UDO$`H&&` zMmB;2WX}&uLXr3#_g&E5`W>~;pEf9Cn?9l{T_&m>#iIx1SsJshU%AZ}!*%$TO08ep zdD|<~o}uG}XB6EIj1c^GaMBLXkYG_TK&Mb{R@A2zwb+yu#g}^Zr={SN%?ENyV_jWS zw8q3+J-hA1ro~NOycEa-U}8uMkoSpoVwR3lKCOKEV;{UwB=tz#-JMIQIVHFlY%40P z$aR_lkJz?DysSTy9-XyZz&000h0G|4$mTMwcKO|62!|;;PD-;bVvcrih3P*y!FoTl zDgJo_pNG#A<(Ym(qTnPtQd!j5EyHbzSS&qvedMaj6Yd4SL`LzN2;;MwN@tBP??(*1 zJ(#>0#H21~)kiuXA>@^fv!mkhFo@tXYD(*ydd zl;B^DWtPlTFxaWkSTu7ge<5HmHg88)EfP@KsN^9TUZ2xxYs(OkZ{do_6%Qc|;mQJS z%J{#$oaeBT;&>7d)Jho;%afU}4CEhe`D{G0gjiKn+}pd(*uf>QXe71g-5Q|A3fhwn zB*6Luti-MDUM#^k8&1*>^zhwQ4+dbQ1i2m;`1u{|1hVy>be&-+{Z#MR^8jk`-Ha^Z z0^ANb4L!#xFIXP$!ZV+DZ!s>IfyOxwKsWQ>0C~NtzD&T_TpY9-)^u`beRFE?xVpNG zqRxz1(}T_RPzEWr@87rlWkO9MQ|;TPLmLo~pTKGATRGU|Uh_@ETrt5AeT| zcR{p;yr)&;g;^A#5b>^ZAW*=hT|#U5qD{#xB1JFD4`bVI({~W=q0!E0k*E7+A1A(a zas5cnq&dOJ(Y#_`ngYS6XaG44(j##E_W*hj0AOXGrs{5We5X=H@Ulu-JFt#h8ri{+#+Lz6W$DUeVmXQw0}~$ zcMWud)sXDRT^GMCw?ZkF1aHY;Z!{~ZR2kJ6XNc(#5r*oW*SF&D*Ad1)gf05-S@1Cq)!;(%0qwQk^z0RmP(>3Wp% zfTm4t2Ew?9))hG^ojc=OziZUV@LHWpV7#bPi(+5n2U?ls@vBeWybN*$ePnI;;_6H*b>^S|;fmjj)(2u62 zPNhHFk+$b5@eCTyExRe_=Jbe!q_f;PjS%vS!Ixk}Z+47Lq+7gppxNp_G(Dku9OJm9=b>Ep7H9 zB!-ZE22GRMerNPqo?m?bPoMAqdH%20^M5^e-7}Xn=RWs2*M08mT+919w2!nAh{yDR z$pHukgPVv^i+6rI-I%{~#~_GvdnfiqKwt6LUs- zi{MzLrB+Fv_gk*@CqSi)Q|H<4%o6n@X`|dHObiU1Ev+mLm>e`*9SFCj%b7E$8F(Pb z*Y9kgm65)ElH<6f6{yD z^vRzzy`KVsN39G%ofF`ZaJskzdP2|!dcVX&Ts=WM8?FIb(a+7>574!M)^YW6aRW3N z(7Swn{Q=Fr1JFB8{)cuv{{!vfa`LY>U0gi>qJNGBTnTQx-#h5cX_v6oKmIE}d`|}h zeytu7;FHrU(AWxm3()!I<8MZ%IRX93@8ls1K=T3my*C)qAM}J*u(36u>Gg@OK?Z=A zkPrkhaJsqdHv{x$KyUXueTdFSdby5=_W=_?1Ku+n^*(EI6wu&W2Dd5Watd>wxPcT8vy+&$ma;X z5A^#^pYt-Hm#@|d47Q^889Z}1ayB5VL+d953-~8VYQ!b?)wb^4aNnJa`&_TqyO;4GiUYb?bGW&ICFY+-qYKx zbw6!HFW(O6_Mng>e_o3_3nm*V2Rvbf&YZQR^9S^eQQ(w|DSb|ZJ}}Bc2O$G!KO_zX zgYzWh51oR%+h?n0!CyarIRLppfsiNU4(Z9k>ID6Do&M^t z?obFQr~Fm=^V;2z$Lg;kt5^6y6W|WMkQsCu{2dHvcW}+0ce8=MfD-S&{{HjsuAr}{ z!AbvZO_@0NpJmL3%=?**nGZ1UWE5r8Vbo$g2tIYesl>RKQRB~k{av4Ff~kvXl<5Q0 z2vfhacU0h?HU4Z5GKWS$AAfSi9dPVFjE+$nw5rOe&$th?587rFVibjTGpc|)=`kX} zy;VW!f9z}UUnBmTmw)h`O_7a(Z6lj5+m2s*ut9c%>R+wUIVBGL&5NJ?I{6p34F2NM zpU?Bp`~UrmH{=Ftnf+Q5ei>c{Z-qC&U&CwRRggFw1Ahj82d|^kf877T>a+3u>uy%7 zPtFI7%KNW6f8Ngpl+f=XaV5n)8W2R6{q#Bzv?R1D(_uny{w@o60YMsZKyJ{*t~k9G1i1hi^n3z>_z8cP zNi2eCe*=Q*I$eVULw?p{q<=9%><~A2Q$(PxkOZ^?j7SMmgR~%B$PhAxETE&1J>&%V z>;>j&AQTFrpbJn8bQP?QNl+@33FSZ!p(3ahs)A~v2IwX97V3mPK!aeej6*XJ4kAD# z7!1Y& z&tVO)*RU>FKMV_-f-S;U7~l+?3<3;W7^E2#81^#gF_Q}? z`wWE)l?)9GZy7!?j4;eFe1k)94!96}8+FWdlb0e66Vzyslt@Hluf{64%GUJagf z4}1hZ3;)5$%*f9u&bS-QS7SzNMpwoF#z@9%jH!$d87mo^8G9J9j5tOr6BpBFCOIZe zCKDz*CQqhNrWmGVraYz!rWWw-j586KnVE%{Wti2$yJydQiaDJ58gn{x3G)l)F6J@j zZ!9b`NRR z9NRfGI1Y1obDZb6!|{aU6$h5%2PYq=ET;kIan4}Q>zw(VFF5--m$f9a+!U!284_g_l@&cK z8Y-G0+9Eo;Nnn%KCf7}In@TqgY-ZRjyV+`U_~x9=?>2wivUQ8mmNQ%KZfV*wyH#kb z?$%RVZ*Q&NIwi(0rX%Jpc3TW1HZ3kFt|xw4{Em3DIBwhKZN}Szw`Ff@-$vRlv)y`o z)b^6?LlW!~dnG(1Zc8*u;3UN)%_UKi1(E|&Y*Krryrfd3TBW{A%ShWw$4Xa8f0YrI zF_AeZQy}wc2iFe7j({Eacf8-pymRkPpPlJDJ7pPURb{;A63-JHAic8BaP*o~DJl0PUPC0`{!uOO-5pm0;6Re`Fgs(4!Qf#TpEzCEUU zqV`npSyYl$a#c!Gdaul-d_Wnk{7f0IBB$c6lA-cZm0#6dHCDAzm7=Du7N}ONHl;49 zenS18df#4wy_S2g?R}-eq@k~Y)~MC^p{b^MR9o$ z6IOeyP*$x+_>VXrd3=O$RR3tg(Lw7S*1^_|HrzJHZ64Wtx7}}>Xggx3U>9Nc)?U=! z+y0pYi-V0rp2N3ehR2eQjUQJ%9&^0cQN|JJ_}WR->9kXwGqviIfoE6P{yUZKXh#l5+zcs2Pd`I_ssH}QMo@5Ixtdt7f%P)o?T z!FZkgYzye)n^?)Gw$bJFW%mE_D6_LSh1kvj+PJi9A-_r_gv zs#ofVH2t)adzVeChs}@FA3b`!`Ef!4LqTxCRN?W$&Z7NA&x&^!XO{?-TzvvP z34SvD^u*KNQnS*=vb|-+F^ub;o!`=;ux+S|%^s_!b=RNE@r)!HjN)H|MaYIIh2X?N9kBf1-U_V>JeZ}R@_ zheIE_daZju_8spV`RMlXYyatf{6NS6Y4H3frcZH0JVVK!H-ElAymPo@L~W!FyC3^* z^vLMonCsZgm!L1?@k8I`{&DGQcSXXio4=SHX|!LJZ2vf1RL&egt_aE3FbR&Oo4=5NWif zEfB;~4MCJZ8jX}rqfs6JUB4fKUIhHK3F)=zHfWYSo$4H)r+@ul2kiyK%L=;=i-N

    G+O~(8Pwy~`O920Nr3-_)dK3$Ykl$+Xmg~S{J5|>dk%B4SM{a!wDc;eI0 zo23rvw^LoACfhvrTC*!5p$$>>i6u!i=;URqVS*d(^qy<7G{|cV|6bla;;>WXkt@MZ z$_w_53uEe}{p0dq+Z>+lUt})9Wnw*BxChc+VsNG8O*zOal5#@^A4!H#pYDq$ptTO; zx$llg+*j|%ob(%vAGFnYQKCzOVh&M~(O=_<$~1^=PKyRzJB;o4q3M4aO^`;a)1X5- z5+&#o-4{4+qT52hC-w61MFcLR2SaQT!@V9Z?V#{tn9Qp<`Y)JROXv0j&r7Onj_0Ku^ea%DOx|s&u$|J^tH^TEGdU-=$FDyfc4FzDdZpsNtd}sdbFqUZ_rW~k}kvF0zCo3W~%z!_Z1xXFK_fRY+?DZAtV4!!IAae z9~}1v^y?m!0bRB>U{(U~TOc5cK>Og_x>oYmQETCZr{FcTC%&e?hL_RHL;J!ht2dzE z1j!PM_%c-wq{+v-Bg2%v#Ow4ceievLG-&k(|GyZ77_EoCrt5UkAWl3MQIU1sd71oB z15VLIMJ6pLq2@?3KZbEx+Pevto2}6K?nme06tzgQZI>$cOl?WBiz8Y=nzF(8z=fKX1jJTpA=h0EEBn4eF5|5=p=MSElCHaRV6i(nPN9QBz| z*EC9sH)_^#P9j98TP}sVfA?D?U;}Cb2EU5oZc^V5FI#AX)h;?YhX%bH3eVrUuxxuC z-DpHQg6`7Bc~XTO5@?X}*|jCGg>NHb6$#|jj+Yi-27)<~H}w@Pn+b$G8gxHwq=USx zLX7G$(?o;3>(GS6G=eX&WJAq}Gn@TX33hj|Uw$Rto$?N^ds>=L+p_)w`>W~avhllI zuET;^&OoI4I3T-?Y0xSE6`2gWp78cC4Vv&vSf}j#KI%WJymrbNktNX;>JrmJgJyONf8YOt4n2W#m6)+CSj%8le$RL2dh| z>=b*p-jw@I#{KWy{NK5`zNlDlZf4_Bzqj^*w>_o#O1*^FfJf{jZZ_mT*Ndj_CqA6; z(itA*EAFvyKk;V#Dnpbs1cfHH*iNrNJMPCpwD%MwCCS}{%7Aq6%DsLz0BJqy`%14i zO&HP9`Ng&~Y0-989-<_X`CzZa2Ao#{o|hE)yn3}$7Rk9!we*thkia>SIt9LVvsq)&zA6uy(Fz5di_YBa4B4u9bbOm;9<- zWO1YaFsgyI<|Fn)HS<~~u=Rj)ZQcX4jPq**)6dqy9k_pU2wzvN_W$VvfYUxu^e7i6 zs-979*K(;AW}G&s867L?LT-3Jh3&z`$`Rg^dQLajhKb5|7Mm{zxz!LVE*k`^|KH zO-Ulc#h6^uf2@IbG6YfnoCZCMZ;SA^wel46TqD8O=`j|S<$7ISEsC$go*!e&cSVwJN<88pvpaFk^8zt>5cYoDreN8rM z2QE4Nk@si5K0jZ-qH;M_&nV<+Y2!TP^qB~_~pN`xX86%lJ0#1q>@f}4G zKF*TV#;(g~cQoUH2RASScCS0g&3@s1y&tZN5d)SDKM?Q!_QHHx7r1*M+uv9-|HBds zR>9v{Gdo!@U7&HgKWUBW4J6Atn%d7bc6AY7=jHxa?y4Z_PP}I{iYwa@ng6o7EhTpO z*zSx7LUzEG+(I4KLhlK4Wm)-2ck9<8C!sHK(BiLtrEiloXbY8}yo=~e+C>haJ0(Vj zHZ?jAL~`~;_EB!;AG3?ddEHZt!l=*U^C)SS37yibb`d++I5z5Abu%FXqLgv@{~LxrCm^-6P zMHKo@g9L~cjrAIx#kIS~97W$Yi|(H9&}p@-x<23Qzq@z;(zd(yn|vOZe%Y+Z7|0a1 zR+LxMAW^a(Q32OyeN~?rKeoV4w6)3*e15&p`h)K-Po&6QA?xd5hvS3}SiU%sB%#6l zoFYR=qof5>Y&!Hi>*a`Mhu>F-g&Ip@0akE9hdyi-6NkMBp$s14`6wZ9> z!IT4Yl4rJ;&6%kIzjV%!-pb0ZLPb4BS-!pZp=j0VL=vXVCvlODGe zlMmpyW69Q!^wcPTTKiA~TuagN3s=rX?xk3V}k zEVQ86ya$rgRg(K`sx;>=DK}{EvNFz%pWwWHjlJ{Q6RaG~0Du|Ni!CC~0%|yYXDLeye<|hHED9%ObFaXb^GQwdzTVrAM5;%}|BtlRPNf zftcUm(3Wyp<=7DQ&pCCh{MRY@t}TN#|btw{L{7p1{8d~H|*QGvgD^Z*-1G_ zi3KkJXU#}as|;1z5iu(ff6)|dRy%jAY5PD&5XbQbfpO#b@o35D2ZpDG43m|b#5Hd{gS-yY zbvf3_IddRQxPde|FpVI0+lt{*-?sLO0ZadKq=b*A$MZ*Gz|zmjH0_b@&awlRKF`C> zj_}BL*o%Cl){H!A47qpU7)stZUt3*DTBO`ewcwY5klcKvnd9?{>dKqh8En`mcgK=< z^28V&fF<3IVrAU?VU09I6RzX96Y6)8H{$)9GGjF>@8(Q9N%bf!_e*~8m9CC5R@g0l zA;%^P>A>ODzivENIHxnQYeq%1BI>cA zEQ9c!D7McAOqSf1_iNFO0}8E!<{`afFYA?X?FILNvsY)(0@pU587zG`DlAmlC)6v= z*+=a}#2fbb2Qkhk*7G?SktH&Lzf}MM{H>el{#KLY=tds}F}uiaXTEL3_`<2wi3cdD z5yJHdxr1q@7fz7Sm-TZD_RW!$V>r#O9Ocpd$ew(H+oHGjNbvwF0_Pu9G+*geT@r&b zdw9IzDfivr{Lf!+44TvyS%V`jnud8B< z^K6FyEkeSRWBXc@CVV4+qk5>Ka#G8}bVO$xQGHaW1dnc{?#2la96Sl0F(||Q%AAMC zHj(V)OTKY!@N!LTm#fjpU2-XVKJZ%hu~O9Jd=3#kr=AtrZAopz2u}2&g0XUKs@hV; z74L;U^J6MA?3gaS+#l!JH(@uFvd!`SjhziUUigXw`{Ou}a7DG&-q{X=#JUvMbGG|yFd5SkIwHj)HuGFoZ6{3x1tCR^BgtLw_;Dtv?AZtb9wW>b*1Daz1mBIu0(ePv_x3gx!BH+eT|nxM66_r zpIS+2*@qzH;Y-MJ7TjI~-|voeutk)Rob8`?YZso<*|tb7DVwT}O{-C;2~DZuh#$N@ z3_uFMMjpPd4UhFsc>Ydo0|f2p@xot&Ai=<{DgTTinV6H*$hNVLIE71v%YC@y>Z&7y z$WrPAGE0pgykyS0(Qd)VlMW;+wnEhxpZzP0!djR7da;cBROr*1?vo~;zYvjTn;PZsT1g%-* z6Uy+SVi3`I+F{~up7)WLpH(o6LR_*w{aCld*c%P~CF2{+c=YoaZnV^w6zjZwS#3>& z5-Kud60Z7RCSBMqrYk41w(1oiCkct2Dg|w! zjyQ`8!0KWypVD)w#wm(3*@tl$)Fhn(zujMy~s^1DKiYZRVzQDyIrz`LPL~d4frmxU(!l(Pnf1bBFGskuL3WU zUzfxUe2!SvUXO0=4)wPr^`uhnX3bAD?A_yEzg=KWk-a+wAJ zEWlO}xfoLbP=LTHfL?PJn5kzbGT{|o)(cgWgeri*<($!szDDA+ke=>-W1oG?E$-Ez zqu#)qqhpM`H+6;A31I_slc#)mz--j^Jjx6=>4@XLl+}Z>&U2cwwK|6Erg$1FWFsZL z?vj_5w6XP))hnuNq!X;&-Qu+F9ro(|){nR1A+~^|F!ts@- z@OnYw!|s>DBO5J4xzDV#POXjM{O+9U1MsQeoPPAGVgT1thyO-{=A`i3{Vxxw-OZjh zXmsMX8|kjlsIU{|t|DG4DDUaFQ+j^#Mb?$`(3USxtyk8;to;em5CgE5W0Y$)hLlwD zR;f+LsM4Dw{O9GSMSq|PN>y=v1RorVC;nmPfbiD4m}jcrm|SA0ncNdXBXNed8ynE08>7lgt~kN)3Qg%D_J2=rrNSwuWd+7~e$DDw_Aigz zPi#@9pq1UZXwT_$l}Yypr6!(+aYfhO(mP#hvAsEd-}?eaUgkhpnETo5s?+Un9&_-> z>7QrcasRwFd`yOX+xY^5pz#?!EmyK0=5$S1sgouDGf~bIgD1q}MZG;llVkHvojb}~ zpm3w}RGGbnvS!jQM~6o7ONx;4mp!t!nZ%)yBHK~h2DBJTqcb;#%dEvo?8DgtdwKh0 z#*CG2V+n72!H?peOmPJ~%I7TvJDrGqreM{K$4z+UX(m4hjTiFCmfU#aeZ*c*sYZx% zODc8y`JIe%Umme<%KA{@E2rCCqB<0^xJApnCp&RsdV0bzhE+V;?Deiv4fe}%9Ljqj z$YgI87yEjyh0FqPO&d(Elb6HXJb;x}CVw^^A4b`y}@hdW=zf zbhmC>gM(c+5cqGSvFJc17s~aGp0iji6Adb%Vt^b8a5nF2Lp)u%O-V;dK5!NDr$OB_ zaVrkbahZ*=_sQ`-sK9U9Yu1N#BhDr0;)~+{b7I>6XqLTOtHe!xuUG9Z)Q-qOX{7GF zo?Gmlno+MoO#`yjtU8?u3!d(62SS5wk%!8EUSB zV>l&%x)*1YG)55`)bEu_-4yOp*mU4nyMfn5&uJRq>amLhRO;T{6c? zn2A?gFfP;=_|Ih{C8c5Yo`n6HM={<@YI95I@>zv|@i{G2-Q7=vRj3->eGK#{;Sn@1 z`fClcg9brlIU4jv6hM7}D7q54vk!$Bv$(W!=w>h>+=3sOI%)Bq|0E*QXX}Lmr@}CI zsXYNc9P|1`i*xPWFGkk5#L@Od6NSq<(q7KIs%YUEb)gKAx(EXckdyx?w;@R8R$GQB)v!NI!j z;s=%P_T3y`o!CUBgM&8+zP5VvVzV1>3ly@dMk~Yx55rLm#FQ~-4#$YA4j_0N6P&L9 zGUAHNtzd8WU7fE>4c&Z;v*z4o1EN@8m7FLMqhzCY7ZWE(Ha26JkPdxw)qR(>l?$f` zivGsrnSz>~E;0KSmF~}nz=ISI-%DgRhfb^kf&ACO0$2dB#*1>}-fNN;(&W`(W)%)S zvvO|5s@|7!Qy_m9k5$zen;Tj!Gkwy8-k@{2yXqyy;)W{v5)I)|@z%GI_(MMV4LomW0)dA$82n#@%;6M2kKc*;u8-CK z_oCVU!E?o4lY`*X_>qK7Qr`-kcjV%R&PvT(@FCL@W)RLrv?NsMD3jS|F)cZ z^jcGelVOZh2qeUtB^iA@L=@AAj?NT7obnf?!!>b!@uS1D9n46x+022J;0hYlPo4un zRL>;BF-{X@Eb!(P~-x;AlF`p1lWEx8knQ75cC3<5sd31qc1HXPP6&@I0udZ)QdqW6<(X+szZ_0_d}U8zrv>91dU&P5 z*2u~_=>j7kYYTF1bLX`YAYil_bD<++i@!RvYfI&yatOUtF_?C&z?Z#IWF%t~TZYET zo7+H~1}?Xgk%v{I1Q-x&vT-Yd5kyAzKWwkA97m_hBIIYE|=)1?Exi^!Qd;%nD<=^-XuXNh3?F z<84I|*yVP%EN;bhZhUy1Kz|MagjWCpT%C>pkHstxF#(<9cV8S_%mtECWLp{CVC~EU zIY2T!r^!-StvkDq=|534+}B2}zh!#!PDsJ4C(Xq#7#dX=Zxmc~rgQNl+FOC)t0j2{ zF44OxgAecij!=+o$10TpELdG*cVx4c{g{33(UWY4CC=-5bUa%l`!V>w)%mLf0LvT- z4b|#b(Pi4hrD&h2USQ^VE>-@>upN*9-eKvvXG0Z1_t>G4wFx-uWe{O-3iO6Ks6#Y} zy$3XW2~$ok0q=vGcWb-o4wj~uS&K5Ua}`|zPX+C^olLwRGOy&KX@tA_90I}^#oLtYGP1p{!_(kFC3 z##>>F*AalSx;0f9$%jjSCsdSeC%iplf(AL8UaM+{e-tG(M-sy8Xu!LA*rELUzW$txkSE3U<)L3V=@Q`n`?{J4_qXs^sG z8AABtc=u5*q3;bD>`Sx39LV`OkHv2myv7-B%5oCE!8XS)#z5C~vo;`Xh-@Uak0{{8 zY`PQ4ffSyqo{arQu?gGMl)Uv^N3P?PqW;51@oW+Qae4Mc(>(U`4b|M2onZ(!io0~eG_4!@@uB+(o>eC$uRV(#Zc5BYVMvtm|gd^!HWZrY()#z?j9); zecQCMJLRIb*%!Mh0)D`uX~OQ)E9!B@$3TnmI1Njd%rME?aBz z9h;vT-}pP0*!OImiT5wuMQad%YjJD9@S-Q-tQp_B`G$+JjsCIqFrp8L&>MHwZzlR2 z#1*F@F4c+!Nfjq~a$V!rawbNL+FWM@o&rHL-b4Y;5%&LQXVC|GDvUG$NO~r)y}^|; z)ekT3v3e0vjq>#0{B11Xbj7qjh>}Ri>?bwmZqOrzQjmbQAi(1?wr1{Uz>7x}o zuE->Ozj4fDlizhi|D@~67o%9kj`qD7=0lwH@4q^_5U(nB8O=i$p%jgB9eYhO?;u>y zX1V)i{|zJ3GkMqekXF%>H#vJ>zn08=GEy?jU1UaaC46p88vGa-v^3;E?KyNT$Z6p7 zkbKZ+M2W(@FItOw0XIzOr`)NRC-xj7hT)c8q6a>Uf2dt3JBIc=#S@mX-$FaHHe+CK z-|2ud)H~&CoQbd+nJX&J%+yY2A(Zo|S;EsPQL*!=oe^O=t+(D(R@UKnmDay%((*Nw zVb4v_WgD$E7@3^COG1!$Qwqt350U(htp_t3>xFUOsHwyL_4kf_n(h<*mi{HpBfaUz z6FpIvCZ@q8_U2o2i%5S%e99aX-m`J5WEIKdW-*ebFUUgyoJFRVQq{85Z)r zyMd5)HN8*CX!AkRyj#*sD@kFls%vACx0bb?sW}lQH0VTx|KQNE!SJnS!Nh$tgs<+g zD;??!VZP(x)NOe5n17Sl`Ol)2cyvpICyCp*w#H02pQPH-K66(h(b_z@>&@6s{(DJ- zFO?HvKi*Qe8j?1myY>Tf^*|8~dKJ9v-$H|;oH--4iKhk|#RQlA=i0Wn&neu$IHxW& z=;wOfIO+SK3I9FGV#QBc_*tGil6p%dFOQ{>8 zBs~36Ws&e16M5k>=;-`&-^p+)OHO2$lVlglog|7ngh$8B>-0bH$$<5i6i$A`|ETQK zJX+dJl4PHGF1%SOW(zAz90rdjR8TS_;mBlqx5(C%*n-avQp=VRF)Ne7<%uSI^&~GT zy_732IsC@8U|#JpWnKP=!;7j zdwCV>Uq+msY0~t_md+u}^bG|+@_sD;U~y{@w8Yqry1%a=^EM~vSsH}v$KX>DxL3|N zkQ4FApJYJIMhs-)rwC}0JGF*VLhu=Gdv;8rZI3nj{d92tfp`(?M{`3`Dp|wYDRL$) zGMl8rsv<;{@}17atu)#Cz3I~#y&osH1TSBUZVsoc2iZT*4opfu)LuLePk4^sgzdPp z+$Lt#ceah+G;yD;aoW)f;@#38WIU`dT0Zq%QoEnU_fd}*`|f<}JRQ-GZq%SB_!$7W zIB#_V4N|%Ni!q<`pL&D7!qF|701HwVGCkJd1h61U_v<^h4_X2!NOtg6UV|SgiZdaX zNtr?N;^vR_)0>4{|1osrJ{Tl7(FjR}8i{G0k0+OO`dbYX`A@G-GyNFQJzn1OJ70Ni!g!6*IV15X5$Z zS`YX8VEjr+(hoD;`(48(TE{;r^m`KwFug;v;W4WOxth!u>nj%RT5BCH6 zy$KdpKLRFt!qm*y>LxDo-W3T48nn62VvWlkEUB+V>vQoI7L-S|z{A4c(=?z~H-S`c zboQb4_4=JnkP`(i7mVEOieL7`{M-4V@wvvakg=*x1XhCb2=g@qsSHf6-GC=|t3SQ^ zz5-V5wLh#=w3-X>YfAGPgu=gv+JGX4o$p~P7G|eN$5D>Nq&(uS5wYe4qCOen`@q0^ zUnZe9KkrA%m4E<=wsCs{N#AM>3CAzmgAM0 zH2rxHnYTy?x2dX3)KjtPA)ecthkP?{_aN9vTuv#<_(vZ>gpvgWmw&u7Td=?2DF<{c zIbe5j)xH~bAA&#tg1}vm}%PD-*GKNY*d}XmxaY?YFnZ0|txl`SdqMz(95c~w( z9zAOdQky70wR>+(sIO|*=DqoBuPu|l^;t+@69$Y3_LQo~&8Us|jn}9OWS|mW%4yUL z?bjL1NBd8IuSchca#~VHrx~LO2fu>MUrygoIwzPSAJL!&b|ra|VH|zNGLm z{|g8&g#-j=aAx#Y1)M_k>~eh2EEQF$qgr=;JhCld%bU~Qs6DcrwWb>^$q&X@7{wTC z*+_aQW}<5juxny3pcKL$c%i)(rRwoTvnjg?X;QZ(zqyC|(?gYiP*K5c^(Q zXP&tf_|DCB^T7bw+-xS+a8;9+E8cBwGLt?b8o2(@(&nI6Y(+(cAIb$UL6D%-0^>S< z>t15~2>RNYuLJ&<7d+GgN-hVdg^l)D%}|${m$i zj&8&Zguc9qI)h&zOY`ZCP!!({52UQbe8aq#uYaK&Cdy9;@{H)CIA<<;wx#&@2M__k zAtY1g`L-|#bx#m(^B6pdS zg&)2_J&ng(V)2W+?^h0arK}rr_ztRYES22?*aF{akbF@%4Z2zIZR4-dft+6pN)Q*w zhu;OVTkjZo)RUNIzSMK=bKHRXQ{+7X+43*LuRqq2)R7XSwoM69)GavA77Cimm$1=$ z*oWkoLnyd9jA*Y(wL_!NK2n~I?$U7=9xO*?iQ*Op)kk9Ic7lBhcxVvOGgAzR9xlpF zN;V~*2Js)GD9<^2kq|&2jlJBd-J$UI*20+ILW;&-0^&C@QMIVD_@~H&UaLJaRuZ2%xOG$4vhRVV zqffy*a?9Zv$vVES>~#RkE!FB)nY*H;vw?zTG2;Brol>Z6By+F^l;0i(d)FX?hZ%^b z9p-2@pMv0hRaX_}J>%dmiZ@52msxF$tO5GcmGcb8L#^1g3C_V^1NyEAQ@0Tij?sHb zIzHihcc5(Q<}i&#gHB(tPXclyZNAOVN4kHGd?{8nL`cHA<1AV%i2CI<+?+&z3r%&h zF*gePjGi-mEf|kn;=)G)IyCd_u&X9kD18JQ}>nmvUMLLI+eN9KwHL&CQ z@_Q^(YmXismlB$EtGHUF+&kSWb^NhX>ca=~>7kUF(6B0c`t+0cbGuW$Y{oIetmA@}PeT9lM%emOwI{Q*d9jLa-ZcpG#EaKN}zYN<+Kv!V8q|g%@A! zD{bSW{K_|$p{`VD(I7Rv4cYvuowsR`Mr$55_Jh+p+tK`U*cbx1d)IUvzf}7V7Rd$b zF62h4P;6s|2ug(%O7J90RjcJZh$wlpRNp}$c~&i<>Rw|!D1)K$v#w`moE0OxM1fjo z(LXr)p*0@mj!m~Uv|vW2bX4|`mBaIgOw#4Q$fN|RRfSxsAe4Eqe9C$_VC+s<4`fyg zJlw!{`p+OT5I2AxBCwn3nn5;w$0^n!pc$l^uA=~~Qv@xLIr7Lydg#tp4uxh-pooMy zyHQ_t6$3S7Q*DR(oM)&fmN&JFX#~2rK3@WFV1m)JYzD_ z!5a2mGvan?MB&{J+NMX|R;EtO^_=pxdAIQVf=fA1LFYQE%nCDrpK=eFwH`458)>LL z_{@h(?SLaoDJP7DfLlD-XQA~{gKP{TUFPI zCeu?Pohc7M22iA2Q!#O4qs=)VqM75dM{~9I=pb#&O>CDE1QfFLOErs4bIgQY&pYaU z+Sl~!!t_UG;V;m)HS5>!R$CK~0xFX8zXzOJg|Q$&+Sysk?L(0e(QbuOLxapbOYX}} zCiI(HsE=%n&g`4R7uA~4d`^YGYX(+x>iiUBVZ7ATWU>qY# z;k8E88@Pg2y%e+ioR*qx)w}cvY2oR;-3|`>OX>$qEZs)~q6VL}E9xQ8hn-!b#*SNa zCe|CUw)R`!^BAea~3xV{(uF`6AM=#7JppS?xJ z`pLvXD-HQ$bLzZ<(k;oxKohhI92;j5(dHjX0say$R2W@(w8ceN3}xIbj>vfx_d{Y; z*_0|?AF*A<|0u;tSw$VF#PiNIT;BsaUaPU@DN*GG++scm z-t1)8M?R?@cXKT^DgEGJPEwu4mF}?ok1*QWreF6va=yK#0oP-RM_<;FC)?G0AX|h7 z+$Vb4S9YDHYge!*A~e(9bByP^H)Z=EUCK$VEl$tg3d9~eZ@O5modwZwVuF95(y04aV~Im zbh)ol(~4m6%>tjQdg+>~VT|Q_zFw_I-)j_?gx+u*3wXkFwN&uOQNOqD33`SQDjPrf zvf3878C-#0UOk^6&^b6jM88})vTOSBM1+X{^3+F0&rPDoLcQ7pEgzjIG)*X&8R7D$ z9y~HOO!?43(e?@cPLCnZrqlPd0^8;_cW|Sps@aJpR8gZbBeM1+Rb-ONr$7WQv1Lw@ zR6d1+2KfQoH9=-h437uCf3D()gG0>W~a_k$q9{-hi%$KlWuV z$*lw<4it?0M`uMLcJErmU*2%{Tn3 zaP9tV&42`($os^#J4dbSPyeCu>F17M>qzW1{`9|7QbEq^nwTocNnHbdI{WvgW9ta0 zb#^KH_R~4?Gwt>Fb}9P<<%R|ucdAj&2Z12s&5?cGDi6SRQd`~yT96OUSErIh>s5jO zH8Kj}<_wDwM&MOIUS0Z!h@|5m)z2wLnk{Mt?Fbw_Wq9@$xnn(_$9kiEFiHn50;kn4 zUWxV61ivzTr*EYx-H-RAw(Jh4LB`T0vFMQ-m_^?VDX`&MC78CxyX2PhTO)aTBDWzW znss2kxv5(a&DFu5?*>WL$iBPrS- zMA|2>u;(Ri8pV+JU`v?4R#G?NVs3e7PYH#SuWAWPR8(Gl7bGzoCw0`Z{aq^@1m4V% zp>sZXbTbBq@f;Rh73aAVCE1i%AkHNd`|EomnCRmCN;{v8#0H%c$~PMjZY5W#Qp6>a z4$*cLb9Jt0*SCnExz8w@osuDM2|iPEw@$v5ELOU?QzK}J|JnwYw=|%xr4U#M7K*jc{(?JvE_4q*;sl?#;Y~#yvy!Ll)Y(mXMRZh@0OF^yo#+GB zlYFuqnrNnyKs3XAK}=GkO(_yh8LYWL#(R+*t|M%ltcwe8)#ShRs#y;GFds0sM?O<3c}}60(p)TN zKy=0RHR(vb*Y?C>*rr4+F01;h&!xuaM%Qjk(LqJoqo&3T6D^bsH<0ekpAi0XtuMei zDYLX=P6D4ZikPdxhhtf;kqs$1STPP7TCmk-TN$lvuhXUx(B`d@Z@?Ys8~9kA7XCcVAQb zw*T3X_CCz&wgN05&rh9lz7*k>$>2%7pScoLUGmgtEb}7^)m-95=68Xg@-+VV9i~$P zeTn1*BlXahUi9ab^|!ft_3z@x)&%e!J@5VQ9uxm!qqwyQt5_8_xhBXb*f-nGK%2kGMY(mCmE^XYUFY51#iL*>Dlme=b+U6Fc^}T`GAg3T zP-OreQ$AC7XhJL4<}S7@I`5{dt((=jWi-=m{%bchN@OYb&|V9Skv*ST>+#>upAO!{ zrPFjcs041S&0FCq%?8ct+dmPr&sNrvHumpUW2Fv}I%M80 zsf~>fVKz@-PGZ{`a;{@H`}rQamw!3m=AOjc(D~Pnqyn^4Z00e6F(;M5uDr;b)Zuzz zyaI@+C2S%3H;S5neqQhy?fye3MA0NTNFn9Xz$?=BDUUZ@hDykBUOyVOR~0yEoH_B6e<^j~LJRG&BV0sPi|KMB_e& zs?OCvZ4>xZ@_Kg8y{Cwj`sR|gzu&M~q`J=gby76m4R1jGpZ2aj9Lltf&qkX_v`*<@ zQc8tvsg%Ra#v(b@b`ZrBwBi!y}m2cb+P+h-}mix?H~Lx{PE2FKJW9~_wWAwezzD$*58QD0rkT|oOTIXJm2I& zU%PyrmzYU_wsGBXPMeuuGUd@;1rhKa*Wb8lA`|B6D0;ebh>EX8X0%5Dj&kQ_a8fSe zu5@IyNfj5$H6FEbqI!BC2{fTg1|+{5xnv~!z9X#qR!FMLGsd?l*}`uNHpf&XtcY}o z3do^*BpUDVtR;F$1fnDxRk;=p>k6I%fT)bMTK{0QMI+IZ^Vlu!7iyEz^`3g%L&D_F z8)cPZ4>BGvSvuKP1W~qgwPp};BK0xbwJa&S-1@1@oJZVpSD2>8ZDV1Y#|Xg^QFn?JC;Qi*A9K zrkJS-HhKnu=jy`JC&rd|uT^M}(DMlMl5K9ku`l&trehq}i-`;bG|Dna4){xmY#^8s zN^Di1FX$QWoc4*CrVbDHSh;O^cgNS(s@qpS!CNW4cR8?^@m~n~zu*D4LK*ZIwJUMT zj99|q$l|=3_=o-0-l0hgL>dyK+Iy&-%b_;hJHv_7y5e3aC8jc0Dy2BDiJw<7MlM?` z5FImXGYhloo}}50OY$?T!_DWnJuR32Nu;A0C8hGB`BC5g#7)L-;n=jO#I&y4^_H$2 zDdT$2Cqnx&>#|gK@hvn0wOx)Kb9%FU%`Y;GR~<0?dGp$hqRDZ9OlMc<00^9V{Ciq_ zH~7Ku5>vp%Z3E={gefuJy8B}W_H1JsC^0gYbh@f6g^gHa>Z7%K`+VNKlVfy$G+a`oI2BZ&Iu-XpA? zBtw}*H1egb^Iw9OYlH{B>CzzFEI02(Dxg7@(A z=9nD)JbUgV}EvTD|t^8 zWStM1__N4r0G*qCL^WPV=v_^dn`xSwstoGe0%0}A!E6~Mbk+&M<3!I2qB7(SI@YC zJa}xkP4D8pZ!nghv%q@Z4?F@WR+l7poPB-wD%?;J1De>*HV%dPM38En#%irr>Cg;x zRV!N;LYI`_SZ3yaApWKjxghE{VUBuv|I6(IW@#hibJvN*-<}7z&wdmgIDwJJxg23$ zxuymK?$e!HZ`=12bd6)v?41(LMkBmhLktfSmc|jP{Q*+&C3lPy6f|ub7Lw_o@m

      KJPS?;6=bJ(!${ zB%l7i_i7u2qz$-?s zL}DmxP-18XkpVsA#SYn8wzx6!W)KH^3`jl^xg#NtI$ev#^eJ>vbVo8I=(=6-8n%38l6TmwOIyiXQouwNU)9zlU@@0 z)>>^c&@&WO1Ld~bxaGOfrm{D(EOgRPdnd-9-&S89@~%Q9`xfN_$OfI%gHe*B>&dk3 zp&msj>GXVp-^(7_@pkr|O$Yr?gY<4$#0koyHRHA(9C?W1G=nHT32rC=iR=eVF^ef- zP)NL_0wSN>48j$9GlM9Cf*x@JzIPcM5Q`_vFraTdwGqh-#EEeQ}vNTPJn5z}3~6E$x`hPG8F4p3OTn|%hh71<5)-%q0F)L} zoan(B#Ap``&`|{#12)G+e_F~A*Q_$LYKx6rSmz2H^?bN(P0VJmC~vHBiM5q=^Ly&B z$BCqU2PZc^D;(abiBji&$F?8sr5tIOWE0~dH)mNN=KyJ>^Rpb9$Ex#ubbjwW#(IFx z`_^px2DuxeEnd&`yK`!F!aQ?%e<;S_iw zcL#HX>vn|;@`X*dSFUh%V-8h3K2mf}ZPohS9_Z7m$NToF89(e6N!j9J<70+Oup9X4 z;2F>3vG`-Ka-Y$V45L7qIlI+ z>F4AhUcN+*M&YGY9K009F zh5aQa_}TwW2neP_c+nIIK!;g+p5EvCk{3{I%dlO>(V(}SfrR}v1Cr8Rd<|OkD2sxh zUo8J1Y)NX~DZ&1J0&_D*5gwaI572aeJwPyn34qie74~2DqKsxWyQKZMvq7Is%;FF` zlG#u?z4IC`JKp(%LEA%OTx&mhO!PVV8AuUxn^bqYbNo_NRCuL|fqT;+#{Jx&CupbG zRlhq~F&!`IaPFfj%usJk`(xH`>ZX}_c=u)ROmXmOEqY27IgFUb2Z6U^wKG}zg9IpR zOR@m1Gj0m+4vzlp377kr>KeLw1qYRwGKZDQ3U`oa3c{pY92hw z8nMZH#S*{qvN5`}A3j~B)%F0@+gf>5>dcL#a|`bZ3C%i1z}$j8 zoFARYNG#gXJ=juV$7p1rmyL{$d?)4)&ofWbjToCMY8!P!Gqw6I^87FH&We$PJZs$} z*v(_BMLSpO8Z&NU9iLxLI}vm~!Emf3T(5p@j@a5t`I9$R8@D2K^abeUKs8eWEdalW zY^x4>V5}Sevbbbo*pL;LJ!NY_MHRc=f!!mYv)o%qZauVnZn`dYAIG50*Klw?0*OyA zAvmA^LluzWA>=CN}b}^X@&6^p|H&oYb=)auizLGi*-_V)c&_Ti66<@SZi` zLJLbn5A|EKtb0IV`mI9nm}V&?YnwVHWZ77_sSoTfzT37n&d;zppRZRPf~eEgH8;EM z!7#MQ?toacMxC~n09t;*v*T{9-qQj&AYZ;U1#q4ze`?s5)E$6defbeMEj|tyoAo&b zn;`q^G#Lk7TAB0rV8Aq9S(@-?W>^2-H&myTDHa(wcZjdBI-5Apqcz7K9IN zmhRNF>j@YqYP{QT=i|N^L=YdVyaPfD9F`a9FRMX9QjY^Fx?xY@)3oD41)}b+@J6<} z)&&Nj(W^0=b>IbzdIfO@`gbpw7k}!H+zB-KzvKZ466)~BEPWciA~CmFEd}t$)J$-C z2B>I)oWg$}?K11+7t8_s|NRaUK%ugqcNG+AZ*}AvU^`tcs9dk7FqL>4mh0g#3B|oo$+j25?pD*RT(o%yhmhJyn&tZu5)`R1u&_0`#dnD-)G@~c{~4S>CfLk ze{1IT)J?3GplBV}LIzF@V3o~t+PaTf>)qI+z}iBy580g7WA{=h+_>MxI%tzpc*d#( z8cBy-Mi$14arZUZHRIRe7E$2Mjd7Q;@p-_x%cQSs=Pv92U++&)+Frze91X!5 xVIPSRf1SUcm(&g1o!6P?JmnbOB2|v#Ygf3p6tISC+NwiNHOGCPPzueo{0k`s+hPC! From 876615d2e21a2aeebc712443bb1f04826586c9b7 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Tue, 14 May 2019 12:43:02 -0700 Subject: [PATCH 16/31] cleanup --- Classification/.DS_Store | Bin 6148 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 Classification/.DS_Store diff --git a/Classification/.DS_Store b/Classification/.DS_Store deleted file mode 100644 index 0bfcf6fb600739ecf525172da4856012600e0ba9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKJx{|x41I=Fs>ITf@%{q;;Hb*LT*OREDuh&&p;Thb#^3JqM^U3FD+Ab)@8x`c zKIIK@3_zC0_6}G8n9~*Ut;^W-TzzC$5ow5GHM;ijy5Dx)ev~^EQ0@%(IHJWn2K*Tw z@qrEAu)8eV;0Zf?;qx5i)hzK$ddq`BiK$yNo(iM_sX!`_3jALMv~kkrH;%cc0;xbM z@S}i!9|~Qu2KJ8j>EL1`08xMA&G>Ay1hHsRVs5k*-(KVuedc)$>>Z8HxY3FEBcQsZr2@a9zzO~CBVzyn From 6e9526248589f6c76f96cf2c8e37651aa305917b Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Tue, 14 May 2019 12:59:21 -0700 Subject: [PATCH 17/31] Update Music Genre for 3.0 --- Classification/Audio/MusicGenre/README.md | 14 +++----- Classification/Audio/MusicGenre/build_csv.py | 17 +++------- .../MusicGenre/music_classification_auto.nml | 18 +++++----- .../Audio/MusicGenre/music_spectrogram.nml | 21 ++++++------ .../Audio/MusicGenre/music_vector_capsule.nml | 34 +++++++++++++++++++ .../MusicGenre/music_vector_capsule_auto.nml | 31 +++++++++++++++++ 6 files changed, 93 insertions(+), 42 deletions(-) create mode 100644 Classification/Audio/MusicGenre/music_vector_capsule.nml create mode 100644 Classification/Audio/MusicGenre/music_vector_capsule_auto.nml diff --git a/Classification/Audio/MusicGenre/README.md b/Classification/Audio/MusicGenre/README.md index e2b6d19..be542d9 100644 --- a/Classification/Audio/MusicGenre/README.md +++ b/Classification/Audio/MusicGenre/README.md @@ -3,13 +3,13 @@ These sample .nml files are for training a classification model using audio data # Data Data for this example is from the [Music Genres Dataset](http://opihi.cs.uvic.ca/sound/genres.tar.gz). The dataset features 100 audio samples from 10 music genres. -To run this example, first you will need to download and pre-process the raw data for the music classification task using the included ```build_genres.py``` script: +To run this example, first you will need to download and pre-process the raw data for the music classification task using the included ```build_csv.py``` script: ```bash -$ python build_genres.py +$ python build_csv.py ``` -If the script failes, make sure that you have installed all the package dependencies of this script which are listed at the top of the script: +If the script fails, make sure that you have installed all the package dependencies of this script which are listed at the top of the script: `tarfile, shutil, pathlib, requests, natsort, and random`. Missing packages can be installed using pip: ```bash @@ -18,11 +18,7 @@ $ pip install Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: ```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/Classification/Audio/music_classification_auto.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/Classification/Audio/training_data.csv" ; +$ neopulse train -p -f /full/path/to/music_classification_auto.nml ``` NOTE: Audio files are big! Be careful with your batch size, or you may get out of memory (OOM) errors. If that happens, reduce the batch size. @@ -33,6 +29,6 @@ For more information on using the AudioDataGenerator visit the [Data section] of # License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. +Tutorial materials are published under the MIT license. See LICENSE for commercial, academic, and personal use. You are welcome to modify these tutorial files. If citing please link to this repository. diff --git a/Classification/Audio/MusicGenre/build_csv.py b/Classification/Audio/MusicGenre/build_csv.py index 9a26c12..f9c1c66 100644 --- a/Classification/Audio/MusicGenre/build_csv.py +++ b/Classification/Audio/MusicGenre/build_csv.py @@ -4,6 +4,7 @@ from random import shuffle import requests + from natsort import humansorted @@ -43,8 +44,6 @@ def write_file(validation_split): # Sort the genres alphabetically. genres = humansorted([str(p) for p in Path('genres').iterdir()]) - cwd = Path.cwd() - #'/DM-Dash/NeoPulse_Examples/Classification/Audio/MusicGenre' with open('label_names.txt', 'w') as of: of.write('Class,Label\n') for index, d in enumerate(genres): @@ -52,8 +51,7 @@ def write_file(validation_split): # Construct lines for the csv file in the form: # /path/to/audio/file.au,class_number # where class_number is the index of each genre class. - - csv_lines = humansorted([str(cwd) + "/" + str(p) + ',' + str(index) + '\n' for p in Path(d).iterdir()]) + csv_lines = humansorted([str(p) + ',' + str(index) + '\n' for p in Path(d).iterdir()]) # shuffle the list: shuffle(csv_lines) # calculate the index on which to split the list into training/validation @@ -68,22 +66,15 @@ def write_file(validation_split): shuffle(train) shuffle(valid) - # Write the training CSV file. + # Write the CSV file. with open('training_data.csv', 'w') as of: - of.write('Audio File,Genre\n') + of.write('Audio,Genre\n') for l in train: of.write(l) for l in valid: of.write(l) - # Write the querying CSV file. - with open('querying_data.csv', 'w') as of: - of.write('Audio\n') - for l in valid: - of.write(l.split(',')[0] + '\n') - - if __name__ == '__main__': # Download data if necessary diff --git a/Classification/Audio/MusicGenre/music_classification_auto.nml b/Classification/Audio/MusicGenre/music_classification_auto.nml index 59766f1..10aa51c 100644 --- a/Classification/Audio/MusicGenre/music_classification_auto.nml +++ b/Classification/Audio/MusicGenre/music_classification_auto.nml @@ -1,21 +1,21 @@ oracle("mode")="classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Audio/MusicGenre/training_data.csv" ; + bind = "training_data.csv" ; input: - x ~ from "Audio File" - -> audio: [maxlen = 1366, nbands = 96] - -> AudioDataGenerator: []; + x ~ from "Audio" + -> audio: [maxlen = 1366, nbands = 96] + -> AudioDataGenerator: [] ; output: - y ~ from "Genre" - -> flat: [10] - -> FlatDataGenerator: [] ; + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator: [] ; params: batch_size = 5, validation_split = 0.2 ; architecture: - input: x ~ audio: [maxlen = 1366, nbands = 96]; + input: x ~ audio: [maxlen = 1366, nbands = 96]; output: y ~ flat: [10] ; x -> auto -> y ; @@ -26,5 +26,5 @@ train: loss = auto, metrics = ['accuracy'] ; run: - epochs = 4 ; + epochs = 2 ; dashboard: ; diff --git a/Classification/Audio/MusicGenre/music_spectrogram.nml b/Classification/Audio/MusicGenre/music_spectrogram.nml index 91cbcd6..5474e82 100644 --- a/Classification/Audio/MusicGenre/music_spectrogram.nml +++ b/Classification/Audio/MusicGenre/music_spectrogram.nml @@ -1,19 +1,19 @@ source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Audio/MusicGenre/training_data.csv" ; + bind = "training_data.csv" ; input: - x ~ from "Audio File" - -> audio: [maxlen = 1366, nbands = 96] - -> AudioDataGenerator: [feature = 'spectrogram']; + x ~ from "Audio" + -> audio: [maxlen = 1366, nbands = 96] + -> AudioDataGenerator: [feature = 'spectrogram'] ; output: - y ~ from "Genre" - -> flat: [10] - -> FlatDataGenerator: [] ; + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator: [] ; params: batch_size = 5, - validation_split = 0.2; + validation_split = 0.2 ; architecture: - input: x ~ audio: [maxlen = 1366, nbands = 96]; + input: x ~ audio: [maxlen = 1366, nbands = 96] ; output: y ~ flat: [10] ; x -> Reshape: [[1366,96,1]] @@ -61,7 +61,6 @@ train: optimizer = Adam:[lr = 0.0001, beta_1 = 0.9, beta_2 = 0.999, epsilon = 0.00000001], loss = 'categorical_crossentropy', metrics = ['accuracy'] ; - run: - epochs = 4 ; + epochs = 2 ; dashboard: ; diff --git a/Classification/Audio/MusicGenre/music_vector_capsule.nml b/Classification/Audio/MusicGenre/music_vector_capsule.nml new file mode 100644 index 0000000..b8979a7 --- /dev/null +++ b/Classification/Audio/MusicGenre/music_vector_capsule.nml @@ -0,0 +1,34 @@ +source: + bind = "training_data.csv" ; + input: + audio ~ from "Audio" + -> audio: [maxlen = 1536, nbands = 24] + -> AudioDataGenerator: [] ; + output: + label ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[] ; + params: + batch_size = 32, + shuffle = True, + shuffle_init = True ; + +architecture: + input: audio ~ audio: [maxlen = 1536, nbands = 24] ; + output: label ~ flat: [10] ; + + audio -> Reshape: [[1536, 24, 1]] + -> Conv2D:[filters = 128, kernel_size = 9, strides = 1, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Vector:[capsule_dim = 8, channels = 32, kernel_size = [9,9],strides = [2,2], padding = 'valid', name = 'primarycap_conv2D'] + -> DigitCaps: [num_capsule = 10, capsule_dim = 16, routings = 3, name = 'digitcaps'] + -> ClassCaps:[num_capsule = 10] + -> label ; + +train: + compile: + optimizer = Adam:[lr = 0.0001], + loss = margin_loss, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Audio/MusicGenre/music_vector_capsule_auto.nml b/Classification/Audio/MusicGenre/music_vector_capsule_auto.nml new file mode 100644 index 0000000..62a5b37 --- /dev/null +++ b/Classification/Audio/MusicGenre/music_vector_capsule_auto.nml @@ -0,0 +1,31 @@ +oracle("mode") = "vector_capsule" + +source: + bind = "training_data.csv" ; + input: + audio ~ from "Audio" + -> audio: [maxlen = 1536, nbands = 24] + -> AudioDataGenerator: [] ; + output: + label ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[] ; + params: + batch_size = 32, + shuffle = True, + shuffle_init = True ; + +architecture: + input: audio ~ audio: [maxlen = 1536, nbands = 24] ; + output: label ~ flat: [10]; + + audio -> auto -> label ; + +train: + compile: + optimizer = Adam:[lr = 0.0001], + loss = margin_loss, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; From a9009e59eda990e4aad7b21533ba0b33d5f60a59 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Tue, 14 May 2019 15:56:39 -0700 Subject: [PATCH 18/31] DICOM files --- .../Dicom/IXIT1_BrainSex/.gitignore | 4 ++ .../Dicom/IXIT1_BrainSex/build_csv.py | 12 ++-- .../Dicom/IXIT1_BrainSex/dicom_sex.nml | 55 ++++++++----------- .../Dicom/IXIT1_BrainSex/dicom_sex_auto.nml | 46 ++++++---------- 4 files changed, 51 insertions(+), 66 deletions(-) create mode 100644 Classification/Dicom/IXIT1_BrainSex/.gitignore diff --git a/Classification/Dicom/IXIT1_BrainSex/.gitignore b/Classification/Dicom/IXIT1_BrainSex/.gitignore new file mode 100644 index 0000000..d32d485 --- /dev/null +++ b/Classification/Dicom/IXIT1_BrainSex/.gitignore @@ -0,0 +1,4 @@ +raw_data/ +images/ +training_data.csv +querying_data.csv diff --git a/Classification/Dicom/IXIT1_BrainSex/build_csv.py b/Classification/Dicom/IXIT1_BrainSex/build_csv.py index b0c9760..5eb7a5d 100644 --- a/Classification/Dicom/IXIT1_BrainSex/build_csv.py +++ b/Classification/Dicom/IXIT1_BrainSex/build_csv.py @@ -4,8 +4,9 @@ from pathlib import Path from random import shuffle -import pandas as pd import requests + +import pandas as pd from natsort import humansorted @@ -22,7 +23,7 @@ def download_data(): nii_f = 'IXI_T1.tar.gz' xls_f = 'IXI.xls' - + if not Path('raw_data/' + nii_f).is_file(): r = requests.get(nii_URL, stream=True) with open('raw_data/' + nii_f, 'wb') as f_z: @@ -58,7 +59,7 @@ def write_file(validation_split): sex_id = row['SEX_ID (1=m, 2=f)'] sex_id -= 1 if IXI_id in pdict: - csv_lines.append("{0},{1}\n".format(str(cwd) + "/images/" + pdict[IXI_id], sex_id)) + csv_lines.append("{0},{1}\n".format("images/" + pdict[IXI_id], sex_id)) shuffle(csv_lines) @@ -75,7 +76,6 @@ def write_file(validation_split): for l in valid: of.write(l) - # Write the querying CSV file. with open('querying_data.csv', 'w') as of: of.write('data\n') @@ -86,7 +86,7 @@ def write_file(validation_split): if __name__ == '__main__': # Download data if necessary - #download_data() + download_data() # Write files with 20% validation split - write_file(0.2) \ No newline at end of file + write_file(0.2) diff --git a/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml b/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml index 15e3d7a..d2f630d 100644 --- a/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml +++ b/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml @@ -1,11 +1,24 @@ -architecture: - input: - img ~ dicom: [shape = [64,64,64,1]]; +source: + bind = "training_data.csv" ; + input: + x ~ from "data" + -> dicom: [shape = [64, 64, 64, 1]] + -> DicomDataGenerator:[spacing=[2.0,2.0,2.0], normalise_zero_to_one = True, flip=True] ; output: - label ~ flat: [2]; + y ~ from "label" + -> flat: [2] + -> FlatDataGenerator:[] ; + params: + batch_size = 8, + shuffle = True, + shuffle_init = True, + repeat_per_load = 10 ; - img - -> Conv3D:[16, kernel_size=[3, 3, 3],strides = [2,2,2], padding = 'same',activation='relu'] +architecture: + input: x ~ dicom: [shape = [64,64,64,1]] ; + output: y ~ flat: [2] ; + + x -> Conv3D:[16, kernel_size=[3, 3, 3],strides = [2,2,2], padding = 'same',activation='relu'] -> Conv3D:[16, kernel_size=[3, 3, 3],strides = [2,2,2], padding = 'same',activation='relu'] -> MaxPooling3D:[pool_size=[2, 2, 2], padding = 'same'] -> Conv3D:[32, kernel_size=[3, 3, 3], padding = 'same',activation='relu'] @@ -14,35 +27,13 @@ architecture: -> Flatten:[] -> Dense:[256, activation='relu'] -> Dense:[2, activation='softmax'] - -> label; - -source: - bind = "/DM-Dash/Neopulse_Examples/Classification/Dicom/IXIT1_BrainSex/training_data.csv"; - input: - img ~ from "data" - -> dicom: [shape = [64, 64, 64, 1]] - -> DicomDataGenerator:[spacing=[2.0,2.0,2.0],normalise_zero_to_one = True,flip=True]; - output: - label ~ from "label" - -> flat: [2] - -> FlatDataGenerator:[]; - params: - batch_size = 8, - shuffle = True, - shuffle_init = True, - repeat_per_load=10; + -> y ; -train : +train: compile: optimizer = Adam:[lr = 0.0001], loss = categorical_crossentropy, - metrics = ['accuracy']; + metrics = ['accuracy'] ; run: - epochs = 20; + epochs = 2 ; dashboard: ; - - - - - - diff --git a/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml b/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml index b432433..acf4b0e 100644 --- a/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml +++ b/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml @@ -1,42 +1,32 @@ oracle("mode")="classification" -architecture: - input: - img ~ dicom: [shape = [64,64,64,1]]; - output: - label ~ flat: [2]; - - img - -> auto - -> label; - source: - bind = "/DM-Dash/Neopulse_Examples/Classification/Dicom/IXIT1_BrainSex/training_data.csv"; + bind = "training_data.csv" ; input: - img ~ from "data" - -> dicom: [shape = [64, 64, 64, 1]] - -> DicomDataGenerator:[spacing=[2.0,2.0,2.0],normalise_zero_to_one = True,flip=True]; + x ~ from "data" + -> dicom: [shape = [64, 64, 64, 1]] + -> DicomDataGenerator:[spacing=[2.0,2.0,2.0],normalise_zero_to_one = True,flip=True] ; output: - label ~ from "label" - -> flat: [2] - -> FlatDataGenerator:[]; + y ~ from "label" + -> flat: [2] + -> FlatDataGenerator:[] ; params: - batch_size = 8, + batch_size = 5, shuffle = True, shuffle_init = True, - repeat_per_load=10; + repeat_per_load = 10 ; + +architecture: + input: x ~ dicom: [shape = [64,64,64,1]] ; + output: y ~ flat: [2] ; + + x -> auto -> y ; -train : +train: compile: optimizer = Adam:[lr = 0.0001], loss = categorical_crossentropy, - metrics = ['accuracy']; + metrics = ['accuracy'] ; run: - epochs = 20; + epochs = 2 ; dashboard: ; - - - - - - From 88bed184457f1db399d9daccb4b41f9b0cdc0615 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Tue, 14 May 2019 15:57:23 -0700 Subject: [PATCH 19/31] CIFAR MNIST Sentiment and Video update to 3.0 --- Classification/Image/CIFAR10/build_csv.py | 17 +--- .../Image/CIFAR10/cifar10_call_auto.nml | 79 +++++++++--------- .../Image/CIFAR10/cifar10_choice_auto.nml | 79 +++++++++--------- .../Image/CIFAR10/cifar10_dist_auto.nml | 79 +++++++++--------- .../Image/CIFAR10/cifar10_full_auto.nml | 43 +++++----- Classification/Image/CIFAR100/build_csv.py | 18 +---- .../Image/CIFAR100/cifar100_call_auto.nml | 80 +++++++++--------- .../Image/CIFAR100/cifar100_choice_auto.nml | 80 +++++++++--------- .../Image/CIFAR100/cifar100_dist_auto.nml | 80 +++++++++--------- .../Image/CIFAR100/cifar100_full_auto.nml | 46 +++++------ Classification/Image/MNIST/build_csv.py | 19 ++--- .../Image/MNIST/mnist_call_auto.nml | 78 +++++++++--------- .../Image/MNIST/mnist_choice_auto.nml | 78 +++++++++--------- .../Image/MNIST/mnist_dist_auto.nml | 78 +++++++++--------- .../Image/MNIST/mnist_full_auto.nml | 44 +++++----- .../Image/MNIST/mnist_matrix_capsule.nml | 34 ++++++++ .../Image/MNIST/mnist_matrix_capsule_auto.nml | 31 +++++++ .../Image/MNIST/mnist_vector_capsule.nml | 33 ++++++++ .../Image/MNIST/mnist_vector_capsule_auto.nml | 31 +++++++ .../Text/Sentiment/sentiment_call_auto.nml | 2 +- .../Text/Sentiment/sentiment_choice_auto.nml | 17 ++-- .../Text/Sentiment/sentiment_dist_auto.nml | 5 +- .../Text/Sentiment/sentiment_full_auto.nml | 2 +- .../Text/Sentiment/sentiment_multi-GPU.nml | 2 +- Classification/Video/HumanAction/build_csv.py | 3 +- .../Video/HumanAction/video_class.nml | 81 +++++++++---------- .../Video/HumanAction/video_class_auto.nml | 42 +++++----- 27 files changed, 639 insertions(+), 542 deletions(-) create mode 100644 Classification/Image/MNIST/mnist_matrix_capsule.nml create mode 100644 Classification/Image/MNIST/mnist_matrix_capsule_auto.nml create mode 100644 Classification/Image/MNIST/mnist_vector_capsule.nml create mode 100644 Classification/Image/MNIST/mnist_vector_capsule_auto.nml diff --git a/Classification/Image/CIFAR10/build_csv.py b/Classification/Image/CIFAR10/build_csv.py index 60f2228..284dfc6 100644 --- a/Classification/Image/CIFAR10/build_csv.py +++ b/Classification/Image/CIFAR10/build_csv.py @@ -3,8 +3,9 @@ import tarfile from pathlib import Path -import numpy as np import requests + +import numpy as np from imageio import imwrite from natsort import humansorted @@ -90,7 +91,6 @@ def write_data(): for index, label in enumerate(names[b'label_names']): of.write(str(index) + ',' + str(label) + '\n') - # write training csv with open('training_data.csv', 'w') as of: of.write('Image,Class\n') count = 0 @@ -99,20 +99,9 @@ def write_data(): for ind, image in enumerate(image_list): file_path = image_path + str(count) + '.png' imwrite(file_path, image) - of.write(str(Path(file_path).resolve()) + ',' + str(labels[ind]) + '\n') + of.write(str(Path(file_path)) + ',' + str(labels[ind]) + '\n') count += 1 - # write querying csv - with open('querying_data.csv', 'w') as of: - of.write('Image\n') - count = 0 - for file_name in data_files: - image_list, labels = load_data(file_name) - for ind, image in enumerate(image_list): - file_path = image_path + str(count) + '.png' - imwrite(file_path, image) - of.write(str(Path(file_path).resolve()) + '\n') - count += 1 if __name__ == '__main__': diff --git a/Classification/Image/CIFAR10/cifar10_call_auto.nml b/Classification/Image/CIFAR10/cifar10_call_auto.nml index 4ec42ae..812b2af 100644 --- a/Classification/Image/CIFAR10/cifar10_call_auto.nml +++ b/Classification/Image/CIFAR10/cifar10_call_auto.nml @@ -1,47 +1,48 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR10/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [10] ; - x -> Conv2D: [32, [3,3]] - -> Activation: ['relu'] - -> Conv2D: [32, [3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: auto - -> Conv2D: [64, [3,3]] - -> Activation: ['relu'] - -> Conv2D: [64, [3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: auto - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: auto - -> Dense: [10] - -> Activation: ['softmax'] - -> y ; + x -> Conv2D: [32, [3,3]] + -> Activation: ['relu'] + -> Conv2D: [32, [3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: auto + -> Conv2D: [64, [3,3]] + -> Activation: ['relu'] + -> Conv2D: [64, [3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: auto + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: auto + -> Dense: [10] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR10/cifar10_choice_auto.nml b/Classification/Image/CIFAR10/cifar10_choice_auto.nml index 60cd1bc..ac268c4 100644 --- a/Classification/Image/CIFAR10/cifar10_choice_auto.nml +++ b/Classification/Image/CIFAR10/cifar10_choice_auto.nml @@ -1,47 +1,48 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR10/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [10] ; - x -> Conv2D: [32,[3,3]] - -> Activation: ['relu'] - -> Conv2D: [32,[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop1")] - -> Conv2D: [64,[3,3]] - -> Activation: ['relu'] - -> Conv2D: [64,[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop2")] - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop3")] - -> Dense: [10] - -> Activation: ['softmax'] - -> y ; + x -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop1")] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop2")] + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop3")] + -> Dense: [10] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR10/cifar10_dist_auto.nml b/Classification/Image/CIFAR10/cifar10_dist_auto.nml index 5102e71..5bb8cdc 100644 --- a/Classification/Image/CIFAR10/cifar10_dist_auto.nml +++ b/Classification/Image/CIFAR10/cifar10_dist_auto.nml @@ -1,47 +1,48 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR10/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [10] ; - x -> Convolution2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_1"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_2"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [0.25] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_3"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_4"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [0.25] - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: [0.5] - -> Dense: [10] - -> Activation: ['softmax'] - -> y ; + x -> Convolution2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_1"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_2"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [0.25] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_3"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_4"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [0.25] + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: [0.5] + -> Dense: [10] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR10/cifar10_full_auto.nml b/Classification/Image/CIFAR10/cifar10_full_auto.nml index 3dfbfd2..5e83063 100644 --- a/Classification/Image/CIFAR10/cifar10_full_auto.nml +++ b/Classification/Image/CIFAR10/cifar10_full_auto.nml @@ -1,29 +1,30 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR10/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [10] ; - x -> auto -> y ; + x -> auto -> y ; train: - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR100/build_csv.py b/Classification/Image/CIFAR100/build_csv.py index 47d47c4..7ee43ba 100644 --- a/Classification/Image/CIFAR100/build_csv.py +++ b/Classification/Image/CIFAR100/build_csv.py @@ -3,8 +3,9 @@ import tarfile from pathlib import Path -import numpy as np import requests + +import numpy as np from imageio import imwrite from natsort import humansorted @@ -82,7 +83,6 @@ def write_data(): for index, label in enumerate(names[b'fine_label_names']): of.write(str(index) + ',' + str(label) + '\n') - # writing training csv file with open('training_data.csv', 'w') as of: of.write('Image,Class\n') count = 0 @@ -91,19 +91,7 @@ def write_data(): for ind, image in enumerate(image_list): file_path = image_path + str(count) + '.png' imwrite(file_path, image) - of.write(str(Path(file_path).resolve()) + ',' + str(labels[ind]) + '\n') - count += 1 - - # writing querying csv file - with open('querying_data.csv', 'w') as of: - of.write('Image\n') - count = 0 - for file_name in data_files: - image_list, labels = load_data(file_name) - for ind, image in enumerate(image_list): - file_path = image_path + str(count) + '.png' - imwrite(file_path, image) - of.write(str(Path(file_path).resolve()) + '\n') + of.write(str(Path(file_path)) + ',' + str(labels[ind]) + '\n') count += 1 diff --git a/Classification/Image/CIFAR100/cifar100_call_auto.nml b/Classification/Image/CIFAR100/cifar100_call_auto.nml index 4e5d234..a9c65f5 100644 --- a/Classification/Image/CIFAR100/cifar100_call_auto.nml +++ b/Classification/Image/CIFAR100/cifar100_call_auto.nml @@ -1,48 +1,48 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR100/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class" - -> flat: [100] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [100] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [100] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [100] ; - x -> Conv2D: [32, [3,3]] - -> Activation: ['relu'] - -> Conv2D: [32, [3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: auto - -> Conv2D: [64, [3,3]] - -> Activation: ['relu'] - -> Conv2D: [64, [3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: auto - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: auto - -> Dense: [100] - -> Activation: ['softmax'] - -> y ; + x -> Conv2D: [32, [3,3]] + -> Activation: ['relu'] + -> Conv2D: [32, [3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: auto + -> Conv2D: [64, [3,3]] + -> Activation: ['relu'] + -> Conv2D: [64, [3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: auto + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: auto + -> Dense: [100] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 8 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR100/cifar100_choice_auto.nml b/Classification/Image/CIFAR100/cifar100_choice_auto.nml index 80cb05d..dfaedec 100644 --- a/Classification/Image/CIFAR100/cifar100_choice_auto.nml +++ b/Classification/Image/CIFAR100/cifar100_choice_auto.nml @@ -1,48 +1,48 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR100/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class" - -> flat: [100] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [100] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [100] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [100] ; - x -> Conv2D: [32,[3,3]] - -> Activation: ['relu'] - -> Conv2D: [32,[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop1")] - -> Conv2D: [64,[3,3]] - -> Activation: ['relu'] - -> Conv2D: [64,[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop2")] - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop3")] - -> Dense: [100] - -> Activation: ['softmax'] - -> y ; + x -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop1")] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop2")] + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop3")] + -> Dense: [100] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR100/cifar100_dist_auto.nml b/Classification/Image/CIFAR100/cifar100_dist_auto.nml index c2e0ca4..d2bcf54 100644 --- a/Classification/Image/CIFAR100/cifar100_dist_auto.nml +++ b/Classification/Image/CIFAR100/cifar100_dist_auto.nml @@ -1,48 +1,48 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR100/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class" - -> flat: [100] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [100] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels=3] ; - output: y ~ flat: [100] ; + input: x ~ image: [shape=[32, 32], channels=3] ; + output: y ~ flat: [100] ; - x -> Convolution2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_1"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_2"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [0.25] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_3"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_4"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [0.25] - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: [0.5] - -> Dense: [100] - -> Activation: ['softmax'] - -> y ; + x -> Convolution2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_1"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_2"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [0.25] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_3"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_4"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [0.25] + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: [0.5] + -> Dense: [100] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/CIFAR100/cifar100_full_auto.nml b/Classification/Image/CIFAR100/cifar100_full_auto.nml index 2052092..58debfd 100644 --- a/Classification/Image/CIFAR100/cifar100_full_auto.nml +++ b/Classification/Image/CIFAR100/cifar100_full_auto.nml @@ -1,32 +1,30 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/CIFAR100/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[32, 32], channels=3] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Class" - -> flat: [100] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; - + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[32, 32], channels=3] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Class" + -> flat: [100] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[32, 32], channels= 3] ; - output: y ~ flat: [100] ; - - x -> auto -> y ; + input: x ~ image: [shape=[32, 32], channels= 3] ; + output: y ~ flat: [100] ; + x -> auto -> y ; train: - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy'] ; + run: + epochs = 4 ; + dashboard: ; diff --git a/Classification/Image/MNIST/build_csv.py b/Classification/Image/MNIST/build_csv.py index 9e8f637..68250c9 100644 --- a/Classification/Image/MNIST/build_csv.py +++ b/Classification/Image/MNIST/build_csv.py @@ -2,8 +2,9 @@ import shutil from pathlib import Path -import numpy as np import requests + +import numpy as np from imageio import imwrite from mnist import MNIST @@ -54,30 +55,20 @@ def write_csv_file(): Path('images').mkdir(parents=True, exist_ok=True) - # writing training csv with open('training_data.csv', 'w') as of: - of.write('image,label\n') + of.write('Image,Label\n') for index, image in enumerate(train_images): img_file = 'images/mnist_train_' + str(index) + '.png' imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') + of.write(str(Path(img_file)) + ',' + str(train_labels[index]) + '\n') for index, image in enumerate(test_images): img_file = 'images/mnist_test_' + str(index) + '.png' imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + of.write(str(Path(img_file)) + ',' + str(test_labels[index]) + '\n') - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('image\n') - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - if __name__ == '__main__': # Download data if necessary diff --git a/Classification/Image/MNIST/mnist_call_auto.nml b/Classification/Image/MNIST/mnist_call_auto.nml index fe12a3c..e52b2d0 100644 --- a/Classification/Image/MNIST/mnist_call_auto.nml +++ b/Classification/Image/MNIST/mnist_call_auto.nml @@ -1,47 +1,47 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[28, 28], channels=1] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Label"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[28, 28], channels=1] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Label"-> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[28, 28], channels=1] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[28, 28], channels=1] ; + output: y ~ flat: [10] ; - x -> Conv2D: [32, [3,3]] - -> Activation: ['relu'] - -> Conv2D: [32, [3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=2] - -> Dropout: auto - -> Conv2D: [64, [3,3]] - -> Activation: ['relu'] - -> Conv2D: [64, [3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=2] - -> Dropout: auto - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: auto - -> Dense: [10] - -> Activation: ['softmax'] - -> y ; + x -> Conv2D: [32, [3,3]] + -> Activation: ['relu'] + -> Conv2D: [32, [3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=2] + -> Dropout: auto + -> Conv2D: [64, [3,3]] + -> Activation: ['relu'] + -> Conv2D: [64, [3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=2] + -> Dropout: auto + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: auto + -> Dense: [10] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_choice_auto.nml b/Classification/Image/MNIST/mnist_choice_auto.nml index d046f28..57fb2f8 100644 --- a/Classification/Image/MNIST/mnist_choice_auto.nml +++ b/Classification/Image/MNIST/mnist_choice_auto.nml @@ -1,47 +1,47 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[28, 28], channels=1] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Label"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[28, 28], channels=1] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Label"-> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[28, 28], channels=1] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[28, 28], channels=1] ; + output: y ~ flat: [10] ; - x -> Conv2D: [32,[3,3]] - -> Activation: ['relu'] - -> Conv2D: [32,[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=2] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop")] - -> Conv2D: [64,[3,3]] - -> Activation: ['relu'] - -> Conv2D: [64,[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=2] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop")] - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: [auto(0.25 ? 0.75 | name = "Drop")] - -> Dense: [10] - -> Activation: ['softmax'] - -> y ; + x -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=2] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop")] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=2] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop")] + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: [auto(0.25 ? 0.75 | name = "Drop")] + -> Dense: [10] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_dist_auto.nml b/Classification/Image/MNIST/mnist_dist_auto.nml index 96d11c1..4b8aea0 100644 --- a/Classification/Image/MNIST/mnist_dist_auto.nml +++ b/Classification/Image/MNIST/mnist_dist_auto.nml @@ -1,47 +1,47 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[28, 28], channels=1] - -> ImageDataGenerator: [rescale= 0.003921568627451]; - output: - y ~ from "Label"-> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[28, 28], channels=1] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Label"-> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[28, 28], channels=1] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[28, 28], channels=1] ; + output: y ~ flat: [10] ; - x -> Convolution2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_1"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_2"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [0.25] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_3"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_4"), kernel_size=[3,3]] - -> Activation: ['relu'] - -> MaxPooling2D: [pool_size=3] - -> Dropout: [0.25] - -> Flatten: [] - -> Dense: [512] - -> Activation: ['relu'] - -> Dropout: [0.5] - -> Dense: [10] - -> Activation: ['softmax'] - -> y ; + x -> Convolution2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_1"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_2"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [0.25] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_3"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> Conv2D: [nb_filter=auto(dist = "uniform", low = 10, high = 100, cast="int" | count = 10, name="Hyperparameter_4"), kernel_size=[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=3] + -> Dropout: [0.25] + -> Flatten: [] + -> Dense: [512] + -> Activation: ['relu'] + -> Dropout: [0.5] + -> Dense: [10] + -> Activation: ['softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_full_auto.nml b/Classification/Image/MNIST/mnist_full_auto.nml index b4dd9c6..612cb4b 100644 --- a/Classification/Image/MNIST/mnist_full_auto.nml +++ b/Classification/Image/MNIST/mnist_full_auto.nml @@ -1,30 +1,30 @@ oracle("mode") = "classification" source: - bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape=[28, 28], channels=1] - -> ImageDataGenerator: [rescale= 0.003921568627451] ; - output: - y ~ from "Label" - -> flat: [10] - -> FlatDataGenerator: [] ; - params: - number_validation = 10000, - batch_size = 32 ; + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape=[28, 28], channels=1] + -> ImageDataGenerator: [rescale= 0.003921568627451] ; + output: + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator: [] ; + params: + number_validation = 10000, + batch_size = 32 ; architecture: - input: x ~ image: [shape=[28, 28], channels=1] ; - output: y ~ flat: [10] ; + input: x ~ image: [shape=[28, 28], channels=1] ; + output: y ~ flat: [10] ; - x -> auto -> y ; + x -> auto -> y ; train: - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_matrix_capsule.nml b/Classification/Image/MNIST/mnist_matrix_capsule.nml new file mode 100644 index 0000000..5f99457 --- /dev/null +++ b/Classification/Image/MNIST/mnist_matrix_capsule.nml @@ -0,0 +1,34 @@ +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098] ; + output: + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[] ; + params: + batch_size = 64, + shuffle = True, + shuffle_init = True ; + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ~ flat: [10] ; + + x -> Conv2D: [filters = 32, kernel_size = 5, strides = 2, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Matrix: [] + -> ConvCaps:[channels = 32, kernel_size = 3, strides = 2, routings = 3] + -> ConvCaps:[channels = 32, kernel_size = 3, strides = 1, routings = 3] + -> ClassCaps:[num_capsule = 10, routings = 3] + -> y ; + +train: + compile: + optimizer = Adam:[lr = 0.001], + loss = margin_loss, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_matrix_capsule_auto.nml b/Classification/Image/MNIST/mnist_matrix_capsule_auto.nml new file mode 100644 index 0000000..82d23c3 --- /dev/null +++ b/Classification/Image/MNIST/mnist_matrix_capsule_auto.nml @@ -0,0 +1,31 @@ +oracle("mode") = "matrix_capsule" + +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098] ; + output: + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[] ; + params: + batch_size = 64, + shuffle = True, + shuffle_init = True ; + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ~ flat: [10] ; + + x -> auto -> y ; + +train: + compile: + optimizer = Adam:[lr = 0.001], + loss = margin_loss, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_vector_capsule.nml b/Classification/Image/MNIST/mnist_vector_capsule.nml new file mode 100644 index 0000000..ed13305 --- /dev/null +++ b/Classification/Image/MNIST/mnist_vector_capsule.nml @@ -0,0 +1,33 @@ +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098] ; + output: + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[] ; + params: + batch_size = 64, + shuffle = True, + shuffle_init = True ; + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ~ flat: [10] ; + + x -> Conv2D:[filters = 256, kernel_size = 9, strides = 1, padding = 'valid', activation = 'relu', name = 'conv1'] + -> PrimaryCaps_Vector:[capsule_dim = 8, channels = 32, kernel_size = [9,9],strides = [2,2], padding = 'valid', name = 'primarycap_conv2D'] + -> DigitCaps: [num_capsule = 10, capsule_dim = 16, routings = 3, name = 'digitcaps'] + -> ClassCaps:[num_capsule = 10] + -> y ; + +train: + compile: + optimizer = Adam:[lr = 0.001], + loss = margin_loss, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_vector_capsule_auto.nml b/Classification/Image/MNIST/mnist_vector_capsule_auto.nml new file mode 100644 index 0000000..6e413e7 --- /dev/null +++ b/Classification/Image/MNIST/mnist_vector_capsule_auto.nml @@ -0,0 +1,31 @@ +oracle("mode") = "vector_capsule" + +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator:[rescale = 0.00392156862745098] ; + output: + y ~ from "Label" + -> flat: [10] + -> FlatDataGenerator:[] ; + params: + batch_size = 64, + shuffle = True, + shuffle_init = True ; + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ~ flat: [10] ; + + x -> auto -> y ; + +train: + compile: + optimizer = Adam:[lr = 0.001], + loss = margin_loss, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Text/Sentiment/sentiment_call_auto.nml b/Classification/Text/Sentiment/sentiment_call_auto.nml index 8833e00..520a207 100644 --- a/Classification/Text/Sentiment/sentiment_call_auto.nml +++ b/Classification/Text/Sentiment/sentiment_call_auto.nml @@ -34,5 +34,5 @@ train: loss = 'categorical_crossentropy', metrics = ['accuracy'] ; run: - epochs = 4 ; + epochs = 2 ; dashboard: ; diff --git a/Classification/Text/Sentiment/sentiment_choice_auto.nml b/Classification/Text/Sentiment/sentiment_choice_auto.nml index c9e947b..05d952d 100644 --- a/Classification/Text/Sentiment/sentiment_choice_auto.nml +++ b/Classification/Text/Sentiment/sentiment_choice_auto.nml @@ -26,13 +26,14 @@ architecture: -> Convolution1D: [64, 4] -> MaxPooling1D: [pool_size=4] -> LSTM: [128] - -> Dense: [2, activation='softmax'] -> y ; + -> Dense: [2, activation='softmax'] + -> y ; train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: ; + compile: + optimizer = 'rmsprop', + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Text/Sentiment/sentiment_dist_auto.nml b/Classification/Text/Sentiment/sentiment_dist_auto.nml index 8222f43..557c39b 100644 --- a/Classification/Text/Sentiment/sentiment_dist_auto.nml +++ b/Classification/Text/Sentiment/sentiment_dist_auto.nml @@ -25,7 +25,8 @@ architecture: -> Convolution1D: [64, 4] -> MaxPooling1D: [pool_size=4] -> LSTM: [128] - -> Dense: [2, activation = 'softmax'] -> y ; + -> Dense: [2, activation = 'softmax'] + -> y ; train: compile: @@ -33,5 +34,5 @@ train: loss = 'categorical_crossentropy', metrics = ['accuracy'] ; run: - epochs = 4 ; + epochs = 2 ; dashboard: ; diff --git a/Classification/Text/Sentiment/sentiment_full_auto.nml b/Classification/Text/Sentiment/sentiment_full_auto.nml index 5187b03..2d1cd5c 100644 --- a/Classification/Text/Sentiment/sentiment_full_auto.nml +++ b/Classification/Text/Sentiment/sentiment_full_auto.nml @@ -26,5 +26,5 @@ train: loss = auto, metrics = ['accuracy'] ; run: - epochs = 4 ; + epochs = 2 ; dashboard: ; diff --git a/Classification/Text/Sentiment/sentiment_multi-GPU.nml b/Classification/Text/Sentiment/sentiment_multi-GPU.nml index b62388b..2b6d5f2 100644 --- a/Classification/Text/Sentiment/sentiment_multi-GPU.nml +++ b/Classification/Text/Sentiment/sentiment_multi-GPU.nml @@ -34,5 +34,5 @@ train Ngpu 2: loss = 'categorical_crossentropy', metrics = ['accuracy'] ; run: - epochs = 4 ; + epochs = 2 ; dashboard: ; diff --git a/Classification/Video/HumanAction/build_csv.py b/Classification/Video/HumanAction/build_csv.py index 5e6cf9b..cc9f04b 100644 --- a/Classification/Video/HumanAction/build_csv.py +++ b/Classification/Video/HumanAction/build_csv.py @@ -4,6 +4,7 @@ from zipfile import ZipFile import requests + from natsort import humansorted @@ -47,7 +48,7 @@ def build_list(data_path, validation_split): for c, p in enumerate(class_paths): line_list = [] for f in Path(p).iterdir(): - line_list.append(str(f.absolute()) + ',' + str(c) + '\n') + line_list.append(str(f) + ',' + str(c) + '\n') shuffle(line_list) split_index = int(validation_split * len(line_list)) diff --git a/Classification/Video/HumanAction/video_class.nml b/Classification/Video/HumanAction/video_class.nml index 97b7a4e..75efed9 100644 --- a/Classification/Video/HumanAction/video_class.nml +++ b/Classification/Video/HumanAction/video_class.nml @@ -1,48 +1,45 @@ source: -bind = "/DM-Dash/Neopulse_Examples/Classification/Video/HumanAction/training_data.csv" ; -input: - x ~ from "Video" - -> video: [shape=[80, 80], channels=3, seqlength=32] - -> ImageDataGenerator: []; -output: - y ~ from "Class" - -> flat: [6] - -> FlatDataGenerator: [] ; -params: - number_validation = 119, - batch_size = 2; - + bind = "training_data.csv" ; + input: + x ~ from "Video" + -> video: [shape=[80, 80], channels=3, seqlength=32] + -> ImageDataGenerator: [] ; + output: + y ~ from "Class" + -> flat: [6] + -> FlatDataGenerator: [] ; + params: + number_validation = 119, + batch_size = 2 ; architecture: - input: x ~ video: [shape=[80, 80], channels=3, seqlength=32] ; - output: y ~ flat: [6] ; + input: x ~ video: [shape=[80, 80], channels=3, seqlength=32] ; + output: y ~ flat: [6] ; - x -> TimeDistributed: [Conv2D: [32, [3,3], kernel_initializer="he_normal", activation='relu'], input_shape=[32, 80, 80, 3]] - -> TimeDistributed: [Conv2D: [32, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [MaxPooling2D: []] - -> TimeDistributed: [Conv2D: [48, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [Conv2D: [48, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [MaxPooling2D: []] - -> TimeDistributed: [Conv2D: [64, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [Conv2D: [64, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [MaxPooling2D: []] - -> TimeDistributed: [Conv2D: [128, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [Conv2D: [128, [3,3], kernel_initializer="he_normal", activation='relu']] - -> TimeDistributed: [Flatten: []] - -> LSTM: [256, return_sequences=True] - -> Flatten:[] - -> Dense: [512, activation='relu'] - -> Dropout: [0.5] - -> Dense: [6, activation='softmax'] - -> y; + x -> TimeDistributed: [Conv2D: [32, [3,3], kernel_initializer="he_normal", activation='relu'], input_shape=[32, 80, 80, 3]] + -> TimeDistributed: [Conv2D: [32, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [MaxPooling2D: []] + -> TimeDistributed: [Conv2D: [48, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [Conv2D: [48, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [MaxPooling2D: []] + -> TimeDistributed: [Conv2D: [64, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [Conv2D: [64, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [MaxPooling2D: []] + -> TimeDistributed: [Conv2D: [128, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [Conv2D: [128, [3,3], kernel_initializer="he_normal", activation='relu']] + -> TimeDistributed: [Flatten: []] + -> LSTM: [256, return_sequences=True] + -> Flatten:[] + -> Dense: [512, activation='relu'] + -> Dropout: [0.5] + -> Dense: [1, activation='softmax'] + -> y; train: - compile: - optimizer = SGD: [lr=0.0001, momentum=0.9], - loss = 'binary_crossentropy', - metrics = ['accuracy'] ; - - run: - epochs = 4; - - dashboard: ; + compile: + optimizer = SGD: [lr=0.0001, momentum=0.9], + loss = 'binary_crossentropy', + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; diff --git a/Classification/Video/HumanAction/video_class_auto.nml b/Classification/Video/HumanAction/video_class_auto.nml index 8fa76ad..5b2ae98 100644 --- a/Classification/Video/HumanAction/video_class_auto.nml +++ b/Classification/Video/HumanAction/video_class_auto.nml @@ -1,32 +1,30 @@ oracle("mode")= "classification" source: - bind = "/DM-Dash/Neopulse_Examples/Classification/Video/HumanAction/training_data.csv" ; - input: - x ~ from "Video" - -> video: [shape=[80, 80], channels=3, seqlength=32] - -> ImageDataGenerator: []; - output: - y ~ from "Class" - -> flat: [6] - -> FlatDataGenerator: [] ; - params: - number_validation = 119, - batch_size = 2; + bind = "training_data.csv" ; + input: + x ~ from "Video" + -> video: [shape=[80, 80], channels=3, seqlength=32] + -> ImageDataGenerator: [] ; + output: + y ~ from "Class" + -> flat: [6] + -> FlatDataGenerator: [] ; + params: + number_validation = 119, + batch_size = 2 ; architecture: - input: x ~ video: [shape=[80, 80], channels=3, seqlength=32] ; + input: x ~ video: [shape=[80, 80], channels=3, seqlength=32] ; output: y ~ flat: [6] ; x -> auto -> y; train: - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy'] ; - - run: - epochs = 4; - - dashboard: ; + compile: + optimizer = auto, + loss = auto, + metrics = ['accuracy'] ; + run: + epochs = 2 ; + dashboard: ; From 7c2b235408c6d78fbcc0024337e39af45480cebb Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 08:21:30 -0700 Subject: [PATCH 20/31] Typo in final layer --- Classification/Video/HumanAction/video_class.nml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Classification/Video/HumanAction/video_class.nml b/Classification/Video/HumanAction/video_class.nml index 75efed9..4bff29d 100644 --- a/Classification/Video/HumanAction/video_class.nml +++ b/Classification/Video/HumanAction/video_class.nml @@ -32,7 +32,7 @@ architecture: -> Flatten:[] -> Dense: [512, activation='relu'] -> Dropout: [0.5] - -> Dense: [1, activation='softmax'] + -> Dense: [6, activation='softmax'] -> y; train: From 54c0014dd7d28b44c8e7cfaae7d2e7c52a96c83c Mon Sep 17 00:00:00 2001 From: Hongye Yang Date: Wed, 15 May 2019 11:17:39 -0700 Subject: [PATCH 21/31] add spectral examples --- SpectralOptimization/image/MNIST/README.md | 37 ++++++++ SpectralOptimization/image/MNIST/build_csv.py | 93 +++++++++++++++++++ .../image/MNIST/img_spectral.nml | 59 ++++++++++++ 3 files changed, 189 insertions(+) create mode 100644 SpectralOptimization/image/MNIST/README.md create mode 100644 SpectralOptimization/image/MNIST/build_csv.py create mode 100644 SpectralOptimization/image/MNIST/img_spectral.nml diff --git a/SpectralOptimization/image/MNIST/README.md b/SpectralOptimization/image/MNIST/README.md new file mode 100644 index 0000000..1de6c35 --- /dev/null +++ b/SpectralOptimization/image/MNIST/README.md @@ -0,0 +1,37 @@ +# Introduction +The sample .nml file is an application of Spectral Optimization in training a CNN image classification model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). + +# Data +The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ +To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: + +```bash +$ python build_csv.py +``` + +If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. + +Missing packages can be installed using pip: +```bash +$ pip install +``` + +Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: +```bash +$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/SpectralOptimization/image/MNIST/img_spectral.nml +``` +The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: +```bash +bind = "/DM-Dash/NeoPulse_Examples/SpectralOptimization/image/MNIST/training_data.csv" ; +``` + +# Tutorial Files +**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. + +# Tutorial Videos and Guides +Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) + +For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) + +# License +Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/SpectralOptimization/image/MNIST/build_csv.py b/SpectralOptimization/image/MNIST/build_csv.py new file mode 100644 index 0000000..4799ebd --- /dev/null +++ b/SpectralOptimization/image/MNIST/build_csv.py @@ -0,0 +1,93 @@ +import gzip +import shutil +from pathlib import Path + +import numpy as np +import requests +from imageio import imwrite +from mnist import MNIST + + +def download_data(): + ''' + Check if raw MNIST data is present. If not, download MNIST data from the official site. + ''' + + Path('raw_data').mkdir(parents=True, exist_ok=True) + + URL = 'http://yann.lecun.com/exdb/mnist/' + file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] + for f in file_list: + if not Path('raw_data/' + f.replace('.gz', '')).is_file(): + r = requests.get(URL + f, stream=True) + with open('raw_data/' + f, 'wb') as f_z: + shutil.copyfileobj(r.raw, f_z) + with gzip.open('raw_data/' + f, 'rb') as f_z: + with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: + shutil.copyfileobj(f_z, f_u) + + +def convert_images(raw): + ''' + Convert images from the MNIST format and return a 4-dim array with + shape: [number_of_images_per_batch, height, width, channel] + The pixel values are integers between 0 and 255. + There are 10000, 28x28 1 channel images per batch, in row major order. + ''' + + return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') + + +def write_csv_file(): + ''' + Save images as PNG files (lossless). + Write absolute path to image files and class label to training_data.csv + training_data.csv should be of length 70001, with the first line containing the header. + The test images are written at the end, i.e. the last 10000 lines correspond to the test set. + ''' + + mndata = MNIST('raw_data') + train_img, train_labels = mndata.load_training() + train_images = convert_images(train_img) + test_img, test_labels = mndata.load_testing() + test_images = convert_images(test_img) + + Path('images').mkdir(parents=True, exist_ok=True) + + # writing training csv + with open('training_data.csv', 'w') as of: + of.write('Image,Label\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') + + + # writing querying csv + with open('querying_data.csv', 'w') as of: + of.write('Image\n') + + for index, image in enumerate(train_images): + img_file = 'images/mnist_train_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + + for index, image in enumerate(test_images): + img_file = 'images/mnist_test_' + str(index) + '.png' + imwrite(img_file, image) + of.write(str(Path(img_file).resolve()) + '\n') + + +if __name__ == '__main__': + + # Download data if necessary + download_data() + + # Write the data to PNG files, and create a csv file for NeoPulse AI Studio + write_csv_file() diff --git a/SpectralOptimization/image/MNIST/img_spectral.nml b/SpectralOptimization/image/MNIST/img_spectral.nml new file mode 100644 index 0000000..c621018 --- /dev/null +++ b/SpectralOptimization/image/MNIST/img_spectral.nml @@ -0,0 +1,59 @@ +oracle("mode") = "spectral_opt" + +source: + bind = "/DM-Dash/Neopulse_Examples/SpectralOptimization/image/MNIST/training_data.csv"; + input: + x ~ from "Image" + -> image: [shape = [28,28], channels = 1] + -> ImageDataGenerator: [rescale=0.00392156862745098]; + + output: + y ~ from "Label" -> flat: [10] -> FlatDataGenerator: [] ; + + params: + shuffle = True, + shuffle_init = True; + +architecture: + input: + x1 ~ image: [shape = [28,28], channels = 1]; + output: + y1 ~ flat: [10]; + + x1 -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [32,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=2] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> Conv2D: [64,[3,3]] + -> Activation: ['relu'] + -> MaxPooling2D: [pool_size=2] + -> Flatten:[] + -> Dense: [10] + -> Activation: ['softmax'] + -> y1; + + +train: + compile: + opt_options = ['sgd', 'adam', 'adamax'], + lr_options = [0.03, 0.01, 0.003, 0.001, 0.0003], + momentum_options = [0.99, 0.9, 0.0], + decay_options = [0.0001, 0.0], + batch_options = [32,64,128], + loss = 'categorical_crossentropy', + metrics = ['accuracy'] ; + + run: + epochs = 5; + +dashboard: ; + + + + + + + From 20c709e58cd017b27c4d31f39990f06ab35322e3 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 12:57:51 -0700 Subject: [PATCH 22/31] Move spectral optimization example to MNIST dataset. --- .../Image/MNIST/mnist_spectral.nml | 13 +-- SpectralOptimization/image/MNIST/README.md | 37 -------- SpectralOptimization/image/MNIST/build_csv.py | 93 ------------------- 3 files changed, 3 insertions(+), 140 deletions(-) rename SpectralOptimization/image/MNIST/img_spectral.nml => Classification/Image/MNIST/mnist_spectral.nml (90%) delete mode 100644 SpectralOptimization/image/MNIST/README.md delete mode 100644 SpectralOptimization/image/MNIST/build_csv.py diff --git a/SpectralOptimization/image/MNIST/img_spectral.nml b/Classification/Image/MNIST/mnist_spectral.nml similarity index 90% rename from SpectralOptimization/image/MNIST/img_spectral.nml rename to Classification/Image/MNIST/mnist_spectral.nml index c621018..1487e41 100644 --- a/SpectralOptimization/image/MNIST/img_spectral.nml +++ b/Classification/Image/MNIST/mnist_spectral.nml @@ -1,7 +1,7 @@ oracle("mode") = "spectral_opt" source: - bind = "/DM-Dash/Neopulse_Examples/SpectralOptimization/image/MNIST/training_data.csv"; + bind = "training_data.csv"; input: x ~ from "Image" -> image: [shape = [28,28], channels = 1] @@ -13,7 +13,7 @@ source: params: shuffle = True, shuffle_init = True; - + architecture: input: x1 ~ image: [shape = [28,28], channels = 1]; @@ -47,13 +47,6 @@ train: metrics = ['accuracy'] ; run: - epochs = 5; + epochs = 2; dashboard: ; - - - - - - - diff --git a/SpectralOptimization/image/MNIST/README.md b/SpectralOptimization/image/MNIST/README.md deleted file mode 100644 index 1de6c35..0000000 --- a/SpectralOptimization/image/MNIST/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Introduction -The sample .nml file is an application of Spectral Optimization in training a CNN image classification model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ -To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: - -```bash -$ python build_csv.py -``` - -If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. - -Missing packages can be installed using pip: -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/SpectralOptimization/image/MNIST/img_spectral.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/SpectralOptimization/image/MNIST/training_data.csv" ; -``` - -# Tutorial Files -**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) - -For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/SpectralOptimization/image/MNIST/build_csv.py b/SpectralOptimization/image/MNIST/build_csv.py deleted file mode 100644 index 4799ebd..0000000 --- a/SpectralOptimization/image/MNIST/build_csv.py +++ /dev/null @@ -1,93 +0,0 @@ -import gzip -import shutil -from pathlib import Path - -import numpy as np -import requests -from imageio import imwrite -from mnist import MNIST - - -def download_data(): - ''' - Check if raw MNIST data is present. If not, download MNIST data from the official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://yann.lecun.com/exdb/mnist/' - file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] - for f in file_list: - if not Path('raw_data/' + f.replace('.gz', '')).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - with gzip.open('raw_data/' + f, 'rb') as f_z: - with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: - shutil.copyfileobj(f_z, f_u) - - -def convert_images(raw): - ''' - Convert images from the MNIST format and return a 4-dim array with - shape: [number_of_images_per_batch, height, width, channel] - The pixel values are integers between 0 and 255. - There are 10000, 28x28 1 channel images per batch, in row major order. - ''' - - return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') - - -def write_csv_file(): - ''' - Save images as PNG files (lossless). - Write absolute path to image files and class label to training_data.csv - training_data.csv should be of length 70001, with the first line containing the header. - The test images are written at the end, i.e. the last 10000 lines correspond to the test set. - ''' - - mndata = MNIST('raw_data') - train_img, train_labels = mndata.load_training() - train_images = convert_images(train_img) - test_img, test_labels = mndata.load_testing() - test_images = convert_images(test_img) - - Path('images').mkdir(parents=True, exist_ok=True) - - # writing training csv - with open('training_data.csv', 'w') as of: - of.write('Image,Label\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(train_labels[index]) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') - - - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('Image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write the data to PNG files, and create a csv file for NeoPulse AI Studio - write_csv_file() From e7425f171f20ee55c8e77ca76901814456eb3e0b Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 12:59:47 -0700 Subject: [PATCH 23/31] Clean up code. --- Classification/Image/MNIST/mnist_spectral.nml | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/Classification/Image/MNIST/mnist_spectral.nml b/Classification/Image/MNIST/mnist_spectral.nml index 1487e41..9e64743 100644 --- a/Classification/Image/MNIST/mnist_spectral.nml +++ b/Classification/Image/MNIST/mnist_spectral.nml @@ -1,24 +1,24 @@ oracle("mode") = "spectral_opt" source: - bind = "training_data.csv"; + bind = "training_data.csv" ; input: x ~ from "Image" -> image: [shape = [28,28], channels = 1] - -> ImageDataGenerator: [rescale=0.00392156862745098]; + -> ImageDataGenerator: [rescale=0.00392156862745098] ; output: y ~ from "Label" -> flat: [10] -> FlatDataGenerator: [] ; params: shuffle = True, - shuffle_init = True; + shuffle_init = True ; architecture: input: - x1 ~ image: [shape = [28,28], channels = 1]; + x1 ~ image: [shape = [28,28], channels = 1] ; output: - y1 ~ flat: [10]; + y1 ~ flat: [10] ; x1 -> Conv2D: [32,[3,3]] -> Activation: ['relu'] @@ -33,7 +33,7 @@ architecture: -> Flatten:[] -> Dense: [10] -> Activation: ['softmax'] - -> y1; + -> y1 ; train: @@ -45,8 +45,6 @@ train: batch_options = [32,64,128], loss = 'categorical_crossentropy', metrics = ['accuracy'] ; - run: - epochs = 2; - -dashboard: ; + epochs = 2 ; + dashboard: ; From 2092b6f65f33c21228cafe02c8f007c69a79fa77 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 13:16:55 -0700 Subject: [PATCH 24/31] Consolidated unsupervised examples into MNIST data set. --- Classification/Image/MNIST/README.md | 24 ++++- .../Image/MNIST/mnist_KmeansPCA.nml | 24 +++++ .../Image/MNIST/mnist_KmeansPCA_batch.nml | 25 +++++ .../Image/MNIST/mnist_kmeans.nml | 4 +- .../Image/MNIST/mnist_kmeans_batch.nml | 6 +- Classification/Image/MNIST/mnist_pca.nml | 23 +++++ .../Image/MNIST/mnist_pca_batch.nml | 25 +++++ UnsupervisedLearning/.DS_Store | Bin 8196 -> 0 bytes UnsupervisedLearning/KMeans+PCA/.DS_Store | Bin 8196 -> 0 bytes .../KMeans+PCA/image/.DS_Store | Bin 8196 -> 0 bytes .../KMeans+PCA/image/MNIST/KmeansPCA.nml | 24 ----- .../image/MNIST/KmeansPCA_batch.nml | 26 ----- .../KMeans+PCA/image/MNIST/README.md | 39 -------- .../KMeans+PCA/image/MNIST/build_csv.py | 93 ------------------ UnsupervisedLearning/KMeans/.DS_Store | Bin 8196 -> 0 bytes UnsupervisedLearning/KMeans/image/.DS_Store | Bin 8196 -> 0 bytes .../KMeans/image/MNIST/README.md | 39 -------- .../KMeans/image/MNIST/build_csv.py | 93 ------------------ .../KMeans/image/MNIST/kmeans.nml | 23 ----- .../KMeans/image/MNIST/kmeans_batch.nml | 25 ----- UnsupervisedLearning/PCA/.DS_Store | Bin 8196 -> 0 bytes UnsupervisedLearning/PCA/image/.DS_Store | Bin 8196 -> 0 bytes .../PCA/image/MNIST/README.md | 39 -------- .../PCA/image/MNIST/build_csv.py | 93 ------------------ 24 files changed, 124 insertions(+), 501 deletions(-) create mode 100644 Classification/Image/MNIST/mnist_KmeansPCA.nml create mode 100644 Classification/Image/MNIST/mnist_KmeansPCA_batch.nml rename UnsupervisedLearning/PCA/image/MNIST/pca.nml => Classification/Image/MNIST/mnist_kmeans.nml (74%) rename UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml => Classification/Image/MNIST/mnist_kmeans_batch.nml (72%) create mode 100644 Classification/Image/MNIST/mnist_pca.nml create mode 100644 Classification/Image/MNIST/mnist_pca_batch.nml delete mode 100644 UnsupervisedLearning/.DS_Store delete mode 100644 UnsupervisedLearning/KMeans+PCA/.DS_Store delete mode 100644 UnsupervisedLearning/KMeans+PCA/image/.DS_Store delete mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA.nml delete mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml delete mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md delete mode 100644 UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py delete mode 100644 UnsupervisedLearning/KMeans/.DS_Store delete mode 100644 UnsupervisedLearning/KMeans/image/.DS_Store delete mode 100644 UnsupervisedLearning/KMeans/image/MNIST/README.md delete mode 100644 UnsupervisedLearning/KMeans/image/MNIST/build_csv.py delete mode 100644 UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml delete mode 100644 UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml delete mode 100644 UnsupervisedLearning/PCA/.DS_Store delete mode 100644 UnsupervisedLearning/PCA/image/.DS_Store delete mode 100644 UnsupervisedLearning/PCA/image/MNIST/README.md delete mode 100644 UnsupervisedLearning/PCA/image/MNIST/build_csv.py diff --git a/Classification/Image/MNIST/README.md b/Classification/Image/MNIST/README.md index 549df7b..61a8e81 100644 --- a/Classification/Image/MNIST/README.md +++ b/Classification/Image/MNIST/README.md @@ -28,14 +28,34 @@ bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/MNIST/training_data.csv" # Tutorial Files *build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. -**mnist_full_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture. - **mnist_call_auto.nml:** Features the use of auto to automatically select an architecture later. **mnist_choice_auto.nml:** Features use of auto keyword to automatically select from range of values for a given parameter. **mnist_dist_auto.nml:** Features use of the auto keyword to automatically select a value from a specified distribution of values (e.g. gaussian). +**mnist_full_auto.nml:** Features full use of the auto keyword to automatically generate the entire architecture. + +**mnist_kmeans.nml:** Demonstrates the k-means unsupervised clustering algorithm. + +**mnist_kmeans_batch.nml:** Demonstrates using batch processing for k-means clustering. + +**mnist_KmeansPCA.nml:** Demonstrates using k-means + PCA for clustering. + +**mnist_KmeansPCA_batch.nml:** Demonstrates batch processing for k-meanst + PCA. + +**mnist_matrix_capsule_auto.nml:** Demonstrates matrix capsule networks using the oracle. + +**mnist_matrix_capsule.nml:** Demonstrates matrix capsule networks without using the oracle. + +**mnist_pca.nml:** Demonstrates using Principal Component analysis (PCA). + +**mnist_pca_batch.nml:** Demonstrates batch processing for PCA. + +**mnist_vector_capsule_auto.nml:** Demonstrates vector capsule networks using the oracle. + +**mnist_vector_capsule.nml:** Demonstrates vector capsule networks without using the oracle. + # Tutorial Videos and Guides Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) diff --git a/Classification/Image/MNIST/mnist_KmeansPCA.nml b/Classification/Image/MNIST/mnist_KmeansPCA.nml new file mode 100644 index 0000000..4a34b6e --- /dev/null +++ b/Classification/Image/MNIST/mnist_KmeansPCA.nml @@ -0,0 +1,24 @@ +oracle("mode") = "unsupervised" + +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[] ; + params: ; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8] + -> Kmeans:[n_clusters=2] + -> y ; + +train: + compile: ; + run: ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_KmeansPCA_batch.nml b/Classification/Image/MNIST/mnist_KmeansPCA_batch.nml new file mode 100644 index 0000000..08b3001 --- /dev/null +++ b/Classification/Image/MNIST/mnist_KmeansPCA_batch.nml @@ -0,0 +1,25 @@ +oracle("mode") = "unsupervised" + +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[] ; + params: + batch_size = 16 ; + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8, batch=True] + -> Kmeans:[n_clusters=2, batch=True] + -> y ; + +train: + compile: + batch = True ; + run: ; + dashboard: ; diff --git a/UnsupervisedLearning/PCA/image/MNIST/pca.nml b/Classification/Image/MNIST/mnist_kmeans.nml similarity index 74% rename from UnsupervisedLearning/PCA/image/MNIST/pca.nml rename to Classification/Image/MNIST/mnist_kmeans.nml index 879ddc0..e4e873e 100644 --- a/UnsupervisedLearning/PCA/image/MNIST/pca.nml +++ b/Classification/Image/MNIST/mnist_kmeans.nml @@ -1,7 +1,7 @@ oracle("mode") = "unsupervised" source: - bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/PCA/image/MNIST/training_data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Image" -> image: [shape = [28, 28], channels = 1] @@ -14,7 +14,7 @@ architecture: output: y; x -> UnsupervisedFlatten:[] - -> Pca:[n_components=8] + -> Kmeans:[n_clusters=10] -> y ; train: diff --git a/UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml b/Classification/Image/MNIST/mnist_kmeans_batch.nml similarity index 72% rename from UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml rename to Classification/Image/MNIST/mnist_kmeans_batch.nml index f1e4c99..5d1dcb6 100644 --- a/UnsupervisedLearning/PCA/image/MNIST/pca_batch.nml +++ b/Classification/Image/MNIST/mnist_kmeans_batch.nml @@ -1,7 +1,7 @@ oracle("mode") = "unsupervised" source: - bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/PCA/image/MNIST/training_data.csv" ; + bind = "training_data.csv" ; input: x ~ from "Image" -> image: [shape = [28, 28], channels = 1] @@ -15,11 +15,11 @@ architecture: output: y; x -> UnsupervisedFlatten:[] - -> Pca:[n_components=8, batch=True] + -> Kmeans:[n_clusters=10, batch=True] -> y ; train: - compile: + compile: batch=True; run:; dashboard:; diff --git a/Classification/Image/MNIST/mnist_pca.nml b/Classification/Image/MNIST/mnist_pca.nml new file mode 100644 index 0000000..44ffbb1 --- /dev/null +++ b/Classification/Image/MNIST/mnist_pca.nml @@ -0,0 +1,23 @@ +oracle("mode") = "unsupervised" + +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[] ; + params: ; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8] + -> y ; + +train: + compile: ; + run: ; + dashboard: ; diff --git a/Classification/Image/MNIST/mnist_pca_batch.nml b/Classification/Image/MNIST/mnist_pca_batch.nml new file mode 100644 index 0000000..49a6f58 --- /dev/null +++ b/Classification/Image/MNIST/mnist_pca_batch.nml @@ -0,0 +1,25 @@ +oracle("mode") = "unsupervised" + +source: + bind = "training_data.csv" ; + input: + x ~ from "Image" + -> image: [shape = [28, 28], channels = 1] + -> ImageDataGenerator:[] ; + params: + batch_size = 16 ; + + +architecture: + input: x ~ image: [shape = [28,28], channels = 1] ; + output: y ; + + x -> UnsupervisedFlatten:[] + -> Pca:[n_components=8, batch=True] + -> y ; + +train: + compile: + batch = True ; + run: ; + dashboard: ; diff --git a/UnsupervisedLearning/.DS_Store b/UnsupervisedLearning/.DS_Store deleted file mode 100644 index 416063bf61615bf7b6b360391ba833fc48b40f22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHMU2GIp6u#fKl$mx)r+~C9gFhxjJDYzzKoEoI#i~1T!OuP~gu_ zc5z>wAt5jnYN7X5mWb0^l5I!uyxZuevsU1d*(6I$jX(G({h)s4H}aR)yj(MrO{|_ zE^VI5ncf+-R+7Y6E242()uqEa&9>Lkxux#wud0nk5AoiXtU;x&zMnT)8+}MtZK=zp zjrpo|8x*DMBt2pEIa&6Hjjm8OOLDiRr437TY*n^NYKPo2EIcbZcPNcgCFvqMO649ir-jI&+**i7Y+c#v`v?{*$Xl-@P%6hH@-G1*)+W z%TR-L*n}n|(2OKH(1|Co4_!EdG;|m^1_whpg%M2P8Jx$nco7%z5-#B~UdKCl7w_RJ zKE(~(#4SwXD}0Zi@H74p3ykEsyf?oSf%ubf+JPl52Or!b7w^vH*zUI7?R)NFE`PIt zKg30gA6QZqU%l?(%?*z=Pd)pB$32aa|0bf(J3%dVw3zG`C=$x4XvVv=NwqNt3YNclff_7=O%ejvL34Tch;>LQ})N<4@S zXu#t{(N^rlE+XiYND)c9aTEj4h^i-H!6vGn!YIz-9G)h+K93iOtQUx^FZ*s28 zyoI-M1=nyLAL1i?j8E`6zQk>OTR_;(0>XY*NZ8?2*0vn`IQ8dAKHVg2&5Z|C1B z{@*$K_y4(BOgKyEfzSj0iw98Lnrcnb)U&;gd8{3$JV2Qzt~VjjH=)80#|iy#oN(%g fA=Trg%54$>P6(tPN^|}pAiV#>``>5Y{ayS64H6N8 diff --git a/UnsupervisedLearning/KMeans+PCA/.DS_Store b/UnsupervisedLearning/KMeans+PCA/.DS_Store deleted file mode 100644 index ab17254a4587713c22cf9cf77cd488a4293b2b18..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLU2GIZ9G~A&V7DxETd^!q;NU_bTBQvir5N;R3$_*l;R#L?84&Gkb(9lt)8kCz<*0 z%>QF&e*4>*{V!t-tp&4*F@rItP~}peMa@+T*YkQsiG?Fv3G!#m$_)EM8OO_9VH*uZ z2t)`(2t)`(2t){69|+Ky&5JU{xi7U*8zB%Oa6=-%_lG!DE|Z}gmr`sU)QA*-WCab2 zLf4cB2%mT;lc5}!Qn*q~nLQwMMVMkhx|2T6%}FLhIWDDiXOQj;;hhm?C=hoizql}G zNJ<&C5dskcS0cb)o9WDEIhJMvW%>J8nIXsagT}_Qh)t`QK4Ye$R4SFTEALGWdc&z8 z9Tfc5bbc@I?MMfMnee*X^PEf9>CA518t&KUws~%lwp}YPoNd!0lYKq5YkI@2Ucohe z5ho=TilQoNziy08mscNdCzfnc)lIjp4-XdwLL}Kw=<5lYuGk}!M?PgwOrG72lf#1ZeSmHzLdJfeUw@(=8uA<5yw`r?X{g~Q)kcc2`nigxuE+?I_99254U8fm6MY%M+D;(T# zhi3HBBh1m1{c1GOuvlNxXjSzumHYVJx2FAI`;g_*thmld7+Vyjk83uWre!LL(&JAg zSB8M;{0v&e>e+fabGq39=CKFa!WuoosQ3}6sB6mbM&IErI<5>Mf2JcAeUDqh1$ypH#9 z3LoG@e1UKA9nRnae#FoC4Zq6?#!3X=TAI6*_=99Qg~NYvu_vxh;W|O#`s<#;Kg{9J z?xwjn*UpPCzh%{$^&49+J@-jZxr~UH^G{Gta!T;5#3spbWB!7Lb&IqmQ@t0vh?&-i z!bzsjoLD%2fw6R1ou(^9GgWl!S5O!9Ca4o4RzuzG3Q=B>(iumh1RU7 z`gGyhvO&}I3Zj4Ld6%|HA&ToWgto1nw2Z%s>eaYZhw%^|#v{bi#{@^8#dA206L=Y~5L@5E z+js}>5?fE>V|;>7@fkkHH#j?qr(KhHdaj(OIj_ClECgA`&i(V|OOKwItTnHp9U8pW zCSFYCUB`0=C}QcIDW6Kz;|NTXFc>`l-#qpIe`yu95dskc|4Rf=(Vl8=rKL@^S@K*v zPIW(3Ubx-3l){A?k&crT={U(S9T&E7GUYzWP>xGUBUJwL9|7UlWOV*V=f5D}4chz* Dq{u_n diff --git a/UnsupervisedLearning/KMeans+PCA/image/.DS_Store b/UnsupervisedLearning/KMeans+PCA/image/.DS_Store deleted file mode 100644 index a0b0e52a5d8dd223a55795b656b000e75adaac46..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLU2GIp6u#e5V5S}D6cEbt(}e~EDO!G{5Cpf|<)^d-bX!_}+ufa^oiIDI?98@6 zDb`?OjPi&I@x>TJ6pTC|#-GFoV*%ki6;Ie#s@w3-dRFh;Kjs{bZ#>D z+`%S#jArVkphq` zqhV2~O?iOui3Ktq$YCjkE6pjh2L!GNQVd9U(#N4u-)^Tfdn|LHTc6eBI)2)6%z|*X43kU_bXtz#4kX;7 zV|ZeolvF5+s;J$1_2A&+8yf0jYuBtFs*4RaG}QC(;BXX**lA)4thg+bQdWNn@2> zw&2?KZr8K;G<&;z^O(I@F8P=geQU zbVdEz4V#-rD)p)v+AM8$XTRlH8Qbg_unfO%Z`#Y6j$t{y`v`K!w+>roL9L1$Q`?<% z&K#McS5;QejYhkQ8S7}#@{jAaiX!J)7>%jAp&T%1wKmt_o`$erTpNw<=c7#plWIeM zujH~U`iQDKO1npa3Ddfjn$~`Vc3*#9RmHT?h1wcL?Xc4s)0PA4wDpSKqIUL6WaZ#S zZIe=kI<<>7Fh}?8$$N@N6RLhqImB0QO?&>Xyy?(g@z|ra(XEQImungg!!(rI(!STm zm&v9Gxi(%ednz473)pIwWUZ`+71$sd!K#EK4&BB7WpNHJYvv+f>o5SV~=kI`ig6t7}%&tW=2idPL|Rt0&zg zV!kw8yHu;uC(|_CRNJ6!REhq2nK0e7nRHW#?{bkulcwp6Kac8vBJz{R-yq<5 image: [shape = [28, 28], channels = 1] - -> ImageDataGenerator:[]; - params:; - - -architecture: - input: x ~ image: [shape = [28,28], channels = 1]; - output: y; - - x -> UnsupervisedFlatten:[] - -> Pca:[n_components=8] - -> Kmeans:[n_clusters=2] - -> y ; - -train: - compile:; - run:; - dashboard:; diff --git a/UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml b/UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml deleted file mode 100644 index 72c1c92..0000000 --- a/UnsupervisedLearning/KMeans+PCA/image/MNIST/KmeansPCA_batch.nml +++ /dev/null @@ -1,26 +0,0 @@ -oracle("mode") = "unsupervised" - -source: - bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/KMeans+PCA/image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape = [28, 28], channels = 1] - -> ImageDataGenerator:[]; - params: - batch_size = 16; - - -architecture: - input: x ~ image: [shape = [28,28], channels = 1]; - output: y; - - x -> UnsupervisedFlatten:[] - -> Pca:[n_components=8, batch=True] - -> Kmeans:[n_clusters=2, batch=True] - -> y ; - -train: - compile: - batch=True; - run:; - dashboard:; diff --git a/UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md b/UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md deleted file mode 100644 index 7380904..0000000 --- a/UnsupervisedLearning/KMeans+PCA/image/MNIST/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Introduction -These sample .nml files are for training a PCA + KMeans model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ -To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: - -```bash -$ python build_csv.py -``` - -If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. - -Missing packages can be installed using pip: -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans+PCA/image/MNIST/KMeansPCA.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans+PCA/image/MNIST/training_data.csv" ; -``` - -# Tutorial Files -**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. -**KMeansPCA.nml:** NML file defines a pca + kmeans process where the model is trained with all the data in one time. -**KMeansPCA_batch.nml:** NML file defines a pca + kmeans process where the model is trained with batched data. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) - -For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py b/UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py deleted file mode 100644 index fcfe45e..0000000 --- a/UnsupervisedLearning/KMeans+PCA/image/MNIST/build_csv.py +++ /dev/null @@ -1,93 +0,0 @@ -import gzip -import shutil -from pathlib import Path - -import numpy as np -import requests -from imageio import imwrite -from mnist import MNIST - - -def download_data(): - ''' - Check if raw MNIST data is present. If not, download MNIST data from the official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://yann.lecun.com/exdb/mnist/' - file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] - for f in file_list: - if not Path('raw_data/' + f.replace('.gz', '')).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - with gzip.open('raw_data/' + f, 'rb') as f_z: - with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: - shutil.copyfileobj(f_z, f_u) - - -def convert_images(raw): - ''' - Convert images from the MNIST format and return a 4-dim array with - shape: [number_of_images_per_batch, height, width, channel] - The pixel values are integers between 0 and 255. - There are 10000, 28x28 1 channel images per batch, in row major order. - ''' - - return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') - - -def write_csv_file(): - ''' - Save images as PNG files (lossless). - Write absolute path to image files and class label to training_data.csv - training_data.csv should be of length 70001, with the first line containing the header. - The test images are written at the end, i.e. the last 10000 lines correspond to the test set. - ''' - - mndata = MNIST('raw_data') - train_img, train_labels = mndata.load_training() - train_images = convert_images(train_img) - test_img, test_labels = mndata.load_testing() - test_images = convert_images(test_img) - - Path('images').mkdir(parents=True, exist_ok=True) - - # writing training csv - with open('training_data.csv', 'w') as of: - of.write('Image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - ''' - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') - ''' - ''' - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - ''' - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write the data to PNG files, and create a csv file for NeoPulse AI Studio - write_csv_file() diff --git a/UnsupervisedLearning/KMeans/.DS_Store b/UnsupervisedLearning/KMeans/.DS_Store deleted file mode 100644 index 50407ec90c7445a32d3ae0a93a90725a678cb356..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLTWl0n7(U-pV5S}D6e^VE(uIZ!(rUR#AqZ}_%cZnPbz54l?d;CbPMDopc4oIg zDb{FWjPi&I(FbFUQ8e;^7%zzr#srArg&LKp55~k76HUA%#s~e+oLNF!;Kjs{bWSqo zpL71p%>SM5%*;QGF(eB5YQ}09V-g))suk4Trg1$#CbdW~!<8U^#*Flkn@d|xdXjB8 z5h4&G5F!vF5F!vFaBm<$?`+BB>>4X zn)Vx=Qyt*@!~z))0{iSWIT|gLJD^V;m#0T89{{t|LWuy z^UWFJLWX^aK#0I(1o*a@&PT!$N;m~u zcl|gip^zk5lKa$};o*ljHaEo9t=%xv5F2i8ZsOwgcx&;8sEw+ju7?BGl0B)23kjpESSSwWu7o-uF}oq z9n0!;T$6{{+v^#}Jg%Fh>fPr$d9T$E)@xFHVWk#a;|YU;nnfc?p!s?jck62KQ=i#;|qM9@s~a+n#yYH1cv)+qOBPqWg7hfSx z{gmtCO=2X|i)b-h!;-9n^|L%1W~bT9>@{|gU1A@w&)64ijNM>AvR~M*>^Js1`wJ0N zU>0Vh5;a(Wg;<1@Sc5HyqXh}HqXUm&H@a{TY3ML;7!GnciXn{RDLjp5@B+@@MV!Ss zypDJ9F5bfhe2UAsf~y$Am-rq(;b;6IZZJ~f`L0qXLgG)7DAaeTa$hCb^Av3$yv2qlP4Bwx5_u~OSj zWHgRV+#ID%qt4`{lg7C9(q*-ZDiPace^^mZL!AZ-7es4nSJgI3#CtX3>mF(%-85pp zFkM%#)T+}d3^!FaDw|}Yzgp&-Zr(z=8N_#SkwmMasEqGN`9Bi&D!a~pAhP}qh8aZE zxkS>XxF3yZ#-l{gB(`E3(erVnh@#y%gnnp5)FZH96H$-i1Ww`b3R7 ziCXxvnR1_aAV-Cy8EXIgkAUEVGJOAs L?|+|wQ?&aRj-^XJ diff --git a/UnsupervisedLearning/KMeans/image/.DS_Store b/UnsupervisedLearning/KMeans/image/.DS_Store deleted file mode 100644 index 472cef4727ca390d308085ac2e376d6a17e936b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLTWl0n7(U-pV5S}Dv>=q_(uIZ!(rUR#AqZ}_%cW3gbz54l?e5M%CroFSo!J&B z#TreFQ65nt`e2MPibfs~<0bLIm;f=pP;W$iFeJX1XyPR?KInhWnI*IZUQ7&0=OlCf zIp@F3{NMS`%>2U`L!w};WUPuYrqIEqUPj$*8rSnUqk}q80+1}B zY0>DM>Hy&r3uHWyBT@=iiYcoH1g;253`lp<$GADkcpyillf{#_ z<_vKu!#+eHL|`HUeA`TAJJ~MQ!ct@E_bZv4ZFzn}!`FyRDVaKLx}uaSr87zor25>! zl%MtsULu{}$4BjHzb_M)>0S-oz!E;iKI*uc-%4iBr!+@&iww)FNNKQVN2_|#dM z6@n)ND<7BTxv?qQhgYVuZh!6;t6KoDy@6)=10!;w2*C9s0BUKOHY2*NW!v^0T8~~i zK)JQ$e9Lvb-Dch+*`t?xghxx(b=vbL*LP)IyU_1=J$gkpZD$L1+BchQJ7t|PX|CMM z=3U!vcRh=T+0*Tt$9%4vpz7Z1xp}`?1nagazO>Q{p82>*L9GEZNufO*>iL;-7c8z_ z(XeLyrsmOdy`oZ^tEvPSJO$T>}zXIk`dG zs8pa%?WP6nr~CHgJ;kF5Rllqpl?$Mg)7Dd^^HBE+L8cI!Z-D~2@ zU3vKAY0c2pn#38uI;V1?%f+z75p2qVygBNfX z=kOZd#yfZy7w`!#;|i`~6kp;8{DhzJhrGc^k>@*$nMjF0NseW5@GCBI=Jrgk-nwmT z%l3Pi$=}Z8OE_!xeRJlkyVH&=e#VvphDtuz#@lf)8~D-uc)ok%`^!9uON zlgMZu8^1Y%~i|HC-XLsbW}GOGASO^XEmYs#jFkE5v&}B6JTmkZuYw zUz)C|)vEQW6o#8>>$MFk(O)kSrW-erZW{4jUL?`1X*%QkQT>mEy~?h!ABn7ggJBvG zbrz9yG44k_8u2I*G>OgFLiBtLDWYg64x$%nBI*&?aEPc!aRMiC3QrJOpT%=T)zd`P z7X?{g#_Mdbq-U+;?@~^E0c9JMJ8Uw>;KJn{{KH&;R*8&5ttMRpd^_} zCMeZAJsx?j9iyX{4&J!kh?K&GI#G_36y-R{pd1&rF*4;o@j#A9Ni)>`_a6bl2W9yF M58wZSfRnWQ7vJ1T^#A|> diff --git a/UnsupervisedLearning/KMeans/image/MNIST/README.md b/UnsupervisedLearning/KMeans/image/MNIST/README.md deleted file mode 100644 index 591bf83..0000000 --- a/UnsupervisedLearning/KMeans/image/MNIST/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Introduction -These sample .nml files are for training a KMeans model using image data in [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf). - -# Data -The data for this task can be found at: http://yann.lecun.com/exdb/mnist/ -To run this example, first you will need to download and pre-process the raw data for the MNIST task using the included ```build_csv.py``` script: - -```bash -$ python build_csv.py -``` - -If the script fails, make sure that you have installed all the package dependencies of this script which are: `gzip, os, shutil, pathlib, numpy, requests, imageio, and python-mnist`. - -Missing packages can be installed using pip: -```bash -$ pip install -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/training_data.csv" ; -``` - -# Tutorial Files -**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. -**kmeans.nml:** NML file defines a kmeans process where the model is trained with all the data in one time. -**kmeans_batch.nml:** NML file defines a kmeans process where the model is trained with batched data. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) - -For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/UnsupervisedLearning/KMeans/image/MNIST/build_csv.py b/UnsupervisedLearning/KMeans/image/MNIST/build_csv.py deleted file mode 100644 index fcfe45e..0000000 --- a/UnsupervisedLearning/KMeans/image/MNIST/build_csv.py +++ /dev/null @@ -1,93 +0,0 @@ -import gzip -import shutil -from pathlib import Path - -import numpy as np -import requests -from imageio import imwrite -from mnist import MNIST - - -def download_data(): - ''' - Check if raw MNIST data is present. If not, download MNIST data from the official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://yann.lecun.com/exdb/mnist/' - file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] - for f in file_list: - if not Path('raw_data/' + f.replace('.gz', '')).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - with gzip.open('raw_data/' + f, 'rb') as f_z: - with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: - shutil.copyfileobj(f_z, f_u) - - -def convert_images(raw): - ''' - Convert images from the MNIST format and return a 4-dim array with - shape: [number_of_images_per_batch, height, width, channel] - The pixel values are integers between 0 and 255. - There are 10000, 28x28 1 channel images per batch, in row major order. - ''' - - return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') - - -def write_csv_file(): - ''' - Save images as PNG files (lossless). - Write absolute path to image files and class label to training_data.csv - training_data.csv should be of length 70001, with the first line containing the header. - The test images are written at the end, i.e. the last 10000 lines correspond to the test set. - ''' - - mndata = MNIST('raw_data') - train_img, train_labels = mndata.load_training() - train_images = convert_images(train_img) - test_img, test_labels = mndata.load_testing() - test_images = convert_images(test_img) - - Path('images').mkdir(parents=True, exist_ok=True) - - # writing training csv - with open('training_data.csv', 'w') as of: - of.write('Image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - ''' - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') - ''' - ''' - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - ''' - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write the data to PNG files, and create a csv file for NeoPulse AI Studio - write_csv_file() diff --git a/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml b/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml deleted file mode 100644 index c48467e..0000000 --- a/UnsupervisedLearning/KMeans/image/MNIST/kmeans.nml +++ /dev/null @@ -1,23 +0,0 @@ -oracle("mode") = "unsupervised" - -source: - bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape = [28, 28], channels = 1] - -> ImageDataGenerator:[]; - params:; - - -architecture: - input: x ~ image: [shape = [28,28], channels = 1]; - output: y; - - x -> UnsupervisedFlatten:[] - -> Kmeans:[n_clusters=10] - -> y ; - -train: - compile:; - run:; - dashboard:; diff --git a/UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml b/UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml deleted file mode 100644 index f3984aa..0000000 --- a/UnsupervisedLearning/KMeans/image/MNIST/kmeans_batch.nml +++ /dev/null @@ -1,25 +0,0 @@ -oracle("mode") = "unsupervised" - -source: - bind = "/DM-Dash/Neopulse_Examples/UnsupervisedLearning/KMeans/image/MNIST/training_data.csv" ; - input: - x ~ from "Image" - -> image: [shape = [28, 28], channels = 1] - -> ImageDataGenerator:[]; - params: - batch_size = 16; - - -architecture: - input: x ~ image: [shape = [28,28], channels = 1]; - output: y; - - x -> UnsupervisedFlatten:[] - -> Kmeans:[n_clusters=10, batch=True] - -> y ; - -train: - compile: - batch=True; - run:; - dashboard:; diff --git a/UnsupervisedLearning/PCA/.DS_Store b/UnsupervisedLearning/PCA/.DS_Store deleted file mode 100644 index 59a13628f493b21bae6315b3a5ab353c5d6c9a22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLU2GIZ9G~A&V7DxETd^!q;NU_bTBQvir5N<+6{Hpd;R7+piN=H&H33aDD(VxO_!ad*B{l)U7ax2uCK_Kb(Fgs{%pTzi<v-9#Y@>k) zfe3*Jfe3*Jfe3*c0|7d-c~Pb~_oX&!BLpG@Zb}6B{t&0iWipiGQi`pE8j%8!tfFC2 z=$i5X;S&#KGL+*|3Rj9Lvj>E(2vZD5chbkXImu)w$EB3+4APw;yfeZK1>)}H7Z>IX zNhzZ?LLfrmY6SReGo9Hi&oXSFB7grXTX1YYXl_1-*tDwYGiEAEwNgF1`u_BwH=GVK zLCJ5=6!-Do&P*_v4X?XB&$)b^$?mbO;eLH?%5#H^?OH|QY?~ID?C-H%(;II0O0MaP zIBBU+6jf3Cbz^jNZCh(oe9h|hV@>hV*4Aczy>4twRpu>k+1Pm?fB4Ag(XnGM%2grU z8CcDvRlZcYL}#ESo%8aAE3B>n!10Gt@&`ujR2hH^WdPLbS=wx4YvN!R=sC_#&$k2HbNhM& zYa}QKs!;Xr^}S+{62ba)z2sZFEQ)0hSsfISLe=rR?I`dt{vNC7lT0L=<-*iA=gezZ z+IV~Onzp-B6E%8moi2KGVQDyp@y5w+XR z(@zGS#qSn;k zR}`UGvC`1GeZoL@6;%egLtCZl$JOpbL0!JwiV8fK#& zOAyC0B+!gDY{tFVicai85Bjhd`(VPt00xoA5RPID$8a1^;b}aBXYn##$4Q*R8+Z?= z@c}->7x)(6;VdrVNBoT6@VlH~tW5B2<+)3VKS)+mIQ$0}d-B>8ZV(i1yx}SQ!yNwX zZkc;){k-_{+g7bvzp?%DbD#8-D~Na{{{-bErv%T+Y?2H&<}X;-ut-}n)qAm zPZyqT8#GO?BKn7(_h_3GqPRXoXj2`eW&BlCuf^5tGW9IGzwf>-exvGpyy zjd$=avGoi-#wYj`pW$MJlBp> zJwTNgZZ|HaaG^$|<0M5oPBKi#g>9Tnxlb~b<5JQHmH+%lK=?Hoo&VAKF9>*(Hva;k CJ3+|+ diff --git a/UnsupervisedLearning/PCA/image/.DS_Store b/UnsupervisedLearning/PCA/image/.DS_Store deleted file mode 100644 index 58f0a55a68b88cd2e0dee073c01ede0f960535ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLU2GIZ9G~A&V7DxETd^!q;NU_bTBQvir5N;R3sMV0xI$Y%%iZnLuI%mB+r4X{ zl+Bu3*$Of)9Ms0nDIQBj}J#IL9iDzOPjeeuBuVxsW{6MfMC%} zGyjjB`R#9K_P>lVv=+=J#tg=oLX}H>7B$x?T+iz@B^HiwCCHz#uFQ}>m~p(!HMY?} zgg}Hqgg}Hqgg}JA&4B=&*}N!IocmH6wGjdl0=Fare1C{j| zhNP5H8zB%Oa4iD-wVBSgvj^Hz$Ig0g z%06QC=~X^q&_Ugud8R;FKb!X)XQ$`ef$h1y zJ%KeGlmeBhdiMHWK4=rc`gFbETe~cZWf!e>ib$bq_}z94@-Y4$EANv`Bpc7Akr|=_zFFM+_F$f$l1*404yYO4X06U5AJW!lr4lX6$y-8Ou?n)7o{K(Or~F(>ue# z4fkk94?V&hP1&zT0}YGyC5={9|4Mn7-+f!!4|WV%F3pPTjD)d8QF^&%lWAI}k|;g? zL~>;an9k3jMXa8!r!%LE9b_JRj6KCpvNP-~`HD5vA@Bfq7oWr zqZUgL$1)_)h!$+d{n&~Q>_Rtsu^0Pb!a_d=kV6qiF^Xe2j%V;Jp2PEa1#jRKPUB6y zk2ClXAK^=UhwpI?6Zi?g;5Ym(Cm1Ucd|PSmQsNJi@bYUd=y2Ims!(vl5#m!;Se17S=7&mQ3|t>?USf z!wM&vK68BG`~}9+Wp$da5Y1H4tzSW1(3_x+i&zbHcPm7BJtn$ZQ{8Hn_^wZr+7()} zqUzIyXUhgn(<_Mnq33J6ECj$LGz*k!~plek%f1;oyk z#LRU_VheHe0d!(J_Mk^_bHCuGjYn_@0SXwxFpl7HJb@>Pr6&YOU%-nviC6I&UMIG` zjd$=a-Xpf2#V7a_pW$GoP@$^DDPjg;-yIBabj9vKWROh9M$y)OU+QESv zZQ{jL-gP{;pCXptnewSbJ&wRM34_7&|IJhX|Cd%#8zB%O@V`U=7450^R$AIrn -``` - -Once you've downloaded and pre-processed the data, you can start training using any of the NML scripts provided. To begin training: -```bash -$ neopulse train -p -f /DM-Dash/NeoPulse_Examples/UnsupervisedLearning/PCA/image/MNIST/pca.nml -``` -The paths in the NML scripts in this directory assume that you have cloned this repository into the /DM-Dash directory of your machine. If you have put it somewhere else, you'll need to move the NML files into a location under the /DM-Dash directory, and change the path in the line: -```bash -bind = "/DM-Dash/NeoPulse_Examples/UnsupervisedLearning/PCA/image/MNIST/training_data.csv" ; -``` - -# Tutorial Files -**build_csv.py:** Script creates list of training files and writes training full image paths and corresponding labels to a training CSV file. -**pca.nml:** NML file defines a pca process where the model is trained with all the data in one time. -**pca_batch.nml:** NML file defines a pca process where the model is trained with batched data. - -# Tutorial Videos and Guides -Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) - -For more information on using the ImageDataGenerator visit the [Data section] of the NeoPulse™ AI Studio Documentation(https://docs.neopulse.ai/NML-source/#data) - -# License -Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. diff --git a/UnsupervisedLearning/PCA/image/MNIST/build_csv.py b/UnsupervisedLearning/PCA/image/MNIST/build_csv.py deleted file mode 100644 index fcfe45e..0000000 --- a/UnsupervisedLearning/PCA/image/MNIST/build_csv.py +++ /dev/null @@ -1,93 +0,0 @@ -import gzip -import shutil -from pathlib import Path - -import numpy as np -import requests -from imageio import imwrite -from mnist import MNIST - - -def download_data(): - ''' - Check if raw MNIST data is present. If not, download MNIST data from the official site. - ''' - - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://yann.lecun.com/exdb/mnist/' - file_list = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz'] - for f in file_list: - if not Path('raw_data/' + f.replace('.gz', '')).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - with gzip.open('raw_data/' + f, 'rb') as f_z: - with open('raw_data/' + f.replace('.gz', ''), 'wb') as f_u: - shutil.copyfileobj(f_z, f_u) - - -def convert_images(raw): - ''' - Convert images from the MNIST format and return a 4-dim array with - shape: [number_of_images_per_batch, height, width, channel] - The pixel values are integers between 0 and 255. - There are 10000, 28x28 1 channel images per batch, in row major order. - ''' - - return np.reshape(np.array(raw), (-1, 28, 28, 1)).astype('uint8') - - -def write_csv_file(): - ''' - Save images as PNG files (lossless). - Write absolute path to image files and class label to training_data.csv - training_data.csv should be of length 70001, with the first line containing the header. - The test images are written at the end, i.e. the last 10000 lines correspond to the test set. - ''' - - mndata = MNIST('raw_data') - train_img, train_labels = mndata.load_training() - train_images = convert_images(train_img) - test_img, test_labels = mndata.load_testing() - test_images = convert_images(test_img) - - Path('images').mkdir(parents=True, exist_ok=True) - - # writing training csv - with open('training_data.csv', 'w') as of: - of.write('Image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - ''' - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + ',' + str(test_labels[index]) + '\n') - ''' - ''' - # writing querying csv - with open('querying_data.csv', 'w') as of: - of.write('image\n') - - for index, image in enumerate(train_images): - img_file = 'images/mnist_train_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - - for index, image in enumerate(test_images): - img_file = 'images/mnist_test_' + str(index) + '.png' - imwrite(img_file, image) - of.write(str(Path(img_file).resolve()) + '\n') - ''' - -if __name__ == '__main__': - - # Download data if necessary - download_data() - - # Write the data to PNG files, and create a csv file for NeoPulse AI Studio - write_csv_file() From f9a4f8336ccaaafd99f90790e4e871882b256315 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 14:09:07 -0700 Subject: [PATCH 25/31] Rename Directories, move VOC2012 to Image Datasets --- {Classification => DataSets}/Audio/MusicGenre/.gitignore | 0 {Classification => DataSets}/Audio/MusicGenre/README.md | 0 {Classification => DataSets}/Audio/MusicGenre/build_csv.py | 0 .../Audio/MusicGenre/music_classification_auto.nml | 0 .../Audio/MusicGenre/music_spectrogram.nml | 0 .../Audio/MusicGenre/music_vector_capsule.nml | 0 .../Audio/MusicGenre/music_vector_capsule_auto.nml | 0 {Classification => DataSets}/Dicom/IXIT1_BrainSex/.gitignore | 0 {Classification => DataSets}/Dicom/IXIT1_BrainSex/README.md | 0 {Classification => DataSets}/Dicom/IXIT1_BrainSex/build_csv.py | 0 {Classification => DataSets}/Dicom/IXIT1_BrainSex/dicom_sex.nml | 0 .../Dicom/IXIT1_BrainSex/dicom_sex_auto.nml | 0 {Classification => DataSets}/Image/CIFAR10/.gitignore | 0 {Classification => DataSets}/Image/CIFAR10/README.md | 0 {Classification => DataSets}/Image/CIFAR10/build_csv.py | 0 .../Image/CIFAR10/cifar10_call_auto.nml | 0 .../Image/CIFAR10/cifar10_choice_auto.nml | 0 .../Image/CIFAR10/cifar10_dist_auto.nml | 0 .../Image/CIFAR10/cifar10_full_auto.nml | 0 {Classification => DataSets}/Image/CIFAR100/.gitignore | 0 {Classification => DataSets}/Image/CIFAR100/README.md | 0 {Classification => DataSets}/Image/CIFAR100/build_csv.py | 0 .../Image/CIFAR100/cifar100_call_auto.nml | 0 .../Image/CIFAR100/cifar100_choice_auto.nml | 0 .../Image/CIFAR100/cifar100_dist_auto.nml | 0 .../Image/CIFAR100/cifar100_full_auto.nml | 0 {Classification => DataSets}/Image/MNIST/.gitignore | 0 {Classification => DataSets}/Image/MNIST/README.md | 2 +- {Classification => DataSets}/Image/MNIST/build_csv.py | 0 {Classification => DataSets}/Image/MNIST/mnist_KmeansPCA.nml | 0 .../Image/MNIST/mnist_KmeansPCA_batch.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_call_auto.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_choice_auto.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_dist_auto.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_full_auto.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_kmeans.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_kmeans_batch.nml | 0 .../Image/MNIST/mnist_matrix_capsule.nml | 0 .../Image/MNIST/mnist_matrix_capsule_auto.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_pca.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_pca_batch.nml | 0 {Classification => DataSets}/Image/MNIST/mnist_spectral.nml | 0 .../Image/MNIST/mnist_vector_capsule.nml | 0 .../Image/MNIST/mnist_vector_capsule_auto.nml | 0 {Classification => DataSets}/Image/README.md | 2 ++ {ImageDetection/ssd => DataSets/Image}/VOC2012/.gitignore | 0 {ImageDetection/ssd => DataSets/Image}/VOC2012/README.md | 0 {ImageDetection/ssd => DataSets/Image}/VOC2012/build_csv.py | 0 {ImageDetection/ssd => DataSets/Image}/VOC2012/ssd300.nml | 0 {Classification => DataSets}/Text/Chinese_news/README.md | 0 {Classification => DataSets}/Text/Chinese_news/build_csv.py | 0 {Classification => DataSets}/Text/Chinese_news/cnews.nml | 0 {Classification => DataSets}/Text/Sentiment/.gitignore | 0 {Classification => DataSets}/Text/Sentiment/README.md | 0 {Classification => DataSets}/Text/Sentiment/build_csv.py | 0 .../Text/Sentiment/sentiment_call_auto.nml | 0 .../Text/Sentiment/sentiment_choice_auto.nml | 0 .../Text/Sentiment/sentiment_dist_auto.nml | 0 .../Text/Sentiment/sentiment_full_auto.nml | 0 .../Text/Sentiment/sentiment_multi-GPU.nml | 0 {Classification => DataSets}/Video/HumanAction/.gitignore | 0 {Classification => DataSets}/Video/HumanAction/README.md | 0 {Classification => DataSets}/Video/HumanAction/build_csv.py | 0 {Classification => DataSets}/Video/HumanAction/video_class.nml | 0 .../Video/HumanAction/video_class_auto.nml | 0 65 files changed, 3 insertions(+), 1 deletion(-) rename {Classification => DataSets}/Audio/MusicGenre/.gitignore (100%) rename {Classification => DataSets}/Audio/MusicGenre/README.md (100%) rename {Classification => DataSets}/Audio/MusicGenre/build_csv.py (100%) rename {Classification => DataSets}/Audio/MusicGenre/music_classification_auto.nml (100%) rename {Classification => DataSets}/Audio/MusicGenre/music_spectrogram.nml (100%) rename {Classification => DataSets}/Audio/MusicGenre/music_vector_capsule.nml (100%) rename {Classification => DataSets}/Audio/MusicGenre/music_vector_capsule_auto.nml (100%) rename {Classification => DataSets}/Dicom/IXIT1_BrainSex/.gitignore (100%) rename {Classification => DataSets}/Dicom/IXIT1_BrainSex/README.md (100%) rename {Classification => DataSets}/Dicom/IXIT1_BrainSex/build_csv.py (100%) rename {Classification => DataSets}/Dicom/IXIT1_BrainSex/dicom_sex.nml (100%) rename {Classification => DataSets}/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR10/.gitignore (100%) rename {Classification => DataSets}/Image/CIFAR10/README.md (100%) rename {Classification => DataSets}/Image/CIFAR10/build_csv.py (100%) rename {Classification => DataSets}/Image/CIFAR10/cifar10_call_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR10/cifar10_choice_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR10/cifar10_dist_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR10/cifar10_full_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR100/.gitignore (100%) rename {Classification => DataSets}/Image/CIFAR100/README.md (100%) rename {Classification => DataSets}/Image/CIFAR100/build_csv.py (100%) rename {Classification => DataSets}/Image/CIFAR100/cifar100_call_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR100/cifar100_choice_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR100/cifar100_dist_auto.nml (100%) rename {Classification => DataSets}/Image/CIFAR100/cifar100_full_auto.nml (100%) rename {Classification => DataSets}/Image/MNIST/.gitignore (100%) rename {Classification => DataSets}/Image/MNIST/README.md (99%) rename {Classification => DataSets}/Image/MNIST/build_csv.py (100%) rename {Classification => DataSets}/Image/MNIST/mnist_KmeansPCA.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_KmeansPCA_batch.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_call_auto.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_choice_auto.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_dist_auto.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_full_auto.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_kmeans.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_kmeans_batch.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_matrix_capsule.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_matrix_capsule_auto.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_pca.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_pca_batch.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_spectral.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_vector_capsule.nml (100%) rename {Classification => DataSets}/Image/MNIST/mnist_vector_capsule_auto.nml (100%) rename {Classification => DataSets}/Image/README.md (89%) rename {ImageDetection/ssd => DataSets/Image}/VOC2012/.gitignore (100%) rename {ImageDetection/ssd => DataSets/Image}/VOC2012/README.md (100%) rename {ImageDetection/ssd => DataSets/Image}/VOC2012/build_csv.py (100%) rename {ImageDetection/ssd => DataSets/Image}/VOC2012/ssd300.nml (100%) rename {Classification => DataSets}/Text/Chinese_news/README.md (100%) rename {Classification => DataSets}/Text/Chinese_news/build_csv.py (100%) rename {Classification => DataSets}/Text/Chinese_news/cnews.nml (100%) rename {Classification => DataSets}/Text/Sentiment/.gitignore (100%) rename {Classification => DataSets}/Text/Sentiment/README.md (100%) rename {Classification => DataSets}/Text/Sentiment/build_csv.py (100%) rename {Classification => DataSets}/Text/Sentiment/sentiment_call_auto.nml (100%) rename {Classification => DataSets}/Text/Sentiment/sentiment_choice_auto.nml (100%) rename {Classification => DataSets}/Text/Sentiment/sentiment_dist_auto.nml (100%) rename {Classification => DataSets}/Text/Sentiment/sentiment_full_auto.nml (100%) rename {Classification => DataSets}/Text/Sentiment/sentiment_multi-GPU.nml (100%) rename {Classification => DataSets}/Video/HumanAction/.gitignore (100%) rename {Classification => DataSets}/Video/HumanAction/README.md (100%) rename {Classification => DataSets}/Video/HumanAction/build_csv.py (100%) rename {Classification => DataSets}/Video/HumanAction/video_class.nml (100%) rename {Classification => DataSets}/Video/HumanAction/video_class_auto.nml (100%) diff --git a/Classification/Audio/MusicGenre/.gitignore b/DataSets/Audio/MusicGenre/.gitignore similarity index 100% rename from Classification/Audio/MusicGenre/.gitignore rename to DataSets/Audio/MusicGenre/.gitignore diff --git a/Classification/Audio/MusicGenre/README.md b/DataSets/Audio/MusicGenre/README.md similarity index 100% rename from Classification/Audio/MusicGenre/README.md rename to DataSets/Audio/MusicGenre/README.md diff --git a/Classification/Audio/MusicGenre/build_csv.py b/DataSets/Audio/MusicGenre/build_csv.py similarity index 100% rename from Classification/Audio/MusicGenre/build_csv.py rename to DataSets/Audio/MusicGenre/build_csv.py diff --git a/Classification/Audio/MusicGenre/music_classification_auto.nml b/DataSets/Audio/MusicGenre/music_classification_auto.nml similarity index 100% rename from Classification/Audio/MusicGenre/music_classification_auto.nml rename to DataSets/Audio/MusicGenre/music_classification_auto.nml diff --git a/Classification/Audio/MusicGenre/music_spectrogram.nml b/DataSets/Audio/MusicGenre/music_spectrogram.nml similarity index 100% rename from Classification/Audio/MusicGenre/music_spectrogram.nml rename to DataSets/Audio/MusicGenre/music_spectrogram.nml diff --git a/Classification/Audio/MusicGenre/music_vector_capsule.nml b/DataSets/Audio/MusicGenre/music_vector_capsule.nml similarity index 100% rename from Classification/Audio/MusicGenre/music_vector_capsule.nml rename to DataSets/Audio/MusicGenre/music_vector_capsule.nml diff --git a/Classification/Audio/MusicGenre/music_vector_capsule_auto.nml b/DataSets/Audio/MusicGenre/music_vector_capsule_auto.nml similarity index 100% rename from Classification/Audio/MusicGenre/music_vector_capsule_auto.nml rename to DataSets/Audio/MusicGenre/music_vector_capsule_auto.nml diff --git a/Classification/Dicom/IXIT1_BrainSex/.gitignore b/DataSets/Dicom/IXIT1_BrainSex/.gitignore similarity index 100% rename from Classification/Dicom/IXIT1_BrainSex/.gitignore rename to DataSets/Dicom/IXIT1_BrainSex/.gitignore diff --git a/Classification/Dicom/IXIT1_BrainSex/README.md b/DataSets/Dicom/IXIT1_BrainSex/README.md similarity index 100% rename from Classification/Dicom/IXIT1_BrainSex/README.md rename to DataSets/Dicom/IXIT1_BrainSex/README.md diff --git a/Classification/Dicom/IXIT1_BrainSex/build_csv.py b/DataSets/Dicom/IXIT1_BrainSex/build_csv.py similarity index 100% rename from Classification/Dicom/IXIT1_BrainSex/build_csv.py rename to DataSets/Dicom/IXIT1_BrainSex/build_csv.py diff --git a/Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml b/DataSets/Dicom/IXIT1_BrainSex/dicom_sex.nml similarity index 100% rename from Classification/Dicom/IXIT1_BrainSex/dicom_sex.nml rename to DataSets/Dicom/IXIT1_BrainSex/dicom_sex.nml diff --git a/Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml b/DataSets/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml similarity index 100% rename from Classification/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml rename to DataSets/Dicom/IXIT1_BrainSex/dicom_sex_auto.nml diff --git a/Classification/Image/CIFAR10/.gitignore b/DataSets/Image/CIFAR10/.gitignore similarity index 100% rename from Classification/Image/CIFAR10/.gitignore rename to DataSets/Image/CIFAR10/.gitignore diff --git a/Classification/Image/CIFAR10/README.md b/DataSets/Image/CIFAR10/README.md similarity index 100% rename from Classification/Image/CIFAR10/README.md rename to DataSets/Image/CIFAR10/README.md diff --git a/Classification/Image/CIFAR10/build_csv.py b/DataSets/Image/CIFAR10/build_csv.py similarity index 100% rename from Classification/Image/CIFAR10/build_csv.py rename to DataSets/Image/CIFAR10/build_csv.py diff --git a/Classification/Image/CIFAR10/cifar10_call_auto.nml b/DataSets/Image/CIFAR10/cifar10_call_auto.nml similarity index 100% rename from Classification/Image/CIFAR10/cifar10_call_auto.nml rename to DataSets/Image/CIFAR10/cifar10_call_auto.nml diff --git a/Classification/Image/CIFAR10/cifar10_choice_auto.nml b/DataSets/Image/CIFAR10/cifar10_choice_auto.nml similarity index 100% rename from Classification/Image/CIFAR10/cifar10_choice_auto.nml rename to DataSets/Image/CIFAR10/cifar10_choice_auto.nml diff --git a/Classification/Image/CIFAR10/cifar10_dist_auto.nml b/DataSets/Image/CIFAR10/cifar10_dist_auto.nml similarity index 100% rename from Classification/Image/CIFAR10/cifar10_dist_auto.nml rename to DataSets/Image/CIFAR10/cifar10_dist_auto.nml diff --git a/Classification/Image/CIFAR10/cifar10_full_auto.nml b/DataSets/Image/CIFAR10/cifar10_full_auto.nml similarity index 100% rename from Classification/Image/CIFAR10/cifar10_full_auto.nml rename to DataSets/Image/CIFAR10/cifar10_full_auto.nml diff --git a/Classification/Image/CIFAR100/.gitignore b/DataSets/Image/CIFAR100/.gitignore similarity index 100% rename from Classification/Image/CIFAR100/.gitignore rename to DataSets/Image/CIFAR100/.gitignore diff --git a/Classification/Image/CIFAR100/README.md b/DataSets/Image/CIFAR100/README.md similarity index 100% rename from Classification/Image/CIFAR100/README.md rename to DataSets/Image/CIFAR100/README.md diff --git a/Classification/Image/CIFAR100/build_csv.py b/DataSets/Image/CIFAR100/build_csv.py similarity index 100% rename from Classification/Image/CIFAR100/build_csv.py rename to DataSets/Image/CIFAR100/build_csv.py diff --git a/Classification/Image/CIFAR100/cifar100_call_auto.nml b/DataSets/Image/CIFAR100/cifar100_call_auto.nml similarity index 100% rename from Classification/Image/CIFAR100/cifar100_call_auto.nml rename to DataSets/Image/CIFAR100/cifar100_call_auto.nml diff --git a/Classification/Image/CIFAR100/cifar100_choice_auto.nml b/DataSets/Image/CIFAR100/cifar100_choice_auto.nml similarity index 100% rename from Classification/Image/CIFAR100/cifar100_choice_auto.nml rename to DataSets/Image/CIFAR100/cifar100_choice_auto.nml diff --git a/Classification/Image/CIFAR100/cifar100_dist_auto.nml b/DataSets/Image/CIFAR100/cifar100_dist_auto.nml similarity index 100% rename from Classification/Image/CIFAR100/cifar100_dist_auto.nml rename to DataSets/Image/CIFAR100/cifar100_dist_auto.nml diff --git a/Classification/Image/CIFAR100/cifar100_full_auto.nml b/DataSets/Image/CIFAR100/cifar100_full_auto.nml similarity index 100% rename from Classification/Image/CIFAR100/cifar100_full_auto.nml rename to DataSets/Image/CIFAR100/cifar100_full_auto.nml diff --git a/Classification/Image/MNIST/.gitignore b/DataSets/Image/MNIST/.gitignore similarity index 100% rename from Classification/Image/MNIST/.gitignore rename to DataSets/Image/MNIST/.gitignore diff --git a/Classification/Image/MNIST/README.md b/DataSets/Image/MNIST/README.md similarity index 99% rename from Classification/Image/MNIST/README.md rename to DataSets/Image/MNIST/README.md index 61a8e81..033db69 100644 --- a/Classification/Image/MNIST/README.md +++ b/DataSets/Image/MNIST/README.md @@ -42,7 +42,7 @@ bind = "/DM-Dash/NeoPulse_Examples/Classification/Image/MNIST/training_data.csv" **mnist_KmeansPCA.nml:** Demonstrates using k-means + PCA for clustering. -**mnist_KmeansPCA_batch.nml:** Demonstrates batch processing for k-meanst + PCA. +**mnist_KmeansPCA_batch.nml:** Demonstrates batch processing for k-means + PCA. **mnist_matrix_capsule_auto.nml:** Demonstrates matrix capsule networks using the oracle. diff --git a/Classification/Image/MNIST/build_csv.py b/DataSets/Image/MNIST/build_csv.py similarity index 100% rename from Classification/Image/MNIST/build_csv.py rename to DataSets/Image/MNIST/build_csv.py diff --git a/Classification/Image/MNIST/mnist_KmeansPCA.nml b/DataSets/Image/MNIST/mnist_KmeansPCA.nml similarity index 100% rename from Classification/Image/MNIST/mnist_KmeansPCA.nml rename to DataSets/Image/MNIST/mnist_KmeansPCA.nml diff --git a/Classification/Image/MNIST/mnist_KmeansPCA_batch.nml b/DataSets/Image/MNIST/mnist_KmeansPCA_batch.nml similarity index 100% rename from Classification/Image/MNIST/mnist_KmeansPCA_batch.nml rename to DataSets/Image/MNIST/mnist_KmeansPCA_batch.nml diff --git a/Classification/Image/MNIST/mnist_call_auto.nml b/DataSets/Image/MNIST/mnist_call_auto.nml similarity index 100% rename from Classification/Image/MNIST/mnist_call_auto.nml rename to DataSets/Image/MNIST/mnist_call_auto.nml diff --git a/Classification/Image/MNIST/mnist_choice_auto.nml b/DataSets/Image/MNIST/mnist_choice_auto.nml similarity index 100% rename from Classification/Image/MNIST/mnist_choice_auto.nml rename to DataSets/Image/MNIST/mnist_choice_auto.nml diff --git a/Classification/Image/MNIST/mnist_dist_auto.nml b/DataSets/Image/MNIST/mnist_dist_auto.nml similarity index 100% rename from Classification/Image/MNIST/mnist_dist_auto.nml rename to DataSets/Image/MNIST/mnist_dist_auto.nml diff --git a/Classification/Image/MNIST/mnist_full_auto.nml b/DataSets/Image/MNIST/mnist_full_auto.nml similarity index 100% rename from Classification/Image/MNIST/mnist_full_auto.nml rename to DataSets/Image/MNIST/mnist_full_auto.nml diff --git a/Classification/Image/MNIST/mnist_kmeans.nml b/DataSets/Image/MNIST/mnist_kmeans.nml similarity index 100% rename from Classification/Image/MNIST/mnist_kmeans.nml rename to DataSets/Image/MNIST/mnist_kmeans.nml diff --git a/Classification/Image/MNIST/mnist_kmeans_batch.nml b/DataSets/Image/MNIST/mnist_kmeans_batch.nml similarity index 100% rename from Classification/Image/MNIST/mnist_kmeans_batch.nml rename to DataSets/Image/MNIST/mnist_kmeans_batch.nml diff --git a/Classification/Image/MNIST/mnist_matrix_capsule.nml b/DataSets/Image/MNIST/mnist_matrix_capsule.nml similarity index 100% rename from Classification/Image/MNIST/mnist_matrix_capsule.nml rename to DataSets/Image/MNIST/mnist_matrix_capsule.nml diff --git a/Classification/Image/MNIST/mnist_matrix_capsule_auto.nml b/DataSets/Image/MNIST/mnist_matrix_capsule_auto.nml similarity index 100% rename from Classification/Image/MNIST/mnist_matrix_capsule_auto.nml rename to DataSets/Image/MNIST/mnist_matrix_capsule_auto.nml diff --git a/Classification/Image/MNIST/mnist_pca.nml b/DataSets/Image/MNIST/mnist_pca.nml similarity index 100% rename from Classification/Image/MNIST/mnist_pca.nml rename to DataSets/Image/MNIST/mnist_pca.nml diff --git a/Classification/Image/MNIST/mnist_pca_batch.nml b/DataSets/Image/MNIST/mnist_pca_batch.nml similarity index 100% rename from Classification/Image/MNIST/mnist_pca_batch.nml rename to DataSets/Image/MNIST/mnist_pca_batch.nml diff --git a/Classification/Image/MNIST/mnist_spectral.nml b/DataSets/Image/MNIST/mnist_spectral.nml similarity index 100% rename from Classification/Image/MNIST/mnist_spectral.nml rename to DataSets/Image/MNIST/mnist_spectral.nml diff --git a/Classification/Image/MNIST/mnist_vector_capsule.nml b/DataSets/Image/MNIST/mnist_vector_capsule.nml similarity index 100% rename from Classification/Image/MNIST/mnist_vector_capsule.nml rename to DataSets/Image/MNIST/mnist_vector_capsule.nml diff --git a/Classification/Image/MNIST/mnist_vector_capsule_auto.nml b/DataSets/Image/MNIST/mnist_vector_capsule_auto.nml similarity index 100% rename from Classification/Image/MNIST/mnist_vector_capsule_auto.nml rename to DataSets/Image/MNIST/mnist_vector_capsule_auto.nml diff --git a/Classification/Image/README.md b/DataSets/Image/README.md similarity index 89% rename from Classification/Image/README.md rename to DataSets/Image/README.md index b94ee32..b446540 100644 --- a/Classification/Image/README.md +++ b/DataSets/Image/README.md @@ -8,6 +8,8 @@ The CIFAR-10 dataset features 60,000 32x32 color images among 10 classes (6,000 The MNIST dataset features 60,000 handwritten digits with 10,000 reserved for test. More information on the datasets and data formats can be found at the links above. +The VOC2012 dataset is an example dataset for training a Single Shot MultiBox Detector model for drawing bounding boxes around objects and classifying them. + # Tutorial Videos and Guides Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-developer-portal) diff --git a/ImageDetection/ssd/VOC2012/.gitignore b/DataSets/Image/VOC2012/.gitignore similarity index 100% rename from ImageDetection/ssd/VOC2012/.gitignore rename to DataSets/Image/VOC2012/.gitignore diff --git a/ImageDetection/ssd/VOC2012/README.md b/DataSets/Image/VOC2012/README.md similarity index 100% rename from ImageDetection/ssd/VOC2012/README.md rename to DataSets/Image/VOC2012/README.md diff --git a/ImageDetection/ssd/VOC2012/build_csv.py b/DataSets/Image/VOC2012/build_csv.py similarity index 100% rename from ImageDetection/ssd/VOC2012/build_csv.py rename to DataSets/Image/VOC2012/build_csv.py diff --git a/ImageDetection/ssd/VOC2012/ssd300.nml b/DataSets/Image/VOC2012/ssd300.nml similarity index 100% rename from ImageDetection/ssd/VOC2012/ssd300.nml rename to DataSets/Image/VOC2012/ssd300.nml diff --git a/Classification/Text/Chinese_news/README.md b/DataSets/Text/Chinese_news/README.md similarity index 100% rename from Classification/Text/Chinese_news/README.md rename to DataSets/Text/Chinese_news/README.md diff --git a/Classification/Text/Chinese_news/build_csv.py b/DataSets/Text/Chinese_news/build_csv.py similarity index 100% rename from Classification/Text/Chinese_news/build_csv.py rename to DataSets/Text/Chinese_news/build_csv.py diff --git a/Classification/Text/Chinese_news/cnews.nml b/DataSets/Text/Chinese_news/cnews.nml similarity index 100% rename from Classification/Text/Chinese_news/cnews.nml rename to DataSets/Text/Chinese_news/cnews.nml diff --git a/Classification/Text/Sentiment/.gitignore b/DataSets/Text/Sentiment/.gitignore similarity index 100% rename from Classification/Text/Sentiment/.gitignore rename to DataSets/Text/Sentiment/.gitignore diff --git a/Classification/Text/Sentiment/README.md b/DataSets/Text/Sentiment/README.md similarity index 100% rename from Classification/Text/Sentiment/README.md rename to DataSets/Text/Sentiment/README.md diff --git a/Classification/Text/Sentiment/build_csv.py b/DataSets/Text/Sentiment/build_csv.py similarity index 100% rename from Classification/Text/Sentiment/build_csv.py rename to DataSets/Text/Sentiment/build_csv.py diff --git a/Classification/Text/Sentiment/sentiment_call_auto.nml b/DataSets/Text/Sentiment/sentiment_call_auto.nml similarity index 100% rename from Classification/Text/Sentiment/sentiment_call_auto.nml rename to DataSets/Text/Sentiment/sentiment_call_auto.nml diff --git a/Classification/Text/Sentiment/sentiment_choice_auto.nml b/DataSets/Text/Sentiment/sentiment_choice_auto.nml similarity index 100% rename from Classification/Text/Sentiment/sentiment_choice_auto.nml rename to DataSets/Text/Sentiment/sentiment_choice_auto.nml diff --git a/Classification/Text/Sentiment/sentiment_dist_auto.nml b/DataSets/Text/Sentiment/sentiment_dist_auto.nml similarity index 100% rename from Classification/Text/Sentiment/sentiment_dist_auto.nml rename to DataSets/Text/Sentiment/sentiment_dist_auto.nml diff --git a/Classification/Text/Sentiment/sentiment_full_auto.nml b/DataSets/Text/Sentiment/sentiment_full_auto.nml similarity index 100% rename from Classification/Text/Sentiment/sentiment_full_auto.nml rename to DataSets/Text/Sentiment/sentiment_full_auto.nml diff --git a/Classification/Text/Sentiment/sentiment_multi-GPU.nml b/DataSets/Text/Sentiment/sentiment_multi-GPU.nml similarity index 100% rename from Classification/Text/Sentiment/sentiment_multi-GPU.nml rename to DataSets/Text/Sentiment/sentiment_multi-GPU.nml diff --git a/Classification/Video/HumanAction/.gitignore b/DataSets/Video/HumanAction/.gitignore similarity index 100% rename from Classification/Video/HumanAction/.gitignore rename to DataSets/Video/HumanAction/.gitignore diff --git a/Classification/Video/HumanAction/README.md b/DataSets/Video/HumanAction/README.md similarity index 100% rename from Classification/Video/HumanAction/README.md rename to DataSets/Video/HumanAction/README.md diff --git a/Classification/Video/HumanAction/build_csv.py b/DataSets/Video/HumanAction/build_csv.py similarity index 100% rename from Classification/Video/HumanAction/build_csv.py rename to DataSets/Video/HumanAction/build_csv.py diff --git a/Classification/Video/HumanAction/video_class.nml b/DataSets/Video/HumanAction/video_class.nml similarity index 100% rename from Classification/Video/HumanAction/video_class.nml rename to DataSets/Video/HumanAction/video_class.nml diff --git a/Classification/Video/HumanAction/video_class_auto.nml b/DataSets/Video/HumanAction/video_class_auto.nml similarity index 100% rename from Classification/Video/HumanAction/video_class_auto.nml rename to DataSets/Video/HumanAction/video_class_auto.nml From a9049c0230052bfce206c1320cee397b8e12bd78 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 14:09:45 -0700 Subject: [PATCH 26/31] Remove .DS_Store --- Regression/.DS_Store | Bin 6148 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 Regression/.DS_Store diff --git a/Regression/.DS_Store b/Regression/.DS_Store deleted file mode 100644 index 6e6f4ea80854cbf9e8c8663bac262d00f6e8d2f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKyH3ME5S)d8BGIIzykFoCtSBj{k*I+rq6xADq<6)4@oCIHgk)JRC}?0-+MT=g z&Ye7k*9*Ybr~M7E0igz5sv)8cYBH From ca7335137da5fa2b4caceb250e7457c0d82ba3dd Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 14:11:26 -0700 Subject: [PATCH 27/31] update .gitignore, README.md --- .gitignore | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index e43b0f9..5509140 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1 @@ -.DS_Store +*.DS_Store diff --git a/README.md b/README.md index dbc58be..82a73a8 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Introduction This is a repository for [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf) sample training files. -There are training examples for the following data and task types: Classification (Image, Audio, Video) and Regression (Text, Vector, Image, Audio, Video). Examples are added periodically. You will find a classification example using text data in the /DM-Dash/examples/sentiment/ folder on your NeoPulse AI Studio instance. A brief tutorial is also available in the Tutorials & Guides section of the Developer Portal (see link below). +There are training examples for the following data and task types: Classification (Image, Audio, Video) and Regression (Text, Vector, Image, Audio, Video). Examples are added periodically. A brief tutorial is also available in the Tutorials & Guides section of the Developer Portal (see link below). Under each data type folder you will find sample NeoPulse™ Modeling Language (NML) code as well as information on the sample dataset used. From 32db770ad8d0ce961fb78de2d62020c809a7fe30 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 14:17:23 -0700 Subject: [PATCH 28/31] Remove Duplicate sentiment dataset, and convert SAGEMAKER_README.md. --- .../README.md => SAGEMAKER_README.md} | 0 SageMaker/Sentiment/.gitignore | 3 -- SageMaker/Sentiment/build_csv.py | 52 ------------------- SageMaker/Sentiment/sentiment_call_auto.nml | 39 -------------- SageMaker/Sentiment/sentiment_choice_auto.nml | 39 -------------- SageMaker/Sentiment/sentiment_dist_auto.nml | 38 -------------- SageMaker/Sentiment/sentiment_full_auto.nml | 32 ------------ SageMaker/Sentiment/sentiment_multi-GPU.nml | 35 ------------- 8 files changed, 238 deletions(-) rename SageMaker/{Sentiment/README.md => SAGEMAKER_README.md} (100%) delete mode 100644 SageMaker/Sentiment/.gitignore delete mode 100644 SageMaker/Sentiment/build_csv.py delete mode 100644 SageMaker/Sentiment/sentiment_call_auto.nml delete mode 100644 SageMaker/Sentiment/sentiment_choice_auto.nml delete mode 100644 SageMaker/Sentiment/sentiment_dist_auto.nml delete mode 100644 SageMaker/Sentiment/sentiment_full_auto.nml delete mode 100644 SageMaker/Sentiment/sentiment_multi-GPU.nml diff --git a/SageMaker/Sentiment/README.md b/SageMaker/SAGEMAKER_README.md similarity index 100% rename from SageMaker/Sentiment/README.md rename to SageMaker/SAGEMAKER_README.md diff --git a/SageMaker/Sentiment/.gitignore b/SageMaker/Sentiment/.gitignore deleted file mode 100644 index 2879cbd..0000000 --- a/SageMaker/Sentiment/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -raw_data/ -aclImdb/ -training_data.csv diff --git a/SageMaker/Sentiment/build_csv.py b/SageMaker/Sentiment/build_csv.py deleted file mode 100644 index f9574c5..0000000 --- a/SageMaker/Sentiment/build_csv.py +++ /dev/null @@ -1,52 +0,0 @@ -import shutil -import tarfile -from pathlib import Path - -import pandas as pd -import requests -from natsort import humansorted -from sklearn.datasets import load_files - - -def download_data(): - ''' - Check if raw IMDB data is present. If not, download data from the official site. - ''' - Path('raw_data').mkdir(parents=True, exist_ok=True) - - URL = 'http://ai.stanford.edu/~amaas/data/sentiment/' - file_list = ['aclImdb_v1.tar.gz'] - for f in file_list: - if not Path('raw_data/' + f).is_file(): - r = requests.get(URL + f, stream=True) - with open('raw_data/' + f, 'wb') as f_z: - shutil.copyfileobj(r.raw, f_z) - - tarfile.open('raw_data/' + f).extractall() - - -def write_data(dir, save_as): - ''' - Write a csv file containing the text and labels. - ''' - df = pd.DataFrame() - shutil.move('aclImdb/train/unsup', '.') - for d in humansorted([str(p) for p in Path(dir).iterdir() if p.is_dir()], reverse=True): - print(d) - data = load_files(d) - pd_form = {"Review": data.data, "Label": data.target} - df = df.append(pd.DataFrame(pd_form)) - shutil.move('unsup', 'aclImdb/train') - df.to_csv(save_as, index=False) - - -def load_query(direc, save_as): - data = load_files(direc) - pd_form = {"Review": data.data} - pd.DataFrame(pd_form).loc[1:5, :].to_csv(save_as, index=False) - -if __name__ == "__main__": - - download_data() - - write_data('aclImdb', 'training_data.csv') diff --git a/SageMaker/Sentiment/sentiment_call_auto.nml b/SageMaker/Sentiment/sentiment_call_auto.nml deleted file mode 100644 index cf9e50f..0000000 --- a/SageMaker/Sentiment/sentiment_call_auto.nml +++ /dev/null @@ -1,39 +0,0 @@ -oracle("generated") = 1 -oracle("complexity") = 0.1 -oracle("regularization") = 0.99 - -source: - bind = "training_data.csv" ; - input: - x ~ from "Review" - -> text: [200] - -> TextDataGenerator: [nb_words=20000] ; - output: - y ~ from "Label" - -> flat: [2] - -> FlatDataGenerator: [] ; - params: - validation_split = 0.5, - batch_size = 1250 ; - -architecture: - input: x ~ text: [200] ; - output: y ~ flat: [2] ; - - x -> Embedding: [20000, 128] - -> Dropout: auto - -> Conv1D: auto - -> MaxPooling1D: [pool_size=4] - -> LSTM: [128] - -> Dense: [2, activation='softmax'] - -> y ; - -train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: - save_on = 'val_acc' ; diff --git a/SageMaker/Sentiment/sentiment_choice_auto.nml b/SageMaker/Sentiment/sentiment_choice_auto.nml deleted file mode 100644 index 00aee12..0000000 --- a/SageMaker/Sentiment/sentiment_choice_auto.nml +++ /dev/null @@ -1,39 +0,0 @@ -oracle("generated") = 1 -oracle("complexity") = 0.1 -oracle("regularization") = 0.99 - -source: - bind = "training_data.csv" ; - input: - x ~ from "Review" - -> text: [200] - -> TextDataGenerator: [nb_words=20000] ; - output: - y ~ from "Label" - -> flat: [2] - -> FlatDataGenerator: [] ; - params: - validation_split = 0.5, - batch_size = 1250 ; - -architecture: - input: x ~ text: [200] ; - output: y ~ flat: [2] ; - - x -> Embedding: [20000, 128] - -> Dropout: [auto(0.25 ? 0.50 | name="Drop")] - -> Convolution1D: [64, 4] - -> MaxPooling1D: [pool_size=4] - -> LSTM: [128] - -> Dense: [2, activation='softmax'] - -> y ; - -train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: - save_on = 'val_acc' ; diff --git a/SageMaker/Sentiment/sentiment_dist_auto.nml b/SageMaker/Sentiment/sentiment_dist_auto.nml deleted file mode 100644 index 409da53..0000000 --- a/SageMaker/Sentiment/sentiment_dist_auto.nml +++ /dev/null @@ -1,38 +0,0 @@ -oracle("generated") = 4 -oracle("complexity") = 0.1 -oracle("regularization") = 0.99 - -source: - bind = "training_data.csv" ; - input: - x ~ from "Review" - -> text: [200] - -> TextDataGenerator: [nb_words=20000] ; - output: - y ~ from "Label" - -> flat: [2] - -> FlatDataGenerator: [] ; - params: - validation_split = 0.5, - batch_size = 1250 ; - -architecture: - input: x ~ text: [200] ; - output: y ~ flat: [2] ; - - x -> Embedding: [20000, 128] - -> Dropout: [auto(dist="uniform", low=0.25, high=0.75, cast="float" | count=4, name="Drop")] - -> Convolution1D: [64, 4] - -> MaxPooling1D: [pool_size=4] - -> LSTM: [128] - -> Dense: [2, activation = 'softmax'] -> y ; - -train: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: - save_on = 'val_acc' ; diff --git a/SageMaker/Sentiment/sentiment_full_auto.nml b/SageMaker/Sentiment/sentiment_full_auto.nml deleted file mode 100644 index cc6ba29..0000000 --- a/SageMaker/Sentiment/sentiment_full_auto.nml +++ /dev/null @@ -1,32 +0,0 @@ -oracle("mode") = "classification" -oracle("complexity") = 0.1 - -source: - bind = "training_data.csv" ; - input: - x ~ from "Review" - -> text: [200] - -> TextDataGenerator: [nb_words=20000] ; - output: - y ~ from "Label" - -> flat: [2] - -> FlatDataGenerator: [] ; - params: - validation_split = 0.5 - batch_size = 1250 ; - -architecture: - input: x ~ text: [200] ; - output: y ~ flat: [2] ; - - x -> auto -> y ; - -train: - compile: - optimizer = auto, - loss = auto, - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: - save_on = 'val_acc' ; diff --git a/SageMaker/Sentiment/sentiment_multi-GPU.nml b/SageMaker/Sentiment/sentiment_multi-GPU.nml deleted file mode 100644 index 21c0ea9..0000000 --- a/SageMaker/Sentiment/sentiment_multi-GPU.nml +++ /dev/null @@ -1,35 +0,0 @@ -source: - bind = "training_data.csv" ; - input: - x ~ from "Review" - -> text: [200] - -> TextDataGenerator: [nb_words=20000] ; - output: - y ~ from "Label" - -> flat: [2] - -> FlatDataGenerator: [] ; - params: - validation_split = 0.5, - batch_size = 1250; - -architecture: - input: x ~ text: [100] ; - output: y ~ flat: [2] ; - - x -> Embedding: [20000, 128] - -> Dropout: [0.5] - -> Convolution1D: [64, 4] - -> MaxPooling1D: [pool_size=4] - -> LSTM: [128] - -> Dense: [2, activation='softmax'] - -> y ; - -train Ngpu 2: - compile: - optimizer = 'rmsprop', - loss = 'categorical_crossentropy', - metrics = ['accuracy'] ; - run: - epochs = 4 ; - dashboard: - save_on = 'val_acc' ; From fb40ed856aecada2a10c11af3cc2fedf6c4e71fe Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 16:05:08 -0700 Subject: [PATCH 29/31] Fix column headers --- DataSets/Audio/MusicGenre/build_csv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DataSets/Audio/MusicGenre/build_csv.py b/DataSets/Audio/MusicGenre/build_csv.py index f9c1c66..f1d1773 100644 --- a/DataSets/Audio/MusicGenre/build_csv.py +++ b/DataSets/Audio/MusicGenre/build_csv.py @@ -68,7 +68,7 @@ def write_file(validation_split): # Write the CSV file. with open('training_data.csv', 'w') as of: - of.write('Audio,Genre\n') + of.write('Audio,Label\n') for l in train: of.write(l) for l in valid: From 11befd532277ad4aa43d68a9d906d31597af8327 Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Wed, 15 May 2019 16:08:56 -0700 Subject: [PATCH 30/31] typo --- DataSets/Audio/MusicGenre/music_classification_auto.nml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DataSets/Audio/MusicGenre/music_classification_auto.nml b/DataSets/Audio/MusicGenre/music_classification_auto.nml index 10aa51c..aac58d5 100644 --- a/DataSets/Audio/MusicGenre/music_classification_auto.nml +++ b/DataSets/Audio/MusicGenre/music_classification_auto.nml @@ -1,4 +1,4 @@ -oracle("mode")="classification" +oracle("mode") = "classification" source: bind = "training_data.csv" ; From 1c4568e481bcc1c738a3dd4ee2c4ba77d404a60e Mon Sep 17 00:00:00 2001 From: Jason Ellis Date: Thu, 16 May 2019 08:08:49 -0700 Subject: [PATCH 31/31] Update README.md --- README.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 82a73a8..7c309e4 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,19 @@ -# Introduction +# NeoPulse 3.0 Examples + +## Introduction This is a repository for [NeoPulse™ AI Studio](https://aws.amazon.com/marketplace/pp/B074NDG36S/ref=vdr_rf) sample training files. There are training examples for the following data and task types: Classification (Image, Audio, Video) and Regression (Text, Vector, Image, Audio, Video). Examples are added periodically. A brief tutorial is also available in the Tutorials & Guides section of the Developer Portal (see link below). Under each data type folder you will find sample NeoPulse™ Modeling Language (NML) code as well as information on the sample dataset used. -# Tutorial Videos and Guides +## Tutorial Videos and Guides Tutorial videos are available in the *Tutorials & Guides* section of the [DimensionalMechanics™ Developer Portal](https://dimensionalmechanics.com/ai-neopulse-tutorials). -# Tutorial Files +## Tutorial Files The NeoPulse™ Modeling Language (NML) scripts can be used to train text, vector, image, audio, and video classification and regression data in NeoPulse™ AI Studio. Each file demonstrates a different level of direct hints in building a model architecture (more details on [direct hints](https://docs.neopulse.ai/NML-Oracle-direct/)): -# License +## License Tutorial materials are published under the MIT license. See license for commercial, academic, and personal use. You are welcome to modify these tutorial files. If citing please link to this repository.