From 2c3cb4ff71b6ed29e3b453c98a509633df158b1d Mon Sep 17 00:00:00 2001 From: Gabriel Dunne Date: Mon, 25 Mar 2013 20:41:27 -0700 Subject: [PATCH] Moved things around and added some gif tools --- docs/PrelingerMostUsedFacetTerms.xls | Bin 0 -> 73216 bytes public/data/prelinger_extended-search.json | 2 +- public/js/script.js | 2 +- routes/index.js | 4 +- scripts/createGifFromImageSeaq.py | 28 + scripts/images2gif.py | 1068 ++++++++++++++++++++ views/index.ejs | 48 +- 7 files changed, 1122 insertions(+), 30 deletions(-) create mode 100644 docs/PrelingerMostUsedFacetTerms.xls create mode 100644 scripts/createGifFromImageSeaq.py create mode 100644 scripts/images2gif.py diff --git a/docs/PrelingerMostUsedFacetTerms.xls b/docs/PrelingerMostUsedFacetTerms.xls new file mode 100644 index 0000000000000000000000000000000000000000..e8d1a7a9b2ee0e36f80722ea224dc715cd80d014 GIT binary patch literal 73216 zcmeHwd3;?*b^noL$96U)v9)hcyKGC2_atUXWLaJkFG5yq0u*?Xo~0K*y(gaD^Aez# zgs|^~t)&H8N=s>JDJ`Wfh0ql!OQB_LX~WWmE|k6O{(ZkQ_sqTT>Zl3*^^aeFALP%q z;>i@45c^-bR!}X&} zmo6!M5x?NSKl%Az(gQz?+Ibni+iF~EaPgb3#kCICdR$lFx)Rq_xUR-^4X%1zU;()f z*Y&tI;M$0*0aqigCS1+9T5z@EYQxozs{>ajt}a{|T-~^OaP{Kq!_|*#0M{U{AzYhq zZN{|)*D$UTTw8J7fNLAB?YMT}+KFoyuHCry;JOjlO}K8xwHMbTa6J;&qi{VM*Dbhi z#Wjj+AFeT6`*DrqI)LjSu0yyEm;sI zxE_n^PF#<}^>|!Q!1YtOo`~y7xSotFi)$L!46a#RIb5f4&EcBImB)1#t~0m_xE62~ zag}f_;(7|MGOi_D6!1v z@K(v(1?eVxxzej4@-?{nD)5lKaK~#gUyu@XJL) z(O(zw??wIxzdZ231NaC3y9k=hlTpV5@`eW=gndI3fXl@PK*L(`fBxaGu0Gs@Us(qL zk&j-IJn#oEJ&5Ah%9}s zWebIoYB4vPujFR$!=UEAZxFs*fqJ>@`vlf4mUER#K3BdEgOZmyvFtjXoyp;@k)>R= zJTpHslda_DN>GkVTPnHo!V(8dUa4N|lG2*H!*3_xAuAwA1~3`If=N z??d?g-jPq=GxF)L-m)q2`y0`iI=m(uTqpO)aNTg7-mm}GVT7~_qsgbc(WKPfe6i+1ESdMr-Jk<@KJJDd%!U^c1V) z7a4S(aXtLpWsN;?=EAvhzJf0EkKg5n$MtY;myu583c0iBU3BSKX<;E(p1}~9kJ#<{ zgJK%`Bgf%!W-MRK;Ukph%Gt$vbR0Rb*Lmjk@WI^!PM7mD*}a+Z^NWR2Ir?5~a>hCD z^>Eu`GQ1L=3VM7~@@$WD?(5;wTQqFhHc&M<_g~p@bk4mhZ|`$4fzZ=)sJU9;m)M)x zmo4HCYTy^_b#a5x;w0TzzEZC8K_J`WLJRc>9=O&d$@XXS;G4t6+3He-48#|_!9^kJ zVGNg%AIQRVgMnu={LOzFHe`znT9{8bP%6zP9JhD2{)8m?-pp9GSagr! zm)z+>A`k1fTsh}BfKR*0MN#Sz2YINc9Vl1xmA#q$7^cj!>u6syV4k#Y>^86-1M4-g zJ_GAFumJ-bG_WB9V>Bq;=FJAS#lVIQY{bAAT}nUW1_Rq>V2nnk?`3oB&|J=(x-Nnxv3foFl$BgXeoyn8g4 zt$5|LO~{`Q=bniiH3?P34i{alM-1_yo-?9Pe&T=YJDE;?#z(1{Xs zDRtc*ymzyUpw`1;xyth8nR50t`jLH-ago)a==0?Qj7CzGfu_YpUh84SqWc#MS=T+R zpvi?~!|NQdYV|=E zIu6FUIG2M_9m_5(4&(1a{&cBa%t!Cj9s1oeU&1cW%IjCS?cDk^2cC`MdRyB^$8^PGL2Vt)yaANUB}IlD=} zFLgxtzM;zM>C@Rl$G(ck7dcn-c(b81V+qi2dSQRb@jd&g!-emcJ?B7vY2H7l)5Z3eRdJx~>p&G5 z7x`b-FJUms)^@m=fn|^J0=UB&&RfwhX{9%6VEYVg%)s^=7$O+Uo^imy4jR}Y1LHJC zy3My4*zE>(#K4Xk*f9e;ZeSAz#%Yvv`#7zV#!eU*r(M!?oQ6qbCk>3#G-o@iiCN?~NsTY)WO&`Bq_6qHssr&SN9BB+bVC%X})gKoAf zR(+Aj2DLRCJUJ);s@qOCDOMv;tY~lMXcdD<>d}5TWfnXT>IhFUQ7+9^XQIb~u-nb6 z)#!kT5au%%nWZ5&@fO^xxVFjabS`tSRG6L4l_Q_2-%ZdNPd+h|hqBL~&d+!~H|S>Z zjQ2fJDJ_mHcz4kIQ$237FPNA3`BSAu=vR2F?A{hP9T*H_v6x*f74nhoX?OF4!NwIU z^W_qXOLLJY*6HRD<1Ve_M`qyHRg3v0SV7wDcA;n`YRHc)RnV&}=i__b>|^kZ@T#Sm z`Q>J-)lEjmD9yp3M`RPZ z5(ZvTzc;x#*I=c~`MXO+yeseC;HF~h5sPr*P>z^w*v-z?W3Z+$hh(FSWmJ~%rn$oD zkxI5YHy_b&b~Cy47)_e=Q@NS>V*V-B=*`;PoNoxP;Gts}2?cAn(M<1dgj<6o1p{NAK>FUN8rahe?CA#f(+2j86t;TRR>yz! z@N1l8RZ0u_nUP|3rhxes#7XjJ^O$zdoz4{*-gAMk0hcuqLXfFD#t~YizGYx#(B)PH z?Vq9y13xO(5to7yM%gJ+IFvKF)73)e4kyH{d&uQ$1aphL)Z}7^k}wN#cUbk zdp@AX@yCeEVhP=xymNepN)5_s{Pby<67(G(waKNtnC%)bBC?o6c+y2uIb`W`xiQAe z?#+ZyGC%famp>DU{a>1a>hzi;vRF`4_lJ>a7 zspS|TSW`6Bout&*QyF|g+v*z*kR`3Cj^1AC!?y~w~`Y+x@ju$LOx%M9%02KEXAyWhZG zX<)B1uvZ({YYgnQ2KF-s_Ok}|Is^MT1N(UcdwmL{I=u*NIiIFyurkO`AYC+b6a?u+ zLW%VvAt0ga0?i#;Pi)OVK%&8Hmym74U_X$ZLYONW3^xk7SVN5aveU@D@cMKkQao7V zXFE|6M#$u6wXXZJGiNS9ec~pn&jul5>!2IcP#Sk@5^}dX6)2cHQ#zxgYBITws`q8f zW#q=)R$W;1rUiDpygwt9()ex4mHFB11;(9`K@@Rli%@FA zr$^v@7E$%6ut6xkp#h~3(rFXQaQI54`P>3r!w?f_5{h$pd4wY%Hu`-j)$yIx!Yn@h z{w%)lP8ZC41B%lI=8XpS3kLR!2KFWc`y~T=vw{7xf&Gesy~V(O)xh3rV83Qyziwb} zGqB$=%XzAFh3N@U$c-?gQ}_`tHsGCRaLi;V}q`X$`Wn`H9(7yh4ryxhsIA%9zQZZ zqEz~v-tu-b-t`c$kw4gKQ*PI-74g7)y>IyE4qEdLQdEC zR$RI_(=(ZyJ;!8(ahR)euE#%WP$&gD7EaQ~ZUl5K73yZ8aH#W?BppNOm1#XEVd`>` z?cF968=bUGQtM!xuHu+a13_DE#uEmGVr4bvNQrZ&n6KkEqi4NMC~a2t3DU%8;$VJ2 zD2?hcC7W~5%Gr##R%LWQ)L0|*LUp~stz^7iJCw<58*Cz zct`f?6Y3Kkbk-0CXOJA;FVrz+g@;NNq``6=N6{U0%Nfi@B42h1iCb<)caO7&51`Jp zJ$#>m{gHwFv4Q=Gf&Hn0{h5LNxq-dkz&>DLA2hJPFt869*oO`5BL?IqU^>{X;r%;H z)e2_1DRmjF(jk?1w?LJjo@O{7Io~e%lBTFl$e{_rtxXB+6f&ziFk=YVxVtg(X9$H- zvq8wZ!CXzl^P%o+7jmy!-Z3cR=@7D@>TVTsa!@Boy+}GYtwNrTON&dT^3r@h(otP* z#gF=aYD}U8M6VoLh0GpT5bGFkq{;Sbq{lc+o`E@v9TGoGryPXdQ=%GoqD3ed!SIsC zrxCLIt3m;(M_e+4A3Wu3xr8Z~VSNtMFqzh}G@LoX7;veKq`L?>Ttw)%Qe}(>REarP z_!4pWZy5Q*$nC}b)#)m7hN8iIlTZqEv;?g=lbu642laG|P#EI|t3usDO_>o&W&Fmz zkSUk3>)a!h(!hCG@-5jVUmM+Z9NAsMXLn+J4pQw%Clj4U32B19H*!*JtZxwrc|8nH-?WIo9W8L7_7@jJ8&l|*>DE7_ZI zF%H+dL7~D3wkT0PoH>RN>^yQ}q8Z0_p&m(AbvNcfys-G`dFqrP!hFFA%cMR;i2KGGz`@Vtwz`%ZJU_Uaj zODT*3{UER)&~B=)W>UusG#N6M1I`Cd(%XZ;Q=d1uRjNTuX_t^ggE?^plet11 zU}tIp88#+C2n!-KJDfQ_pN|sdx`q53+E|*&j*OKGS?1#-4-QI&lGZ3>=%5fvn81QE z^NN{l9n=kySTe9QD5D5(67sjsZK2vij{YwlqC1LtgwvK1s-;=T_`y+xwp|F8p8hm*1A9{T3#BRy>}x3}N>Ia59Cbt}YC4z5=_w@A__poM zctXSyEAW9Lju;fmVbH#*8!(R;M-cji;<>_WO~Ok@?=ieNBox`;!qrxJ0R01YLieO#Qav%VXSv? zzJyHRp&j1Mr=kdMe(jr0T|kyAmqv2xl@hR7s6ze=YA$&R^K+Yo%4LOyoI+Fr4XMD@ zcdZ@}YMm80=ajCbVIx|Ex+oU5kXgm|85Am|)W?U>JYJr|@W4w;^u!)6Mg^%~?#Ah+Q6T+G^WxKJ;^PV3eHXOWNkuz4en$K zyFf5ouNuaxWZlG5mXPs~FcR%THcpmTIcjIv%o)N~AxphQ> z#IRGy;$aModmyLLk}}`$J|V*gJzmB#X=h)@@LL`}7$u&jc)L&t>aps0XY%g;;o_P|URV_M`o}@UUGdY}$bRSZ6a|6T0mX3ZB~j{S4;;&Osq@S;a3k=i#tTWD|6b$0LGER<0lu9YP6@U)n!;lA|%tQQbmu z51sCtHk>({M~W*fi68d03*|p_Z5~?$V9?45QeR1e% z?A8IHYEV;%!U|$PuuWJ@R4yTbm+Nr-16qU%Bd*2|CNW&y!1fr}jRtm; zf!%CidkyRn2KGn;dz67;*NtU-bxR6cy$VaffT`Z}i+sr5EkYg)Oj|lvtl)|Svr)*5 zL1eHFmP26P)+A&}jf-KC>scoTJ_JI}4ANY#&T$l%gEoPna!A}MWYeHzt7S~QAlh?V zenHc#+k{N3rf(dx`SVYyX3ub5gW71DkbiXos-_r|IGF>ea3+r!5<+_0_CyM4vyi`a zu$;JusSn9}-0N)OIM#qf)`EpYLP@HZj5D$0SpNkp ze=b)Z&g?%&e;#QGXY&}A_y$tKLIVpJZD6gq590YWQc9cJfbm3ml>U1Nf#uvHtM}iz zO(=??L);CVU5mbAMktjUupD1l$YPE8zS0H1EKqeer8IO(lxEB~YZuCDXl+eeDqW8@ zq1=Xt7tiLAzwZ_$Iu>dbig4)D@|@3uMbr2#+J(X#DlcQn$_%=Sw%&DBqfoFzvzBJE zi;=(CD->@X+l^P|i_!Q4DZOqJ03TBy5=wmdE=e*5F2QAf>cM|tYJ%{AD_rt`^d;{D zHVajP8tw7(+^)bSZM(OiY6x#}9+{39ieZv7+61Cis3t-maBl)`d*J$juGXz6O&hFH z1KVd{V+OY0z{U;ifPoz}utNrR*uZWxu-gsnh=Cn7uww>x+`uLb>@fy5X<#P|Y|6my zFtC#bcFMpWYhZU8*y9ZB@dow;1N$ihd!m6oDTUEWkpI+5jGwPy-YLh~oiJ={7ILHV z{(;HEV^fEZAM+C2BIHUHnghs)cl$+YA=xu7TtG&#n=_|z=@Rm(#wQPCec@gxdbcMG zHD#BOZ7mYFI}AxS3b|O#EjPIEk8Tojvf9l9m;`iDJx=3w3mIAmxlvIVrehe%dYgnC z9`4VT<1EHDA)~9tJAkbNXERgfyawW$gghVagd^tE8Fl4GA?t_6A{OKuJ0N6#9r_*s z9_`nG-{RySwtb^eDB>EpK-f2zf)PIyudAcwMxlJD4n2@RkFPtlorb1CC?y(QK2XT& zLi|>tsHnm^P{?5!aJP zx7FSRvj)w`mueMCQ)ou1a3(Y5SLyjLPq_+R;aWkwLnvE1EK2QULOtIr6f`v*H3V4y z>2_qGZQ3f7JzcwFk)>CHW`vTdhL(3A|D%MV35`Ky@%n_4sy@4?NxQ#fm1d#z2ARXs z!Z^paM<~VWW*#V&u^a^@EX4V_*dX zTQIPqft3tw(ZHT!U}XbaGO&t)RSoQ{ft@q3^9FXo!0tA%ryAJP4D9I!_R|LTj1+*4bj-+`!AMp4VC>qYcalltLaz6632}_8^$59D-|Zk01FP6&gZmU7 z&7H@h9bZMGkbhP69pu_3ytUU<Oq3A@CdIY|bP9ck{ z`BoaI55n7%;q@I7fjYKfn~?L>0SkqGULWd^VaF^!6Q-I%YTpi_Waz##%kG)L(mSqE zcXR&d$P(^tcL`-jw*p&smm7#PLjYgC=4nkr`BJNb`{E_%O+qnC6qVya=B{6hup#&8irXJm%SUp0y3><(N zP<-2i=&8&;m+$NA*(#J&wIp2ZUrPmPKrVgqj&`9Ot2`Wp)FAGMEpRyVO2NwrWjZhh z;sx+9k@U|fnZ}QYgfd<)8OQVom($#g-TL6Pb0)V@DEom&Aw1p#bIY_)lC~nxG_ZRN z>|O(VmVw=8V9z$N=NQ;?4eWUa_Iv|-fq}iyz+PlvFE+537}!e<>}3Y_aszvXf!%Ll zuQae%8Q7~0>@^1VS_AtT1N&J6d!2#(oPqtkfxSM3aj$PzWLX{W0jb7?7oBu+_Bu*EJwy9>J>7zzThG3fQOk1 zHt*1dIc)as-{>cp;H zuC!e!5#fyxxyqbC@>@x_g~9?ZR}QU$pF{b@#bBbFghHYU_fTm%&cACG%8HuiLs-Lu zwL_6{8W75ix;|RAH#0Slk;2kk6z=59b-LZgRU03|nz>9OL`RB2p{$$(yp2|FCc?JlW!V?G8hRjm}w%r z)pviFP%Oh2!gBVd-1#cZN<7wS7D{Q{TiE6ii;n&6EkbEkHa}E?k9Rx&awe})D7LD^ zU63%`M&XTbTXKo;QKOrMQXD@|)62>FjY4UTtBlu^bPHuV@FO=U^-{&Y5~jdqWq7HJm>Ot30d48=p1Bb(eH6rP|@s|&h+V3SaSf=z)d z;-XrNk6bSlqf7HY6eaan4=*g_kxk|;JRj5~lq#){lSbH<*WcM96fRYf zC_=KB+q;PO^$4X*hxt*;JyYSQkkgE>=BpSG%AK0B!$oYh80}@^c2G+AEuBJXR67Am zD0uAr8*MQ~^Mh4lj-t!IUJV6O4|Opidv{nUvZ`cjD|7KDU$yfKn7`g96laYa0zcpLD(r& z9J-b8$~PUtxFvG_`NI7|-JDdy7}y65>@N)LLk9L?1N(@9ebm7I(!f4uU>`TIzcR4DHn2|^*e4C_QwH{F z1N)4Deb&JK#=!p8z&>YSe`jESpTanuLH-MT38C>K(=Fsd9bF#gs!bFnR>M%P8iY)# zW`ji0+)aa!GqvEhEOOa3k(cbLD($x1Ic~6OjEVQ`hvza}B0>l1Fz z>jD`Rbqo1j`{(vz>6}mPKZWJJd2WEf{%I8ofVThk(k!;yDAR3n8n{g;3aZg=N9-28 zW<3bfMgxNa5b7T_|tM zDsU%L@D199vbU@}95>M=ltWeH(E~=2DT+f3(BpiWUZGg38jnbhAwLeQv^WZhMp>Og zNmcV7k)1#iNJzZt70PX@qC2o+3#kJad~yXp(k7uOYw}-I5mr|q1enbHR(7C_D?1!P zmJepr=1Y-jX%I?#{DE9k_>5+u$g9mcl7&&egE4o$Z;Md$)#k7)%Fy7rq)(^=bes_i zk)z}@7&&sFY3&s1g}66hwN%p4a6gmvASlvS>hlKn1q1t{fqluq{=vZh(ZK%6!2a35 zzHDIsVqpJjVE<-d|88JkF|hwIu&)}}*9`3I2KEgD`=)_?%fP;EVE<`g-!ZW78rb&? z?E41x0|Wb^f&Iw9E~PLo7bgGda^WM{b6%jDge<7~`3M3%ZswXo(Pse+G%bDJjSbKZi*(7e=D4>J)Nopva#_EbM#* zK46Ic4+wczoslE?qT65?yXlVdki(;;a=8?}#ugz*s~el39GlN_ZxJlsox_A4&n2N) zHw&3P=mRc%$vTsTvPL1(Yq;nLroMGDqD{#9!8{~)x}YNI5=uePtyl+za9yNLB6p-) zC=_b)j$Ckc@7tQWtu&8i$kDi=MJOezkdBUy9XgD&2=DaYt4$~_+J>W}hmTE-9~(V3 zHtvhNgp#9eIEsz5vKJ7m#YAOD$!`z}k}AogS!~wi?`;)Ilj>INv4_2)yqf0MY7xp+ zXjQg2pRH7MsFM*2m`>&3R(wpnK9a|w2%~6ar%>SZbAzU4>q(9kT7}{lH=sP5&tTdp z`eyAyK~&R*$T?Ijrjv@g4^@{?DC1X$-F3?(uuG}NvO9z_s^%3{kOVf1sUdDW(IOPs z_$84r<2OSl(`1B_8^0tL#&gFozgQz`WUo+?)#e?gTgC%{FyO%IM&t#8j2STfLgCf{ zLxSwY1#C$j?G8*CY!FI(_*njE85U%OVjmg<51z?CON(6fK}O;y%Lvs#Elo1rW4DVx z;`FO!W~t7=RvFl32DaM3))?632Da9~)*0A(1G~b&t~9W#4D4zHyT-uk4eVM2yUxI_ zH?R!`w$Z>E46M<>nhdPjz*-Ef)xg>etlhvm46M_@x>6W8U-D56=Iu+PqA@8;$K zUNLnG`BAmzQJeuu8t(72z~7aK44NY1_A!e{w`FLMQIu{W$EJuTu%^=I-TJBZ9w7^B zpe%YEh~f=8s10ruGPRySvz*F_BNQagHA&t|*=-l{e$Zt8eWE3+KGs7y2-?AyPcdtF zl2KJ|v*q8p2j?p;YiZ(0vLUz2Kt?Do$u7p)BOK_3J%(l&_M@uk6^c*LTX>|WNzKjm z3nePqG9_b_FcBOa6pEP!R@pnAa2ytI2AVrT+9i~}ptl1lX8v?9_z|HnYBG6TTOx?5 zpnshap`=MUHv5P`Z;mj>$GRZ1B3=VMGdF(c@9s}z&us#FpH?RQ% z8#J&X1KVU^n+fejnjh=FZ2up10)n}KaNupI`r)4+Ba*lq*cV_-KL*i8m@vw`h3 zutyl!BMt0P2KHzJyCsD&(;x#Z44gSZdvKdfTY*n|A z!DD5PjK|s-Et}OSNr1xg`;ktqj>zm9&ZzhNO*L9b{6qhE)>(K7NsS4GGP~Vw|!%mP;7$9jS6}E zWH^GZS13hsEBr3%(N0ODWv;#BP^DKWSSyhEO{3X^JwjOviatEhX-z0^{k)XE@Ocpi z#_BG==Ze>kEkaRDywtfY;?hy@uSY18@iTmWELd!58g8iWRnaJv((o;#b-iST7NM-h zl_SKkgtKsb({2z7t)3llG#?6$zk{P|{;=SXx4uKH`){ zp~Q#wmS){vi0soAq1=b|q6A65y3b{&Pyqya#C8Bo5A|tKzAu}E>OiAA*vJxN)o3W) zEYu32J($PG*N+6INvIv-+u*JGd3U!43`J=pHELk{3~bE6_8Zu^fgLcgg9diUzz!SO zZ3cF`fgLfhqXu@&z>XW(gn>QAz$Ojsgn>;N*c}FT(!fp`*kcXsP6K9o?u`< zWnfPCKKP=P`D|TZt zw&&KyZ4qjZ6<-wecWJzk#(cL>v1oGFvVNKH4jf|dh`P?W&~W76Y!+&t6`28hy5T4p zI^9n4wg~moimgf>5J)>WC{$U&pG-6I3Cue)or@-XK&Z)r$z4W@c1I)0Z9@H4ADwQz z>{YSX7Y}le4?1G+ZxQOhmD?Tb7k32Hpinig-0sBD8=Hh0b44bAe~XvY7}lUrm9ALt zV@Jv%FS$*qVZ#`a%j@)X2fh#vt#ifKMJmv*k_m$`YTYmxV_q>v&{gcK7Jc-sLKPg# z9SNdTG7jvc(<+rse6H)0aYuT9kTtMr1Di3hSp&-%*l7csGq8CB%Ny8T26o243I?`d zU_}Ef8Q7wMJ;lJv2DW5i6$7gp*jWQRXJF?I?1F*aZD3C|u%{W=(+%vW4eS{yjQmdi z3nS)A<|M3+&boC988M8Qs|!f>L(=;Z=GXWUbAymAgExm01~75P+0YIlZw66e0Sqjb zNyNsIj2iS~M8LT##JHFtcnTg0myEeUTbq!DgVcKJ16(-l)F;*fy5dM`FJL`JyqIjG zkgYWr>DcIrQLh;{3Ryeg7=rj`4G7s=HD*{oHp*G31&FcV7}xvnWP0`ANqM$rYhNmq zA)yec;Y-tC60Dk=*7L2qg~FkpH)xSdN{O#PX7RfiO_mDHJ&6lVdr=9B^=i~wt+pzz@BSh&oi*+8`ujB?1cvQA_IG|fxX1QUTR=3Gq9H%*eeX|egk`@ zfxXJWUTt8nF|gMf*v}Z)&l=e44D9C&?B@;a^(lw}po!I#vwfXH-d*-Qx6<0T=msGtYv_#P=ITOXWMOu2 zDRaOdwt4fOT_byTNB(x3kkJzox`eEuID@=Z$n&u}3`d0Qp+V>na(|G2ByiB$9YCr^ zq!YIYMIl&>S^`XpVpZb^?OxQFE}@hJ8o48M`K-?CE{Svu9|Je3iXI5mciX6)iu_yiF*T`oZGK z%c#ePgaWGLfdy6W9k@lP!BpI8D^BiNje31T(i=dYHm+|puwO8+Uo@~c z8Q3ow*qaUPmksP!4D2li_NxZ=Rs;Jr1N(IYdz*p%hJpR2fxX?pe#^k#VPL;)V83Hv z?=-OAHL!OX*zXzG?;F@37}&cF><JqGsP6vnY8`OlOzY=e|ohUP_>jA@GSSZN7I z>m*#W5h25x`e6*IsaVs137Y5}YB%cLBIIUMaIDRiuM9&qNcIB`%4t1r(~HejS@+4 zx>Dr84G1Mpt#D19ZlNmOBJTkV2<6bEL=L4cdt*C?gwkm~|1h?QWR*c9;)Fg&YkYv) zc)krH#Ogrx35D5w{jkEHoj)iPaMMm5$>Urn)Trw*2842Nz7x`bv6C7G8eVe`3blb* zX)@`T&+ic`37u1|=~AY?z^C;rLbYMC2p!)7dnp*FUZFk-BZ`pc1pgNkqk#-3?>RC* zJyY}K<2YX=m?D0p8-%JyN0>FV6Wsl|kVAq&YP&f}0R*7HU3?Al0_lhvkzeH*FE>L{oE2R11qPZ@+3F{A271@4}dIf>)&T!e`H{P zY+!$4V1H_0e`a8RZeZ^>un!p62Mz2m4D3S&_F)72h=G0F!2Zg>{@TDkVPKy$uumD- zrw!~g2KHG4`x^uMTLb%?f&HC<{e22!{Es|j^7>>I$L^JDb;Y2Ny-ekoo_OjLa-FH9 zgVEz4pkK&|rudw|X<+e@*gnVxHm&>E%mZ>iB)a!;pp$M4$brMG{(TSu{W6j%ylA)2!8Xc7&=FvBh7OJ+Ik!|qR)d}`OI~Ob%5QMPF(b`Hy^s{tCKTnk7-N#; z+(C;_sx?M1F+Rn!k?%O_+tnl#Z;cU59J=$w;jt6Gs6!~_`nI}p-tif(#dVC)EfjeT z0xQ7;Tb7;Gix$D7^k=9) zh@!NW`MiOB!N9&~U|%w@e=x9rG_Zd%uzxnNFB{mu7}&oW*uNRrzZ=+B4D3G)?5hU$ zH3R#)fqlckzG-0JGO%wO*nb+>cMR;i2KGGz`@Vtwz`%ZJU_UajODT+NxX6F1NBqo` zlXa}UbOlgV_(4Iue^#527h`Eaybjq6F1wEkrd`OEK^k(!;?jk}SvVOOky90S3t2Qs z9CF=ri&WK|qQQ>FN<^Nui2SMtCs*ARa0*Pw3_@Gc8)Lcaa^Bm4esy$|w8Ut%F9chd2eH?EJ zWmH(iQif=aDuc;wLb(aw0YuKAQ)%&W8KDSiEFHspY=dwf%TvqwyI~hqb5f*&W}3f% z>0#W?9_bTGmRht4Uj#o03sbPv)~~(vkD;{1k8xEnqYXZSZg<@!ls+{Np{``tHVK6> zv?R|RYS9u}j*UW@482~^kTOohb2t*f$)Y9hngCA?UNRGhX7WLO_X>qoNBn_sf-681 zWKBZ(4Nbse6t}?)o3LFd#u~NcIu-xbctmk>l88K8pBv3e}CyzC=ea zhT)!HIU<)WH-2>nw#vXRGqBYLw#L9NH?Xw^w$8xT8`u>FcBO$`Wnfnu*fj=LZ(!FN z*mVYWy@72ou#E=RU|@{~)?{GK2G(L=tp?U+VC@FhVPKsG)|J9IE+hZN0*^#bU9jCE zWWzAtEa&b*cA3gen~)y^r3Ix@lvSRTAf+uS0QCS{E{2<7&`X%UK5x)ATe=JX5YOZ!gmL?Mm`$9YCz zp)kixq0`sG!dmVv4Rwz%%SPWMl)rk}<_ybZ4*9F}HNpYjqgbSY`CoSjkEiozoO`y& z2<24Gwcg7e@35*lN-FLZ%B|YVm zdi#ZP9^OA{_ukCOe69kYDcT$fHMa?cKYoAzg6`9>ADe}GKpo>KgBP8{uhTBn2Wr!p zGE}=8aGOv!#8u-=M#j|j^oVw$nh3YgW93TX#7m~vWKf>=*SZa?$H00Gtk1yu4Q#-` z1`TY;z&07!W&_(|V8aGBVqjYh>;?ndW?=6d`NCSJ6fj!#5Zb@NOo8&)Ln@r{V5vfOA%{U+a^In0p$gFey!{N={?W-PehIfs67*^FHfmt| z3~bE6_8Zu^fgLcgg9diUzz!SOZ3cF`fgLfhqXu@&z>XW(gn>QAz$Ojsgn>;N*c}FT z(!fp`*kcXsP6K9o?u`itx=qNx4|Pvs<^)2$N66m~C55%SAA23RODF|;FwaVFRl92#70ouGq&!qB zaUaLzcl$THK`2R@Z?Mv{LLQ}eCn#v@PONkbac%}y3JQiz+#?johw2opgM&|2tT`Wi zw@_>~KXS#V@-)+0owivh+nR1rd*hwRCaGj_P>9?9)b}B!-t5B>?S=H=K!ZZ9pn=+& zCqT{CBvuX!m4&Hbp;t>-s6tCPR;W%Z<57{A&Ec{fd`E2k84Wztliwj!G4a|TuEbInH~JVB>Zlc7L(g_RfdhA=0pWm9f0?!`k%R61d}_7W zwkK>CDC3>zM}(SAoyk;l(?Z3>;GIG>sC$*CZdktQua1BFgqm`N_PFJ;-sWx>YEO;$ zrmBxav3`46jT+q~RIjEaaa&);Z}KJX>RVHv=Zu{~HN3+6L^G+{w?2W@try-U)YPVK z!O)v|?9Qz|d>9SDu8-#!#qhBRYV&w36$!VMK1~&`4l^2#)k0$ho^b3mX{%7%>##T~ zE>|OM)FIUSI{k_f9p)Yu=P_E<^sFYK5m0v|9EIbjMxTtL^jIWoVABRRV_>remNT%^ z1~zA4^9GhTu)7TGjDZylY{9^a239h#MFV?^ft3wx$-pWGRyDA*26oQC&KuYT1H0S6 zo@!uEGq9%{*iReSGg25GQSzTUqG961EndgDjIPY@F{@<`R0Q@QiWJp?!b+aqLP zjX(RH$Y>L>TPf0h^>~psAxj6jP9BoPr;G@>JLuzw-p;H+$od*_U~*B=5aj=$Ay9Cx zoSV@_fc-*A2zqEfkL7L`GGk~a{raP5@>Z{CuqM+H`YBP^BAh>l{8ofMxFvh!yEX{r zC2mAX=MXbOp$T0+U-GF>{yAu<_u42#!E3{=!8}tQQ+tJ(5^_;|GdhG)6?z{#*I)rp z^0=`!p@@Zk%@+^_!Ct%A3_Q}8l(x_t`2`G%CbJ7s#J@o(ccJ|_j&RX`^ai2u#RXL! zB0^s36iQ%lMoZa6ZiSCUisk8OteX+aV|aDHEb(9!M})c4F>dzz;!!@sh^LkYdoLlD zO!lMU*di2L&C!ZhIcjx?-p8l9?;!amp+tvaNV;YWUJ)zM1fve2sE5w=#D|xbs`?rG zgmNFOcak8SYt3Hi7wUq*RcShRxOEhEp@InU5>FUq(4mLfH@!lA5dt_M$yF}kMQ5u% z#SGuUPYY0mXyA$c66M!>TXqqJX_rClVgq}LfxXngUS?n~H?UV2*!>3fN&|b9fxX(mUSnXdHL#yCu%9)s*BRK) z8Q9Mo*y~dmjS=}T7$a<2$1S&<0%;bqVK8&PC{ixXLUxSHkW@oWyOV<5A!N%~R;xT! zQD>UyR<;XyG{hN8B+#{jl+j@!zXqQpQ4ZmP(!78(3^P8$>O^(Bke9=VyoO#^-7^cL zW_5a}klBNJT*MS0{48wG>$Z!h+;$6DKWKeVaRQDmtcpuPp=oFk%0f`T$oVLGZP+ao zh{WAk$}pc9Lo5>O0CcRK5lTqVz|3MrJaS)%Vz3!leCcSS5rxv=+>v1T`-B1%lwU;R zjv<5z!9IM^&bwVgp$a-p>9CS9kaP%TEPT9(WD>Joks<0A3S3a2fuO*>Ud}}3z}p{6 zVcfy~-6l(Zyg~|`1Zh0WpFrX`uvaLcLBUqGW{#Aq`6XC?%r5xd=J*XZ3S~9~Y%AHw zAoU34HVk)w&mqeR=RU%4mx^~sFVrZM4 zLx)vL2svYk5gr!V^$wx5#{_QQcDik9x>|*z9}K_S*nK9eD;ArCN+1ZRD}@kJ&~QYg zEAms++!0=n$!r#Cgdm>S0LNzsu)&Q&%@DsYzCP;n^ZV2fp&`}j1#VZ*W=3Z|qBL!( z-e_RIU|_#!U~e+8Uox;a8`v)!*smDaTMX=14eYH3_Gjw5V1N#jF`%MFTyMg_d zfxW}Pe%ru)$H3lcV83f%?=rC8GqB$`us<-ccN^Fr8rXXb?7bfy=V;+c;9MM>uQ#gY{z6_(*H0}O!)g>TO9EODKx?;6cIC*@AStAsXW+58~v8qm2 z%hQZk(6?$4vU3>aRg1+7XOR`*O=*{quY=;nF~~T(0iiZbhs{K@ORYjSkA<&{sT-dJ zPZifCWcauOynDWwo#vLL9L$rXU&#MKF=KINOu`|sJC2EJqhBZ#D--VKxlSZ(6-r8I zQsR&YZ}})Q!SY?eXx+bTr%-TI@lQ>T9y>8{d~&MhNGghv-a5tOhIPn6S<()k${}bQ z7Am$1WlCFY4eIqsMS zwqi3ogwn3oYl?(p*~O(QPp5GeZ3}d)xZeLma&{2(>`?)R;?-zC(jh6@-G) znG2CUZWii;@NzWsE`yxH+91>m@lEG^B%ITe51=4zoZe?(e`H{PY+!$4V1H_0e`a8R zZeZ^>un!p62Mz2m4D3S&_F)72h=G07!2Z&}K4xGaH?Y4lu)j92PZ-!I4eV0}_GttA zjDdaD!2ZU-{?@=gXJCJ4V1J*&=thzMbd-jKg43t-*k_Hx+9YJdL=ldPf~-@xI)&UA zG$juN;CNay`asbuYZ5B_R z&m^)dHVc_t=hf1;jb*1{=CGhW66{Sv_Sd*hntFd}jx;pfm@+$MIRaQ2f<%_tmqMpPqc!B~%4EOqs$E z#+|15z3aT$o$@-BD`4@OIo748io2IF2Qtu zWV{-LS|gYXc%IQMgF@Az<3pE!76l4u>9=hg*?#km8-2eG3N?yNn(*e?`~jh&(Yeps zTDgJ=b6e5J*d|mzI@w!G$*ed`#$nOOd7Pf=7a~zv>=3Fa31v*+A+gJRVt)MjeGJpmR;AtZx21sc`|QJglcpEs~C7}ysL z>`Mmr4+i#+2KG+|_Rj|PWdr*c1N&D4`!@socLV#1f&GVpebvCeW?)}8ux}XHHx2At z2KH?O`%eS=j)8sGz`kc--#4%y7}yUD>_-N6DTPs^kpEPpB-j(GC9J^xF{$n=M7lcL#;c8e~72y`Gd#%6^xSheJD^ zAv%}~tD;EKA!O+wijG9XlTN#K3i&%U$Pt9peWxHhgxp?R1v1%l=critwoO9T*TKde z@N=t(mAb)3mrw#i3zrb)bw=|vBA-!@Y!nKF`XoL*pF@~dp-_Y#u9Oy$yW&UXD8#tC>=MeAzTru16~-Xkc$s;^An~3P#Y`>C$$WVZ8Z&B5i%{G`Q%i-J zY%xp|HVWlV+2|yy)S&7Hq4eqDM{d7P-%bjkw)0fB5HH`ya>AOTXl;xS!BHHwr_;IH zT|&Xs7fh7y-#QjehIR<$R5>I`GUjb;r%-5>$v{xS0oZwrJD12qY`HA;?|G-+edCoZ}PP8CkvD&eR#$ zDg(RBz*ZaB8UwrBz}6brIs;p8U{@H}l?HZ|fn9B2*BDs6fn950*BRLL2DZV#HX2xi zfi)UflYuoGSc`$R8d#fwwHsK6fpr>KR|=yCMgG&_ap2NJRm?&|52%J560)R53s2>E zFkTtQy&^-SW?I5)0J3T<6WFFhbw;z0V^s~E%DJLQW3~%9SI5}3Yb01L*8aKj>s`+G zDK2KlDlkp+?-lZMcn@SioXO)T-BYdt~%?MdsqfAJYs36BMyMTF(f}6qe zdVnk+^Z@pXj#}6z{GD8_@6X=!%|6@$7fDMD;$gVY$DQps#w=1lUqMw*x*q3W=B0~wCW8z5yt z!DDUnj+Ex$r-ju^Jwjy`Ok@pv_~XdI1De!hi2E02(_YJy(Wli#?21z8fI`q+BNJpupR^JHLyMd>o>3g0~<83Ap_fF zV4DqWi-8Rr*oc8`HLx2DY@2~?H?SQBw$s3N8Q5+E+mph$jRn7@rck6?$6{XzClq)E*DGWsyAe^w0qtV1khg5IJAG7yuhcGNw;B@{Zwij`4k7D>Q8)!U zDo1npoEwCESo5+<;IjQ^vU6CTj7htlBRhBbTeb_?wB{4Vx9A4t?w*@Rc5TBw)O905 zKCTgJ`=&k7Q+5cMyhd%uH`FAj^HJM(dtJFhC{F^HI3jDr)-8zvq*Gggi z2i9IKtAJmHzh^<{pBG<|~`N3Ph_VJ_B6Sq9c;TM7EPd`VnsO^Oq9JAhMg5rIv zH@iExxWDgl_iU6N=^hvx=;8d|5={F!OgLgJFpRbT)k3!1Gq8Qfz-WUE%D_WVP0(c@ zn7G$f{w(+R02;K@{e1bB!Nl)F`2F6IPv0~0>95|hDe?Oo(U>~CrtgeZGF&%Yr}yi> zb#e#lzEYl!F5%DpSJu0K`G3CX%NpYUuL9-E2IK$3o2^rN!FLcDM9;IMoEwXO(OLW! zaF`QdjRVc77T|&C4*>o2FZ1V3!-%TC_ZU?>Ylp5A3q2&Dy>z4D3n+BTJ?4 zC9|cmYYdExn5Mhdz{sj;y6aQe>Q(+rHsFm<9U4EjdeznFkveIRHT?hNxF=WzfCQsc zt5;nIg3Iv#^>X0wk)xTZJ10PNHSVFntr5%tY+G8r>Uzfz^>T&8yD+U@wE-yWx>EGa zFgSOsSJeZ)40o@UQGdK5E99s46(R?NG!(xE(Cp_FPrkz1$YidzZ~_1*39~jlq?kv2 zPFb-=GK#Np7i+v0KplW}qIQ7yUKUk}={8Ml^sujz!)cr|gagn}!`0{%trGbk)JI?( zRC$$P`^T`y7$3FTRl82$x47ly?8nuvnd|xZCHSHRS0lO9HxgOZ3@fd1uYI-Dm=QK< zjkXl=BpiFh34eCe8dvLDaeKDqb8IEK+}DY847T#}sFi-o!e^0sLiXnxaj8CR;^nBU z5uZ97sL8u2APsm&Of=#kTIejKHSitRx+kv_%y7AFKUc512AxaV%f;{D&1SE2R9BJ; zlWA@qj5kx7Aorp{OtKKAi7XWFGDnp1)IcV@Bm7}-8zX_EDUAW257-@pb^7?-#5UX2TfFuFh33-ed*>uI-p_t<9JWjNboC*Z-Q~E7GYvfEEPk3{eXqr<@^!DppZ{6w`!V@nXnlW( zVaYw{kb7}G3)g+Po{j4{xSosaM=0m@g6Q`bMgIc&$Jur*!$#K-f8-3l|j1AZD z<40c{UHSj}(N(LfD;{0d$xyE&k492n@IK}s9lg-x)=4c!qnXBN_0kyaRvM$-Nn>jb zj6>%%-FgG#s4-1E46M<> z_>I%gZ8or$6!uBEo}XNr<&qo7U6ILzgIgJ4cmI!vQSSeVF_8bi?u|oYo{$m^J6xep z4`xYnT;PhL<2zWU=H213UHAVD2HpKPw&c9gTX0v9Hr);kM%ewY#`<1yg&WGc0**%4 ztXaEu{nyrgd+m4D*RA4ENCf*6;y3btv-^!)?=kYa|BW2(eL!n=?+f3? z^?U65^0~DbxigJ_mp^V+!<^V+!<^V+!<^V+!<^V+!<^V+!<^V+!< z^V+!<^A@`nYs0&xy@7TE>oBlR1M5m*%tPT1cIT9d$?+qHj~yJJ%#2Qs9Xfn+oGOuz zhkNeSB!6Q(fQ;A%@0IZYuQq)NooA7DNqP4hKpMfZeObL~0x<78B*jvNOHty&4D<4n z#%BqqDq`o2>0D-}w1C6%upK7yR7#oc8Tc3rdo!;9@zVWl3;k@g&AG?^p^19vXY!TW z;b+E<_#t3y$gNRBw8XW7W@Rb!kk5Cq$@}?ON)WpsVIJvOQ;6DQ&w3-=MgnOk`WiTy z#AAo~5Rhc+B3sHH3byp2ny1e9wP+V>ul2GikC=T56G`7w3vMH5p zbL?ePCfniErA_tfRqN^M;f^}Yoj4zwgv)$rf+-)GUzg(AfvMkPM)(fs|J^@G9J4X=1M4-gJ_GAFumJ-bG_WB9+hkyy z4Qz{n4W}@E=L|64DK5ZvwXg#`;NoLfBW$ezf?NCse06?<>x3_iw2Ne*l9M_tPJ|ja z2w%8^YAYEp+<>^dhihVf*I_d?!Zl$#&M)~|L)7hh;cJs4lR=|vg)iO#_KSmL8-y>; z=O;p99G<Tj zlmDl^;=eQ?{P!n6|Nr&?q?w=pzy0&0OBDa*RZvSjc;D;4{@vsA*S_^->txH|J3g}w z*lTe45x>Z-_?gnR$LbwKDCGc=kNM+tNwz2-S79WIr^@_kEl{hls>#)>mVVQq0!`!YuE8< zA`iH~~rZX21(P9w!HhtZ43zWnLayFLAX>+hBNpDkJS zi4T9`bz9r6eZ{NM|HI#X+kRkcQW*Q(dkbE(d(X!A@k;oAtD!%+>HoS;`=X@QjqWxk! z9?s3Af6NtLZZm24e0~3i`+QbJXM#U#FRAI?4gxbM%l-WELq6Tf)= ojO&A(%UKW_SOJfHT5w&j27=f{12j;a2qp3QpxZ=U~u0NrY)s{jB1 literal 0 HcmV?d00001 diff --git a/public/data/prelinger_extended-search.json b/public/data/prelinger_extended-search.json index ef73d94..7be90a7 120000 --- a/public/data/prelinger_extended-search.json +++ b/public/data/prelinger_extended-search.json @@ -1 +1 @@ -../../prelinger_extended-search.json \ No newline at end of file +../../data/prelinger_extended-search.json \ No newline at end of file diff --git a/public/js/script.js b/public/js/script.js index ff5ca31..e2b4baa 100644 --- a/public/js/script.js +++ b/public/js/script.js @@ -36,7 +36,7 @@ $(document).ready(function() { $('#filter').keyup(on_filter_change); - $.getJSON('data/prelinger_extended-search.json', function(data) { + $.getJSON('./data/prelinger_extended-search.json', function(data) { extended_json = data; $('#filter').show(); }); diff --git a/routes/index.js b/routes/index.js index d6b1a44..d2fbfab 100644 --- a/routes/index.js +++ b/routes/index.js @@ -1,7 +1,7 @@ var fs = require("fs") -, prelinger = require('../prelinger_extended-search.json'); +, prelinger = require('../data/prelinger_extended-search.json'); -var LIMIT = true; +var LIMIT = false; var count = 50; exports.index = function(req, res) { diff --git a/scripts/createGifFromImageSeaq.py b/scripts/createGifFromImageSeaq.py new file mode 100644 index 0000000..e653b62 --- /dev/null +++ b/scripts/createGifFromImageSeaq.py @@ -0,0 +1,28 @@ +__author__ = 'Robert' +from images2gif import writeGif +from PIL import Image +import os + +file_names = sorted( + (fn for fn in os.listdir('.') if fn.endswith('.png')) + ) +#['animation_a.png', 'animation_b.png', ...] " + +images = [Image.open(fn) for fn in file_names] + +size = (150,150) +for im in images: + im.thumbnail(size, Image.ANTIALIAS) + +print writeGif.__doc__ +# writeGif(filename, images, duration=0.1, loops=0, dither=1) +# Write an animated gif from the specified images. +# images should be a list of numpy arrays of PIL images. +# ... +# ... + +filename = "my_gif.GIF" +writeGif(filename, images, duration=0.2) +#54 frames written +# +#Process finished with exit code 0 diff --git a/scripts/images2gif.py b/scripts/images2gif.py new file mode 100644 index 0000000..5f72982 --- /dev/null +++ b/scripts/images2gif.py @@ -0,0 +1,1068 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2012, Almar Klein, Ant1, Marius van Voorden +# +# This code is subject to the (new) BSD license: +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the nor the +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" Module images2gif + +Provides functionality for reading and writing animated GIF images. +Use writeGif to write a series of numpy arrays or PIL images as an +animated GIF. Use readGif to read an animated gif as a series of numpy +arrays. + +Note that since July 2004, all patents on the LZW compression patent have +expired. Therefore the GIF format may now be used freely. + +Acknowledgements +---------------- + +Many thanks to Ant1 for: +* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly + improves the results. +* the modifications to save each image with its own palette, or optionally + the global palette (if its the same). + +Many thanks to Marius van Voorden for porting the NeuQuant quantization +algorithm of Anthony Dekker to Python (See the NeuQuant class for its +license). + +Many thanks to Alex Robinson for implementing the concept of subrectangles, +which (depening on image content) can give a very significant reduction in +file size. + +This code is based on gifmaker (in the scripts folder of the source +distribution of PIL) + + +Usefull links +------------- + * http://tronche.com/computer-graphics/gif/ + * http://en.wikipedia.org/wiki/Graphics_Interchange_Format + * http://www.w3.org/Graphics/GIF/spec-gif89a.txt + +""" +# todo: This module should be part of imageio (or at least based on) + +import os, time + +try: + import PIL + from PIL import Image + from PIL.GifImagePlugin import getheader, getdata +except ImportError: + PIL = None + +try: + import numpy as np +except ImportError: + np = None + +def get_cKDTree(): + try: + from scipy.spatial import cKDTree + except ImportError: + cKDTree = None + return cKDTree + + +# getheader gives a 87a header and a color palette (two elements in a list). +# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size". +# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well +# technically the first byte says how many bytes follow, after which that +# amount (max 255) follows). + +def checkImages(images): + """ checkImages(images) + Check numpy images and correct intensity range etc. + The same for all movie formats. + """ + # Init results + images2 = [] + + for im in images: + if PIL and isinstance(im, PIL.Image.Image): + # We assume PIL images are allright + images2.append(im) + + elif np and isinstance(im, np.ndarray): + # Check and convert dtype + if im.dtype == np.uint8: + images2.append(im) # Ok + elif im.dtype in [np.float32, np.float64]: + im = im.copy() + im[im<0] = 0 + im[im>1] = 1 + im *= 255 + images2.append( im.astype(np.uint8) ) + else: + im = im.astype(np.uint8) + images2.append(im) + # Check size + if im.ndim == 2: + pass # ok + elif im.ndim == 3: + if im.shape[2] not in [3,4]: + raise ValueError('This array can not represent an image.') + else: + raise ValueError('This array can not represent an image.') + else: + raise ValueError('Invalid image type: ' + str(type(im))) + + # Done + return images2 + + +def intToBin(i): + """ Integer to two bytes """ + # devide in two parts (bytes) + i1 = i % 256 + i2 = int( i/256) + # make string (little endian) + return chr(i1) + chr(i2) + + +class GifWriter: + """ GifWriter() + + Class that contains methods for helping write the animated GIF file. + + """ + + def getheaderAnim(self, im): + """ getheaderAnim(im) + + Get animation header. To replace PILs getheader()[0] + + """ + bb = "GIF89a" + bb += intToBin(im.size[0]) + bb += intToBin(im.size[1]) + bb += "\x87\x00\x00" + return bb + + + def getImageDescriptor(self, im, xy=None): + """ getImageDescriptor(im, xy=None) + + Used for the local color table properties per image. + Otherwise global color table applies to all frames irrespective of + whether additional colors comes in play that require a redefined + palette. Still a maximum of 256 color per frame, obviously. + + Written by Ant1 on 2010-08-22 + Modified by Alex Robinson in Janurari 2011 to implement subrectangles. + + """ + + # Defaule use full image and place at upper left + if xy is None: + xy = (0,0) + + # Image separator, + bb = '\x2C' + + # Image position and size + bb += intToBin( xy[0] ) # Left position + bb += intToBin( xy[1] ) # Top position + bb += intToBin( im.size[0] ) # image width + bb += intToBin( im.size[1] ) # image height + + # packed field: local color table flag1, interlace0, sorted table0, + # reserved00, lct size111=7=2^(7+1)=256. + bb += '\x87' + + # LZW minimum size code now comes later, begining of [image data] blocks + return bb + + + def getAppExt(self, loops=float('inf')): + """ getAppExt(loops=float('inf')) + + Application extention. This part specifies the amount of loops. + If loops is 0 or inf, it goes on infinitely. + + """ + + if loops==0 or loops==float('inf'): + loops = 2**16-1 + #bb = "" # application extension should not be used + # (the extension interprets zero loops + # to mean an infinite number of loops) + # Mmm, does not seem to work + if True: + bb = "\x21\xFF\x0B" # application extension + bb += "NETSCAPE2.0" + bb += "\x03\x01" + bb += intToBin(loops) + bb += '\x00' # end + return bb + + + def getGraphicsControlExt(self, duration=0.1, dispose=2): + """ getGraphicsControlExt(duration=0.1, dispose=2) + + Graphics Control Extension. A sort of header at the start of + each image. Specifies duration and transparancy. + + Dispose + ------- + * 0 - No disposal specified. + * 1 - Do not dispose. The graphic is to be left in place. + * 2 - Restore to background color. The area used by the graphic + must be restored to the background color. + * 3 - Restore to previous. The decoder is required to restore the + area overwritten by the graphic with what was there prior to + rendering the graphic. + * 4-7 -To be defined. + + """ + + bb = '\x21\xF9\x04' + bb += chr((dispose & 3) << 2) # low bit 1 == transparency, + # 2nd bit 1 == user input , next 3 bits, the low two of which are used, + # are dispose. + bb += intToBin( int(duration*100) ) # in 100th of seconds + bb += '\x00' # no transparant color + bb += '\x00' # end + return bb + + + def handleSubRectangles(self, images, subRectangles): + """ handleSubRectangles(images) + + Handle the sub-rectangle stuff. If the rectangles are given by the + user, the values are checked. Otherwise the subrectangles are + calculated automatically. + + """ + + if isinstance(subRectangles, (tuple,list)): + # xy given directly + + # Check xy + xy = subRectangles + if xy is None: + xy = (0,0) + if hasattr(xy, '__len__'): + if len(xy) == len(images): + xy = [xxyy for xxyy in xy] + else: + raise ValueError("len(xy) doesn't match amount of images.") + else: + xy = [xy for im in images] + xy[0] = (0,0) + + else: + # Calculate xy using some basic image processing + + # Check Numpy + if np is None: + raise RuntimeError("Need Numpy to use auto-subRectangles.") + + # First make numpy arrays if required + for i in range(len(images)): + im = images[i] + if isinstance(im, Image.Image): + tmp = im.convert() # Make without palette + a = np.asarray(tmp) + if len(a.shape)==0: + raise MemoryError("Too little memory to convert PIL image to array") + images[i] = a + + # Determine the sub rectangles + images, xy = self.getSubRectangles(images) + + # Done + return images, xy + + + def getSubRectangles(self, ims): + """ getSubRectangles(ims) + + Calculate the minimal rectangles that need updating each frame. + Returns a two-element tuple containing the cropped images and a + list of x-y positions. + + Calculating the subrectangles takes extra time, obviously. However, + if the image sizes were reduced, the actual writing of the GIF + goes faster. In some cases applying this method produces a GIF faster. + + """ + + # Check image count + if len(ims) < 2: + return ims, [(0,0) for i in ims] + + # We need numpy + if np is None: + raise RuntimeError("Need Numpy to calculate sub-rectangles. ") + + # Prepare + ims2 = [ims[0]] + xy = [(0,0)] + t0 = time.time() + + # Iterate over images + prev = ims[0] + for im in ims[1:]: + + # Get difference, sum over colors + diff = np.abs(im-prev) + if diff.ndim==3: + diff = diff.sum(2) + # Get begin and end for both dimensions + X = np.argwhere(diff.sum(0)) + Y = np.argwhere(diff.sum(1)) + # Get rect coordinates + if X.size and Y.size: + x0, x1 = X[0], X[-1]+1 + y0, y1 = Y[0], Y[-1]+1 + else: # No change ... make it minimal + x0, x1 = 0, 2 + y0, y1 = 0, 2 + + # Cut out and store + im2 = im[y0:y1,x0:x1] + prev = im + ims2.append(im2) + xy.append((x0,y0)) + + # Done + #print('%1.2f seconds to determine subrectangles of %i images' % + # (time.time()-t0, len(ims2)) ) + return ims2, xy + + + def convertImagesToPIL(self, images, dither, nq=0): + """ convertImagesToPIL(images, nq=0) + + Convert images to Paletted PIL images, which can then be + written to a single animaged GIF. + + """ + + # Convert to PIL images + images2 = [] + for im in images: + if isinstance(im, Image.Image): + images2.append(im) + elif np and isinstance(im, np.ndarray): + if im.ndim==3 and im.shape[2]==3: + im = Image.fromarray(im,'RGB') + elif im.ndim==3 and im.shape[2]==4: + im = Image.fromarray(im[:,:,:3],'RGB') + elif im.ndim==2: + im = Image.fromarray(im,'L') + images2.append(im) + + # Convert to paletted PIL images + images, images2 = images2, [] + if nq >= 1: + # NeuQuant algorithm + for im in images: + im = im.convert("RGBA") # NQ assumes RGBA + nqInstance = NeuQuant(im, int(nq)) # Learn colors from image + if dither: + im = im.convert("RGB").quantize(palette=nqInstance.paletteImage()) + else: + im = nqInstance.quantize(im) # Use to quantize the image itself + images2.append(im) + else: + # Adaptive PIL algorithm + AD = Image.ADAPTIVE + for im in images: + im = im.convert('P', palette=AD, dither=dither) + images2.append(im) + + # Done + return images2 + + + def writeGifToFile(self, fp, images, durations, loops, xys, disposes): + """ writeGifToFile(fp, images, durations, loops, xys, disposes) + + Given a set of images writes the bytes to the specified stream. + + """ + + # Obtain palette for all images and count each occurance + palettes, occur = [], [] + for im in images: + palettes.append( getheader(im)[1] ) + for palette in palettes: + occur.append( palettes.count( palette ) ) + + # Select most-used palette as the global one (or first in case no max) + globalPalette = palettes[ occur.index(max(occur)) ] + + # Init + frames = 0 + firstFrame = True + + + for im, palette in zip(images, palettes): + + if firstFrame: + # Write header + + # Gather info + header = self.getheaderAnim(im) + appext = self.getAppExt(loops) + + # Write + fp.write(header) + fp.write(globalPalette) + fp.write(appext) + + # Next frame is not the first + firstFrame = False + + if True: + # Write palette and image data + + # Gather info + data = getdata(im) + imdes, data = data[0], data[1:] + graphext = self.getGraphicsControlExt(durations[frames], + disposes[frames]) + # Make image descriptor suitable for using 256 local color palette + lid = self.getImageDescriptor(im, xys[frames]) + + # Write local header + if (palette != globalPalette) or (disposes[frames] != 2): + # Use local color palette + fp.write(graphext) + fp.write(lid) # write suitable image descriptor + fp.write(palette) # write local color table + fp.write('\x08') # LZW minimum size code + else: + # Use global color palette + fp.write(graphext) + fp.write(imdes) # write suitable image descriptor + + # Write image data + for d in data: + fp.write(d) + + # Prepare for next round + frames = frames + 1 + + fp.write(";") # end gif + return frames + + + + +## Exposed functions + +def writeGif(filename, images, duration=0.1, repeat=True, dither=False, + nq=0, subRectangles=True, dispose=None): + """ writeGif(filename, images, duration=0.1, repeat=True, dither=False, + nq=0, subRectangles=True, dispose=None) + + Write an animated gif from the specified images. + + Parameters + ---------- + filename : string + The name of the file to write the image to. + images : list + Should be a list consisting of PIL images or numpy arrays. + The latter should be between 0 and 255 for integer types, and + between 0 and 1 for float types. + duration : scalar or list of scalars + The duration for all frames, or (if a list) for each frame. + repeat : bool or integer + The amount of loops. If True, loops infinitetely. + dither : bool + Whether to apply dithering + nq : integer + If nonzero, applies the NeuQuant quantization algorithm to create + the color palette. This algorithm is superior, but slower than + the standard PIL algorithm. The value of nq is the quality + parameter. 1 represents the best quality. 10 is in general a + good tradeoff between quality and speed. When using this option, + better results are usually obtained when subRectangles is False. + subRectangles : False, True, or a list of 2-element tuples + Whether to use sub-rectangles. If True, the minimal rectangle that + is required to update each frame is automatically detected. This + can give significant reductions in file size, particularly if only + a part of the image changes. One can also give a list of x-y + coordinates if you want to do the cropping yourself. The default + is True. + dispose : int + How to dispose each frame. 1 means that each frame is to be left + in place. 2 means the background color should be restored after + each frame. 3 means the decoder should restore the previous frame. + If subRectangles==False, the default is 2, otherwise it is 1. + + """ + + # Check PIL + if PIL is None: + raise RuntimeError("Need PIL to write animated gif files.") + + # Check images + images = checkImages(images) + + # Instantiate writer object + gifWriter = GifWriter() + + # Check loops + if repeat is False: + loops = 1 + elif repeat is True: + loops = 0 # zero means infinite + else: + loops = int(repeat) + + # Check duration + if hasattr(duration, '__len__'): + if len(duration) == len(images): + duration = [d for d in duration] + else: + raise ValueError("len(duration) doesn't match amount of images.") + else: + duration = [duration for im in images] + + # Check subrectangles + if subRectangles: + images, xy = gifWriter.handleSubRectangles(images, subRectangles) + defaultDispose = 1 # Leave image in place + else: + # Normal mode + xy = [(0,0) for im in images] + defaultDispose = 2 # Restore to background color. + + # Check dispose + if dispose is None: + dispose = defaultDispose + if hasattr(dispose, '__len__'): + if len(dispose) != len(images): + raise ValueError("len(xy) doesn't match amount of images.") + else: + dispose = [dispose for im in images] + + + # Make images in a format that we can write easy + images = gifWriter.convertImagesToPIL(images, dither, nq) + + # Write + fp = open(filename, 'wb') + try: + gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose) + finally: + fp.close() + + + +def readGif(filename, asNumpy=True): + """ readGif(filename, asNumpy=True) + + Read images from an animated GIF file. Returns a list of numpy + arrays, or, if asNumpy is false, a list if PIL images. + + """ + + # Check PIL + if PIL is None: + raise RuntimeError("Need PIL to read animated gif files.") + + # Check Numpy + if np is None: + raise RuntimeError("Need Numpy to read animated gif files.") + + # Check whether it exists + if not os.path.isfile(filename): + raise IOError('File not found: '+str(filename)) + + # Load file using PIL + pilIm = PIL.Image.open(filename) + pilIm.seek(0) + + # Read all images inside + images = [] + try: + while True: + # Get image as numpy array + tmp = pilIm.convert() # Make without palette + a = np.asarray(tmp) + if len(a.shape)==0: + raise MemoryError("Too little memory to convert PIL image to array") + # Store, and next + images.append(a) + pilIm.seek(pilIm.tell()+1) + except EOFError: + pass + + # Convert to normal PIL images if needed + if not asNumpy: + images2 = images + images = [] + for im in images2: + images.append( PIL.Image.fromarray(im) ) + + # Done + return images + + +class NeuQuant: + """ NeuQuant(image, samplefac=10, colors=256) + + samplefac should be an integer number of 1 or higher, 1 + being the highest quality, but the slowest performance. + With avalue of 10, one tenth of all pixels are used during + training. This value seems a nice tradeof between speed + and quality. + + colors is the amount of colors to reduce the image to. This + should best be a power of two. + + See also: + http://members.ozemail.com.au/~dekker/NEUQUANT.HTML + + License of the NeuQuant Neural-Net Quantization Algorithm + --------------------------------------------------------- + + Copyright (c) 1994 Anthony Dekker + Ported to python by Marius van Voorden in 2010 + + NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994. + See "Kohonen neural networks for optimal colour quantization" + in "network: Computation in Neural Systems" Vol. 5 (1994) pp 351-367. + for a discussion of the algorithm. + See also http://members.ozemail.com.au/~dekker/NEUQUANT.HTML + + Any party obtaining a copy of these files from the author, directly or + indirectly, is granted, free of charge, a full and unrestricted irrevocable, + world-wide, paid up, royalty-free, nonexclusive right and license to deal + in this software and documentation files (the "Software"), including without + limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons who receive + copies from any such party to do so, with the only requirement being + that this copyright notice remain intact. + + """ + + NCYCLES = None # Number of learning cycles + NETSIZE = None # Number of colours used + SPECIALS = None # Number of reserved colours used + BGCOLOR = None # Reserved background colour + CUTNETSIZE = None + MAXNETPOS = None + + INITRAD = None # For 256 colours, radius starts at 32 + RADIUSBIASSHIFT = None + RADIUSBIAS = None + INITBIASRADIUS = None + RADIUSDEC = None # Factor of 1/30 each cycle + + ALPHABIASSHIFT = None + INITALPHA = None # biased by 10 bits + + GAMMA = None + BETA = None + BETAGAMMA = None + + network = None # The network itself + colormap = None # The network itself + + netindex = None # For network lookup - really 256 + + bias = None # Bias and freq arrays for learning + freq = None + + pimage = None + + # Four primes near 500 - assume no image has a length so large + # that it is divisible by all four primes + PRIME1 = 499 + PRIME2 = 491 + PRIME3 = 487 + PRIME4 = 503 + MAXPRIME = PRIME4 + + pixels = None + samplefac = None + + a_s = None + + + def setconstants(self, samplefac, colors): + self.NCYCLES = 100 # Number of learning cycles + self.NETSIZE = colors # Number of colours used + self.SPECIALS = 3 # Number of reserved colours used + self.BGCOLOR = self.SPECIALS-1 # Reserved background colour + self.CUTNETSIZE = self.NETSIZE - self.SPECIALS + self.MAXNETPOS = self.NETSIZE - 1 + + self.INITRAD = self.NETSIZE/8 # For 256 colours, radius starts at 32 + self.RADIUSBIASSHIFT = 6 + self.RADIUSBIAS = 1 << self.RADIUSBIASSHIFT + self.INITBIASRADIUS = self.INITRAD * self.RADIUSBIAS + self.RADIUSDEC = 30 # Factor of 1/30 each cycle + + self.ALPHABIASSHIFT = 10 # Alpha starts at 1 + self.INITALPHA = 1 << self.ALPHABIASSHIFT # biased by 10 bits + + self.GAMMA = 1024.0 + self.BETA = 1.0/1024.0 + self.BETAGAMMA = self.BETA * self.GAMMA + + self.network = np.empty((self.NETSIZE, 3), dtype='float64') # The network itself + self.colormap = np.empty((self.NETSIZE, 4), dtype='int32') # The network itself + + self.netindex = np.empty(256, dtype='int32') # For network lookup - really 256 + + self.bias = np.empty(self.NETSIZE, dtype='float64') # Bias and freq arrays for learning + self.freq = np.empty(self.NETSIZE, dtype='float64') + + self.pixels = None + self.samplefac = samplefac + + self.a_s = {} + + def __init__(self, image, samplefac=10, colors=256): + + # Check Numpy + if np is None: + raise RuntimeError("Need Numpy for the NeuQuant algorithm.") + + # Check image + if image.size[0] * image.size[1] < NeuQuant.MAXPRIME: + raise IOError("Image is too small") + if image.mode != "RGBA": + raise IOError("Image mode should be RGBA.") + + # Initialize + self.setconstants(samplefac, colors) + self.pixels = np.fromstring(image.tostring(), np.uint32) + self.setUpArrays() + + self.learn() + self.fix() + self.inxbuild() + + def writeColourMap(self, rgb, outstream): + for i in range(self.NETSIZE): + bb = self.colormap[i,0]; + gg = self.colormap[i,1]; + rr = self.colormap[i,2]; + outstream.write(rr if rgb else bb) + outstream.write(gg) + outstream.write(bb if rgb else rr) + return self.NETSIZE + + def setUpArrays(self): + self.network[0,0] = 0.0 # Black + self.network[0,1] = 0.0 + self.network[0,2] = 0.0 + + self.network[1,0] = 255.0 # White + self.network[1,1] = 255.0 + self.network[1,2] = 255.0 + + # RESERVED self.BGCOLOR # Background + + for i in range(self.SPECIALS): + self.freq[i] = 1.0 / self.NETSIZE + self.bias[i] = 0.0 + + for i in range(self.SPECIALS, self.NETSIZE): + p = self.network[i] + p[:] = (255.0 * (i-self.SPECIALS)) / self.CUTNETSIZE + + self.freq[i] = 1.0 / self.NETSIZE + self.bias[i] = 0.0 + + # Omitted: setPixels + + def altersingle(self, alpha, i, b, g, r): + """Move neuron i towards biased (b,g,r) by factor alpha""" + n = self.network[i] # Alter hit neuron + n[0] -= (alpha*(n[0] - b)) + n[1] -= (alpha*(n[1] - g)) + n[2] -= (alpha*(n[2] - r)) + + def geta(self, alpha, rad): + try: + return self.a_s[(alpha, rad)] + except KeyError: + length = rad*2-1 + mid = length/2 + q = np.array(list(range(mid-1,-1,-1))+list(range(-1,mid))) + a = alpha*(rad*rad - q*q)/(rad*rad) + a[mid] = 0 + self.a_s[(alpha, rad)] = a + return a + + def alterneigh(self, alpha, rad, i, b, g, r): + if i-rad >= self.SPECIALS-1: + lo = i-rad + start = 0 + else: + lo = self.SPECIALS-1 + start = (self.SPECIALS-1 - (i-rad)) + + if i+rad <= self.NETSIZE: + hi = i+rad + end = rad*2-1 + else: + hi = self.NETSIZE + end = (self.NETSIZE - (i+rad)) + + a = self.geta(alpha, rad)[start:end] + + p = self.network[lo+1:hi] + p -= np.transpose(np.transpose(p - np.array([b, g, r])) * a) + + #def contest(self, b, g, r): + # """ Search for biased BGR values + # Finds closest neuron (min dist) and updates self.freq + # finds best neuron (min dist-self.bias) and returns position + # for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative + # self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])""" + # + # i, j = self.SPECIALS, self.NETSIZE + # dists = abs(self.network[i:j] - np.array([b,g,r])).sum(1) + # bestpos = i + np.argmin(dists) + # biasdists = dists - self.bias[i:j] + # bestbiaspos = i + np.argmin(biasdists) + # self.freq[i:j] -= self.BETA * self.freq[i:j] + # self.bias[i:j] += self.BETAGAMMA * self.freq[i:j] + # self.freq[bestpos] += self.BETA + # self.bias[bestpos] -= self.BETAGAMMA + # return bestbiaspos + def contest(self, b, g, r): + """ Search for biased BGR values + Finds closest neuron (min dist) and updates self.freq + finds best neuron (min dist-self.bias) and returns position + for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative + self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])""" + i, j = self.SPECIALS, self.NETSIZE + dists = abs(self.network[i:j] - np.array([b,g,r])).sum(1) + bestpos = i + np.argmin(dists) + biasdists = dists - self.bias[i:j] + bestbiaspos = i + np.argmin(biasdists) + self.freq[i:j] *= (1-self.BETA) + self.bias[i:j] += self.BETAGAMMA * self.freq[i:j] + self.freq[bestpos] += self.BETA + self.bias[bestpos] -= self.BETAGAMMA + return bestbiaspos + + + + + def specialFind(self, b, g, r): + for i in range(self.SPECIALS): + n = self.network[i] + if n[0] == b and n[1] == g and n[2] == r: + return i + return -1 + + def learn(self): + biasRadius = self.INITBIASRADIUS + alphadec = 30 + ((self.samplefac-1)/3) + lengthcount = self.pixels.size + samplepixels = lengthcount / self.samplefac + delta = samplepixels / self.NCYCLES + alpha = self.INITALPHA + + i = 0; + rad = biasRadius >> self.RADIUSBIASSHIFT + if rad <= 1: + rad = 0 + + print("Beginning 1D learning: samplepixels = %1.2f rad = %i" % + (samplepixels, rad) ) + step = 0 + pos = 0 + if lengthcount%NeuQuant.PRIME1 != 0: + step = NeuQuant.PRIME1 + elif lengthcount%NeuQuant.PRIME2 != 0: + step = NeuQuant.PRIME2 + elif lengthcount%NeuQuant.PRIME3 != 0: + step = NeuQuant.PRIME3 + else: + step = NeuQuant.PRIME4 + + i = 0 + printed_string = '' + while i < samplepixels: + if i%100 == 99: + tmp = '\b'*len(printed_string) + printed_string = str((i+1)*100/samplepixels)+"%\n" + print(tmp + printed_string) + p = self.pixels[pos] + r = (p >> 16) & 0xff + g = (p >> 8) & 0xff + b = (p ) & 0xff + + if i == 0: # Remember background colour + self.network[self.BGCOLOR] = [b, g, r] + + j = self.specialFind(b, g, r) + if j < 0: + j = self.contest(b, g, r) + + if j >= self.SPECIALS: # Don't learn for specials + a = (1.0 * alpha) / self.INITALPHA + self.altersingle(a, j, b, g, r) + if rad > 0: + self.alterneigh(a, rad, j, b, g, r) + + pos = (pos+step)%lengthcount + + i += 1 + if i%delta == 0: + alpha -= alpha / alphadec + biasRadius -= biasRadius / self.RADIUSDEC + rad = biasRadius >> self.RADIUSBIASSHIFT + if rad <= 1: + rad = 0 + + finalAlpha = (1.0*alpha)/self.INITALPHA + print("Finished 1D learning: final alpha = %1.2f!" % finalAlpha) + + def fix(self): + for i in range(self.NETSIZE): + for j in range(3): + x = int(0.5 + self.network[i,j]) + x = max(0, x) + x = min(255, x) + self.colormap[i,j] = x + self.colormap[i,3] = i + + def inxbuild(self): + previouscol = 0 + startpos = 0 + for i in range(self.NETSIZE): + p = self.colormap[i] + q = None + smallpos = i + smallval = p[1] # Index on g + # Find smallest in i..self.NETSIZE-1 + for j in range(i+1, self.NETSIZE): + q = self.colormap[j] + if q[1] < smallval: # Index on g + smallpos = j + smallval = q[1] # Index on g + + q = self.colormap[smallpos] + # Swap p (i) and q (smallpos) entries + if i != smallpos: + p[:],q[:] = q, p.copy() + + # smallval entry is now in position i + if smallval != previouscol: + self.netindex[previouscol] = (startpos+i) >> 1 + for j in range(previouscol+1, smallval): + self.netindex[j] = i + previouscol = smallval + startpos = i + self.netindex[previouscol] = (startpos+self.MAXNETPOS) >> 1 + for j in range(previouscol+1, 256): # Really 256 + self.netindex[j] = self.MAXNETPOS + + + def paletteImage(self): + """ PIL weird interface for making a paletted image: create an image which + already has the palette, and use that in Image.quantize. This function + returns this palette image. """ + if self.pimage is None: + palette = [] + for i in range(self.NETSIZE): + palette.extend(self.colormap[i][:3]) + + palette.extend([0]*(256-self.NETSIZE)*3) + + # a palette image to use for quant + self.pimage = Image.new("P", (1, 1), 0) + self.pimage.putpalette(palette) + return self.pimage + + + def quantize(self, image): + """ Use a kdtree to quickly find the closest palette colors for the pixels """ + if get_cKDTree(): + return self.quantize_with_scipy(image) + else: + print('Scipy not available, falling back to slower version.') + return self.quantize_without_scipy(image) + + + def quantize_with_scipy(self, image): + w,h = image.size + px = np.asarray(image).copy() + px2 = px[:,:,:3].reshape((w*h,3)) + + cKDTree = get_cKDTree() + kdtree = cKDTree(self.colormap[:,:3],leafsize=10) + result = kdtree.query(px2) + colorindex = result[1] + print("Distance: %1.2f" % (result[0].sum()/(w*h)) ) + px2[:] = self.colormap[colorindex,:3] + + return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) + + + def quantize_without_scipy(self, image): + """" This function can be used if no scipy is availabe. + It's 7 times slower though. + """ + w,h = image.size + px = np.asarray(image).copy() + memo = {} + for j in range(w): + for i in range(h): + key = (px[i,j,0],px[i,j,1],px[i,j,2]) + try: + val = memo[key] + except KeyError: + val = self.convert(*key) + memo[key] = val + px[i,j,0],px[i,j,1],px[i,j,2] = val + return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) + + def convert(self, *color): + i = self.inxsearch(*color) + return self.colormap[i,:3] + + def inxsearch(self, r, g, b): + """Search for BGR values 0..255 and return colour index""" + dists = (self.colormap[:,:3] - np.array([r,g,b])) + a= np.argmin((dists*dists).sum(1)) + return a + + + +if __name__ == '__main__': + im = np.zeros((200,200), dtype=np.uint8) + im[10:30,:] = 100 + im[:,80:120] = 255 + im[-50:-40,:] = 50 + + images = [im*1.0, im*0.8, im*0.6, im*0.4, im*0] + writeGif('lala3.gif',images, duration=0.5, dither=0) diff --git a/views/index.ejs b/views/index.ejs index 394661e..f1481d2 100644 --- a/views/index.ejs +++ b/views/index.ejs @@ -1,20 +1,19 @@ - - <%= title %> - - - - - + + <%= title %> + + + + + -
- × -
+
+ × +
-
    - <% for(var i = 0; i < clips.length; i++) { %> -
  • - - - -
  • - <% } %> -
+
    + <% for(var i = 0; i < clips.length; i++) { %> +
  • + + + +
  • + <% } %> +
-
-
- +
+
+ -- 2.34.1