Skip to content

Instantly share code, notes, and snippets.

@manjotsidhu
Created September 20, 2017 15:24
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save manjotsidhu/a3b518252776854f3293b085fa28ae94 to your computer and use it in GitHub Desktop.
Save manjotsidhu/a3b518252776854f3293b085fa28ae94 to your computer and use it in GitHub Desktop.
Camera Patch
From c513decb40fc97173a7ac0bba5b7f8bcbb61ea3e Mon Sep 17 00:00:00 2001
From: fire855 <fire855@gmail.com>
Date: Sun, 30 Oct 2016 07:39:16 +0000
Subject: [PATCH] Framework_av MTK Patch
Change-Id: Ia8ab2f73ee51e27f21e4a6df047c307c5dd9965c
---
AV.zip | Bin 0 -> 27670 bytes
camera/Android.mk | 7 +
camera/Android.mk.orig | 79 +
camera/mediatek/MtkCamera.cpp | 212 +
camera/mediatek/MtkCameraParameters.cpp | 308 +
include/camera/MtkCamera.h | 275 +
include/camera/MtkCameraParameters.h | 364 +
include/media/MediaPlayerInterface.h | 3 +
include/media/stagefright/ACodec.h | 3 +
include/media/stagefright/ACodec.h.orig | 501 ++
include/media/stagefright/ColorConverter.h | 4 +
include/media/stagefright/MediaSync.h | 5 +-
include/media/stagefright/OMXCodec.h | 4 +
include/media/stagefright/OMXCodec.h.orig | 433 ++
.../media/stagefright/dpframework/DpBlitStream.h | 243 +
include/media/stagefright/dpframework/DpDataType.h | 450 ++
media/libmediaplayerservice/Android.mk | 5 +
media/libmediaplayerservice/Android.mk.orig | 75 +
media/libmediaplayerservice/FMAudioPlayer.h | 144 +
media/libmediaplayerservice/MediaPlayerFactory.cpp | 25 +
.../libmediaplayerservice/StagefrightRecorder.cpp | 8 +
media/libstagefright/ACodec.cpp | 44 +
media/libstagefright/ACodec.cpp.orig | 7270 ++++++++++++++++++++
media/libstagefright/Android.mk | 11 +
media/libstagefright/Android.mk.orig | 242 +
media/libstagefright/CameraSource.cpp | 8 +
media/libstagefright/CameraSource.cpp.orig | 1045 +++
media/libstagefright/MediaCodecSource.cpp | 6 +
media/libstagefright/MediaSync.cpp | 8 +-
media/libstagefright/OMXCodec.cpp | 11 +
media/libstagefright/OMXCodec.cpp.orig | 4629 +++++++++++++
media/libstagefright/SurfaceMediaSource.cpp | 3 +-
media/libstagefright/colorconversion/Android.mk | 7 +
.../colorconversion/ColorConverter.cpp | 113 +
.../filters/GraphicBufferListener.cpp | 12 +-
.../libstagefright/filters/GraphicBufferListener.h | 6 +-
media/libstagefright/filters/MediaFilter.cpp | 4 +-
media/libstagefright/omx/GraphicBufferSource.cpp | 17 +-
media/libstagefright/omx/GraphicBufferSource.h | 6 +-
services/audioflinger/AudioFlinger.cpp | 12 +
.../camera/libcameraservice/api1/CameraClient.cpp | 40 +
.../libcameraservice/api1/CameraClient.cpp.orig | 1066 +++
.../libcameraservice/gui/RingBufferConsumer.cpp | 2 +-
.../libcameraservice/gui/RingBufferConsumer.h | 5 +-
44 files changed, 17681 insertions(+), 34 deletions(-)
create mode 100644 AV.zip
create mode 100644 camera/Android.mk.orig
create mode 100644 camera/mediatek/MtkCamera.cpp
create mode 100644 camera/mediatek/MtkCameraParameters.cpp
create mode 100644 include/camera/MtkCamera.h
create mode 100644 include/camera/MtkCameraParameters.h
create mode 100644 include/media/stagefright/ACodec.h.orig
create mode 100644 include/media/stagefright/OMXCodec.h.orig
create mode 100644 include/media/stagefright/dpframework/DpBlitStream.h
create mode 100644 include/media/stagefright/dpframework/DpDataType.h
create mode 100644 media/libmediaplayerservice/Android.mk.orig
create mode 100644 media/libmediaplayerservice/FMAudioPlayer.h
create mode 100644 media/libstagefright/ACodec.cpp.orig
create mode 100644 media/libstagefright/Android.mk.orig
create mode 100644 media/libstagefright/CameraSource.cpp.orig
create mode 100644 media/libstagefright/OMXCodec.cpp.orig
create mode 100644 services/camera/libcameraservice/api1/CameraClient.cpp.orig
diff --git a/AV.zip b/AV.zip
new file mode 100644
index 0000000000000000000000000000000000000000..70561a8d49fea75067fd37bf8e3d2db9e89e162f
GIT binary patch
literal 27670
zcmYJaV~j9N6D+#6ZQHhO+qP}nwr$(CZQJ{-eczMs<endsNqW%9B$cl2>QRse20;M;
z0Du5^ix!upao4m+#0LO4N&x@>|If-s=V0h!Y;mLW<-E<-_Pgs3y2YKGc#JNon1;i?
ztAu3gTnt{FUVC}#V1d}wkth;cbo9=)+5dAi2rdzja?-K8n`x@74bt~?c#s2pfKagW
zua`7K)eK(MTPt^}waFK9H516X*-Ex@3bnLqs-ddN=|##>U|c&8PzSgZisTaa0~|#%
zDV|}Lo59&vqgrJ^eh?=t-``Vb>+Z<siq@acY75W@9L&uDY6|K7%Pr94RGNS?qp~On
z8PX9KwhjI-VEF*|eRd@*c6&dtQHTBjsSc<ChA*hH{lSOf>AMs+VAieo_50wWg0Wpa
z{$a*p53snV0zpch)!U3aHJnN%=So$xbu~QP3Y1Txsbr1Hsa1)Wi<_Gp=+3HZgX_Tv
z5_i3C1LT!B=i}~}r(*#t54il?Iia_&=x_R<M4~OQ)yAWGegVF~pHF_WYP6|$uD9b<
z<MsPnhv2-%ystMzKJV|bsI!iK#{l7dK@TafUO+K%Zc?>GFblGC7LTu<@aO|-C2FLp
z!6(K1kPjk;L$^NgK!{gKHG#gz$!FemE*;-F#1!*nl1%~J$NRwchw>zQysYMQDhVSS
z>E2eyK6MdzT8)_9q!4r59Q!v)C2q$7!~_yH6Lcql5>o#hWyMPMBpnt>%p`kh@)0qE
z2p<fACJ{F=z@@Uv+dD2c@SaFY=-^_)(VW@A?ezj|pl`$Yndb(O%Sj#X8;Hc4DjmOW
zZXUI>M$SA{L6%41&FUIL)3yKA?*lZ4Qhjw<+9Aka7f_`E^81zr00nq2@a5I;P%hBz
zx=7$(xwE?!Js2R9;kXz(wFeeqguw2ez{5D=JHGqb044^ki*<o3Sn*x#>P4&fk6Qiy
zVwgAy6ko9c5H^O(?Z6Mry~24fIT8mgQ*G)3j6f(26>Clgmj^Iq4j`U>@#_JU=g9?$
zO*`|q!LHTMS?=0;yxhi^-R<ZeEul?UW^w~&5nq!<!>$Hk##S|@&PsbT=5sjSb~X|%
ziyAY0kL%+6@pa72On5^(?AmsK5W6n@1#fMEvcH|jbZfg1vL9uV(I(EjC3JfN+Mz4?
z>fPf@AK;YiB0PA&a6qjp-VL=V7k_)b$!aFo7Z{CrJGBERAkM%lHBSFUISu1uc32R9
z6JIf_urU)>b(G20a{L13K&ds|P3moW9Dq0BGZ}y@yT+FFvKaFaSkLDBva(@lGa;fT
zLbN7r0-SKYzjA*sb#Fl_1%-UY++|#N2&Q#UBoUSNUF{+PJkOtR;7)t5akS<T{%cU^
zW64aFDfrsl{PdKHE8VT{aU|gusw7N`To6?*$f^s9XoCsfjR93xUY?oJlpakFWfsP8
z;*NG^p8R>5q9!1RFtfv3T)p#&(o=wiL~y^AANQoigbZuAoCAFZPV&F@gVwUnpqht6
zQ6q8ccabGDw&Vyj2d~_G^(toZT1pZJdYtzUqgGuSw=pxpVh*)Z<rMF!a*o8GQ*<Om
zY8<4rRhC+>Z49koL;ZEGwOZ3cEtOET%N#8l*;N4krpwv*4yd}e>EdL53lGsHb}#jN
zYg=gA8{Y>!iSo;<?d~uViatj13YD|LrVb6kQ7C;KvC-3FWEQ~OgTFFp?`FW7(1<RV
zf<EaoEfy<$9{a%J4gbDg?1l2ur8QkpQ_#?hvHZPGIR<!anhuRb?TB300GO;=cw4Z@
zn{62qAg{h?CopXDxees;5e<w%tf~~0Kjw0rZv06GmzM8%kyJl>3rT<jv%U~Udd94_
ztuV{ovZ_;?7Y^0hoU+OK5|!4h$~0?^4GU&{N?ou*6-J^FztR-Uh(%m$+hWPBO39Wx
zZ6g+BSPWYZEDbOBl|kg2f&8V-3Xw7$pI+T~)|_{ynQr2VAAS!X1Llt0wr)r`%kx{W
z04**RtJo%jja@qYf_w^3N5C;G4nW-JWj7rBqQ!8K0{-T5hkm~v-)MmfbO|r{RHVu|
zLL+~rKl42J7`=44Z@3*9U-#Ih(%sznh)5G;NW7KsS!-9p3ZkidvpeG=BOl&dc3nU{
zr@r%=P%;cM;6c#CyfwWdFt|QYs6GXsv!T#nFs2yl8al6_t>|j_avN}koo8>bJe1$q
z*zst-Y&!;D&Fby>uPMLyd70^(H!U&kayF6St+xpSulcpr@bsh}cwIS&0CiZPfMEmr
z55OEd-;5NSA@8Nmz&(r?*Xr%-EANEF7*bbGJ?qBZ8AQzkd+7DJGqQK;Gzvv#MeaY#
zX_YQ|mqE@7@=Dh$fKLDTP^!z{H(HhYb@0Ntzp2Sre(%yxbnN?n>Q#SrDn*S~UY{g{
z@sj&R@>%0M#pT>UB}+?DfQR8P5STtcS!+!#CB5%VP(1Pm{v{+B=b-*M1ED3K0MS?5
z3qqR>Pq2TqOu&6U7&bqs#)*A=I4|961}hd9!GaX8w3&;KmH<}IfS?8kfm^D1qT0~z
zvA2+B7r`5G(lLW!h6Dlz$UwyuedQGn0<Z4~MJhw>{0Ed5_9g5012cLy2aNQu=9m%e
zAkB`6HZLH3y;}#I1r!wFb7pe3?-t#hjU6NeHX>laAYvsQ4@DRKb5xG~_=X04Oj)Hx
zcG>+&&Y&4NSS;&j<Vs+B3(Vm|sAI5a5=k>)4x;x?Hs+iqY%%%xd*+V<2IDO<L`oJj
z{M=(K<9f%p7q4!X;=Jowwtd^u(`E$+yra7PRZPXOO#U~H674M&;~_+MSIxZ+xIBMn
zpRPZXLeqOFc5I|{y)JFjtV^GWrrr!PG?UH%I-3@0Rz+KUJ~X!K*l&)YxB}-SUyjb-
z<4iR+huTcT1^eqI*4Fz!G5hh5t(wcWlOz51>1rBoxPlG!>JEuPA*O_3@`Pz?DMT@5
zGr>u2>^pUgjntCLzbO86BLyo^{&W`B$hN?!#uIlAj3IYm=*DCu+*D&RV>A)+$q*5H
z;uv)JLm8El$wd$&r>Gzz(84hJbO<n$7*Hv0MdT?)Gj!Nv+*hL+xojdCyKEu_|8E|N
zWn@Ev$9-DH3m*SY!Q-24ZG+_@yAno;^n#gSU;bQ{E0^`P_vMGM;oktvx&hug^^e)a
z1i)a~;LEr}@95Y^W~Uq=>|F%vX--CIYtGKRQAUBD!O{qca_#^^=)OT4*f^yPvE@8X
zUqh=^oJq0vD}pbDB%q=^7>I;Hb|Zq&No+ms;_w`!Mgj;z`QvKuOFbD1(n`7>oi4Ee
zxx?|@OdOBniaJOmflJg&NJ!=7zo>PbDB<`n01)KZU;w$a#>BV9kmIA+Q;QO;bP3R@
zG&$hn+AXCgGT4uduhVzAneQ3;cmOKwjr-2kDg~>r+IKyfXPLx#KX#Yv1pX^yTcP>p
z3P0%JS00!0TlmuLwbh(~2H>{^8h@EP=Qp21yAv*3*gIfn<}A6d!($G$xXfNAI+9Zh
zk8_hJCQ`ygP3g-CIye3X(EoIT@$-Y1njgQ2MB42c#wW(YK@rI@WbVRbE6z6iqD+rn
zz})2l7PZG55oV{72(wa3n}o-7@q#q9>^WW1<CKwvO4FbdKnzTc4qq73=3T;{ecA5?
zXQ~$?;OWV-_I-(n2^7A}GA$7Qs+7t1T_mPiyXyT_u-AHl^xX#Lmj!%D32xn9Q^1F*
zHLI@%7BDnroMDmS9V5&b8oVl2lfECvfJ)}n_}rujk;I|l*>0Dqt=|o|F?ZsFapkaj
zmZ3Dj-WzLhqJug2WR#f=WwmRigJOlMJ{hVo6l$B+!RU+aaZsBLz1+nn{x_sai!Qd0
z25qR`+2H*=i32PAbk=Q{G&Bk7)4P}ez$iy~VC;Ab#P@t%hTT?;_R!ejqrZbW%K*ec
zw~yK0<>-u^DL?lnR=m%AERp!MTj*-P)#!Zc<9}h?%N=vfv5(K&yl5?^Sn%*`VWC^S
zjje^->yO-C{qQTG*Wg79a{c_7jG;b<y;G~$4J#0%{N&&@jw6SJb042q`PN#vc&$YX
z3)@CE`uKYhi-@hoSmULC&1NUuXT?W{hlim~eXxv$FJ=~T{KIImb2W?Tw{`vIy|_Uu
z{o&(iV3?B=9}NtB?BU<?vbM04%`Q2NG5%E?uc;&u^WTA+OiGmh?yYzW8`cXCHTut`
zcgZ2ZVo~P2l{GMz-*gW@nb3bm#W%U@qtYw9P#f;0_&4!xc8FLEOsx4CB|thRma)ko
zzl0AWM(9DxAioF)Q$T!7$%?}^voQ|U0rDZp@4zACOa%F3cddTRBRbI6uZ7!P)0g4j
z_cTdl39LbKB$pHCND&R5HMwR{iy0@=v)d5r>zGyr$g)TkI^@2JSSAAH{zY}=Xg+?e
zY=W)w5!91g>w{Xf`9{ARV`G;!T92y{dO?UN^M1SAP;8!P5*~EkfH2g@Y{ZjFC^~TW
zcha`~seb)rKhqYpH=7)QBECY%KEjLTQEi5>YRU@_3UePY@`GE@T8Z-*J}}mw`F4u?
zn73S}Ci6&|W%iZeNsE}Cq-P@ZAbn5?0!?887;9+6kUseNqNUozcG|A((#le{Or_PJ
zE@{@9VlvyrUL^Z&D3J^@-@hE-QO_@JfQaBB?WLvmmgR_}QF!!s4)&jaOtxWt!LUo=
zkPKS=i}Q(a87XWvtYKkman__wpiDLF@`KCYnDzlS1YTBq0r;i(e`&t^tGcug$H=P@
zJ<!8BlRd!tQkI{`$_<-*8a|FhAOp=B7o7?hk?2|G(L&Qc$kh0wCEZO6$pp@CIbGgT
zyOH};1^^}9CmFojpE1(RYeTl>U!t?9h^DZit;dh^h`*fH#5Mk1HgVHu+%m|x_lMc+
z1Mo~*#o^wCYZW>btW@Y$xJR6|ik#)FdkP0FzvS5Ap1q#HNA|RU30GJT{M*pK3~=4>
z>pA|Na!`#$j-Z`}RQbhG=P@J9)Prh#qNwsRhdfj{`p)Nm_Q9Rlwhc@_C5W%5T)y$z
z(`mfW44X~mGe0=;YD2PMJdY;x)JLXa-hZI8^tSGoU>1Rok7dmOYN>T{okl&MkEQP)
z0Z74bnk<<*%T}i+B!^~&izhDQ-jOY7UI9Iy^yU5i0S2{wZo`8paAq(wC{Y7bI5OBW
zU9fGYFv^@7DN&*nK-Q!U_CD(&+T6IkmLb{xm3Pbk8d8<pY;0Iw^r}W!u!a@&<x??%
zZG3!fF5C;5GSfgPWp+Gr$U5d+%Sas39S}CAUE7Aju`m3?=Io_z->kjfd)jcvMJ-{p
z?2fM-7F}^K64_CsRtV0~<?R^yUD*ZOc)vwk_E9e1ba?Ch`u7S$8(Z)Vi$HC2Ju`z;
zgYnYzmy!0e446eB_Fs}t%tPHTGdE*yS>I#$^_o*7nFdDjiz&fuh8y#h7-pE7e}k$A
zcV0R72UME}(-WN+u1~DB(9vt>5dNcV7m^<+66QbIT%WZ0g)j&|YI9ew%+HHGk0Ezc
z=F0Y+WA!&e<UZ>3n=5u%hwc;H!qX!0Y@}FC12)#WKPW6-hvD04YZ$Me<pwyu6~%_`
zSd{@tQqLUQt5>!mxURAK;$4e8U<dw|@=_^mO$6_k<OM46llj~pAE-{jNQ#W3wS8Fb
z764(|o~(BZWiai}Ry)Tu?K<A9b~m2iglA$a^`o-Kk%xUgymn*~l<uc59mN^r>>=7i
z_G15zXSx={*D6pi6^Z;$jyVWvQj=Z~dH@Yy=J|n7uQ<YIFW?@_M1}3Oa`l)+BR5S5
zwQ1vP_wHEv97GtiRgfE1pEf2A1mBA97^KBFs)*QBN6cVq7B`^6jnk-2lCgl;NafqL
ztr@D|TjD0W1DjGoZItk~J<zjP4mL~*(H|a~WP0PBLcM!wUxAeBR;L;Qb@IdV9~Pz-
zV=L0lx@U}y!xZ_llMJg*8W#hD|JCv2A4%JlZieU)@byB^tHYTdH}gtmNj)DeF|0#p
zF6+;gLFCx)uLuMkplMyA94K@6jG+Ajv$kj~1EH7;iD`00D5fEaafT~$#wbc+q~eIZ
zFo>}l_MBjd*Xk{uqd#{=VKV2q+7`X(A7GQNvxhu__9;5~3S+{*#SVRjITiJIwK$p6
zCI6IWbzNQ~1HiG1YY;RsNQqOYrvmr7N%|#n@8Pijpd(Ka36&Vux*5PwSjkXCv-mI;
zk`RetS+#~Z(GN8tSyx2-&$Lp3s9$B6NnRr6QyFF|ND@bO3>>5#2TV?*z==Dayci8>
zib!~b?H?Y7nuk#Nj*F!RqQ;^kx)t1nqdZkfSwyy=sHP`H5e<<T7LHg(-bkGomQGKa
zA{r7_RrQ7Jv|K|2siLAcLm?i6Id|t5rzjLvQ*P>)b`gjAQ5xd}SM1}7U_6#&3V2-l
zt@M9hMe)u2q_-9dL-q8(<q-^XkP~@3&<EmQu(K_9LxZpRqh|nE-)DvQ!I8?0j$Np8
z{!%B*OvtbMqoACZMRgu|0pCMe{-o@jvVYwa{Z7hae#)o3Sw#BmySph(^7Tqjj-qNs
z@e}p|x2WtUv(H>TZ&i{0(aCpT_a-$?F=k$t{27g83y)tI^&-<dCN1*_$4(s!%25Wi
zc(I$7e+pwvnML$;9CD(JP#mJt-BiaBws!094Oxd-F&|)7OL41ws{pyiDxxFt`LrCr
zV)elqVQTu_$!`bn_$ez)%K)4EX{2#j@YhX|YGaCdS4Am8s-bOy(uABo5l~0P7%LXo
zd|S#)#zE6kw@@<2_QE9Gkd%YZ>6vw#y@DK=vBI-}-lUF^RG<!{GNkdC<xvMVY&gS?
zg&1>YNimva#EdzzvBn&9D8rA9=vnB24v-U;g`3l;j2wui69rKKbteOkLxcg?1g@yt
z_=Co)&7agmbk$aYzg1KL=hyWS2TpJ~a#}G0X2T)K1uvR_np9{Bq;f)=go9kP&Eo@-
z3tnOb&EW%(3!g5~kDp1)KvnYX3KRm@n1Rn+)6gO>H4vne=#bZ-*d=d-+jnY6i3zsu
z)C|q~gOq5i<+rL7NUO|5Z`?F)^V&Cl;iGp?v|$r>j;nWl4P4NoAik3g&QYQ-meITS
z4H{}AF+17BMo>ZjZJg;97>h9?YRBa>yK|q|fi_{<PF+lu<TT>Y<BmSqY;~QH7rI=f
zXN5Fn5`&AoepJf05=?{^U4l9Uck1Ce83ctkBQa{l!j0N<9+Wwi3%F-n4}*kLo8BNb
zG7$mrXN1<C!rsL#8-2N4pfH&?gS@l_hvSnphup<|utW3f6n>LdbI)l(9b}foog0ce
z&`D#%L?3L%gi9ueI=oClyjH{E))7kC<k?O|I1DL=$dim<(xwLLm;wQ^1~sk!wpW^n
zyVmshyrk~x5U*On8JTbo!++2g@YYC2B+@_21M~foq?6l-8{#*b{d=r^Pf__UZ$DVj
z);1X?p{no}JfOAId^S^-ko{v2p2_%q(j@TRld`VpyX^PDor1B8T|-!1DH1EvJGTER
zAaDM&zqiU>7E5|Kf1ozo%#mhd5<8k5;OYixW^~;kG2l*}Dx&}yQo|&HiyWz|-Ldb#
z%uHk{cDK>LExUUj6wAt6>v?{Djw$=fjQiSUK3ZeE>(3^TtcD<GAD|8Jz03RMyXgZy
zJ0TFMUQ9`zD*zww_n~*#lMD(AKlbUle}90yDY*4uiHHqud{#?%WL%KB-{apK!$OTh
z;(g=0$yJR=E+gVoA%>ryrvpn2S7$$9L-v!}wcr!Y+lLubU*9!6;dB-W98WY@EZG^M
z59U_5C;FmZ`Ii_<3qjYz<_@zSsvL5LG<$Ee%~+v6Ua4OfFs<%<eP>qv)A7*y05v&Y
zzgX!mf~VGUq{0CD_AQVi1c`|6GHA^YO(8j7WKj3UTD!R-mv;KEXHm<FR#qf>;z*G9
zskveRwqFr60^%EeKDGZxO#pPnyWe@!?i0Sw5D+d2c<x@Z0#@+OmjGbz;rOSAyM)|+
zLo%t-K2Q}e>v^RiO9=K1`y#wbP4&^KDOBo&fMpTo8ON8#0fZ-{ssV&4C6d-F$|#Y{
zL=B@>>C%MhEOXDmu&Lc9X~H1Rn(9DZqvHdIM;^mM%&-~&s0_d<vXj8LyR{uI%NDkU
zp<N@m7H`q3ZF6A&#SK7TFs}J{P=Mq>q?%Gzg{yK6rBzB%2m=!})s34T1OkQZZirX4
z&BE-+b|rT>K^+0F>_Dy*p$1`{7412W*nDy>WW+GRKwn<%NdE{OI5)58>uqm$VCzQb
znVKr=p*Uiz!U7q-#t7^=!rgOK2iw~FYeP4)9YMCS%2kBvp&}T0zl~d4uyWB-sCBV0
z15sZhNkh2+Dd`;@mrykgzQkV~Ur8r9)fi;~Ir@;*9|B;id(C{z#?*%Y>-H1$DM*75
zy@Y~A^`Cx80TC+tRviy{llXOXG~}2t_}>?4r3pAV!Q?`0vA>%_rAd%MU`bmG7e%$2
ztf~sDno?ulzO4rXDg_p<o36Fvt%2@{K!OrNmr`TLk_egGd-#KeMz{iUk&IkhI;^8U
zmIkTeo=MTROL(z6yV9eZG#KrcVCwb-K0m(z_{7CC?1${OG#%B19)E(FA_e7&%0n$6
zb7Y41j8M=%A=dIE5m?{&YXD#NzB190X)D-elbAB6r&)aH@Y9))@l%~eK9$LLhNUq7
z%ErUgMTGRN>}Wc+1x#xdq6&Sh);dZp<SihdfsZy1I%{1(pLvwkd+t1d`ac*=h=31d
zw$9oQFJe^3JtjX>%;@#4aqsRCWpz)!QIRB=b8(YB2)91L1x^yu$G198fX7gB6)-n3
zg1IflQ>>#*j?*zLq+AMq+-#b++z_WxO$2&PvV(T}%%qt3beeSYqEi}dw5d(I(_hfu
zmt0!=hSB-zxd^>2@Wbh5Z?ALTp0Hd0gaK*wu##eSYb+Bv!4u7d)?GZ`p64Tkz_r=t
z7>EL?7aAydm%7#!>nlh-u@&uTFV{qKOvDEXZ7~oP5UV;Tpe)$mN|8obao!!_-phj~
z_8z`#d4)@qA4P$BJ!$pgw||ejHr?-Gu&c)AZEkLBw~dbv7MpUj(PU}@fpAapQz%kr
zOYi7OdeR8#*1>wx_Tn1VhO2Cu|ET2BP?3td6^u#)bb*zEikTHr(WD+vX+)<w%}f!{
z7&{sO;~~wWxm`FsxbNb?0m*k9bn=8I(>b$ljX(xfw#p@J4uFs#%hVM>Wz3R$V>YS6
zdqfPJUVL0++*V50I4rFKHo7WUtYdMSkPlOc1QE3oinE}MK)~=Iq0hzny8A|!zc-&h
zlSbq^FXQ*)$b2sa=W=d%z#^x+4KUM<3r7RiFRbE4RRntPME!Si<;2#H@$Sk8Me*HC
z=*SynDm>O5Bw&eJmjYJPs?<2J*K)qbR%#FbEi?Eo<3t}4W<?_tE1S=(y_y+h1aJz0
zE}Fdt8*tyY6|ildNR}IK1=WwBq40JkZ!tFE99|@oT@QHp4t7}}Yh~UXCI*4j-e(VQ
zw`#JTvu?Cj(qX@N($3e(Ld~ZPCMQK_?2cTRU4KMGStG07=*ldAU`=thP1vAA-B{Se
zSo3;EOk!S0nD6$`7YN9bSDc$$pPv-X_nk$HV=4~oQ1T`DqVOAT^!k9yf`!hp%AN<n
zuIFbZPipqAs$k2~n0wW7RbSPTPiVs2^PbR>u`>d*7=c<uX%$~_o3b;&cz=`S=3oH&
zUS#Hc#WQlp@SnZzBWEVZ&u7vv{JWu#(p|L+U&y$ekp)=4c4ezg<N!GIDRTxM{2<Mo
zE$~Mxs!oOPT@<ZAuX~I^EYN5-W)d~eaQUnjlu2XIapM*odDVTsZAO?b3DvWWeloKd
z1LY;P8T^Q*wenY~{8@y!FAU|59`umLqqdKQ6HF3TgP7`MncuM?{qkc3I0B*B0S7yu
zjoGmI<rc;)wjE0grzpnb{ZY36ifkoNb+T#|ZPm#`lcBL8QsGvAcnA9PHFe&Hi>=hN
zP)2@Ky5WWJw<V~yzbrpGncJ&Hn`qK4!M>Q=z%q5Q-PL|&kIq`4$QYk$(1y*`T30Cz
zTUD<uY+<=jf%tI*I<QB?HOsVvKZx+TG(WyGfQ{$J-yQCUUjAGn%yxvtO(kWS7o2}{
zn|;Sbxcr^(C^!XJ0mRotdl&`SPg&<V6UcbhA$chT52-)ru(ZjJ0q&*b_t5mx_GV^D
ztIPPFAlKi-W_-cU=OW8q6t~>D^YHRo1H=ve7*|06VE=|X!d^qGK$iEWTUr`>h%h`A
zGh4kpzPmQJDfK->ZaA@#1$JjI0V10n^Nm4B+}YOVC1szTL`Ie3qf4x>n?z4V0j0fZ
zTOvIVBLNwNA%4-vT&wjaM{1BkeK#OM<YTgQp{alqA8SHt68MG#evE2T31l<Shw?Dk
zQU}szO}uraozghQsKzmVk!~3XNt1`C2x3%2b4bGcE|b|WAOPgQP$-)j@NG*TiYCMb
zb5-gntau=YNS%Nra1j^}c>`*LhGMo1N*I~N=I;m~c<!eoWUqQfYD#4JBcBdWBgNl>
z|8GdYR=(!&ou1taAL)|w0L(N0F*F4gA;SH6HvzOA{GA@&)4(qX%cnqo21ATw9+N?b
zY%R#9cL72+5v)-KSdxLEJXZ~E>xKszx@N2i01`MP=pQyy=WQz$g_NsRR2@Qj)}hmD
zlx{Uwi|A+*>ALvDbVY*_Il|FGVa7=qh`PA8YhSO;=%7k1x+~QvtQw2IIz2Z6>Kk(o
zX;g?dxx=^62+{sg4t#OJ#NtY#4z;=q<(u~>SBbclL;V3N{Qs)`*MF)#A;UXa4=?~g
z7v%p`d-ngS_PMFH_D38D<NN2zGcTi1P=Orq3nkE0MwMu~l+{hU+?OswW0?W=lfgL4
zdwaKjZ>_=<fUPd86-mKg`Tk~O{M+k9v4Np0kF^tT!wgQ{DIam8TySnV2^@GYyzEW%
zHb+VsC6F@=z)PUhP*MC1w9yz_WC0P-Jr6t{^y(UY`n1MV>$GWOzQ@3+7z3>bvJJKv
zCPJf0a2s)M|9mVorlvD&(f$aM0l!}?A3Wb5?K|cX)#3vnrG-%v$gb@XN`Ww96%~dt
z>MWW>oN>Sy+~{zQHbPt>OL5~}gyIQHBNQ(_@=nE8oV%kFM8hnzz(z2fw)CDvACqb?
z*-<#7z~>rxlU*Vc52jOlgwvp2F43l$-r*boWB*lZoOt_s7m<fkX|qYQ2N@!E_@{`1
z%VEio`=cF-2YLewNd+&BD)#Cd!_{1fR$$!#%B#>Ol}qYbufP(YD1wxAo*-F#(WcVx
zA(=jqjQ9__-Z}_MR*No>>52tvsExb6F!j)43#a~!#njaekl-G%tP-IRI||NL{!YvC
zr9a5%Ae>OJZ=&NFacwziFvF#p5GMHb?#%r#3|L&GyJ-hcLHXdj+#rvj`azS%7oefP
zY8w@-2h$(J7hn3Y_4Z@Ooed9~xUg3Y|7rL7fiY8hG=*f0dvxaG`QgC&*H^-isAT;}
zy!cmF=UN_WBJW)`>eYu_M54_y$qa36>YTc&{oKz<wH{1J!gHU(7aI=LrPx}UQue<b
zy#Ahvo&T=X@p$tKO0Gr<d|5$DK=D?9W&V&(T^|jzzJPiJ?k#8EC2X#@kXH5ywMHve
zi|QU3q4Ww|RojXC^i{2^bcI+@ya?O?lf?@M1(569Dezd>HWq|FlgTop$V#v?ZBI;k
zFj+Nv(xvu6D%l`xoy!a4wO`LKP&3HkH|St0kP4LCL-*c!+flzl6@0_fsdGocdDLEk
z%r;_WJpC!88U}5HyaCKw5_b6|4)#tqlZ=_h7reoQ3fWpE4&1u%_2K&aRsFfH!OfjJ
zrOI1MwPCnMv>Mc=^*5lW9sdDogVT{`a4lG-t@zezE(LehuFeQg32#gW3E;yZQ*BDI
zHfKy(kH>O?8Mz<iNXIlsTJ>+WK=R$4S+5=oy!nwcf01{-qs~1mH&Q=r{`qC**(Q0L
zPX6Vg@E&lD7l$Odq{mRy0jTOBGX*9RQOXm4NQ6qYTWFsa=q#JU?R!7GC>Q3W4%vFg
zU#>{bliEyP1gg4e@58iCPY*9Qt&Dl~j%@=Rlm9<}Z~g~xW9BJMssF$|O!Pm1GyV_Y
zvzph=*%C;9b^S%s>e&<3;etnmha0G)*5<m_;+)f)y;arJ>j1$cVc~87<eeseZ`YZC
zkPF+B;l(;b47<F$y#0PN4zouHJ%wdR;`oZLEDpBG7Q(YP+jy)zrYrbZ#7r2^uI4M@
z-5f!bx&c-}MH)x&pHW3@9T?C&`HO{h!__WwPk;LJLPy}d<Z$#@{Rt^r58$rc1#kvE
z1{2j-6f;V$(3@LOQ&uv}U-DNaXy0Ey?}z`^{|0z{i)cd44z?ww%vOwlZQ_oXS@Z<b
z-WOr)_WC+oYS=T+72T4T8a6F>W`0023}gYy;eSxVR&cpiNXw2Fe<O-k%c@=~%Q2&l
zv!aX&fU?Tsy<*2sb3~Z8dL3yy6k7_dVm`B&SX`4cs2F)iNm8yxi1Y5K+9buR4B@bM
z?b-neU45OjR9{`P7~?sf<KktHV>nyRY0wn9`T$`HdQ}6?!6{8AFSW(coc9yU_C6)0
zh^Ino><eBCY3u4b@<=V|R2c=LHi?BdoN4i_UJGDg-6SH0=T%|XdV*drbfS|)$Dy*N
zC0&zYK&%81QH^2|V}JPG6@w%mk0~r$<sV|f=wK^+7I`gMboQSLwsQ%iCN3GJZSaM6
zABfZaz*LzlYcG5C_MO4$8U;mdtgyz{9h=z~PKa4Oez3ZHi;ab_{b4<Rx&Z$kh{-2(
zpz2EsI1TT}hbZX$rF4>zE}z5cp<O;ifv0dX^>s1j7Ib0f9ehyRg1WgplB+#2!=pJ0
zZIC5RgB%e?<YKLy-s;5(35pb_#zPW|3mcqFj$ri*?3ZXfWej&2(?%&AwP68Emz6y-
zzL^>cdS)<$1D6Tp1_=>2P=tb>5F0|NbcjG@6yQK-B4X%3QC1j_iQfaV1eX*4WhE4h
zl!V9KIDMx24~*wGi+R7i=3~xQnARquAn$tk{HONH&FJJ&o|7D=HKlY35UI!fj)nr1
zNn_D1(c}Ko1@SkN=tg~rAiV7WN&FBX$lwtZqGqCM5<ht09^U5;h+{8Uv9rG|;{?NM
z9jeTE0qMg-l>J=J`H>mp8I35!1V$0EEqw{ga*~X^M(-ioer1A^t<E73R@y=KL=A96
z>kJ{UT)xMVLB=_l+@ByO^cm5ZMHcMhM(!pxoTXVUo-E=ylF%@ia1?3WbU&zSFt2p7
z4g>xk9V6p!P>eMlSwwj>Ci-R>RfINucG1G60R=1O%>7t^S;E-u1oU@)Qirxn<}JF2
zlAPE^R@y$gH0%f$DPbjVkJNY5l;+b~=ae#*)y4}v<ZFMIqedkrGapK*l9{yDORDpU
z^D^QVTaFb8VIK~eJXR4M8`V#!&C(fuGml8b-#%?Ay}e*EOrK@Wy%mf`dK1PCQY##6
zvupHraMa5^-VN^_XTz_rtG~rBZ%+<Cm-LVvZ%=Mlt2vjRL(1o`_z%Tgip!G-VAQ*e
zFa^M$pYwOl&RLBa?Sc_E+65QtwBsLN|C5qJp9rP7T1U%_0ZSoqS;xX7J)AiG;@2Uf
ztGbz~{`#8yIX)R4K0f(;^!vWMUEOp~uljg`8sec>{#<C_1)fgfEr2QwzNb@uI!CYU
z9B3(X6I(#*FS0bsYzAd87$j;2-mk2?A#GdH)FArmYC|~mM(fTL*6b7Wx3>tj!n~6~
zsuACKRiM&<TDgZy;I&=P;<_DP*+gOm{iEA$YdSLx6VudiyBp^}Bk4V}-F}CBI~f^k
zyTQ>jB7S>rat2wr&gT1~_=mF*b&T0&^z3zo09b1+4$w{a03fig$&mnFw2p<dWWD-T
z*4y%-BH=9)?;6PqF#K>}j#kvlRq~^?xFbS2Y)hjAib>640i`Xg%Ysl==Q?9bueOxp
zZjip=QTj}#Wn$R}oa74(-xmuL)Dl%*3yTIsyJI2zX5@Tw6ZFnF1klzB7cOC`N(|Ml
zfD<HoBYpwA7dclF;4|zlfN;z8)(~$-IR_na$wiDFJc;X>XNQ9z(;adLQK4&7#Dzfx
zp4DS(^BIlnnnVhX)k(`82RwmKCCZmGm9DWo4>%2@O9!0dmU`c$Ai$t`+5)YR#z0et
z6*Q+d3w?9HHNDP6tUSgM6lJy6OE9OOIUj}c6ZGV!kQ}v^z%CTjx%Vg&6=weZ^?v5|
zefD}qvM+IImG7;(_YnH9DOPVZ1Wjk0epZhUjF{L7)?@qGyQ)*%C~!6Bu8uT;OR$rB
z&i})1?N)VV|KOo0$<F-&aLzYgIqyJfPI<(UIj(siMz5Ta3_2w9=}GeG>bZSsE{2{#
zYtLv%X1TiBsy0R}DCFM4IL>Lc+3Q4~YToZijbaq&P-ruCl`^Uj`a3xOA|u&xu=3Vv
z8!2-)&DdGa2u3t?dYj326t|IzWsz%*Is+#}Ov}wM9-=U8%2-z_L3}F<bKSQ)qzjkM
zYMX`QM-8yH7I<%xLgWxzNSDL9Vr?Fiefw&T2w6wueiXSyH7~U0cAvZ-MT?(CD7;N(
zE4F;P)XLT^%TYyjqClvv)49@`kVW(s4l=hM;5HSVqVz(7?<>Das##E&1FP(bfHKFz
zdMM+E+<l2}=(}03w7R=}sru3+>>oOBTOoov*ywwBUJQa4xxd7NdZ8BL1l0+7Rzf|$
zu3mN2)dPq$?panLB<$Q!-!`sTOI!1G4w~~jL;B~VHJM4edQDVQBz~}O%^+u_SUW=K
z(o6xKiUY0vbQ9Q#)>q6R*vcX6r_G$4t~kk)mbBycb;JER{?Ul5?KrgXCiauaef6Rv
zj0ux+=>8n{bnX)OPo;1lKO&y-2S-~@Ajt%|BaTBw<MqZp#8+lyJhEZsjYOb0#TPKi
z0K%m?#7JiZI$vsPYDHQy<_P(8-<iS6=%Y7STBlVu5t`|$1mo8gW-6t3t8h3mk|g|r
z@Wwk?)Ki;z)|D*r8{)qdHaUMT^i@5X(gQFPaBUcmX*aqg6C#L1+n3787%zk5lSuyB
z7oM*W+i260_c7ksG#-!=sE6E<Vd4ETspp3PCxzbb3wr-nK&^zA{yyBww(j|4EVlSz
z+UYxM_z+0X<D1Th3QdD#{6*}n;FVPW1Xu)TZQ(>xvS)iI<rLQEFj+XtyzWFVjh0Gn
zVZ7hNA)W8Bf<zF25ViIi*^qA>><^79$t*1jH}9Pd=pXW==CS|(qgx06(JdJNkB|S;
z{>+5_Kf1;Af9aNnrt`KKlHYj!1qmv;rQQJ`UP37ntuzyL%LOUvy)f12&?13(aPVN^
zg!N^={XC2S08wRWF%L*7QGwtayRRQ}Mm58O0B+Ih^|P4UJ`P-q;Pz5ju3q+CFrFKZ
zvq_b0xou1L1LcEZAhrd(fU4^wIIgNKHI5i~cqqpTiwAl=qc5MnCzx}#+CeTQehh|d
zhc3CR#hXD7q-6VUIestuinne>Wk@Lv(i6YMDA)I=r;qpJBYOu#53F-$KhZaM49cmI
z?K`!Nj14XsSR78Pv0LnKgboHFTuWZ?ZChCpvr2eyU6~uMhzeG07|=}n&d7f?4>o2k
zMVp!h$W1HUcD~2NQT2VRI>Wyg2&@$=Ryg8SU9&s!1~zOC58fdJE1+`%r32oUI$`5x
zv$d}gM+O}bNsB}09q|i+SOy}P`^-?qX(k>R&@3M*{T4My>sd-aq#L4#;t)OPj&-A&
z>A*z_Q93b$NSMu(bGXqQ1|u=7RIOAom4-!8&V>r9s979vtXS0OK?~e;f+r5n&<{2~
z0MsB;x31YTPMkLJu>cu`uU@?R2xD`ViT+}9UjA?~Z<TxDSU+$Fzq$Y*{?e|8IB};W
zP=~1^3}3v7X57qqLj!U(%`&yL#Laj0AV{bXqA(VxoDG@`V-pW-^x^r)OD5@vI|#qS
zGkS)&%>Jkjh_~Wms}rHx;@41egHAD&!8Pp#Lt+BM@5Kk(EWkd>B92)Chrurkk)Z%&
zJFNZ;{5S7uj2>*Ch|-s23|>GF^jr#9x)EHz^nM;-`oC}!t9g@<uP#3g!5JLeJzqmM
zFm=M!mm#g<+=NaNExZk_EvrF#2^IA4uorF+xC|hsAOu>IH;drm%BRKb_iPeB8>IS1
zh>Lmgmdo16v_uh#;tlL5idV6H%+araO@#NMkw;#{O2e*+PBj^=O0-(t0{h|OPS<b5
zBY=nqLJ1=zFmi}Vx2uQ*q#YG3Z68jKCr0?KrCJS@LdFx}2eR<GGD!|4Uvk0u1}A(I
z7|%+ti}6I3{q!hjBiJHhY?@(L**uiQfL*rDB3UC1LmbFZ8vQVMX2f`%9uWa?g>KIf
z1u0(L;&MZAYc|^1wwpS|vOHn~l6xG&iRqf{V_mJ}_9y4eyxNR9Zabt}>eZT#_m`Sa
zY;(dToI)B}Ki<jheCs3Eif;v*8Zxi!ck9wAKx|9{!b$rll3~goPwgdE$I)*oHHmL7
z73%Rp8(t_zxuF&lJK#lfl{ikZ;~DDOT33qRZlV1lB0Wp4Mgp~0&4lr2bD?D}9^`**
z<j8@4ge)2K5SEuyDLCjBbjn6b{O?b6MFTPAf(?TT>-CI%hsHcqe>p8CXH7Y`hb=gt
z+GmePPizZEzD0NodBulNrdPJVGpkgf8(lDv`8p>>`-svRJ-4%?f?l&nKPm+kjW6Ot
z+S)`&*=$UWhP9y5lJX?u4}X~A5}IuBGJ}UKBjH2ZC@9LxCPyS%W0$}m^tCoQm`g;J
zI+P05xGEOCp|CG3jhFrF7sK0XX((p`q@kaOD0XJk(;q%;^r;&jT-|*&-xQA@zugxl
z4hT5Gj#jnbHtPO0<z=e!COcomh;P3atd8e}_B`r8(9~#xYkl}<r>WG;&PqfnHoQx&
zg}GNz7C(dFCGk)w^*Ak=?SelIGa+rNjtU9if7Tn)_^4;@)g{;3+jyLDYxZnkS&Ud4
z<2Cm5fqe~Iy(MHA?gS9-3KgxqM_s>}Z4O4K@s#rL`pvoncARKTBif@<>^BZlvrg=}
zX;`3b*K&c)d9O|NlW_-X$*2x(vGIhnaG8YNzVj9P)#Rqlop7yhH|e<E%V&Hj@Dzpo
z*Umz<c`4R+toVgz)-B6YI9hK06;-okwOqN0v8Pf#uzfUB>jV+<0e#;f2<=Z#Jk{M?
zIT<5r?q^#o`~OZ^8vMk&ksAMwn8@VJ`{LUwHY~EWG=oxZl8Nl^vJJ{N@}1AI4M^9%
z;SjVigq0Rwd@Z<9dv|~Qzr3$3f1-3u!NG><;JAtnn>m$my%%nOcw1ZiOi4?@p@YsZ
z+UeoY{EXl-ZwB7ahJDyR8hZM&Y0i_guixC&)zhD%H)oITmQQ^?5=kUuK0HE7mNwic
z2A(?3Q;3-Fdl7H(1Q))LAkE96vc}}aTl9Q0{9*6Ca{STI$q}oIaMn#vNA%ezj1^Lr
zUYu;rdceNIJyF-9G{Q4D{YkLWmy7c~9$?AX^~aQ>s$<Tb7;|@rEHTaVCzd54PtDSL
zas4sn%Z}fcr_U+AMDm(u?Yj4oUl!M|g8Q+y6Lz11w}$fE-Q;tn_Nhn@b%7-2Q8X~f
z!>HE}j3dc(QQ{YQXXv%*5U)fMNw0V~oa>U_wM&PO{(%2qki7UWNVX;cq^H3E0NniF
zngsLzf@CYEwepe1tMe1J9H;Vf!G!Rjdb=(zp+y_8VX{GeYqx0ViCEJatLKFV^PT;^
z^BpXL!eU;QEM-Xc6W@8>_$)yr5J!vcdCM*zUv8(}`2eq-Yg`w(+d40AEBscu+*^gO
zRjb)8F@_i+22I#2V~dvgH$0qHty}0sPT)1^P04oX{ALd7o4!5p^b@ulicjTpZA@;U
zp{Xl}&EUQhzGI?AweQ+(4sMHKeP|1n7~dCsb2Ib%xdxNB`XAV&Bf-R-0mBOIcr?N6
zF^5!>R4IG=kak-=hj{@Ghg!R@#}!WdZro{8-D)HJZkjjOHO^ME-1uc%{7&XFU**mH
zz4`#OEx)@4vq79WYxpzgYpr-1h2a>|dXuIx-1R#CyFY|CDgs9z`!PGCxU{Cz{bmhi
zs2q}~9C3$t*tRdUJ41?-iu^>Q(I0pVH|~b_<(6E1$?8B;Gli$khNDaqj*DP{`+_l_
z^x#>T%hkoYaSN={L=S~bwRbDj2BdJiAOa`MR_&T%`4h303RT>BS3|%5wW$H?zUkX$
z_W+gNMZ<o9^OD_coM!PL+>U1ZF$z;s_WopjP?-n)e7(M5zqbN^5brk+==bCh4}f8i
z<tqmBd+*ThnoUsZ&KV#y^90cVEFeB}nwdF1p}Y?ZlfIN@SoRQ85l<uLM%q0)NEapc
znM;indlTIGLT&VFpNUSbTKrXNZ$8huAsx)D{W7)Wy??q(XOnnA@S#f9Gpa49pUeS`
z3r{<cd_I*zpzD4QbDjA;2+g<i3hx~rt_ybTTI4b>4~N#<Jhvnyd@pjw*G0(JMaiCB
za<WP-dFAq+3g*j-q(0lh4UXUQXS&1Sys9$xde#j)Ow_`^Y|WvdiTJFt_g_IKJ0s8=
zLaDo@GS_~kg)Z_>L#gZsRi+Es1H&xFmW1;r{lZUY)>$Nav47}TVZdV{jI`rowQBQ8
zIjdCOLnL5)317;poeQM8g2Oktz?dA}$Y-@TkpY|4G&URxrP6S1f4QWRDF^Ntw@=&7
z;iHhvig@PxhHdtwX!67mL4mORw{NPqPFz>h=*iPhYe_{^v2>G!BpdM~CuV&)KM*dY
zCh$BTsmR)hO|*`N6J6jMiXt^^xNiF}&inr240ivDB5YQ*8`kTT>Ha=jo7Q(i^-%c9
z_HKEAiG{Nj%Q%L6yQTjO=6X?<CT8(!LRcHqz+8aUE(C^rm>4+{XP2SbnN-|8Xfkf#
zVt{PR!zd#hog9%&XUJ-BRAnN-y4x98H>C-Et_f`k^uszQW0M3DT8T{#LL)fLA)kcg
zW~D86UvN(DH2Kn@I7cGKnW#NWeA-dOAaB6{Sr}D%XhtuZ7#oU7Y#o&Ldu{MeG3CP|
z;sxL%(+zKgXOae_EujTY>MlH~y{@0Ha`I65ikv87lDA6)a=))T^K2vaJ*NN|ap_xs
zC}xq{#C!}pL21paY5&Uu@y`<HC69b<NH?}hoTFYbfw!XE&iIpT&-P}7pPWmxcYdzX
zs$Q*DU|O*V2$|j_qM{T;P&IT)qOljo{z(kt8tYMl!Jw$t1=GEmoMI(^r`AkEFkw#m
z=Toe=^sZnhc<qaAxvSpkAN{6@`6kf0YX0{{Kv$B?Z^FO=I2lRAx!^*$>o6h8Qb7M&
zHtgR-ii{{WCW^U}exl6BHj;Vym?f&xvtfa?u!ay}ZdsYLQWwvfsqjJ<nPI}L#(G^U
zpHb$P6uj*Sr4;c-VvO(TMj-U0fNN`_*Rn=p+MnPv%t~WDMuvC_A*Ba#ii)bhp^K%7
zksdM}?LpuOW}K?%-MU~32-C&rNJyetP!$ZSGK5GPC4uya0JejZu;#t)VUkN>MU<zq
zxP1n35x)Q!QVi*?iWjo#k|EFHcore@abnmW_Qiw(S#^{yLlUY<X>qNoajBjmR-Muy
zuKj{>7+G>^A2HI_48}BXb3&T3_B-h5Ne2d{SAMkch15M%*b=PH1OuZ&eKfWm2rTIe
z>5?|)s-l9RB@<l=xtd9=`CPrTH9ar6QFMtS>n*lB#t#EoFST4F4K%y5Ci|UGNvTv>
zgaeGZvUN=bI@}J#l`0AKyE)AuKNdwPl{2#81=Wx&Nt~OZ)RY9sRaq#7!@@sflX5Cv
zPZJavudRrLj=;R^LX^}o)Wif8;p0OWMI?dLbSe6Qb>6V9=_Elxf|t{OX#~0p^HTzs
zkFgSkCRQXUNH|NjoChF^$h^U>`2v)d2-lMhk25?c{2%xo&@eiq<rJA4LePka2*j$0
zoPrxhOgR-k$iY=8hkr^Obdo&F4fD>cCM)NTF|S<-qj;$^O!~GmQ){85b*PP@46zkT
z@_wO!p@D`nP);~L445ySi|GQP=7s`;Q4C=<`o~BIerxoMl#)=kF{%fdSmkXeVtOSR
zdqe)IEtO?miTC30dLCa+ise?Xz^=K;x!P|q(=gD{NZ}RD<FQRc(U9G5BIJ@YN{qV7
zv1qMJW_!HSd}T!^ql{=O)@RK{4Mi^PkjJx<Q1BWx>zF8~G4q6kutXSyU1iVZ(T~D1
z_{uxq{D;OgdD;XZxQt<g=R8l}Et#{9-!4mI-_xJVw#r-S=K($V%*fMy-an0VHx!Og
za)qc3ENwP-xLGt*-u=s5ZmpJ3*06Xvf;saGIuA)4IPyIF+A?NJeIj*he37eveEMkc
zcfBOjjgwvCglN0|C~!R_vo2>&156rGj>+ZAF>5^w(_YSI`?Q5wv*`zGWeZeooR(tZ
zK#(_MYHa`YHGYiYWjfDZkQ_D#*2;Q#D}^7OLEp?R%5MPCbM&UmWBIFf?AFOR+eCnU
zLVq*OH+8(LhBt|D@ci|I{A<8>!@as{Dux3Rsfz}v-sgk1Z~nl3u4|-@i^=)&D>Z6-
zI7PR(D?9RQ*F+BSL)Uj9rI}7f_+qEBE3T}$&eCh}_F`8@HXr9PTt(eXEu}}M&hvg1
zz3ZaEe4@)F)v4OtTGA)Skqcow%QXeD3rt)ZU)Y0+c$0hu6G5>DH@KOL>8KA*+0-6w
za?tV+vq2O69U{?_+D1LLrJmMKd-+E)h{6@86yL;U-v5Q>KXQE@`QitXbyefiU29q9
zF&sjpTueb$8&Mtq)+%2O!=KPMxjg7V-8W}3RpX@Gm@oxdu8>2Tf;dfXezZ5TeO0~K
z?pX8U_U(D->(agvX$<|=3E$7+j{Ispw`9|<sZVpaWOT!C2kC3-LR+cPRl#|pyV3nh
zwlr=ObQ+M|-03}z7kOKa^pkpHuW$O7ZV{VG`*?S$qEBOMac5vBt4~eEpJtmM`oI#i
zwj_!vfTL;ciFS@j|7op|H|r@p6ufc*E0+3Sa~&v#e_snw{OM@%zgKi0soCLZ1-Pe8
z=ZbYUmCC8&G<A61={NI|A?obP1?pcNqX*Fg@X}JDI>WrieS;sUcO$@s=mZ+m9Hs%M
z#DI!mMF$&=ez)Mj^X8+Ep-^ibmb-mh1gbX=z(HZtWN>u}lzLl(A07z2^?@lkT^+zv
zyJhGNwhU1qIs+#VE*=eZ9t?<180vvD&Yo39vW{T(VW-G}`L8y|QL->iK+_@gBTGot
z98cvYy^<#gaR{yznnCb?_IhoxGZ|AjpcCQn)D`gfK*ovV?TGrp5xnMIm9l__UUHgY
z@c26WU8seW%bSl&1}5-$i8?eoen;_ne2dtr=w1K*$$9*3K2qP0nfU`%a<I5*r&+t#
ziUs51<#&X?e(gK=2)*7KpQ$5UwtO@Q-}65-?-m`KX~6Mt8`4Fvh<6IylC%7=o^g{_
z%+vQ8cAkAlmFej9<rf4(>Ukoz8g)C8^eH=SJJ&C9w@4*c=TUpQrBx;RzC}`dEm$U(
zf#=%&1jwxk!dQ^0EwPrMhqsUlmUd|O^mtS4CG^&;8i9Q~!!S&(j}3}$y8TQHk>R?7
zPzSJ+lL#ieU$@;Ga!0!AENnQ{LfzB#diY=qP6X2Y8zC@kV5VRP&6BRr5aN!g_Ce9c
z1J7s8{%*@SA5HgUF57BT4j(SY%E=e5ZX;^7V%&;dD>wHQx9)sz;e;-hltSwq$a|Ak
zjER2KG$}fxD<#=j5KwBJzP^%^?^(x#5j?nA$%_`CkJQ6k&!-?w4bDKnOV`P7(54@K
zy(W75o`D|i8eg)K4dK+?76SYK`g-T!PTD2zJK5N_ZDV8Gw)2Z^+qSc@jg4({qm6Cb
zdH250srT8cQ_nMV&3vn8rapi5%v8^G_vLbYz_A1xg~$~)C!!j4eT9E>&CiEqSNK(g
zic>NNW588I-Q`FQlfpw`01dC)(@t;N72PJ_?P((3C8MC>C!1hd-Oc$eIWxaMvs~y1
zUlCDx3v73Y82Vb}h2Z~$F6Vzj7c})1CyQ@DK-QrD`M$^cpU@>nN!xCf5!LspmP3mT
z3jLcTVL(waHI1k!iWQi%S9-~4L|oMJ`ab5W<(Xe@lK*fsMl9{`qg$V&!;DGgV<#(2
zary_{WihVX58pwoJ7e^@{38+JY1}P>R#<coa2ncP{6G`12jGzgYo$5R-jKnBv;L6P
z?N6VFbfpC6J)SO7<qAgs3{*UO+;mhWV;lJ%kYy-Z99U_mghlq(fO_cdwGV)g&l{fU
zE|@9I%E1`BX!xDX7!T3Dr*NY<>k+L#XZz-8{Vbja$9Y`qkg+L3cK(P_$=Xiy)mqL7
zPg;73WS&SH3R5a?y9x18#4Jz4_wr=TLg&2#e|-aP16(Lz?UwXo?9+uRWw3CWUsMTZ
zCKKi;{h&t%nQ=nE7eCS|xtQX^qjz99d`4Wc&Rp?+k}yQ}Hq+WMmU(h>Qp{as=RBQ4
z%lPfP@i10Q;0H7(M^@NiQFFi}1R^g=Nfw=?dFoeZa?)BUm+8SQgo~X4$vajUh$qpx
zLgI?p{7lP8`(nmZaPfZY1nxv49M0H=<)mH8?Oz*ne06E)BoaHgc%U|Z@WPHArhd+W
z88%MmkvVePvFz&1pzR+Tu=L#s&6U+5GjXV|IeU?i?p$AX{T$iCG^qLl9hQHB@`*8j
zUl8aoO0iR|SG^$d-=5|2A6Ae^P<nYKsQFMwN?~<c=Lv(YObLPJJ5H0!-u#Z+qS9bS
z?GVX*=Xl3cJ)kvGPGi_wNr8i|GLMQjj;yu10?cdG)vt(nty7}Xv{|h%_1H+zb94nC
z#Bb_NGW^LkAmis_#HyW8*329<<F+&9=+Uy!QSo?>^MAbY{*O133~UZRll}3=(f@cO
z+kd?AT<6R!Yc%nT=at6O$IW!Ves&^`<B`LyE|}JqG#{*JFllUua4vC$a25<8?qv38
zw^dOhcx)2ndbG|kCy5KH+f!3fWA<Hc;^`1zt+!lzcBt*CSE=ZvQmYqxx_9&9a<N-s
zs%dvt`2FzV;N>CRnK#YcS<uo>qRDgMGv2dFi-vF#E@OqStGknwKa6F()-PjPc8w~o
z>!m~J7wid~#fxji!d9XmEnZ%=Zo=xr)XU<OM@qITaAh8y_?PU+4&2KE#)H@!3ld$_
z<N!nOLG~*-RBBYnQCNtOv1z+9_2?2@LiO4Ahlz=^8*>>3`iiu@)wQ#hrkj@#P3~H?
zk+cWuV)dj5HY&(Kg>4D6xhC(K=J7j*{lyuA>nAu)Y4f9n2ElBG6x-+GKHHR$s3D?N
z&uY18rCbby*Aj%U&;^8M;4i|X#WOg}Sd$K8GmAOs5gjL1l3-lG$rRZbe9Eq6Gl+|?
zOFhWY&G><1^ubEuF?HT&5^o8bli@NolJ`J;IgZ%3FTqjK6vk7UI?6C`z@*wLb%Iux
zCm7E+A#*QV1s-lN*;iCniNhRG$!e{x_EvX~m|MjnP<|0FjgPj_QdJ#FjYohGsG()p
zVAc8Rnfa4mpDW!f8oIHgNk)@no3Dd|kB{EbuY#RFzYqKio(ev0VUXx+rPcRgeV3-8
zyi4T-V2TP^2V-yYtDQ-Oe~IeaV&c8%p{Z)lkN(D!T>7m+ii`&7B3ST^D*GeGxh)aK
z>@oN=3H7Vdy*AwPmw8m?+~U?ikL6uZnN(@<tp5D3eofZG687lS^$7BpYWv5H&ja3T
zb1S?VV&7>ahjuBa*#z79^|%b#Zxc6>NCwV?ZkeIrxKgt84|^w@O<cX}#|KAXX?!eM
z^4hTaE0*_EX$%x;>U%A7k!rNBDh_F}jcu_IvktPAWcD>$P$csVJ*d;v{0X5g8ZVE|
z=*V=S1)LH-${Zy8JNgAP=l-CZq~7d4S^;@{BQQqo=r`pc19V0ndt}HN$4N!*{Y&bl
z`r*T4a&l1b0DG7%m=nzMRKnyq%=<BCVP?5>`^lp`*gK`!x52w3r1O;^C~FKzvxU-T
z>2rVJ{*MTozVqf2#u|gr5W8Qq_v)ng68+KVu@fA5yDx+AAR5O^E+7c!0p4{387;6_
z_yBtUI>XsUyD%b`vA|a+Zpe5jyN2OLgq@V_%Sgi))b#pPW7NE08cX@}dMqDp@MNH3
z6A0rGm{h(Epg6WL4fFv|NenV~sT{MM0m2<@Tn5?%15++|EJfttZeI8ws-53hyZ{5p
z$-mtmjg_T`hN(;sEi=M_dez8)3>j_h6gK#giy0pFH$mps8pLL~rA1P(6M2$V=2KOm
zLSa4U{owYA+BJ_7!egFz4}`@n`TR+uy2z0TR3zt#wXLGK-s8hdemX}3rKDE5-UP-Z
zyTq`j4Cm22FrHKL!o|p1ju+5($(zf`VWA%tNNQEglZ{dKmQzv(DHV59TOchVW&~Kg
zRxl@L2^7;r;KItC08?=yu?a58jdqm<v`W?9Y3K<{6ClZhU}ZHd9fvz&fO$s;pF3m1
zGL}X1;t`ur;mgPWu<ap{o<dIWikZ%ffbJqVE-F1PB5^a}HG*Ut5Vs9LDhBf|i(&Uh
zyE=%i;!ibTyT{*cG5D5nFN#L|kVjHc#INucN!~XITT<E!-K>(XA~cmMDW7Oajpj^e
zVRh4LnJ(dN+Uk$?3l&QJ4Fu6)%F3rae*|~rERctoP9t&^a8B5^Zk|;0EOAN3JS#By
zHi8TSEK1p1OkHs9SR1T05AH+GlND)rMe7@3!L8Q@x28lmPjmIVU<boy*4*y8e<3+F
zhK|U<MTED(p3zK~s<c`<2E$>kE`x01T3kBxvkW*5FQObmMm;taN+aU@Ob_Eg4^XwH
z^=(h^;>oZ*c0Jw`TV__lQ=&IHsVsnKs6s-sWJwINzL09N=u%n6xk%(1&%{xZJ29Na
zb*LoS6($*CNz--=?l8Jls68+m*Z_m!ml#HU$-t?A<48UxhyAHwOJ^amj_?VIdmy;m
z4wXH6o8shd++1d0(>M>ifC{E_I!RD$D~yP1gGnrd)NcTj)vOh>7x<5jLeZ0Qj{rN=
zW>gC!N|x{a3iC)f$a1~guH=G<n!mu6sIWJ&?=xji#(=sTipV5sNzD_vl5vo5pPOER
z`XrO*1d&~c5XNNa4hdr(VVu&GC4VGF9C2(mUdTO}xye}7lDUVbNZqpBMF@50)6cjF
zaB0~bpr<MxC&j$5_BQx~D)vB(W$p;x1_K*g@+*FcW7ca3n6in(A7G)kX1yk>QUQB}
zPIhqu8>v8>X{?}5`x^NvgTddYEt8S+)jCHs0J*Z+oy1X{yx}`0&bkyp?P-BYgE$z*
znz8yS5erjNDpEZ-rl30XLiss6dQLE^Z6;1mp*eKLAG{-;QK{|B)f%@$GU*CC!3t$w
zri*<GIl~%<-A@Q|Cz*U31S}$>!<Huu>afJ-YGTscQl}gp9W{)l(|djX?03ObY3+jQ
zbr)i{&;Q*o^EFd^As&^c*7Fj50QRdpq&VfG0UJuDEO`ZSpx(NQLu|rc*o7(AwT=lW
z*{nTXQ!-Ig8WyZe9qSm!LT>1JKDlsz24&o<4I(2caMytN)n%s;Rs$kmNwez~MT4ps
zM@E2Ys2c)?Yq$co$f;D?Nos5XWWZ^TVJSr4AHUBY*UMJi7x6qYRGDe*ElZR_Ay;tN
zgVFgWAe+?W8Hy)*Eh0B9|DspQ{nzce?hfq-AegL2|75=HsP;17*pvjG$4=R)-fYs<
zZ;ZlWM;ZvcuQR)?x98e*hZcJUu4I0>&LY$_iz_c6D@#~DQp3FOD0K3`z?`dDH9LKl
z=ypxv8XN{!x2oidu<?YOLnuS$8@BD%;`tU;#}fnJ%vAQY!#cuUI>H1)0yQQ<^ga2|
z!iDmEH@|+;nIAmN@R3d+(TX8c2>UJ15t$4-KJ+~y=-R7htlIGg!eF4{$E?0E31X|3
z#3iKinvHXZQpBe&O5yOPV6h#&fq4o7jDjhN47(vuN&mh7)X}-usRK_8FHw<%XOLXj
z`o@q<YH8kqvZEA7<ug)WYVqz^=|`eptmIq=)~em79eq>)Q}04h{7!UHt?TI3XWJNb
zT098Um4Ac;@DKs<iPSzs^6tGW!I{L=%jaHPOh<nQ5{I~+2j=&jo(M!3ig@Bp28V7;
zb+@4_jN&W2%KI)CY)O47CRO5kSZjOyF8}PIl|d4C+3OGG+t`gsibZ;v8!wtr>JpAE
zJa^EeN#l{I#nch>DCsr0+L-IoUq4;t$9{(<vX{nD915D)P@cVAjQS(8bhpLQ@5cs}
z5*UUDoN=@3zxzWpatC(EznkutzKfCG%H9G_I?lJw1&QHlBpEg2Gh@N5H0MrTL1nkr
z5r4L;51(EnIIz-@s|$*87H-TC$B8Pc6zK~$V}yC9Kx*}S@Ord%0!7<0E~>+1@~YVO
zQ7CO)IAt}$!P_>9uDOuLO~vxZa0V!-aFfMg;rRs=xQ>q><YmLFs78yQltan8X<xqW
z<h^aKi@d&f0z0{>Z@@PD+1jh}4<)YQH0qj@_lx};`WT*~E=ZpzY=mZQ#Pp(j&x}VR
z!o~Sfz_+n5^FoDs%zpW+kEJHlTrq>i{&?#6lfZh=S5y)UHDuI)r8$X-pm?PVXh<;i
zM|)L&7Sq{|PAP7Y42l<-8_uCu2O5a7VqAwk*L5*&s|Y)ulOOH7^lqYPKK+wgc`xd~
zIl53iE5nYg1*L;>msx?BTze3HSkJ@hJ6;z&%tbvDd(icfd$Ze6T8jAe<FOT-m#_pb
zdmE<W^(ZB%mWv!w<QXCDxuH!|i)CcJPPXVg!O$8^B=_V-Zp$$lcbCX;VSF{qwBk}i
zMyxow+J?8XSJIU`z$#}!5Gtq5mz(m%Rl#wAbF9TU(QbrvD^{`_FUgym{KZw~_9A_M
zjp?5gX>pSGPvwq0qwX4kQjVqCr38ZJoR8*bhtjp{UEUl@<=G!SzK99_S+o)uTp73e
zOoBfN*mE-Yev@2J$tKf0i0^gXo3rK_9iN1*T)Cx`+@G+lG4Vv`1%Qlb81%R}Mp<{m
z!xBF8{Q%q#)&N+GQ#p>0VVPfn?33gsi&1jc4@C!_K#Dg9$gyW7FYh@R6$vi=BX)i{
z7_53YLCA2o#Dd_IxD5A&idR%^sW~6~I)bbEoP_Dx+pg>12*p`N=S99$w^6t00hh7y
zOF6PYjG(;h@(Vk&h*u%40TsQ7{2`yKXpMoRepHLv5t4tAlR07ka21b3B73$u+a6ia
z5);ds%w<87869kJ>9HeYe~jWHXB#KP+U|Ju>!>Tdf88}s42>UOgssOkCWeU_U|g8*
zjLMe4FZgNH(ZAxv^R@p7*(vg>+u1o^$~Y7U!E`qPZ{o>M=8BDBjzQ7#lBa}hCw!2?
z5s1Q%9~>1N^Ui`p>)aj5u24KWkbi09rSa#QvY478=~MF1UT{;<M7AM+czH`PLf365
zg#|?#nl8+t%J9`9Ass}3QuREhxdjsWEEU+x^9URWeGl6dkCRt#gz3a2Yi1dO%2K6x
zQ2-XbkJ^N(Xsh}`82g%PB%)YVrDOi~bfo_qdPUg@60fcCt0|(XUS;YWQiozHnfDNR
zET{gy78_iuv{Lk4$=nFK-m8K6ZLQ7^VA#UVHZs~e$>N!BIe56hwBMb7Ui$Pta)^-d
z!)y4RO9Nn-bm(FQq)r$Gx#Xf8)xG0k*B_7`Y(%`^c~cN=bVgos1vHaw;4#x3r?-N#
zap7NmL>G$>@o_&pTo=aC_TIFhO&cJCc{T>P3|Lifgsr;*53*)F2(gWFGnO(F>f1P!
z7%XUQP-NVhe~?XGF5{LnoL+#M2kM#~t}rXG;W4Ka;B*P5jieP51P69xA1NYg>@eP(
zmn<L=kknmE)|DC8G%zn<<Oid56cByq7lVZP_3-BjpNW%CRY?Up@ra_1$xPRH5$>$z
zjpxS_AOx=BE(3>-fA=#RNTHQN6ILrc8p;|*1Xrp8vy|ly3S8boOQl^WDs%s0*-SDN
zdsjf6<CR%EuBD_}wGxR+XTPv&B>AB;8H4FSRB!w&{g#Wt<boRMzCw!^V9|LliNX5@
zQDT|B+$I6X*H8LuVM9<njpX)a9_m5_{${|Zi@{Qh9V1%Ysn7!->LCOR^u!2bZ6rLs
zW$O2?E`|&*TJX^hhN1Ozl0awWD5@k+*hT1DsYJ{{jPrYeHoQ<Xe*t_zU23lVii&@o
zFKB*jRSp4T2S-RZG!8=a&<v@M&_W`=V|`1;^^_U%Hs?@w^~2-d6lvERZUeTO9Ux~+
zY5v9K2OQ%_^)KLP_8?7mh~K^16q70qJkS<jYO;K$MY4Lw2~`rp=FEe`UQ{t+xnkX%
zpF(j#3+;~LQu$y`?~8B7@$I{eAuQi>9Z3v6SIr>Pr)Cjv8;cD1&5p|&S?%zfU5G5{
zz5_bSOX5vabM>XFGg9g597*2W1FqkYAj<%Ioo$aSdp!K%<WKlqO5adcE*J7gfSLxu
zUm<Dv8N?_K)3HgK^)ql_z0BLbD}ZZBiLXelUCwKRk7TfyXxqw`S6PXKCgWTR&KG=s
z-?5Vdfz4)eT618F4F-)g9@k4F28O$_W^(sAb^c*fM;B=>*UatUn1zx@(!|jvOz|@2
zQatkpXP1!GTJF7l+R|zEU2UP#L;f`4nOwX?WGk7Cuhp{*%7RaV%ykj}I_hCbF~Zsw
zG)2Nw`^d#%O<<G+rb-SS%;mte5k+0I*@QDgYdp4J+j_)H4z`BC<`fa@4QJU|BFaW4
zxpC3kL7wW#xqGdzFzSoczddBk+h*10jm8CXO_F@>azv|#NiG<PigA;1_X!icdm?;h
zB|mviX~_1SCrEXSpctOOh6g?r<AJ)iM16wd*$9TFRiiriF2uM!gS8!svuZIW?9<<v
zZ}GI>xt^zuH?*<c8Wuk+j_UEWxlV_-IyMxC559RjM{Qzn$G@$Q9G9_0q&3_BX}7RQ
z?+$rPNAc@*{}D4>i!DU4(sU2-%PuB=1xsRWtrdLjL9$9?>Ge5K=BMPOrJm`azq!~}
zs^G9MRJ_tuA|CC0OybHoIB_#<H;S=b@MH(0z(cyipW6R<K>y~w2b_RqQ&Fg`xspD5
z%$tZYu<pv?XoN}%49J$md9lEs1T~es5z_@HnoWSv<tCE3ar-=6KvA1Bt#{mV>2{5=
zrRWvZ3H9wA7l?z@OzLQgS`<RAj2!c2hmZLL7b=I5__<(yA%Z7DCVmta{e}gi(s!CA
zPYM!G0%D&s5(T$RmyLYCpiVZKt7M_Our7TgFHOUxj!ggfb4$}gJ3*~={RY`dLewRf
zth$<*hHds0weoeV-M2KUPNy);_-I=1iFIu|mPk5c7bt3Cl3!I|;2%y0K2LZ~1hc;M
z8`XxrxOKErJG7mI{4(nA=>pZ9%stR^eK_gAcOezpS|Ao&zvS~2g>mzd6MKz}y+P@*
z4V`8$aH?ZlA7T*)*m~*VtT^>CQAQ@ui5!Uf<5eY<xq@6i2nhU%Du6;dj%fGP3|as@
z{`Zzlp8oV(Im<-xp%wQ~43Uil_}+ti5QS7Hh>b+xR8L?2Q4(b4=9iB2BDR@m%+y<X
zm9YXj*Dk~XEsHW~jNw$bN&9j(X-c^l!eU>%q@Q|-UspuG{kX#FtQq;X87v#r4I6I)
zQ>moH78pd{^HeiYztME>dpKs{QE>9uAULnr%i{Hum^%7%X@?464qX*q7w43e4F^#^
zJ?1rTwIY?2T;K=An2h-v&??4?qLD>U%#Y}*D6qa2*YK1%7<Z{7c=O+C@D8qYc!meW
z%?U?)_A(nC7t$F|H8sQ;G9aLXs#L?}3;W5a4?=P{Od5diFz7&it!*}a{fx2R$?fnP
zYSYzCRuGneOdd|N{KccCB7~U#bE6V%e1J;ru3fIbU+rSc_h@nee~l%FX*NYIQWdRk
zhSTYAhh@*hTc3m4*MD((EMxv~A(wZ-=p$*>h+n;<pR*Z2*js$#o#5XhwVu{Hd7YYW
zrX2Ww`E$nWWZAS+<FJ15q$|c+CQUzQ<Q4JE@es#bvxD%^b-e9j8;EcsmXje7PgJ^}
zX>dRbxl!Y*s5!lstX@(6ZQ#VBR*`>?1&yLL$6J*@1vzgZ=f;y+6aEe091?23I|yU@
zmP8@VkWgO5n#-CSaXKcaUc(gC3>dF9HzIu)2nzZLDsA*}B>%nXHTs78UE<tFp=<l*
zmdN<ZrRMu*XY5!!>-glO`jv0kDn`Qrgl46Al`1~KM&H<G>$4EyV$N=Lvx5sCrK>x7
z#O~Flqw^%r`*wu-DqPM?#Rg$5wTrX;9?eRa^Fh9Y;6>w8@YEXpx2>eM4X|0w%lKo^
zJ%g7`fu7Go*;WVH6UC?h)adVJ>$6DMOu4N(s`oS7jtwha@)7P$IxJyXSIqQTr)6x|
zzU82w2+a6eg7~;yTout>0m!$613zIQK5h+y9P0!*SMqSJ72y92c1j4G<RG~5KJbI>
z++atb+u2YTOD}QLco03QIB-t?QXo!$TtbNTn{`SEohwNN`0al-Dn<D12nG1wXvbch
zt=i9Y9$aJd_TwB@VPBkfg!C&IUyOVr%t2JfS==mUoB@CDe0Mhh5I6A<j|pOkhD8zG
z>)snU7B>9rK&|s=v_$ky%#EiQM3r>Wq^3L=WZ}MYsW@LYpRQig=6=x!r;fu~N9FAm
zig@LGmHtw(XnR*jp~OK))_vxm{?BgGRX9lTYIm@9OV8sajpbu00;t6lgAQLbtX*hp
zW@e-8TYM8s0|oEb=gB{Qe!P0J9gWOZqm(~oBK8!CP)CYzwQ?s6wlI*=sCPe=QU%C;
zNagnWas_;E&A|4w4*4c>6B<{4NzI0wztp%0Z@UQ6oeo?y3!%5&uNTwJlElZvlIyYi
zQZuGlUzt^tO^R1tf@c%_1~Iu=6VzLno0l)OZdK@1Zv(p^Jk}N@bEMU%xXw_iqGdK^
zri2_e&4846a#b{S{USW?Hp7#osdELH7TIA&*Jhwk?QM1@xxk7lL0h^J1khSgl<fX}
z?0T%wHWVj{E~^*OM0=`O&^_U0(UO<~3wG}OaEX>wou{)`s_PkD9eY0vsB1{SzZzJo
zBc1XDSi?Hv$a>U3!meO7aT%uU2*R#w1L+}7g5ZzHScJY2dZumM-KnvpmJrKznsmfo
zfF!K6((e8M`0Asz*Gt#KuK$!rG}K;3E4F6w*29%!tlC4u`ZhP(kjY?gFESSrl+s6T
zu*+s}TzdWO7&pH=E|2Iv+ind0%CTC@8HB;F`Sog(fDQS&gkva5JA+%j9}Z%C&c=bI
zTL1G#+-5&P@H9Ny!qmZymCS>#ikXI;SGfz0V64^XlS0Q`dftx~4(ey_PRa&Sgl}MH
zP{<P6M{yZFftp$6XK=S^)!B+F>JUM>Em@=d@2P1KXNl33J6@WUoFUJom&gro@`k(<
zfmT0od1USF#2gh1^BHy`GogeGkT$oxaK>|gu$jmTrDK{<>BWvB`=foB@e$pf``LCP
zZuf#9ykmt|$1?23g<9W8nv;4s%vW~z<<2@6lUf&(1F+1=>s^V8Y{o@+UfpkLg0$3_
z{jjq`Lgon!l;@Mr-El}x^T6{%aOAZ9x7=@Rc{EYDxEZEb7DkBN48jQd<+txrDQ3)+
zne(;ZcekOq$}alYjG=y8yi^y}66^9qkqoUMoC}LGb4tXC_J1P8oCo8fX$L@c2yWUD
zr9au4Ua^oSSVLK2+DG1ZX&{WcwJjji3lDm(^3{W|dh|QV@(nEypow5^GoSiOEcG%l
zK)KE^<2#98xC#>9d@=HrE+EXC6fDn`NnSrxjKuAg&tk&$-3$Np1vWy+lm<zwHN2P9
z!_M}EGgP>pHyVTC3;hjBLDBE1aCS$G)P5~XI9o}DqVtU3NYBPy(Sz24&pRzcXrXFJ
zNmW@^z&ISPejgeiAiyjJyMtFu<h{*o8)4tnSYpfctysU0>l+eYbZCPJX>pA$)QSGD
zY@wWQpFGsopE}eANW11M*mB}V%odXUFcE9Ye?o4V`s?`BW7d;SwZ=t0&O)Afj5&?c
zNp5h8;`!)Uuu=l{Yn363P$1a@uGemt{eFfN=m@IZ0(!6}FdUG{pvz*ANbwD)S5Q_h
z!251l6RW&}B<vSk=Ie-^HtCSs(d&NIMvBu|<0|=90qPA>&#uH>m=iH<(1Uxl{-2ky
zj>UDsBBAgFJ4*$PuBVakj@?-z!C~GT0FSFS@>d0}yYlvfIkZtvM0=(YB*EvnMPCYe
zkzw?3tl+m8R=Gd7va34LZH-l8F8~%feNx+oMBeAjD(gkN7%m2i@6gZ+g1tIO^447O
z7aK2M#B>;$wOFV~7mAUv2r9a6??6}6GAj|X@Ua3YvT>>l8c51*pDLc0&OxEm2gz;(
zDwFaI_HzI=gmgD?kTZ=pB>UxFDgBQ%&H3!`L@KHQ)3e<IF9)Bcq1zKZ@8KVM<s4%G
z`|ktA#uaHis>D5=6_#z+p>|8gZGV0p2F8IoGAm(p6D)`q3cN(8F-(MVk!WPUqrNKx
z3PxngnVB9cNuzk}T8HuUrEgiT)t|W9hTn&U+jelD4kpzU>78+g^}->8Yc(Ia;ukj?
z%;U5V=~*-<EqgSZdh~r<u9=s88#RcTd!`Lt8t<(CpX1za)dM}J<Mg+!P?Qzrvx{TW
znRR32S4_3@tn|%BF8~}M!+Xe_cICFO*zAoSPBy6`wqMVmA9|t=Dh-a~j}iam1-fUz
z7Xol}Mx2Q>w#ZPS>efK{t`5@7`0d2oN0~lzFc@02EMWJr<;oL{9c!cTUP8Q1^oK4o
z+B{%5iO*LV^|{Nw<SmTLA~TvjpDtbbzR<&7N5Uj)Ch;yf9nM3RYG=IWxsh^76@b6i
z&n`wEae{puy}+NC3ndh396w!Z(!fj>F{8YSeM+`0*G)2-;y9}ONvgR?C{LB>II4f(
zl#7CQFuScI#>q;ln}DM_#GjBa@k1SqVNQPLNWO$r$xKC3WdAUJm}JX??KGEm(*R5`
z-iprZB!x70f<kP4Z#7)g6B+69ec$!*iM=}fexwP>SmP3W2k~l(a0Ka0HiW=%G%LGk
zkUN9;iZ7j%*gz%7Ah^>U5mliAygq!MR9R*yAh8?~_|PnBD8v2u-Y*mbdl+w7DI%!w
zP-$0&Gj(2(&$<XD`n#(GIcc0jfNraj(A?6GdOtH0A)`2lkO<yV>q4B;7zg7qn4hi`
z;6E>e{cq(UGcHHNA2i1-I8AkMD6lq-O3V}Khb!P!78unc9q2KarIHi7uLDb1Na1%S
zxhr4#u_o0kUlzy>;cFMHb(yrytyiDSrX6|hUn{jcxWcRMi`>|ve8p^3CAs#rq+mj^
zv|r+7$am^OMZ|3WbWGdJ1f!CV4$&D>s<EVU15kxk(PuPKx&&}pS>&KEizOEf;uP}c
z%1V0wHkr>aiY97~B<&Nws0jgt)#t24>ale!6{HysH-^h5HzPZf*GN|R+)|M+ah9A~
z5=VIrHYr64RaJveWH)`N$=y*oB67321~SEkMwSf7Dff*SmTUBklM&QAYnYZV|EbCI
zl6#4_8<5j6TB=hHXEI+YCzZ?ASQd$f9+0D$c1yT994`d<VS@G}#ENXPjOH9bB_HyG
z(zqCsq>bE_iB?h<u=f)MOZ1L%70QJkv4Msp2^NJ`*gsKXl>Q8gbOC(R%ACZAoj@9`
z?*j0rolTc=lE^Hb4#ob{tay85z%u*0Yu5ap45APzD%q1i=7C2=aZVv|jp}{AR^`Iu
zwiAgt>^0$O6$5l2{RS9c4bt?8$o1X^_p^-X;A_Qq=nJ^Z{OfuZs(w7w&7B!Bfr((8
zhtv)#jN<|*`HCK$n$k^X@8+Nh)6MGdLHr+cr(zCY!vl(4??AR){>e-^H`-vIqaas)
zR<|RTa_aS{x8Lu2zhIuQL$H%w&6x7DQXXlFZ;4(YV62I&RM*o&1uZywH-uo;3q8+d
zFNKqUK5JTNK0N%UrtpP7?)NV=0}h9XQlk<V7!#q<q!f^l9lSpEnc6M@&1IbAtTNDJ
zYQTgIAw8Xlk!wu(hW1m0Cqj}hJA^NctcBM(#K#*CrA6*m^EQriMpO<Iary7J*EKrT
z!zP!aXuum&`o5}?6R;qX$C!F}n_X3;q~M-VH_C6KW!^XY=EMi732E0$7XleQ>|jM&
zR3$KBNJh`CeLzpEbe;Z9TxEhUjC7w^sXuXVSeZSr0=>vDdLdq?b#F!rk6-3Vu&|hz
z$~WXM9YR*m5bS{B3M67$<oKM!`n(0vo<lpyR0b!uXYz1u%Qt)skcN-VqFI2kbXaWZ
z@x&;NOG!A49VOC?x5CDbKlJ)Q3S*;LIs6_p26BD+;!zccfWU@a4J7;V)+Q7-8*4+K
zpec4Y3h<tyH@So|Dfk7?E@<Jz81}G5_;5s20BFt76lO1|D;UX|B!UD9RFz~QvoYs{
zS%X9^*z@e>p%M6SN?ccHe;UW8&>rnOs7fyJchCKvP1qE1)oNtmIsl!BF$szKt<8RQ
zrjqSLdA|6}V>>q;YT}hp=A@H}wXQ77rJbb5;U?V&BG9|e4w&@k#CLq?R4f^g+Qc1L
zNE&Q1j9)<Z3HD698?ZWc8m&w+;TW1IWPX;YK=3)O=I9HvN?eS0UpX5qLS53r%yOy_
z$;K8#BQp$bQiI=O<-y=gJk^pY#iG6~bFOks1f(%ikM1L7^fUQ}%Z#DI1S@9V@|eG2
zh;ACmz@|o8KtmMjOHPOCK&uVqMy!5me3V`b=H9p?tuf>9EGZ2d8%(ORI|XR!77*8A
z)a)Z|et)#3gOm4Xf`jp(buH%j!9G*!u=<fMA0Yet;8&k!uQzsDm#apvJlJBc?xiNs
z2lFE&W13X)J8gm`1oKfV6&k=@V$r>o#n;to?D`;ex0jR(<`KtF;gsQ>c}r;{{TTpg
z<kq1zxE2ugO>X3`!H<Q1a&sGj2-_>X>E<|&px`1nMd1f}6)@pJer?clfZqZ8q$B`t
zbBgL1G?~+)i{01UX2JP3zX0qo(4n33Ltj8={vuNN!gjCJK_S>q=J@AZvyyvG-RG6!
zO&f_d#)dv&C>J67NTcgcTt1<{YMjZt^16dAwME~3dUKt>Ez5)B?>tIyl?344#*uz7
zu<?AH2;XJ$$|rSj!{~*=kKdMeRR-LZwGj;)9dA!RK%V(inX2ivxaTizfk%2R&D);i
z)=~8a;y;6pKc(~#-X>IK0B<fkQ7uM2EkQ3onR5Ley1rFEZ7_s81i4U;`)3$G)ze-p
z+!>*v4y*%P6)|X%@K$iWsmhXwauC~~#9*74ryqP*KjUI<q8CXrY8Nx0r~`H@;OnPk
zCESoRnp=<mLJAQ?m)xETzMEspZ;eiW*@W@J+9vDU$#KVPa3UqtT?eLbzRMhC`hwP`
zHH2uMZ8f(_w4v<9!ou(Ci7iLl#BFXia&yx|z!Xqss)<vtW`8Id0F$Jqrh$M43;0w?
zhC|%}gN&Qae`!TjRjfZIXCY^v0VSm*+ncBaWr)IA4vj0y%vvIm^-p08H0d-=ceIyz
z`#AOmZoDi?8B*P@-VG_BEy&V{U#=v!7H&&7iEhrpODX>|3W(KdNzXgGiGDl}MiHJ&
ze-IJpp_;R+Mwa_SK0g9RMoMs5MRZiun)yT$^Cz##BT==lg)79E02GCLT<l?WR|8R?
zGniF+*53M2`-G~r-c4yj77>^fdc0xNB6|dbNB>k=0e>R^`oB%dMEYk^rhgW~fA|0Y
z1WnNYJpc@Z3G~eMD=Cq19vFOm{$EXw8v9rO2LEr@sK3F@e`bLCZx5-z!Sk-d8UKJY
zDF34|+FbwIe}n(GTg~6#G!h`7|Msr=8{9<6<@66YWR>$@g{L6<SN{h8Z(a7^U~sxW
z)!P5boBJDF#?a^f4>)qC_h02~ZTVOK2LEp@+uvXsRv@7NR<`{OuG<s|{0D3q{~sN>
z;riG98~neSrhkJE1%QD5n|Jy*7$Yho^dImw&wrG|&F^3PZ}9&n4gL*w76t<PZwlex
nV1Y55_<z7OUjNbKT;zXk1!*wwe|!nz&uaH4CpN+2AM5`EY7NVM
literal 0
HcmV?d00001
diff --git a/camera/Android.mk b/camera/Android.mk
index 36f6da1..2448832 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -58,6 +58,13 @@ else
LOCAL_WHOLE_STATIC_LIBRARIES += libcamera_parameters
endif
+# Mediatek
+ifeq ($(BOARD_HAS_MTK_HARDWARE),true)
+LOCAL_SRC_FILES += \
+ mediatek/MtkCamera.cpp \
+ mediatek/MtkCameraParameters.cpp
+endif
+
LOCAL_MODULE:= libcamera_client
include $(BUILD_SHARED_LIBRARY)
diff --git a/camera/Android.mk.orig b/camera/Android.mk.orig
new file mode 100644
index 0000000..2448832
--- /dev/null
+++ b/camera/Android.mk.orig
@@ -0,0 +1,79 @@
+# Copyright 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CAMERA_CLIENT_LOCAL_PATH:= $(call my-dir)
+include $(call all-subdir-makefiles)
+include $(CLEAR_VARS)
+
+LOCAL_PATH := $(CAMERA_CLIENT_LOCAL_PATH)
+
+LOCAL_SRC_FILES:= \
+ Camera.cpp \
+ CameraMetadata.cpp \
+ CaptureResult.cpp \
+ CameraParameters2.cpp \
+ ICamera.cpp \
+ ICameraClient.cpp \
+ ICameraService.cpp \
+ ICameraServiceListener.cpp \
+ ICameraServiceProxy.cpp \
+ ICameraRecordingProxy.cpp \
+ ICameraRecordingProxyListener.cpp \
+ camera2/ICameraDeviceUser.cpp \
+ camera2/ICameraDeviceCallbacks.cpp \
+ camera2/CaptureRequest.cpp \
+ camera2/OutputConfiguration.cpp \
+ CameraBase.cpp \
+ CameraUtils.cpp \
+ VendorTagDescriptor.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libutils \
+ liblog \
+ libbinder \
+ libhardware \
+ libui \
+ libgui \
+ libcamera_metadata \
+
+LOCAL_C_INCLUDES += \
+ system/media/camera/include \
+ system/media/private/camera/include \
+
+ifneq ($(TARGET_SPECIFIC_CAMERA_PARAMETER_LIBRARY),)
+LOCAL_WHOLE_STATIC_LIBRARIES += $(TARGET_SPECIFIC_CAMERA_PARAMETER_LIBRARY)
+else
+LOCAL_WHOLE_STATIC_LIBRARIES += libcamera_parameters
+endif
+
+# Mediatek
+ifeq ($(BOARD_HAS_MTK_HARDWARE),true)
+LOCAL_SRC_FILES += \
+ mediatek/MtkCamera.cpp \
+ mediatek/MtkCameraParameters.cpp
+endif
+
+LOCAL_MODULE:= libcamera_client
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ CameraParameters.cpp
+
+LOCAL_MODULE := libcamera_parameters
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/camera/mediatek/MtkCamera.cpp b/camera/mediatek/MtkCamera.cpp
new file mode 100644
index 0000000..5f56c94
--- /dev/null
+++ b/camera/mediatek/MtkCamera.cpp
@@ -0,0 +1,212 @@
+/* Copyright Statement:
+ *
+ * This software/firmware and related documentation ("MediaTek Software") are
+ * protected under relevant copyright laws. The information contained herein is
+ * confidential and proprietary to MediaTek Inc. and/or its licensors. Without
+ * the prior written permission of MediaTek inc. and/or its licensors, any
+ * reproduction, modification, use or disclosure of MediaTek Software, and
+ * information contained herein, in whole or in part, shall be strictly
+ * prohibited.
+ *
+ * MediaTek Inc. (C) 2010. All rights reserved.
+ *
+ * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
+ * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
+ * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
+ * ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
+ * WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
+ * NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
+ * RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
+ * INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
+ * TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
+ * RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
+ * OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
+ * SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
+ * RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
+ * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
+ * ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
+ * RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
+ * MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
+ * CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
+ *
+ * The following software/firmware and/or related documentation ("MediaTek
+ * Software") have been modified by MediaTek Inc. All revisions are subject to
+ * any receiver's applicable license agreements with MediaTek Inc.
+ */
+
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "MtkCamera"
+#include <utils/Log.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <camera/MtkCamera.h>
+
+namespace android {
+
+
+MtkCamMsgExtDataHelper::
+MtkCamMsgExtDataHelper()
+ : mIsValid(false)
+ , mspData(0)
+ , mspHeap(0)
+ , mDataOffset(0)
+ , mDataSize(0)
+{
+ ::memset(&mExtDataHdr, 0, sizeof(mExtDataHdr));
+}
+
+
+MtkCamMsgExtDataHelper::
+~MtkCamMsgExtDataHelper()
+{
+ uninit();
+}
+
+
+bool
+MtkCamMsgExtDataHelper::
+init(const sp<IMemory>& dataPtr)
+{
+ bool ret = false;
+ //
+ sp<IMemoryHeap> heap = 0;
+ ssize_t offset = 0;
+ size_t size = 0;
+ //
+ if ( NULL == dataPtr.get() ) {
+ ALOGE("[MtkCamMsgExtDataHelper] dataPtr is NULL \r\n");
+ goto lbExit;
+ }
+ //
+ heap = dataPtr->getMemory(&offset, &size);
+ if ( NULL == heap.get() || NULL == heap->base() ) {
+ ALOGE("[MtkCamMsgExtDataHelper] heap or heap->base() is NULL - (heap,offset,size)=(%p,%ld,%d) \r\n", heap.get(), offset, size);
+ goto lbExit;
+ }
+ //
+ if ( sizeof(DataHeader) > size ) {
+ ALOGE("[MtkCamMsgExtDataHelper] sizeof(DataHeader)(%d) > size(%d) \r\n", sizeof(DataHeader), size);
+ goto lbExit;
+ }
+ //
+ ::memcpy(&mExtDataHdr, ((uint8_t*)heap->base()) + offset, sizeof(DataHeader));
+ mspData = dataPtr;
+ mspHeap = heap;
+ mDataOffset = offset;
+ mDataSize = size;
+ mIsValid= true;
+ ret = true;
+lbExit:
+ return ret;
+}
+
+
+bool
+MtkCamMsgExtDataHelper::
+uninit()
+{
+ mIsValid= false;
+ mspData = NULL;
+ mspHeap = NULL;
+ mDataOffset = 0;
+ mDataSize = 0;
+ ::memset(&mExtDataHdr, 0, sizeof(mExtDataHdr));
+ return true;
+}
+
+
+bool
+MtkCamMsgExtDataHelper::
+create(size_t const extParamSize, uint32_t const u4ExtMsgType)
+{
+ bool ret = false;
+ //
+ size_t const extDataSize = sizeof(DataHeader) + extParamSize;
+ sp<IMemoryHeap> heap = 0;
+ sp<IMemory> dataPtr = 0;
+
+ // (1) Check arguments.
+ if ( 0 == extParamSize )
+ {
+ ALOGW("[MtkCamMsgExtDataHelper::create] extParamSize==0 \r\n");
+ }
+
+ // (2) Allocate memory
+ heap = new MemoryHeapBase(extDataSize, 0, NULL);
+ dataPtr = new MemoryBase(heap, 0, extDataSize);
+
+ // (3) Initialize.
+ ret = init(dataPtr);
+ if ( ! ret )
+ {
+ ALOGE("[MtkCamMsgExtDataHelper::create] init fail \r\n");
+ goto lbExit;
+ }
+
+ // (4) Assign the header.
+ mExtDataHdr.extMsgType = u4ExtMsgType;
+ ::memcpy(((uint8_t*)mspHeap->base()) + mDataOffset, &mExtDataHdr, sizeof(DataHeader));
+
+ ret = true;
+lbExit:
+ return ret;
+}
+
+
+bool
+MtkCamMsgExtDataHelper::
+destroy()
+{
+ return uninit();
+}
+
+
+uint8_t*
+MtkCamMsgExtDataHelper::
+getExtParamBase() const
+{
+ return mIsValid
+ ? static_cast<uint8_t*>(mspHeap->base()) + mDataOffset + sizeof(DataHeader)
+ : NULL;
+}
+
+
+size_t
+MtkCamMsgExtDataHelper::
+getExtParamSize() const
+{
+ return mIsValid
+ ? (mDataSize - sizeof(DataHeader))
+ : 0;
+}
+
+
+ssize_t
+MtkCamMsgExtDataHelper::
+getExtParamOffset() const
+{
+ return mIsValid
+ ? (mDataOffset + sizeof(DataHeader))
+ : 0;
+}
+
+
+}; // namespace android
diff --git a/camera/mediatek/MtkCameraParameters.cpp b/camera/mediatek/MtkCameraParameters.cpp
new file mode 100644
index 0000000..fd2a717
--- /dev/null
+++ b/camera/mediatek/MtkCameraParameters.cpp
@@ -0,0 +1,308 @@
+/* Copyright Statement:
+ *
+ * This software/firmware and related documentation ("MediaTek Software") are
+ * protected under relevant copyright laws. The information contained herein is
+ * confidential and proprietary to MediaTek Inc. and/or its licensors. Without
+ * the prior written permission of MediaTek inc. and/or its licensors, any
+ * reproduction, modification, use or disclosure of MediaTek Software, and
+ * information contained herein, in whole or in part, shall be strictly
+ * prohibited.
+ *
+ * MediaTek Inc. (C) 2010. All rights reserved.
+ *
+ * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
+ * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
+ * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
+ * ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
+ * WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
+ * NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
+ * RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
+ * INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
+ * TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
+ * RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
+ * OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
+ * SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
+ * RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
+ * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
+ * ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
+ * RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
+ * MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
+ * CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
+ *
+ * The following software/firmware and/or related documentation ("MediaTek
+ * Software") have been modified by MediaTek Inc. All revisions are subject to
+ * any receiver's applicable license agreements with MediaTek Inc.
+ */
+
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "MTKCameraParams"
+#include <utils/Log.h>
+
+#include <string.h>
+#include <stdlib.h>
+#include <camera/MtkCameraParameters.h>
+
+namespace android {
+
+
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+// App Mode.
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+const char MtkCameraParameters::PROPERTY_KEY_CLIENT_APPMODE[] = "client.appmode";
+//
+const char MtkCameraParameters::APP_MODE_NAME_DEFAULT[] = "Default";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_ENG[] = "MtkEng";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_ATV[] = "MtkAtv";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_S3D[] = "MtkS3d";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_VT[] = "MtkVt";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_PHOTO[] = "MtkPhoto";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_VIDEO[] = "MtkVideo";
+const char MtkCameraParameters::APP_MODE_NAME_MTK_ZSD[] = "MtkZsd";
+
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+// Scene Mode
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+const char MtkCameraParameters::SCENE_MODE_NORMAL[] = "normal";
+
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+// Face Beauty
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+const char MtkCameraParameters::KEY_FB_SMOOTH_LEVEL[] = "fb-smooth-level";
+const char MtkCameraParameters::KEY_FB_SMOOTH_LEVEL_MIN[] = "fb-smooth-level-min";
+const char MtkCameraParameters::KEY_FB_SMOOTH_LEVEL_MAX[] = "fb-smooth-level-max";
+//
+const char MtkCameraParameters::KEY_FB_SKIN_COLOR[] = "fb-skin-color";
+const char MtkCameraParameters::KEY_FB_SKIN_COLOR_MIN[] = "fb-skin-color-min";
+const char MtkCameraParameters::KEY_FB_SKIN_COLOR_MAX[] = "fb-skin-color-max";
+//
+const char MtkCameraParameters::KEY_FB_SHARP[] = "fb-sharp";
+const char MtkCameraParameters::KEY_FB_SHARP_MIN[] = "fb-sharp-min";
+const char MtkCameraParameters::KEY_FB_SHARP_MAX[] = "fb-sharp-max";
+
+
+//
+const char MtkCameraParameters::KEY_EXPOSURE[] = "exposure";
+const char MtkCameraParameters::KEY_EXPOSURE_METER[] = "exposure-meter";
+const char MtkCameraParameters::KEY_ISO_SPEED[] = "iso-speed";
+const char MtkCameraParameters::KEY_AE_MODE[] = "ae-mode";
+const char MtkCameraParameters::KEY_FOCUS_METER[] = "focus-meter";
+const char MtkCameraParameters::KEY_EDGE[] = "edge";
+const char MtkCameraParameters::KEY_HUE[] = "hue";
+const char MtkCameraParameters::KEY_SATURATION[] = "saturation";
+const char MtkCameraParameters::KEY_BRIGHTNESS[] = "brightness";
+const char MtkCameraParameters::KEY_CONTRAST[] = "contrast";
+const char MtkCameraParameters::KEY_AF_LAMP_MODE [] = "aflamp-mode";
+const char MtkCameraParameters::KEY_STEREO_3D_PREVIEW_SIZE[] = "stereo3d-preview-size";
+const char MtkCameraParameters::KEY_STEREO_3D_PICTURE_SIZE[] = "stereo3d-picture-size";
+const char MtkCameraParameters::KEY_STEREO_3D_TYPE [] = "stereo3d-type";
+const char MtkCameraParameters::KEY_STEREO_3D_MODE [] = "stereo3d-mode";
+const char MtkCameraParameters::KEY_STEREO_3D_IMAGE_FORMAT [] = "stereo3d-image-format";
+
+// ZSD
+const char MtkCameraParameters::KEY_ZSD_MODE[] = "zsd-mode";
+const char MtkCameraParameters::KEY_SUPPORTED_ZSD_MODE[] = "zsd-supported";
+//
+const char MtkCameraParameters::KEY_FPS_MODE[] = "fps-mode";
+//
+const char MtkCameraParameters::KEY_FOCUS_DRAW[] = "af-draw";
+//
+const char MtkCameraParameters::KEY_CAPTURE_MODE[] = "cap-mode";
+const char MtkCameraParameters::KEY_SUPPORTED_CAPTURE_MODES[] = "cap-mode-values";
+const char MtkCameraParameters::KEY_CAPTURE_PATH[] = "capfname";
+const char MtkCameraParameters::KEY_BURST_SHOT_NUM[] = "burst-num";
+//
+const char MtkCameraParameters::KEY_MATV_PREVIEW_DELAY[] = "tv-delay";
+const char MtkCameraParameters::KEY_PANORAMA_IDX[] = "pano-idx";
+const char MtkCameraParameters::KEY_PANORAMA_DIR[] = "pano-dir";
+
+// Values for KEY_EXPOSURE
+const char MtkCameraParameters::EXPOSURE_METER_SPOT[] = "spot";
+const char MtkCameraParameters::EXPOSURE_METER_CENTER[] = "center";
+const char MtkCameraParameters::EXPOSURE_METER_AVERAGE[] = "average";
+
+// Valeus for KEY_ISO_SPEED
+const char MtkCameraParameters::ISO_SPEED_AUTO[] = "auto";
+const char MtkCameraParameters::ISO_SPEED_100[] = "100";
+const char MtkCameraParameters::ISO_SPEED_200[] = "200";
+const char MtkCameraParameters::ISO_SPEED_400[] = "400";
+const char MtkCameraParameters::ISO_SPEED_800[] = "800";
+const char MtkCameraParameters::ISO_SPEED_1600[] = "1600";
+
+// Values for KEY_AE_MODE = "ae-mode"
+
+// Values for KEY_FOCUS_METER
+const char MtkCameraParameters::FOCUS_METER_SPOT[] = "spot";
+const char MtkCameraParameters::FOCUS_METER_MULTI[] = "multi";
+
+// AWB2PASS
+const char MtkCameraParameters::KEY_AWB2PASS[] = "awb-2pass";
+
+
+//
+// Camera Mode
+const char MtkCameraParameters::KEY_CAMERA_MODE[] = "mtk-cam-mode";
+// Values for KEY_CAMERA_MODE
+const int MtkCameraParameters::CAMERA_MODE_NORMAL = 0;
+const int MtkCameraParameters::CAMERA_MODE_MTK_PRV = 1;
+const int MtkCameraParameters::CAMERA_MODE_MTK_VDO = 2;
+const int MtkCameraParameters::CAMERA_MODE_MTK_VT = 3;
+
+// Values for KEY_FPS_MODE
+const int MtkCameraParameters::FPS_MODE_NORMAL = 0;
+const int MtkCameraParameters::FPS_MODE_FIX = 1;
+
+// Values for raw save mode
+
+// Values for KEY_FOCUS_DRAW
+
+// Values for capture mode
+const char MtkCameraParameters::CAPTURE_MODE_PANORAMA_SHOT[] = "panoramashot";
+const char MtkCameraParameters::CAPTURE_MODE_BURST_SHOT[] = "burstshot";
+const char MtkCameraParameters::CAPTURE_MODE_NORMAL[] = "normal";
+const char MtkCameraParameters::CAPTURE_MODE_BEST_SHOT[] = "bestshot";
+const char MtkCameraParameters::CAPTURE_MODE_EV_BRACKET_SHOT[] = "evbracketshot";
+const char MtkCameraParameters::CAPTURE_MODE_SMILE_SHOT[] = "smileshot";
+const char MtkCameraParameters::CAPTURE_MODE_MAV_SHOT[] = "mav";
+const char MtkCameraParameters::CAPTURE_MODE_AUTO_PANORAMA_SHOT[] = "autorama";
+const char MtkCameraParameters::CAPTURE_MODE_MOTION_TRACK_SHOT[] = "motiontrack";
+const char MtkCameraParameters::CAPTURE_MODE_HDR_SHOT[] = "hdr";
+const char MtkCameraParameters::CAPTURE_MODE_ASD_SHOT[] = "asd";
+const char MtkCameraParameters::CAPTURE_MODE_ZSD_SHOT[] = "zsd";
+const char MtkCameraParameters::CAPTURE_MODE_PANO_3D[] = "pano_3d";
+const char MtkCameraParameters::CAPTURE_MODE_SINGLE_3D[] = "single_3d";
+const char MtkCameraParameters::CAPTURE_MODE_FACE_BEAUTY[] = "face_beauty";
+const char MtkCameraParameters::CAPTURE_MODE_CONTINUOUS_SHOT[] = "continuousshot";
+const char MtkCameraParameters::CAPTURE_MODE_MULTI_MOTION[] = "multi_motion";
+const char MtkCameraParameters::CAPTURE_MODE_GESTURE_SHOT[] = "gestureshot";
+
+// Values for panorama direction settings
+const char MtkCameraParameters::PANORAMA_DIR_RIGHT[] = "right";
+const char MtkCameraParameters::PANORAMA_DIR_LEFT[] = "left";
+const char MtkCameraParameters::PANORAMA_DIR_TOP[] = "top";
+const char MtkCameraParameters::PANORAMA_DIR_DOWN[] = "down";
+
+//
+const int MtkCameraParameters::ENABLE = 1;
+const int MtkCameraParameters::DISABLE = 0;
+
+// Values for KEY_EDGE, KEY_HUE, KEY_SATURATION, KEY_BRIGHTNESS, KEY_CONTRAST
+const char MtkCameraParameters::HIGH[] = "high";
+const char MtkCameraParameters::MIDDLE[] = "middle";
+const char MtkCameraParameters::LOW[] = "low";
+
+// Preview Internal Format.
+const char MtkCameraParameters::KEY_PREVIEW_INT_FORMAT[] = "prv-int-fmt";
+
+// Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT,
+// and KEY_VIDEO_FRAME_FORMAT
+const char MtkCameraParameters::PIXEL_FORMAT_YUV420I[] = "yuv420i-yyuvyy-3plane";
+const char MtkCameraParameters::PIXEL_FORMAT_YV12_GPU[] = "yv12-gpu";
+const char MtkCameraParameters::PIXEL_FORMAT_YUV422I_UYVY[] = "yuv422i-uyvy";
+const char MtkCameraParameters::PIXEL_FORMAT_YUV422I_VYUY[] = "yuv422i-vyuy";
+const char MtkCameraParameters::PIXEL_FORMAT_YUV422I_YVYU[] = "yuv422i-yvyu";
+
+const char MtkCameraParameters::PIXEL_FORMAT_BAYER8[] = "bayer8";
+const char MtkCameraParameters::PIXEL_FORMAT_BAYER10[] = "bayer10";
+
+const char MtkCameraParameters::KEY_BRIGHTNESS_VALUE[] = "brightness_value";
+
+// ISP Operation mode for meta mode use
+const char MtkCameraParameters::KEY_ISP_MODE[] = "isp-mode";
+// AF
+const char MtkCameraParameters::KEY_AF_X[] = "af-x";
+const char MtkCameraParameters::KEY_AF_Y[] = "af-y";
+// Effect
+const char MtkCameraParameters::EFFECT_SEPIA_BLUE[] = "sepiablue";
+const char MtkCameraParameters::EFFECT_SEPIA_GREEN[] = "sepiagreen";
+
+//
+// on/off => FIXME: should be replaced with TRUE[]
+const char MtkCameraParameters::ON[] = "on";
+const char MtkCameraParameters::OFF[] = "off";
+//
+const char MtkCameraParameters::WHITE_BALANCE_TUNGSTEN[] = "tungsten";
+//
+const char MtkCameraParameters::ISO_SPEED_ENG[] = "iso-speed-eng";
+const char MtkCameraParameters::KEY_RAW_SAVE_MODE[] = "rawsave-mode";
+const char MtkCameraParameters::KEY_RAW_PATH[] = "rawfname";
+
+const char MtkCameraParameters::KEY_FAST_CONTINUOUS_SHOT[] = "fast-continuous-shot";
+
+const char MtkCameraParameters::KEY_CSHOT_INDICATOR[] = "cshot-indicator";
+
+// AF EM MODE
+const char MtkCameraParameters::KEY_FOCUS_ENG_MODE[] = "afeng-mode";
+const char MtkCameraParameters::KEY_FOCUS_ENG_STEP[] = "afeng-pos";
+const char MtkCameraParameters::KEY_FOCUS_ENG_MAX_STEP[] = "afeng-max-focus-step";
+const char MtkCameraParameters::KEY_FOCUS_ENG_MIN_STEP[] = "afeng-min-focus-step";
+const char MtkCameraParameters::KEY_FOCUS_ENG_BEST_STEP[] = "afeng-best-focus-step";
+const char MtkCameraParameters::KEY_RAW_DUMP_FLAG[] = "afeng_raw_dump_flag";
+const char MtkCameraParameters::KEY_PREVIEW_DUMP_RESOLUTION[] = "preview-dump-resolution";
+// Values for KEY_PREVIEW_DUMP_RESOLUTION
+const int MtkCameraParameters::PREVIEW_DUMP_RESOLUTION_NORMAL = 0;
+const int MtkCameraParameters::PREVIEW_DUMP_RESOLUTION_CROP = 1;
+//
+const char MtkCameraParameters::KEY_MAX_NUM_DETECTED_OBJECT[] = "max-num-ot";
+//
+const char MtkCameraParameters::KEY_VIDEO_HDR[] = "video-hdr";
+
+// KEY for [Engineer Mode] Add new camera paramters for new requirements
+const char MtkCameraParameters::KEY_ENG_AE_ENABLE[] = "eng-ae-enable";
+const char MtkCameraParameters::KEY_ENG_PREVIEW_SHUTTER_SPEED[] = "eng-preview-shutter-speed";
+const char MtkCameraParameters::KEY_ENG_PREVIEW_SENSOR_GAIN[] = "eng-preview-sensor-gain";
+const char MtkCameraParameters::KEY_ENG_PREVIEW_ISP_GAIN[] = "eng-preview-isp-gain";
+const char MtkCameraParameters::KEY_ENG_PREVIEW_AE_INDEX[] = "eng-preview-ae-index";
+const char MtkCameraParameters::KEY_ENG_CAPTURE_SENSOR_GAIN[] = "eng-capture-sensor-gain";
+const char MtkCameraParameters::KEY_ENG_CAPTURE_ISP_GAIN[] = "eng-capture-isp-gain";
+const char MtkCameraParameters::KEY_ENG_CAPTURE_SHUTTER_SPEED[] = "eng-capture-shutter-speed";
+const char MtkCameraParameters::KEY_ENG_CAPTURE_ISO[] = "eng-capture-iso";
+const char MtkCameraParameters::KEY_ENG_FLASH_DUTY_VALUE[] = "eng-flash-duty-value";
+const char MtkCameraParameters::KEY_ENG_FLASH_DUTY_MIN[] = "eng-flash-duty-min";
+const char MtkCameraParameters::KEY_ENG_FLASH_DUTY_MAX[] = "eng-flash-duty-max";
+const char MtkCameraParameters::KEY_ENG_ZSD_ENABLE[] = "eng-zsd-enable";
+const char MtkCameraParameters::KEY_SENSOR_TYPE[] = "sensor-type";
+const char MtkCameraParameters::KEY_ENG_PREVIEW_FPS[] = "eng-preview-fps";
+const char MtkCameraParameters::KEY_ENG_MSG[] = "eng-msg";
+const int MtkCameraParameters::KEY_ENG_FLASH_DUTY_DEFAULT_VALUE = -1;
+const int MtkCameraParameters::KEY_ENG_FLASH_STEP_DEFAULT_VALUE = -1;
+const char MtkCameraParameters::KEY_ENG_FLASH_STEP_MIN[] = "eng-flash-step-min";
+const char MtkCameraParameters::KEY_ENG_FLASH_STEP_MAX[] = "eng-flash-step-max";
+const char MtkCameraParameters::KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL[] = "eng-focus-fullscan-frame-interval";
+const char MtkCameraParameters::KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MAX[] = "eng-focus-fullscan-frame-interval-max";
+const char MtkCameraParameters::KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MIN[] = "eng-focus-fullscan-frame-interval-min";
+const int MtkCameraParameters::KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MAX_DEFAULT = 65535;
+const int MtkCameraParameters::KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MIN_DEFAULT = 0;
+const char MtkCameraParameters::KEY_ENG_PREVIEW_FRAME_INTERVAL_IN_US[] = "eng-preview-frame-interval-in-us";
+const char MtkCameraParameters::KEY_ENG_PARAMETER1[] = "key-eng-parameter1";
+const char MtkCameraParameters::KEY_ENG_PARAMETER2[] = "key-eng-parameter2";
+const char MtkCameraParameters::KEY_ENG_PARAMETER3[] = "key-eng-parameter3";
+
+const char MtkCameraParameters::KEY_ENG_SAVE_SHADING_TABLE[] = "eng-save-shading-table";
+const char MtkCameraParameters::KEY_ENG_SHADING_TABLE[] = "eng-shading-table";
+const int MtkCameraParameters::KEY_ENG_SHADING_TABLE_AUTO = 0;
+const int MtkCameraParameters::KEY_ENG_SHADING_TABLE_LOW = 1;
+const int MtkCameraParameters::KEY_ENG_SHADING_TABLE_MIDDLE = 2;
+const int MtkCameraParameters::KEY_ENG_SHADING_TABLE_HIGH = 3;
+const int MtkCameraParameters::KEY_ENG_SHADING_TABLE_TSF = 4;
+
+// KEY for [Engineer Mode] Add new camera paramters for ev calibration
+const char MtkCameraParameters::KEY_ENG_EV_CALBRATION_OFFSET_VALUE[] = "eng-ev-cal-offset";
+
+}; // namespace android
diff --git a/include/camera/MtkCamera.h b/include/camera/MtkCamera.h
new file mode 100644
index 0000000..8982feb
--- /dev/null
+++ b/include/camera/MtkCamera.h
@@ -0,0 +1,275 @@
+/* Copyright Statement:
+ *
+ * This software/firmware and related documentation ("MediaTek Software") are
+ * protected under relevant copyright laws. The information contained herein is
+ * confidential and proprietary to MediaTek Inc. and/or its licensors. Without
+ * the prior written permission of MediaTek inc. and/or its licensors, any
+ * reproduction, modification, use or disclosure of MediaTek Software, and
+ * information contained herein, in whole or in part, shall be strictly
+ * prohibited.
+ *
+ * MediaTek Inc. (C) 2010. All rights reserved.
+ *
+ * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
+ * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
+ * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
+ * ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
+ * WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
+ * NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
+ * RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
+ * INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
+ * TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
+ * RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
+ * OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
+ * SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
+ * RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
+ * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
+ * ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
+ * RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
+ * MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
+ * CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
+ *
+ * The following software/firmware and/or related documentation ("MediaTek
+ * Software") have been modified by MediaTek Inc. All revisions are subject to
+ * any receiver's applicable license agreements with MediaTek Inc.
+ */
+
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_MTK_CAMERA_H
+#define ANDROID_HARDWARE_MTK_CAMERA_H
+
+#include <binder/IMemory.h>
+
+namespace android {
+
+// extended msgType in notifyCallback and dataCallback functions
+enum {
+ MTK_CAMERA_MSG_EXT_NOTIFY = 0x40000000,
+ MTK_CAMERA_MSG_EXT_DATA = 0x80000000,
+};
+
+// extended notify message related to MTK_CAMERA_MSG_EXT_NOTIFY used in notifyCallback functions
+enum {
+ //
+ // Smile Detection
+ MTK_CAMERA_MSG_EXT_NOTIFY_SMILE_DETECT = 0x00000001,
+ //
+ // Auto Scene Detection
+ MTK_CAMERA_MSG_EXT_NOTIFY_ASD = 0x00000002,
+ //
+ // Multi Angle View
+ MTK_CAMERA_MSG_EXT_NOTIFY_MAV = 0x00000003,
+ //
+ // Burst Shutter Callback
+ // ext2: count-down shutter number; 0: the last one shutter.
+ MTK_CAMERA_MSG_EXT_NOTIFY_BURST_SHUTTER = 0x00000004,
+ //
+ // Continuous Shutter Callback
+ // ext2: current continuous shutter number.
+ MTK_CAMERA_MSG_EXT_NOTIFY_CONTINUOUS_SHUTTER= 0x00000005,
+ //
+ // Continuous EndCallback
+ MTK_CAMERA_MSG_EXT_NOTIFY_CONTINUOUS_END = 0x00000006,
+
+ // ZSD preview done
+ MTK_CAMERA_MSG_EXT_NOTIFY_ZSD_PREVIEW_DONE = 0x00000007,
+ //
+ // Capture done (disable CAMERA_MSG_SHUTTER / CAMERA_MSG_COMPRESSED_IMAGE)
+ MTK_CAMERA_MSG_EXT_NOTIFY_CAPTURE_DONE = 0x00000010,
+ //
+ // Shutter Callback (not disable CAMERA_MSG_SHUTTER)
+ // ext2: 1: CameraService will play shutter sound.
+ MTK_CAMERA_MSG_EXT_NOTIFY_SHUTTER = 0x00000011,
+ //
+ // for EM preview raw dump error notify
+ MTK_CAMERA_MSG_EXT_NOTIFY_RAW_DUMP_STOPPED = 0x00000012,
+ //
+ // Gesture Detection
+ MTK_CAMERA_MSG_EXT_NOTIFY_GESTURE_DETECT = 0x00000013
+};
+
+// extended data message related to MTK_CAMERA_MSG_EXT_DATA used in dataCallback functions
+// extended data header is located at the top of dataPrt in dataCallback functions
+// DATA: Header + Params
+enum {
+ //
+ // Auto Panorama
+ // Params:
+ // int[0]: 0:mAUTORAMAMVCallback, 1:mAUTORAMACallback
+ // int[1~]:depends on
+ //
+ MTK_CAMERA_MSG_EXT_DATA_AUTORAMA = 0x00000001,
+ //
+ // AF Window Results
+ MTK_CAMERA_MSG_EXT_DATA_AF = 0x00000002,
+ //
+ // Burst Shot (EV Shot)
+ // int[0]: the total shut count.
+ // int[1]: count-down shut number; 0: the last one shut.
+ MTK_CAMERA_MSG_EXT_DATA_BURST_SHOT = 0x00000003,
+ //
+ // Continuous Shot
+ // int[0]: current continuous shut number.
+ MTK_CAMERA_MSG_EXT_DATA_CONTINUOUS_SHOT = 0x00000004,
+
+
+ MTK_CAMERA_MSG_EXT_DATA_OT = 0x00000005,
+
+ // Facebeauty Shot
+ // int[0]: data type. 0:original image.
+ MTK_CAMERA_MSG_EXT_DATA_FACEBEAUTY = 0x00000006,
+ //
+ // MAV Shot
+ // int[0]: data type. 0:original image.
+ MTK_CAMERA_MSG_EXT_DATA_MAV = 0x00000007,
+ //
+ // HDR Shot
+ // int[0]: data type. 0:0EV image.
+ MTK_CAMERA_MSG_EXT_DATA_HDR = 0x00000008,
+
+ //
+ // Motion Track
+ // Params:
+ // int[0]: 0: frame EIS, 1: captured image, 2: blended image, 3: intermediate data
+ // int[1~]:depends on
+ //
+ MTK_CAMERA_MSG_EXT_DATA_MOTIONTRACK = 0x00000009,
+
+ //
+ // Compressed Image (not disable CAMERA_MSG_COMPRESSED_IMAGE)
+ // int[0]: current shut index; 0: the first one shut.
+ MTK_CAMERA_MSG_EXT_DATA_COMPRESSED_IMAGE = 0x00000010,
+};
+
+// MTK-extended camera message data helper.
+// DATA: Header + Params
+class MtkCamMsgExtDataHelper
+{
+public:
+ // The header type of MTK-extended camera message data.
+ struct DataHeader {
+ uint32_t extMsgType;
+ };
+
+public:
+ MtkCamMsgExtDataHelper();
+ ~MtkCamMsgExtDataHelper();
+ bool init(const sp<IMemory>& dataPtr);
+ bool uninit();
+ bool create(size_t const extParamSize, uint32_t const u4ExtMsgType);
+ bool destroy();
+
+ uint8_t* getExtParamBase() const;
+ size_t getExtParamSize() const;
+ ssize_t getExtParamOffset() const;
+ inline uint32_t getExtMsgType() const { return mExtDataHdr.extMsgType; }
+ inline DataHeader const& getExtDataHeader() const { return mExtDataHdr; }
+ inline sp<IMemory>const& getData() const { return mspData; }
+ inline sp<IMemoryHeap>const& getHeap() const { return mspHeap; }
+
+protected:
+ bool mIsValid;
+ sp<IMemory> mspData;
+ sp<IMemoryHeap> mspHeap;
+ ssize_t mDataOffset;
+ size_t mDataSize;
+ DataHeader mExtDataHdr;
+};
+
+
+// cmdType in sendCommand functions
+enum {
+ CAMERA_CMD_MTK_DEFINE_START = 0x10000000,
+ CAMERA_CMD_DO_PANORAMA,
+ CAMERA_CMD_CANCEL_PANORAMA,
+ CAMERA_CMD_START_SD_PREVIEW, //(Smile Detection)
+ CAMERA_CMD_CANCEL_SD_PREVIEW, //(Smile Detection)
+ CAMERA_CMD_START_OT,
+ CAMERA_CMD_STOP_OT,
+ CAMERA_CMD_START_MAV,
+ CAMERA_CMD_STOP_MAV,
+ CAMERA_CMD_START_AUTORAMA,
+ CAMERA_CMD_STOP_AUTORAMA,
+ CAMERA_CMD_GET_MEM_INFO, //For Video to get PMEM buffer info
+ CAMERA_CMD_GET_REC_BUF_INFO,
+ CAMERA_CMD_CANCEL_CSHOT,
+ CAMERA_CMD_SET_CSHOT_SPEED,
+#ifdef MTK_S3D_SUPPORT
+ CAMERA_CMD_START_3DSHOT,
+ CAMERA_CMD_STOP_3DSHOT,
+#endif
+ CAMERA_CMD_START_MOTIONTRACK,
+ CAMERA_CMD_STOP_MOTIONTRACK,
+ CAMERA_CMD_START_CLONECAMERA,
+ CAMERA_CMD_SHOT_CLONECAMERA,
+ CAMERA_CMD_STOP_CLONECAMERA,
+ CAMERA_CMD_START_GD_PREVIEW, //(Gesture Detection)
+ CAMERA_CMD_CANCEL_GD_PREVIEW, //(Gesture Detection)
+};
+
+/*
+ * For Video to get PMEM buffer info
+ *
+ * Command: CAMERA_CMD_GET_MEM_INFO
+ */
+struct CameraMemInfo {
+ enum { eTYPE_PMEM = 0 };
+ uint32_t u4Type;
+ uint32_t u4VABase;
+ uint32_t u4PABase;
+ uint32_t u4MemSize;
+ uint32_t u4MemCount;
+};
+
+
+/*
+ * set camera fatal errors enum
+ *
+ */
+enum {
+ CAMERA_ERROR_NO_MEMORY = 1000,
+ CAMERA_ERROR_RESET,
+};
+
+/*
+ * For Video to get buffer info
+ *
+ * Command: CAMERA_CMD_GET_REC_BUF_INFO
+ */
+struct CameraRecBufInfo {
+ int32_t i4MemId;
+ uint32_t u4VirAddr;
+ uint32_t u4Size;
+};
+
+
+/*
+ * For Video to set setting
+ *
+ * Command: CAMERA_CMD_GET_REC_BUF_INFO
+ */
+struct CameraRecSetting {
+ int32_t mi4BufSecu; //security
+ int32_t mi4BufCohe; //coherent
+};
+
+
+}; // namespace android
+
+#endif //ANDROID_HARDWARE_MTK_CAMERA_H
diff --git a/include/camera/MtkCameraParameters.h b/include/camera/MtkCameraParameters.h
new file mode 100644
index 0000000..4a34957
--- /dev/null
+++ b/include/camera/MtkCameraParameters.h
@@ -0,0 +1,364 @@
+/* Copyright Statement:
+ *
+ * This software/firmware and related documentation ("MediaTek Software") are
+ * protected under relevant copyright laws. The information contained herein is
+ * confidential and proprietary to MediaTek Inc. and/or its licensors. Without
+ * the prior written permission of MediaTek inc. and/or its licensors, any
+ * reproduction, modification, use or disclosure of MediaTek Software, and
+ * information contained herein, in whole or in part, shall be strictly
+ * prohibited.
+ *
+ * MediaTek Inc. (C) 2010. All rights reserved.
+ *
+ * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
+ * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
+ * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
+ * ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
+ * WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
+ * NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
+ * RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
+ * INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
+ * TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
+ * RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
+ * OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
+ * SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
+ * RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
+ * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
+ * ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
+ * RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
+ * MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
+ * CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
+ *
+ * The following software/firmware and/or related documentation ("MediaTek
+ * Software") have been modified by MediaTek Inc. All revisions are subject to
+ * any receiver's applicable license agreements with MediaTek Inc.
+ */
+
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_MTK_CAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_MTK_CAMERA_PARAMETERS_H
+
+#include <camera/CameraParameters.h>
+
+namespace android {
+
+
+/**
+ * @class MtkCameraParameters
+ * @brief MTK-proprietary camera parameters.
+ * @details This class is derived from CameraParameters and defines MTK-proprietary camera parameters.
+ */
+class MtkCameraParameters : public CameraParameters
+{
+public:
+ MtkCameraParameters() : CameraParameters() {}
+ MtkCameraParameters(const String8 &params) { unflatten(params); }
+ ~MtkCameraParameters() {}
+
+ MtkCameraParameters& operator=(CameraParameters const& params)
+ {
+ unflatten(params.flatten());
+ return (*this);
+ }
+ //
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+// App Mode.
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ static const char PROPERTY_KEY_CLIENT_APPMODE[];
+ //
+ static const char APP_MODE_NAME_DEFAULT[];
+ static const char APP_MODE_NAME_MTK_ENG[];
+ static const char APP_MODE_NAME_MTK_ATV[];
+ static const char APP_MODE_NAME_MTK_S3D[];
+ static const char APP_MODE_NAME_MTK_VT[];
+ static const char APP_MODE_NAME_MTK_PHOTO[];
+ static const char APP_MODE_NAME_MTK_VIDEO[];
+ static const char APP_MODE_NAME_MTK_ZSD[];
+ //
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+// Scene Mode
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ static const char SCENE_MODE_NORMAL[];
+ //
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+// Face Beauty
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ static const char KEY_FB_SMOOTH_LEVEL[];
+ static const char KEY_FB_SMOOTH_LEVEL_MIN[];
+ static const char KEY_FB_SMOOTH_LEVEL_MAX[];
+ //
+ static const char KEY_FB_SKIN_COLOR[];
+ static const char KEY_FB_SKIN_COLOR_MIN[];
+ static const char KEY_FB_SKIN_COLOR_MAX[];
+ //
+ static const char KEY_FB_SHARP[];
+ static const char KEY_FB_SHARP_MIN[];
+ static const char KEY_FB_SHARP_MAX[];
+ //
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+//
+//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ static const char KEY_EXPOSURE[];
+ static const char KEY_EXPOSURE_METER[];
+ static const char KEY_ISO_SPEED[];
+ static const char KEY_AE_MODE[];
+ static const char KEY_FOCUS_METER[];
+ static const char KEY_EDGE[];
+ static const char KEY_HUE[];
+ static const char KEY_SATURATION[];
+ static const char KEY_BRIGHTNESS[];
+ static const char KEY_CONTRAST[];
+ static const char KEY_ZSD_MODE[];
+ static const char KEY_SUPPORTED_ZSD_MODE[];
+ static const char KEY_AWB2PASS[];
+ static const char KEY_AF_LAMP_MODE [];
+
+ static const char KEY_STEREO_3D_PREVIEW_SIZE[];
+ static const char KEY_STEREO_3D_PICTURE_SIZE[];
+ static const char KEY_STEREO_3D_TYPE [];
+ static const char KEY_STEREO_3D_MODE [];
+ static const char KEY_STEREO_3D_IMAGE_FORMAT [];
+ //
+ static const char KEY_FPS_MODE[]; // normal,fix
+ //
+ static const char KEY_FOCUS_DRAW[]; // 0,1
+ //
+ static const char KEY_CAPTURE_MODE[]; // normal,bestshot,evbracketshot,burstshot,smileshot,panoramashot
+ static const char KEY_SUPPORTED_CAPTURE_MODES[];
+ static const char KEY_CAPTURE_PATH[];
+ static const char KEY_BURST_SHOT_NUM[];
+ //
+ static const char KEY_MATV_PREVIEW_DELAY[];
+ //
+ static const char KEY_PANORAMA_IDX[];
+ static const char KEY_PANORAMA_DIR[]; // right,left,top,bottom
+ //
+ static const char KEY_SENSOR_DEV[]; // main,sub,atv
+ static const char KEY_SUPPORTED_SENSOR_DEVS[];
+
+ // Values for KEY_EXPOSURE
+ static const char EXPOSURE_METER_SPOT[];
+ static const char EXPOSURE_METER_CENTER[];
+ static const char EXPOSURE_METER_AVERAGE[];
+
+ // Valeus for KEY_ISO_SPEED
+ static const char ISO_SPEED_AUTO[];
+ static const char ISO_SPEED_100[];
+ static const char ISO_SPEED_200[];
+ static const char ISO_SPEED_400[];
+ static const char ISO_SPEED_800[];
+ static const char ISO_SPEED_1600[];
+
+ // Values for KEY_FOCUS_METER
+ static const char FOCUS_METER_SPOT[];
+ static const char FOCUS_METER_MULTI[];
+
+ static const char KEY_CAMERA_MODE[];
+ // Values for KEY_CAMERA_MODE
+ static const int CAMERA_MODE_NORMAL;
+ static const int CAMERA_MODE_MTK_PRV;
+ static const int CAMERA_MODE_MTK_VDO;
+ static const int CAMERA_MODE_MTK_VT;
+
+ // Values for KEY_FPS_MODE
+ static const int FPS_MODE_NORMAL;
+ static const int FPS_MODE_FIX;
+
+ // Values for KEY_CAPTURE_MODE
+ static const char CAPTURE_MODE_PANORAMA_SHOT[];
+ static const char CAPTURE_MODE_BURST_SHOT[];
+ static const char CAPTURE_MODE_NORMAL[];
+ static const char CAPTURE_MODE_BEST_SHOT[];
+ static const char CAPTURE_MODE_EV_BRACKET_SHOT[];
+ static const char CAPTURE_MODE_SMILE_SHOT[];
+ static const char CAPTURE_MODE_AUTO_PANORAMA_SHOT[];
+ static const char CAPTURE_MODE_MOTION_TRACK_SHOT[];
+ static const char CAPTURE_MODE_MAV_SHOT[];
+ static const char CAPTURE_MODE_HDR_SHOT[];
+ static const char CAPTURE_MODE_ASD_SHOT[];
+ static const char CAPTURE_MODE_ZSD_SHOT[];
+ static const char CAPTURE_MODE_PANO_3D[];
+ static const char CAPTURE_MODE_SINGLE_3D[];
+ static const char CAPTURE_MODE_FACE_BEAUTY[];
+ static const char CAPTURE_MODE_CONTINUOUS_SHOT[];
+ static const char CAPTURE_MODE_MULTI_MOTION[];
+ static const char CAPTURE_MODE_GESTURE_SHOT[];
+
+ // Values for KEY_PANORAMA_DIR
+ static const char PANORAMA_DIR_RIGHT[];
+ static const char PANORAMA_DIR_LEFT[];
+ static const char PANORAMA_DIR_TOP[];
+ static const char PANORAMA_DIR_DOWN[];
+ //
+ static const int ENABLE;
+ static const int DISABLE;
+
+ // Values for KEY_EDGE, KEY_HUE, KEY_SATURATION, KEY_BRIGHTNESS, KEY_CONTRAST
+ static const char HIGH[];
+ static const char MIDDLE[];
+ static const char LOW[];
+
+ // Preview Internal Format.
+ static const char KEY_PREVIEW_INT_FORMAT[];
+
+ // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT,
+ // and KEY_VIDEO_FRAME_FORMAT
+ static const char PIXEL_FORMAT_YUV420I[]; // I420
+
+ /**
+ * @var PIXEL_FORMAT_YV12_GPU
+ *
+ * GPU YUV format:
+ *
+ * YV12 is a 4:2:0 YCrCb planar format comprised of a WxH Y plane followed
+ * by (W/2) x (H/2) Cr and Cb planes.
+ *
+ * This format assumes
+ * - an even width
+ * - an even height
+ * - a vertical stride equal to the height
+ * - a horizontal stride multiple of 32/16/16 pixels for y/cr/cb respectively
+ * i.e.
+ * y_stride = ALIGN(width, 32)
+ * c_stride = y_stride / 2
+ *
+ * y_size = y_stride * height
+ * c_size = c_stride * height / 2
+ * size = y_size + c_size * 2
+ * cr_offset = y_size
+ * cb_offset = y_size + c_size
+ *
+ * for example:
+ * width/height = 176x144
+ * y stride = 192x144
+ * cr stride = 96x72
+ * cb stride = 96x72
+ *
+ */
+ static const char PIXEL_FORMAT_YV12_GPU[];
+
+ /*
+ * YUV422 format, 1 plane (UYVY)
+ *
+ * Effective bits per pixel : 16
+ *
+ * Y sample at every pixel, U and V sampled at every second pixel horizontally on each line.
+ * A macropixel contains 2 pixels in 1 uint32_t.
+ *
+ */
+ static const char PIXEL_FORMAT_YUV422I_UYVY[];
+ //
+ static const char PIXEL_FORMAT_YUV422I_VYUY[];
+ static const char PIXEL_FORMAT_YUV422I_YVYU[];
+ static const char PIXEL_FORMAT_BAYER8[];
+ static const char PIXEL_FORMAT_BAYER10[];
+
+ /**
+ * @var KEY_BRIGHTNESS_VALUE
+ *
+ * This is a key string of brightness value, scaled by 10.
+ *
+ */
+ static const char KEY_BRIGHTNESS_VALUE[];
+
+ // ISP Operation mode for meta mode use
+ static const char KEY_ISP_MODE[];
+ // AF
+ static const char KEY_AF_X[];
+ static const char KEY_AF_Y[];
+ static const char KEY_FOCUS_ENG_MAX_STEP[];
+ static const char KEY_FOCUS_ENG_MIN_STEP[];
+ static const char KEY_FOCUS_ENG_BEST_STEP[];
+ static const char KEY_RAW_DUMP_FLAG[];
+ static const char KEY_PREVIEW_DUMP_RESOLUTION[];
+ static const int PREVIEW_DUMP_RESOLUTION_NORMAL;
+ static const int PREVIEW_DUMP_RESOLUTION_CROP;
+
+ // Values for effect
+ static const char EFFECT_SEPIA_BLUE[];
+ static const char EFFECT_SEPIA_GREEN[];
+ // Values for AWB
+ static const char WHITE_BALANCE_TUNGSTEN[];
+ // Eng
+ static const char ISO_SPEED_ENG[];
+ static const char KEY_FOCUS_ENG_MODE[]; // 0,1,2,3 (0: normal)
+ static const char KEY_FOCUS_ENG_STEP[];
+ static const char KEY_RAW_SAVE_MODE[]; // on, off
+ static const char KEY_RAW_PATH[];
+
+ // KEY for Continuous shot speed
+ static const char KEY_FAST_CONTINUOUS_SHOT[];
+
+ static const char KEY_VIDEO_HDR[];
+
+ static const char KEY_MAX_NUM_DETECTED_OBJECT[];
+
+ // KEY for c_shot indicator
+ static const char KEY_CSHOT_INDICATOR[];
+
+ // KEY for [Engineer Mode] Add new camera paramters for new requirements
+ static const char KEY_ENG_AE_ENABLE[];
+ static const char KEY_ENG_PREVIEW_SHUTTER_SPEED[];
+ static const char KEY_ENG_PREVIEW_SENSOR_GAIN[];
+ static const char KEY_ENG_PREVIEW_ISP_GAIN[];
+ static const char KEY_ENG_PREVIEW_AE_INDEX[];
+ static const char KEY_ENG_CAPTURE_SENSOR_GAIN[];
+ static const char KEY_ENG_CAPTURE_ISP_GAIN[];
+ static const char KEY_ENG_CAPTURE_SHUTTER_SPEED[];
+ static const char KEY_ENG_CAPTURE_ISO[];
+ static const char KEY_ENG_FLASH_DUTY_VALUE[];
+ static const char KEY_ENG_FLASH_DUTY_MIN[];
+ static const char KEY_ENG_FLASH_DUTY_MAX[];
+ static const char KEY_ENG_ZSD_ENABLE[];
+ static const char KEY_SENSOR_TYPE[];
+ static const char KEY_ENG_PREVIEW_FPS[];
+ static const char KEY_ENG_MSG[];
+ static const int KEY_ENG_FLASH_DUTY_DEFAULT_VALUE;
+ static const int KEY_ENG_FLASH_STEP_DEFAULT_VALUE;
+ static const char KEY_ENG_FLASH_STEP_MIN[];
+ static const char KEY_ENG_FLASH_STEP_MAX[];
+ static const char KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL[];
+ static const char KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MAX[];
+ static const char KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MIN[];
+ static const int KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MAX_DEFAULT;
+ static const int KEY_ENG_FOCUS_FULLSCAN_FRAME_INTERVAL_MIN_DEFAULT;
+ static const char KEY_ENG_PREVIEW_FRAME_INTERVAL_IN_US[];
+ static const char KEY_ENG_PARAMETER1[];
+ static const char KEY_ENG_PARAMETER2[];
+ static const char KEY_ENG_PARAMETER3[];
+
+ static const char KEY_ENG_SAVE_SHADING_TABLE[];
+ static const char KEY_ENG_SHADING_TABLE[];
+ static const int KEY_ENG_SHADING_TABLE_AUTO;
+ static const int KEY_ENG_SHADING_TABLE_LOW;
+ static const int KEY_ENG_SHADING_TABLE_MIDDLE;
+ static const int KEY_ENG_SHADING_TABLE_HIGH;
+ static const int KEY_ENG_SHADING_TABLE_TSF;
+
+ // KEY for [Engineer Mode] Add new camera paramters for ev calibration
+ static const char KEY_ENG_EV_CALBRATION_OFFSET_VALUE[];
+
+public: //// on/off => FIXME: should be replaced with TRUE[]
+ static const char ON[];
+ static const char OFF[];
+};
+
+}; // namespace android
+
+#endif
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 4810b7e..e2fb54d 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -52,6 +52,9 @@ enum player_type {
// argument to the 'test:' url in the setDataSource call.
TEST_PLAYER = 5,
DASH_PLAYER = 6,
+#ifdef MTK_HARDWARE
+ FM_AUDIO_PLAYER = 7,
+#endif
};
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 042c8c5..4f3c9bc 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -316,6 +316,9 @@ struct ACodec : public AHierarchicalStateMachine, public CodecBase {
#ifdef USE_SAMSUNG_COLORFORMAT
void setNativeWindowColorFormat(OMX_COLOR_FORMATTYPE &eNativeColorFormat);
#endif
+#ifdef MTK_HARDWARE
+ void setHalWindowColorFormat(OMX_COLOR_FORMATTYPE &eHalColorFormat);
+#endif
status_t cancelBufferToNativeWindow(BufferInfo *info);
status_t freeOutputBuffersNotOwnedByComponent();
BufferInfo *dequeueBufferFromNativeWindow();
diff --git a/include/media/stagefright/ACodec.h.orig b/include/media/stagefright/ACodec.h.orig
new file mode 100644
index 0000000..042c8c5
--- /dev/null
+++ b/include/media/stagefright/ACodec.h.orig
@@ -0,0 +1,501 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * This file was modified by Dolby Laboratories, Inc. The portions of the
+ * code that are surrounded by "DOLBY..." are copyrighted and
+ * licensed separately, as follows:
+ *
+ * (C) 2015 Dolby Laboratories, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#ifndef A_CODEC_H_
+
+#define A_CODEC_H_
+
+#include <stdint.h>
+#include <android/native_window.h>
+#include <media/hardware/MetadataBufferType.h>
+#include <media/IOMX.h>
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <OMX_Audio.h>
+
+#include <system/audio.h>
+
+#define TRACK_BUFFER_TIMING 0
+
+namespace android {
+
+struct ABuffer;
+struct MemoryDealer;
+struct DescribeColorFormatParams;
+
+struct ACodec : public AHierarchicalStateMachine, public CodecBase {
+ ACodec();
+
+ virtual void setNotificationMessage(const sp<AMessage> &msg);
+
+ void initiateSetup(const sp<AMessage> &msg);
+
+ virtual void initiateAllocateComponent(const sp<AMessage> &msg);
+ virtual void initiateConfigureComponent(const sp<AMessage> &msg);
+ virtual void initiateCreateInputSurface();
+ virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
+ virtual void initiateStart();
+ virtual void initiateShutdown(bool keepComponentAllocated = false);
+
+ virtual status_t setSurface(const sp<Surface> &surface);
+
+ virtual void signalFlush();
+ virtual void signalResume();
+
+ virtual void signalSetParameters(const sp<AMessage> &msg);
+ virtual void signalEndOfInputStream();
+ virtual void signalRequestIDRFrame();
+
+ // AHierarchicalStateMachine implements the message handling
+ virtual void onMessageReceived(const sp<AMessage> &msg) {
+ handleMessage(msg);
+ }
+
+ struct PortDescription : public CodecBase::PortDescription {
+ size_t countBuffers();
+ IOMX::buffer_id bufferIDAt(size_t index) const;
+ sp<ABuffer> bufferAt(size_t index) const;
+
+ private:
+ friend struct ACodec;
+
+ Vector<IOMX::buffer_id> mBufferIDs;
+ Vector<sp<ABuffer> > mBuffers;
+
+ PortDescription();
+ void addBuffer(IOMX::buffer_id id, const sp<ABuffer> &buffer);
+
+ DISALLOW_EVIL_CONSTRUCTORS(PortDescription);
+ };
+
+ static bool isFlexibleColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent);
+
+ // Returns 0 if configuration is not supported. NOTE: this is treated by
+ // some OMX components as auto level, and by others as invalid level.
+ static int /* OMX_VIDEO_AVCLEVELTYPE */ getAVCLevelFor(
+ int width, int height, int rate, int bitrate,
+ OMX_VIDEO_AVCPROFILETYPE profile = OMX_VIDEO_AVCProfileBaseline);
+
+protected:
+ virtual ~ACodec();
+ virtual status_t setupCustomCodec(
+ status_t err, const char *mime, const sp<AMessage> &msg);
+ virtual status_t GetVideoCodingTypeFromMime(
+ const char *mime, OMX_VIDEO_CODINGTYPE *codingType);
+
+ struct BaseState;
+ struct UninitializedState;
+ struct LoadedState;
+ struct LoadedToIdleState;
+ struct IdleToExecutingState;
+ struct ExecutingState;
+ struct OutputPortSettingsChangedState;
+ struct ExecutingToIdleState;
+ struct IdleToLoadedState;
+ struct FlushingState;
+ struct DeathNotifier;
+
+ enum {
+ kWhatSetup = 'setu',
+ kWhatOMXMessage = 'omx ',
+ // same as kWhatOMXMessage - but only used with
+ // handleMessage during OMX message-list handling
+ kWhatOMXMessageItem = 'omxI',
+ kWhatOMXMessageList = 'omxL',
+ kWhatInputBufferFilled = 'inpF',
+ kWhatOutputBufferDrained = 'outD',
+ kWhatShutdown = 'shut',
+ kWhatFlush = 'flus',
+ kWhatResume = 'resm',
+ kWhatDrainDeferredMessages = 'drai',
+ kWhatAllocateComponent = 'allo',
+ kWhatConfigureComponent = 'conf',
+ kWhatSetSurface = 'setS',
+ kWhatCreateInputSurface = 'cisf',
+ kWhatSetInputSurface = 'sisf',
+ kWhatSignalEndOfInputStream = 'eois',
+ kWhatStart = 'star',
+ kWhatRequestIDRFrame = 'ridr',
+ kWhatSetParameters = 'setP',
+ kWhatSubmitOutputMetadataBufferIfEOS = 'subm',
+ kWhatOMXDied = 'OMXd',
+ kWhatReleaseCodecInstance = 'relC',
+ };
+
+ enum {
+ kPortIndexInput = 0,
+ kPortIndexOutput = 1
+ };
+
+ enum {
+ kFlagIsSecure = 1,
+ kFlagPushBlankBuffersToNativeWindowOnShutdown = 2,
+ kFlagIsGrallocUsageProtected = 4,
+ kFlagPushBlankBuffersToNativeWindowOnSwitch = 1 << 7,
+ };
+
+ enum {
+ kVideoGrallocUsage = (GRALLOC_USAGE_HW_TEXTURE
+ | GRALLOC_USAGE_HW_COMPOSER
+ | GRALLOC_USAGE_EXTERNAL_DISP),
+ };
+
+ struct BufferInfo {
+ BufferInfo() : mCustomData(-1) {}
+ enum Status {
+ OWNED_BY_US,
+ OWNED_BY_COMPONENT,
+ OWNED_BY_UPSTREAM,
+ OWNED_BY_DOWNSTREAM,
+ OWNED_BY_NATIVE_WINDOW,
+ UNRECOGNIZED, // not a tracked buffer
+ };
+
+ static inline Status getSafeStatus(BufferInfo *info) {
+ return info == NULL ? UNRECOGNIZED : info->mStatus;
+ }
+
+ IOMX::buffer_id mBufferID;
+ Status mStatus;
+ unsigned mDequeuedAt;
+
+ sp<ABuffer> mData;
+ sp<GraphicBuffer> mGraphicBuffer;
+ int mFenceFd;
+ FrameRenderTracker::Info *mRenderInfo;
+ int mCustomData;
+
+ // The following field and 4 methods are used for debugging only
+ bool mIsReadFence;
+ // Store |fenceFd| and set read/write flag. Log error, if there is already a fence stored.
+ void setReadFence(int fenceFd, const char *dbg);
+ void setWriteFence(int fenceFd, const char *dbg);
+ // Log error, if the current fence is not a read/write fence.
+ void checkReadFence(const char *dbg);
+ void checkWriteFence(const char *dbg);
+ };
+
+ static const char *_asString(BufferInfo::Status s);
+ void dumpBuffers(OMX_U32 portIndex);
+
+ // If |fd| is non-negative, waits for fence with |fd| and logs an error if it fails. Returns
+ // the error code or OK on success. If |fd| is negative, it returns OK
+ status_t waitForFence(int fd, const char *dbg);
+
+#if TRACK_BUFFER_TIMING
+ struct BufferStats {
+ int64_t mEmptyBufferTimeUs;
+ int64_t mFillBufferDoneTimeUs;
+ };
+
+ KeyedVector<int64_t, BufferStats> mBufferStats;
+#endif
+
+ sp<AMessage> mNotify;
+
+ sp<UninitializedState> mUninitializedState;
+ sp<LoadedState> mLoadedState;
+ sp<LoadedToIdleState> mLoadedToIdleState;
+ sp<IdleToExecutingState> mIdleToExecutingState;
+ sp<ExecutingState> mExecutingState;
+ sp<OutputPortSettingsChangedState> mOutputPortSettingsChangedState;
+ sp<ExecutingToIdleState> mExecutingToIdleState;
+ sp<IdleToLoadedState> mIdleToLoadedState;
+ sp<FlushingState> mFlushingState;
+ sp<SkipCutBuffer> mSkipCutBuffer;
+
+ AString mComponentName;
+ uint32_t mFlags;
+ uint32_t mQuirks;
+ sp<IOMX> mOMX;
+ IOMX::node_id mNode;
+ sp<MemoryDealer> mDealer[2];
+
+ sp<ANativeWindow> mNativeWindow;
+ int mNativeWindowUsageBits;
+ sp<AMessage> mInputFormat;
+ sp<AMessage> mOutputFormat;
+ sp<AMessage> mBaseOutputFormat;
+
+ FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
+ Vector<BufferInfo> mBuffers[2];
+ bool mPortEOS[2];
+ status_t mInputEOSResult;
+
+ List<sp<AMessage> > mDeferredQueue;
+
+ bool mSentFormat;
+ bool mIsVideo;
+ bool mIsEncoder;
+ bool mEncoderComponent;
+ bool mComponentAllocByName;
+ bool mFatalError;
+ bool mShutdownInProgress;
+ bool mExplicitShutdown;
+
+ // If "mKeepComponentAllocated" we only transition back to Loaded state
+ // and do not release the component instance.
+ bool mKeepComponentAllocated;
+
+ int32_t mEncoderDelay;
+ int32_t mEncoderPadding;
+ int32_t mRotationDegrees;
+
+ bool mChannelMaskPresent;
+ int32_t mChannelMask;
+ unsigned mDequeueCounter;
+ MetadataBufferType mInputMetadataType;
+ MetadataBufferType mOutputMetadataType;
+ bool mLegacyAdaptiveExperiment;
+ int32_t mMetadataBuffersToSubmit;
+ size_t mNumUndequeuedBuffers;
+
+ int64_t mRepeatFrameDelayUs;
+ int64_t mMaxPtsGapUs;
+ float mMaxFps;
+
+ int64_t mTimePerFrameUs;
+ int64_t mTimePerCaptureUs;
+
+ bool mCreateInputBuffersSuspended;
+
+ bool mTunneled;
+
+ status_t setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode);
+ status_t allocateBuffersOnPort(OMX_U32 portIndex);
+ status_t freeBuffersOnPort(OMX_U32 portIndex);
+ virtual status_t freeBuffer(OMX_U32 portIndex, size_t i);
+
+ status_t handleSetSurface(const sp<Surface> &surface);
+ status_t setupNativeWindowSizeFormatAndUsage(
+ ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */);
+
+ status_t configureOutputBuffersFromNativeWindow(
+ OMX_U32 *nBufferCount, OMX_U32 *nBufferSize,
+ OMX_U32 *nMinUndequeuedBuffers);
+ status_t allocateOutputMetadataBuffers();
+ status_t submitOutputMetadataBuffer();
+ void signalSubmitOutputMetadataBufferIfEOS_workaround();
+ status_t allocateOutputBuffersFromNativeWindow();
+#ifdef USE_SAMSUNG_COLORFORMAT
+ void setNativeWindowColorFormat(OMX_COLOR_FORMATTYPE &eNativeColorFormat);
+#endif
+ status_t cancelBufferToNativeWindow(BufferInfo *info);
+ status_t freeOutputBuffersNotOwnedByComponent();
+ BufferInfo *dequeueBufferFromNativeWindow();
+
+ inline bool storingMetadataInDecodedBuffers() {
+ return mOutputMetadataType >= 0 && !mIsEncoder;
+ }
+
+ inline bool usingMetadataOnEncoderOutput() {
+ return mOutputMetadataType >= 0 && mIsEncoder;
+ }
+
+ BufferInfo *findBufferByID(
+ uint32_t portIndex, IOMX::buffer_id bufferID,
+ ssize_t *index = NULL);
+
+ virtual status_t setComponentRole(bool isEncoder, const char *mime);
+ virtual status_t configureCodec(const char *mime, const sp<AMessage> &msg);
+
+ status_t configureTunneledVideoPlayback(int32_t audioHwSync,
+ const sp<ANativeWindow> &nativeWindow);
+
+ status_t setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat,
+ bool usingNativeBuffers = false);
+
+ status_t setSupportedOutputFormat(bool getLegacyFlexibleFormat);
+
+ virtual status_t setupVideoDecoder(
+ const char *mime, const sp<AMessage> &msg, bool usingNativeBuffers);
+
+ virtual status_t setupVideoEncoder(
+ const char *mime, const sp<AMessage> &msg);
+
+ status_t setVideoFormatOnPort(
+ OMX_U32 portIndex,
+ int32_t width, int32_t height,
+ OMX_VIDEO_CODINGTYPE compressionFormat, float frameRate = -1.0);
+
+ typedef struct drcParams {
+ int32_t drcCut;
+ int32_t drcBoost;
+ int32_t heavyCompression;
+ int32_t targetRefLevel;
+ int32_t encodedTargetLevel;
+ } drcParams_t;
+
+ status_t setupAACCodec(
+ bool encoder,
+ int32_t numChannels, int32_t sampleRate, int32_t bitRate,
+ int32_t aacProfile, bool isADTS, int32_t sbrMode,
+ int32_t maxOutputChannelCount, const drcParams_t& drc,
+ int32_t pcmLimiterEnable, int32_t bitsPerSample = 16);
+
+ status_t setupAC3Codec(bool encoder, int32_t numChannels, int32_t sampleRate,
+ int32_t bitsPerSample = 16);
+
+ status_t setupEAC3Codec(bool encoder, int32_t numChannels, int32_t sampleRate,
+ int32_t bitsPerSample = 16);
+
+ status_t selectAudioPortFormat(
+ OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat);
+
+ status_t setupAMRCodec(bool encoder, bool isWAMR, int32_t bitRate);
+ status_t setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels);
+
+ status_t setupFlacCodec(
+ bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel);
+
+ status_t setupRawAudioFormat(
+ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels);
+
+ status_t setPriority(int32_t priority);
+ status_t setOperatingRate(float rateFloat, bool isVideo);
+
+ status_t setMinBufferSize(OMX_U32 portIndex, size_t size);
+
+ status_t setupMPEG4EncoderParameters(const sp<AMessage> &msg);
+ status_t setupH263EncoderParameters(const sp<AMessage> &msg);
+ status_t setupAVCEncoderParameters(const sp<AMessage> &msg);
+ status_t setupHEVCEncoderParameters(const sp<AMessage> &msg);
+ status_t setupVPXEncoderParameters(const sp<AMessage> &msg);
+
+ status_t verifySupportForProfileAndLevel(int32_t profile, int32_t level);
+
+ status_t configureBitrate(
+ int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode);
+
+ virtual status_t setupErrorCorrectionParameters();
+
+ status_t initNativeWindow();
+
+ // Returns true iff all buffers on the given port have status
+ // OWNED_BY_US or OWNED_BY_NATIVE_WINDOW.
+ bool allYourBuffersAreBelongToUs(OMX_U32 portIndex);
+
+ bool allYourBuffersAreBelongToUs();
+
+ void waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
+
+ size_t countBuffersOwnedByComponent(OMX_U32 portIndex) const;
+ size_t countBuffersOwnedByNativeWindow() const;
+
+ void deferMessage(const sp<AMessage> &msg);
+ void processDeferredMessages();
+
+ void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+ // called when we have dequeued a buffer |buf| from the native window to track render info.
+ // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
+ // stored.
+ void updateRenderInfoForDequeuedBuffer(
+ ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
+
+ // Checks to see if any frames have rendered up until |until|, and to notify client
+ // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
+ // unrendered frame. These frames are removed from the render queue.
+ // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
+ // queue, allowing all rendered framed up till then to be notified of.
+ // (This will effectively clear the render queue up-until (and including) |until|.)
+ // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
+ void notifyOfRenderedFrames(
+ bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
+
+ void sendFormatChange(const sp<AMessage> &reply);
+ virtual status_t getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify);
+
+ void signalError(
+ OMX_ERRORTYPE error = OMX_ErrorUndefined,
+ status_t internalError = UNKNOWN_ERROR);
+
+ static bool describeDefaultColorFormat(DescribeColorFormatParams &describeParams);
+ static bool describeColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ DescribeColorFormatParams &describeParams);
+
+ status_t requestIDRFrame();
+ virtual status_t setParameters(const sp<AMessage> &params);
+
+ // Send EOS on input stream.
+ void onSignalEndOfInputStream();
+
+ virtual void setBFrames(OMX_VIDEO_PARAM_MPEG4TYPE *mpeg4type) {}
+ virtual void setBFrames(OMX_VIDEO_PARAM_AVCTYPE *h264type,
+ const int32_t iFramesInterval, const int32_t frameRate) {}
+
+ virtual status_t getVQZIPInfo(const sp<AMessage> &msg) {
+ return OK;
+ }
+ virtual bool canAllocateBuffer(OMX_U32 /* portIndex */) {
+ return false;
+ }
+ virtual void enableCustomAllocationMode(const sp<AMessage> &/* msg */) {}
+ virtual status_t allocateBuffer(
+ OMX_U32 portIndex, size_t bufSize, BufferInfo &info);
+
+ virtual status_t setDSModeHint(sp<AMessage>& msg,
+ OMX_U32 flags, int64_t timeUs) {
+ return UNKNOWN_ERROR;
+ }
+
+ virtual bool getDSModeHint(const sp<AMessage>& msg) {
+ return false;
+ }
+
+ sp<IOMXObserver> createObserver();
+#ifdef DOLBY_ENABLE
+ status_t setDolbyParameterOnEndpChange();
+ void setDolbyParameter(const sp<AMessage> &msg);
+ status_t setDolbyParameterOnProcessedAudio(const sp<AMessage> &params);
+#endif // DOLBY_END
+
+ status_t setupRawAudioFormatInternal(
+ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels,
+ int32_t bitsPerSample);
+
+ DISALLOW_EVIL_CONSTRUCTORS(ACodec);
+};
+
+} // namespace android
+
+#endif // A_CODEC_H_
diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h
index 85ba920..a914dcd 100644
--- a/include/media/stagefright/ColorConverter.h
+++ b/include/media/stagefright/ColorConverter.h
@@ -79,6 +79,10 @@ struct ColorConverter {
status_t convertTIYUV420PackedSemiPlanar(
const BitmapParams &src, const BitmapParams &dst);
+#ifdef MTK_HARDWARE
+ status_t convertYUVToRGBHW(const BitmapParams &src, const BitmapParams &dst);
+#endif
+
ColorConverter(const ColorConverter &);
ColorConverter &operator=(const ColorConverter &);
};
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
index ef8cb23..b8819a7 100644
--- a/include/media/stagefright/MediaSync.h
+++ b/include/media/stagefright/MediaSync.h
@@ -19,6 +19,7 @@
#include <gui/IConsumerListener.h>
#include <gui/IProducerListener.h>
+#include <gui/BufferQueue.h>
#include <media/AudioResamplerPublic.h>
#include <media/AVSyncSettings.h>
@@ -205,7 +206,7 @@ class MediaSync : public AHandler {
bool mHasAudio;
int64_t mNextBufferItemMediaUs;
- List<BufferItem> mBufferItems;
+ List<BufferQueue::BufferItem> mBufferItems;
sp<VideoFrameScheduler> mFrameScheduler;
// Keep track of buffers received from |mInput|. This is needed because
@@ -248,7 +249,7 @@ class MediaSync : public AHandler {
void onFrameAvailableFromInput();
// Send |bufferItem| to the output for rendering.
- void renderOneBufferItem_l(const BufferItem &bufferItem);
+ void renderOneBufferItem_l(const BufferQueue::BufferItem &bufferItem);
// This implements the onBufferReleased callback from IProducerListener.
// It gets called from an OutputListener.
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index cdffd88..f371fd2 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -313,6 +313,10 @@ struct OMXCodec : public MediaSource,
status_t allocateBuffersOnPort(OMX_U32 portIndex);
status_t allocateOutputBuffersFromNativeWindow();
+#ifdef MTK_HARDWARE
+ void setHalWindowColorFormat(OMX_COLOR_FORMATTYPE &eHalColorFormat);
+#endif
+
status_t queueBufferToNativeWindow(BufferInfo *info);
status_t cancelBufferToNativeWindow(BufferInfo *info);
BufferInfo* dequeueBufferFromNativeWindow();
diff --git a/include/media/stagefright/OMXCodec.h.orig b/include/media/stagefright/OMXCodec.h.orig
new file mode 100644
index 0000000..cdffd88
--- /dev/null
+++ b/include/media/stagefright/OMXCodec.h.orig
@@ -0,0 +1,433 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * This file was modified by Dolby Laboratories, Inc. The portions of the
+ * code that are surrounded by "DOLBY..." are copyrighted and
+ * licensed separately, as follows:
+ *
+ * (C) 2015 Dolby Laboratories, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#ifndef OMX_CODEC_H_
+
+#define OMX_CODEC_H_
+
+#include <android/native_window.h>
+#include <media/IOMX.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaSource.h>
+#include <utils/threads.h>
+
+#include <OMX_Audio.h>
+
+namespace android {
+
+struct MediaCodecInfo;
+class MemoryDealer;
+struct OMXCodecObserver;
+struct CodecProfileLevel;
+class SkipCutBuffer;
+
+struct OMXCodec : public MediaSource,
+ public MediaBufferObserver {
+ enum CreationFlags {
+ kPreferSoftwareCodecs = 1,
+ kIgnoreCodecSpecificData = 2,
+
+ // Request for software or hardware codecs. If request
+ // can not be fullfilled, Create() returns NULL.
+ kSoftwareCodecsOnly = 8,
+ kHardwareCodecsOnly = 16,
+
+ // Store meta data in video buffers
+ kStoreMetaDataInVideoBuffers = 32,
+
+ // Only submit one input buffer at one time.
+ kOnlySubmitOneInputBufferAtOneTime = 64,
+
+ // Enable GRALLOC_USAGE_PROTECTED for output buffers from native window
+ kEnableGrallocUsageProtected = 128,
+
+ // Secure decoding mode
+ kUseSecureInputBuffers = 256,
+ };
+ static sp<MediaSource> Create(
+ const sp<IOMX> &omx,
+ const sp<MetaData> &meta, bool createEncoder,
+ const sp<MediaSource> &source,
+ const char *matchComponentName = NULL,
+ uint32_t flags = 0,
+ const sp<ANativeWindow> &nativeWindow = NULL);
+
+ static void setComponentRole(
+ const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
+ const char *mime);
+
+ virtual status_t start(MetaData *params = NULL);
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ virtual status_t pause();
+
+ // from MediaBufferObserver
+ virtual void signalBufferReturned(MediaBuffer *buffer);
+
+ enum Quirks {
+ kNeedsFlushBeforeDisable = 1,
+ kWantsNALFragments = 2,
+ kRequiresLoadedToIdleAfterAllocation = 4,
+ kRequiresAllocateBufferOnInputPorts = 8,
+ kRequiresFlushCompleteEmulation = 16,
+ kRequiresAllocateBufferOnOutputPorts = 32,
+ kRequiresFlushBeforeShutdown = 64,
+ kDefersOutputBufferAllocation = 128,
+ kDecoderLiesAboutNumberOfChannels = 256,
+ kInputBufferSizesAreBogus = 512,
+ kSupportsMultipleFramesPerInputBuffer = 1024,
+ kRequiresLargerEncoderOutputBuffer = 2048,
+ kOutputBuffersAreUnreadable = 4096,
+ };
+
+ struct CodecNameAndQuirks {
+ String8 mName;
+ uint32_t mQuirks;
+ };
+
+ // for use by ACodec
+ static void findMatchingCodecs(
+ const char *mime,
+ bool createEncoder, const char *matchComponentName,
+ uint32_t flags,
+ Vector<CodecNameAndQuirks> *matchingCodecNamesAndQuirks);
+
+ static uint32_t getComponentQuirks(
+ const sp<MediaCodecInfo> &list);
+
+ static bool findCodecQuirks(const char *componentName, uint32_t *quirks);
+
+protected:
+ virtual ~OMXCodec();
+
+private:
+
+ // Make sure mLock is accessible to OMXCodecObserver
+ friend class OMXCodecObserver;
+
+ // Call this with mLock hold
+ void on_message(const omx_message &msg);
+
+ enum State {
+ DEAD,
+ LOADED,
+ LOADED_TO_IDLE,
+ IDLE_TO_EXECUTING,
+ EXECUTING,
+ EXECUTING_TO_IDLE,
+ IDLE_TO_LOADED,
+ RECONFIGURING,
+ PAUSING,
+ FLUSHING,
+ PAUSED,
+ ERROR
+ };
+
+ enum {
+ kPortIndexInput = 0,
+ kPortIndexOutput = 1
+ };
+
+ enum PortStatus {
+ ENABLED,
+ DISABLING,
+ DISABLED,
+ ENABLING,
+ SHUTTING_DOWN,
+ };
+
+ enum BufferStatus {
+ OWNED_BY_US,
+ OWNED_BY_COMPONENT,
+ OWNED_BY_NATIVE_WINDOW,
+ OWNED_BY_CLIENT,
+ };
+
+ struct BufferInfo {
+ IOMX::buffer_id mBuffer;
+ BufferStatus mStatus;
+ sp<IMemory> mMem;
+ size_t mSize;
+ void *mData;
+ MediaBuffer *mMediaBuffer;
+ };
+
+ struct CodecSpecificData {
+ size_t mSize;
+ uint8_t mData[1];
+ };
+
+ sp<IOMX> mOMX;
+ bool mOMXLivesLocally;
+ IOMX::node_id mNode;
+ uint32_t mQuirks;
+
+ // Flags specified in the creation of the codec.
+ uint32_t mFlags;
+
+ bool mIsEncoder;
+ bool mIsVideo;
+ char *mMIME;
+ char *mComponentName;
+ sp<MetaData> mOutputFormat;
+ sp<MediaSource> mSource;
+ Vector<CodecSpecificData *> mCodecSpecificData;
+ size_t mCodecSpecificDataIndex;
+
+ sp<MemoryDealer> mDealer[2];
+
+ State mState;
+ Vector<BufferInfo> mPortBuffers[2];
+ PortStatus mPortStatus[2];
+ bool mInitialBufferSubmit;
+ bool mSignalledEOS;
+ status_t mFinalStatus;
+ bool mNoMoreOutputData;
+ bool mOutputPortSettingsHaveChanged;
+ int64_t mSeekTimeUs;
+ ReadOptions::SeekMode mSeekMode;
+ int64_t mTargetTimeUs;
+ bool mOutputPortSettingsChangedPending;
+ sp<SkipCutBuffer> mSkipCutBuffer;
+
+ MediaBuffer *mLeftOverBuffer;
+
+ Mutex mLock;
+ Condition mAsyncCompletion;
+
+ bool mPaused;
+
+ sp<ANativeWindow> mNativeWindow;
+
+ // The index in each of the mPortBuffers arrays of the buffer that will be
+ // submitted to OMX next. This only applies when using buffers from a
+ // native window.
+ size_t mNextNativeBufferIndex[2];
+
+ // A list of indices into mPortStatus[kPortIndexOutput] filled with data.
+ List<size_t> mFilledBuffers;
+ Condition mBufferFilled;
+
+ // Used to record the decoding time for an output picture from
+ // a video encoder.
+ List<int64_t> mDecodingTimeList;
+
+ OMXCodec(const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t quirks, uint32_t flags,
+ bool isEncoder, const char *mime, const char *componentName,
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow);
+
+ void addCodecSpecificData(const void *data, size_t size);
+ void clearCodecSpecificData();
+
+ void setComponentRole();
+
+ void setAMRFormat(bool isWAMR, int32_t bitRate);
+
+ status_t setAACFormat(
+ int32_t numChannels, int32_t sampleRate, int32_t bitRate,
+ int32_t aacProfile, bool isADTS);
+
+ status_t setAC3Format(int32_t numChannels, int32_t sampleRate);
+
+ void setG711Format(int32_t sampleRate, int32_t numChannels);
+
+ status_t setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat);
+
+ void setVideoInputFormat(
+ const char *mime, const sp<MetaData>& meta);
+
+ status_t setupBitRate(int32_t bitRate);
+ status_t setupErrorCorrectionParameters();
+ status_t setupH263EncoderParameters(const sp<MetaData>& meta);
+ status_t setupMPEG4EncoderParameters(const sp<MetaData>& meta);
+ status_t setupAVCEncoderParameters(const sp<MetaData>& meta);
+ status_t findTargetColorFormat(
+ const sp<MetaData>& meta, OMX_COLOR_FORMATTYPE *colorFormat);
+
+ status_t isColorFormatSupported(
+ OMX_COLOR_FORMATTYPE colorFormat, int portIndex);
+
+ // If profile/level is set in the meta data, its value in the meta
+ // data will be used; otherwise, the default value will be used.
+ status_t getVideoProfileLevel(const sp<MetaData>& meta,
+ const CodecProfileLevel& defaultProfileLevel,
+ CodecProfileLevel& profileLevel);
+
+ status_t setVideoOutputFormat(
+ const char *mime, const sp<MetaData>& meta);
+
+ void setImageOutputFormat(
+ OMX_COLOR_FORMATTYPE format, OMX_U32 width, OMX_U32 height);
+
+ void setJPEGInputFormat(
+ OMX_U32 width, OMX_U32 height, OMX_U32 compressedSize);
+
+ void setMinBufferSize(OMX_U32 portIndex, OMX_U32 size);
+
+ void setRawAudioFormat(
+ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels);
+
+ status_t allocateBuffers();
+ status_t allocateBuffersOnPort(OMX_U32 portIndex);
+ status_t allocateOutputBuffersFromNativeWindow();
+
+ status_t queueBufferToNativeWindow(BufferInfo *info);
+ status_t cancelBufferToNativeWindow(BufferInfo *info);
+ BufferInfo* dequeueBufferFromNativeWindow();
+
+ status_t freeBuffersOnPort(
+ OMX_U32 portIndex, bool onlyThoseWeOwn = false);
+
+ status_t freeBuffer(OMX_U32 portIndex, size_t bufIndex);
+
+ bool drainInputBuffer(IOMX::buffer_id buffer);
+ void fillOutputBuffer(IOMX::buffer_id buffer);
+ bool drainInputBuffer(BufferInfo *info);
+ void fillOutputBuffer(BufferInfo *info);
+
+ void drainInputBuffers();
+ void fillOutputBuffers();
+
+ bool drainAnyInputBuffer();
+ BufferInfo *findInputBufferByDataPointer(void *ptr);
+ BufferInfo *findEmptyInputBuffer();
+
+ // Returns true iff a flush was initiated and a completion event is
+ // upcoming, false otherwise (A flush was not necessary as we own all
+ // the buffers on that port).
+ // This method will ONLY ever return false for a component with quirk
+ // "kRequiresFlushCompleteEmulation".
+ bool flushPortAsync(OMX_U32 portIndex);
+
+ void disablePortAsync(OMX_U32 portIndex);
+ status_t enablePortAsync(OMX_U32 portIndex);
+
+ static size_t countBuffersWeOwn(const Vector<BufferInfo> &buffers);
+ static bool isIntermediateState(State state);
+
+ void onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ void onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data);
+ void onStateChange(OMX_STATETYPE newState);
+ void onPortSettingsChanged(OMX_U32 portIndex);
+
+ void setState(State newState);
+
+ status_t init();
+ void initOutputFormat(const sp<MetaData> &inputFormat);
+ status_t initNativeWindow();
+
+ void initNativeWindowCrop();
+
+ void dumpPortStatus(OMX_U32 portIndex);
+
+ status_t configureCodec(const sp<MetaData> &meta);
+
+ status_t waitForBufferFilled_l();
+
+ status_t resumeLocked(bool drainInputBuf);
+ int64_t getDecodingTimeUs();
+
+ status_t parseHEVCCodecSpecificData(
+ const void *data, size_t size,
+ unsigned *profile, unsigned *level);
+ status_t parseAVCCodecSpecificData(
+ const void *data, size_t size,
+ unsigned *profile, unsigned *level);
+
+ status_t stopOmxComponent_l();
+
+ OMXCodec(const OMXCodec &);
+ OMXCodec &operator=(const OMXCodec &);
+#ifdef DOLBY_ENABLE
+ static uint32_t getDolbyComponentQuirks(const sp<MediaCodecInfo> &info);
+ void setDolbyProcessedAudio();
+#endif // DOLBY_END
+};
+
+struct CodecCapabilities {
+ enum {
+ kFlagSupportsAdaptivePlayback = 1 << 0,
+ };
+
+ String8 mComponentName;
+ Vector<CodecProfileLevel> mProfileLevels;
+ Vector<OMX_U32> mColorFormats;
+ uint32_t mFlags;
+};
+
+// Return a vector of componentNames with supported profile/level pairs
+// supporting the given mime type, if queryDecoders==true, returns components
+// that decode content of the given type, otherwise returns components
+// that encode content of the given type.
+// profile and level indications only make sense for h.263, mpeg4 and avc
+// video.
+// If hwCodecOnly==true, only returns hardware-based components, software and
+// hardware otherwise.
+// The profile/level values correspond to
+// OMX_VIDEO_H263PROFILETYPE, OMX_VIDEO_MPEG4PROFILETYPE,
+// OMX_VIDEO_AVCPROFILETYPE, OMX_VIDEO_H263LEVELTYPE, OMX_VIDEO_MPEG4LEVELTYPE
+// and OMX_VIDEO_AVCLEVELTYPE respectively.
+
+status_t QueryCodecs(
+ const sp<IOMX> &omx,
+ const char *mimeType, bool queryDecoders, bool hwCodecOnly,
+ Vector<CodecCapabilities> *results);
+
+status_t QueryCodecs(
+ const sp<IOMX> &omx,
+ const char *mimeType, bool queryDecoders,
+ Vector<CodecCapabilities> *results);
+
+status_t QueryCodec(
+ const sp<IOMX> &omx,
+ const char *componentName, const char *mime,
+ bool isEncoder,
+ CodecCapabilities *caps);
+
+status_t getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]);
+
+} // namespace android
+
+#endif // OMX_CODEC_H_
diff --git a/include/media/stagefright/dpframework/DpBlitStream.h b/include/media/stagefright/dpframework/DpBlitStream.h
new file mode 100644
index 0000000..57fd482
--- /dev/null
+++ b/include/media/stagefright/dpframework/DpBlitStream.h
@@ -0,0 +1,243 @@
+#ifndef __DP_BLIT_STREAM_H__
+#define __DP_BLIT_STREAM_H__
+
+#include "DpDataType.h"
+
+enum
+{
+ DP_BLIT_GENERAL_USER = 0,
+ DP_BLIT_HWC0 = 0,
+ DP_BLIT_GPU = 1,
+ DP_BLIT_HWC1 = 1,
+ DP_BLIT_HWC2 = 2,
+ DP_BLIT_HWC3 = 3
+};
+
+
+class DpBlitStream
+{
+public:
+ static bool queryHWSupport(uint32_t srcWidth,
+ uint32_t srcHeight,
+ uint32_t dstWidth,
+ uint32_t dstHeight,
+ int32_t Orientation = 0);
+
+ DpBlitStream();
+
+ ~DpBlitStream();
+
+ enum DpOrientation
+ {
+ ROT_0 = 0x00000000,
+ FLIP_H = 0x00000001,
+ FLIP_V = 0x00000002,
+ ROT_90 = 0x00000004,
+ ROT_180 = FLIP_H|FLIP_V,
+ ROT_270 = ROT_180|ROT_90,
+ ROT_INVALID = 0x80
+ };
+
+ DP_STATUS_ENUM setSrcBuffer(void *pVABase,
+ uint32_t size);
+
+ DP_STATUS_ENUM setSrcBuffer(void **pVAList,
+ uint32_t *pSizeList,
+ uint32_t planeNumber);
+
+ // VA + MVA address interface
+ DP_STATUS_ENUM setSrcBuffer(void** pVAddrList,
+ void** pMVAddrList,
+ uint32_t *pSizeList,
+ uint32_t planeNumber);
+
+ // for ION file descriptor
+ DP_STATUS_ENUM setSrcBuffer(int32_t fileDesc,
+ uint32_t *sizeList,
+ uint32_t planeNumber);
+
+ DP_STATUS_ENUM setSrcConfig(int32_t width,
+ int32_t height,
+ DpColorFormat format,
+ DpInterlaceFormat field = eInterlace_None,
+ DpRect *pROI = 0);
+
+ DP_STATUS_ENUM setSrcConfig(int32_t width,
+ int32_t height,
+ int32_t yPitch,
+ int32_t uvPitch,
+ DpColorFormat format,
+ DP_PROFILE_ENUM profile = DP_PROFILE_BT601,
+ DpInterlaceFormat field = eInterlace_None,
+ DpRect *pROI = 0,
+ DpSecure secure = DP_SECURE_NONE,
+ bool doFlush = true);
+
+ DP_STATUS_ENUM setDstBuffer(void *pVABase,
+ uint32_t size);
+
+ DP_STATUS_ENUM setDstBuffer(void **pVABaseList,
+ uint32_t *pSizeList,
+ uint32_t planeNumber);
+
+ // VA + MVA address interface
+ DP_STATUS_ENUM setDstBuffer(void** pVABaseList,
+ void** pMVABaseList,
+ uint32_t *pSizeList,
+ uint32_t planeNumber);
+
+ // for ION file descriptor
+ DP_STATUS_ENUM setDstBuffer(int32_t fileDesc,
+ uint32_t *pSizeList,
+ uint32_t planeNumber);
+
+ DP_STATUS_ENUM setDstConfig(int32_t width,
+ int32_t height,
+ DpColorFormat format,
+ DpInterlaceFormat field = eInterlace_None,
+ DpRect *pROI = 0);
+
+ DP_STATUS_ENUM setDstConfig(int32_t width,
+ int32_t height,
+ int32_t yPitch,
+ int32_t uvPitch,
+ DpColorFormat format,
+ DP_PROFILE_ENUM profile = DP_PROFILE_BT601,
+ DpInterlaceFormat field = eInterlace_None,
+ DpRect *pROI = 0,
+ DpSecure secure = DP_SECURE_NONE,
+ bool doFlush = true);
+
+ DP_STATUS_ENUM setRotate(int32_t rotation)
+ {
+ if ((m_rotation != rotation) ||
+ (mRotate != rotation))
+ {
+ m_rotation = rotation;
+ mRotate = rotation;
+ m_frameChange = true;
+ }
+
+ return DP_STATUS_RETURN_SUCCESS;
+ }
+
+ //Compatible to 89
+ DP_STATUS_ENUM setFlip(int flip)
+ {
+ if (mFlip != flip)
+ {
+ mFlip = flip;
+ m_flipStatus = ((flip!= 0)? true: false);
+ m_frameChange = true;
+ }
+
+ return DP_STATUS_RETURN_SUCCESS;
+ }
+
+ DP_STATUS_ENUM setOrientation(uint32_t transform);
+
+ DP_STATUS_ENUM setTdshp(int gain)
+ {
+ if (mTdshp != gain)
+ {
+ mTdshp = gain;
+ m_frameChange = true;
+ }
+
+ return DP_STATUS_RETURN_SUCCESS;
+ }
+
+ uint32_t getPqID();
+
+ DP_STATUS_ENUM setPQParameter(const DpPqParam &pParam);
+
+ DP_STATUS_ENUM setDither(bool enDither)
+ {
+ if (m_ditherStatus != enDither)
+ {
+ m_ditherStatus = enDither;
+ m_frameChange = true;
+ }
+
+ return DP_STATUS_RETURN_SUCCESS;
+ }
+
+ DP_STATUS_ENUM setAdaptiveLuma(bool enADL)
+ {
+ m_adaptiveLuma = enADL;
+
+ return DP_STATUS_RETURN_SUCCESS;
+ }
+
+ DP_STATUS_ENUM setUser(uint32_t eID = 0);
+
+
+ DP_STATUS_ENUM invalidate();
+
+ DP_STATUS_ENUM pq_process();
+
+ // for dump register
+ void enableDumpReg(unsigned int flags){mDumpRegFlags = flags;}
+
+private:
+ DpStream *m_pStream;
+ DpChannel *m_pChannel;
+ int32_t m_channelID;
+ DpBasicBufferPool *m_pSrcPool;
+ DpBasicBufferPool *m_pDstPool;
+ int32_t m_srcBuffer;
+ int32_t m_srcWidth;
+ int32_t m_srcHeight;
+ int32_t m_srcYPitch;
+ int32_t m_srcUVPitch;
+ DpColorFormat m_srcFormat;
+ DP_PROFILE_ENUM m_srcProfile;
+ DpSecure m_srcSecure;
+ bool m_srcFlush;
+ int32_t m_dstBuffer;
+ int32_t m_dstWidth;
+ int32_t m_dstHeight;
+ int32_t m_dstYPitch;
+ int32_t m_dstUVPitch;
+ DpColorFormat m_dstFormat;
+ DP_PROFILE_ENUM m_dstProfile;
+ DpSecure m_dstSecure;
+ bool m_dstFlush;
+ DpStream *m_pPqStream;
+ DpChannel *m_pPqChannel;
+ DpAutoBufferPool *m_pPqPool;
+ int32_t m_pqBuffer;
+ int32_t m_cropXStart;
+ int32_t m_cropYStart;
+ int32_t m_cropWidth;
+ int32_t m_cropHeight;
+ int32_t m_cropSubPixelX;
+ int32_t m_cropSubPixelY;
+ int32_t m_targetXStart;
+ int32_t m_targetYStart;
+ int32_t m_rotation;
+ bool m_frameChange;
+ bool m_flipStatus;
+ bool m_ditherStatus;
+ bool m_adaptiveLuma;
+ uint32_t m_userID;
+ DpPqConfig m_PqConfig;
+ uint32_t m_PqID;
+ uint32_t m_engFlag;
+ //Compatible to 89
+ int mRotate;
+ int mFlip;
+ int mTdshp;
+
+ DpStream *mStream;
+ DpChannel *mChannel;
+ DpBufferPool *mSrcPool;
+ DpBufferPool *mDstPool;
+ DpPortOption *mSrcPort;
+ DpPortOption *mDstPort;
+ int mSrcBufferId;
+ int mDstBufferId;
+ unsigned int mDumpRegFlags;
+};
+
+#endif // __DP_BLIT_STREAM_H__
diff --git a/include/media/stagefright/dpframework/DpDataType.h b/include/media/stagefright/dpframework/DpDataType.h
new file mode 100644
index 0000000..d858e97
--- /dev/null
+++ b/include/media/stagefright/dpframework/DpDataType.h
@@ -0,0 +1,450 @@
+#ifndef __DP_DATA_TYPE_H__
+#define __DP_DATA_TYPE_H__
+
+#ifndef __KERNEL__
+#include <stdio.h>
+#include <stdlib.h>
+#include <assert.h>
+#include <string.h>
+#include <math.h>
+#endif
+
+#ifndef MAX
+ #define MAX(x, y) ((x) >= (y))? (x): (y)
+#endif // MAX
+
+#ifndef MIN
+ #define MIN(x, y) ((x) <= (y))? (x): (y)
+#endif // MIN
+
+#ifndef __KERNEL__
+class DpStream;
+class DpChannel;
+
+class DpBasicBufferPool;
+class DpAutoBufferPool;
+class DpCommand;
+class DpBufferPool;
+#endif
+
+typedef unsigned long long DpJobID;
+typedef int DpEngineType;
+
+typedef enum DP_STATUS_ENUM
+{
+ DP_STATUS_ABORTED_BY_USER = 4,
+ DP_STATUS_ALL_TEST_DONE = 3,
+ DP_STATUS_ALL_TPIPE_DONE = 2,
+ DP_STATUS_BUFFER_DONE = 1,
+ DP_STATUS_RETURN_SUCCESS = 0,
+ DP_STATUS_INVALID_PARAX = -1,
+ DP_STATUS_INVALID_PORT = -2,
+ DP_STATUS_INVALID_PATH = -3,
+ DP_STATUS_INVALID_FILE = -4,
+ DP_STATUS_INVALID_CHANNEL = -5,
+ DP_STATUS_INVALID_BUFFER = -6,
+ DP_STATUS_INVALID_STATE = -7,
+ DP_STATUS_INVALID_ENGINE = -8,
+ DP_STATUS_INVALID_FORMAT = -9,
+ DP_STATUS_INVALID_X_INPUT = -10,
+ DP_STATUS_INVALID_Y_INPUT = -11,
+ DP_STATUS_INVALID_X_OUTPUT = -12,
+ DP_STATUS_INVALID_Y_OUTPUT = -13,
+ DP_STATUS_INVALID_X_ALIGN = -14,
+ DP_STATUS_INVALID_Y_ALIGN = -15,
+ DP_STATUS_INVALID_WIDTH = -16,
+ DP_STATUS_INVALID_HEIGHT = -17,
+ DP_STATUS_INVALID_CROP = -18,
+ DP_STATUS_INVALID_ANGLE = -19,
+ DP_STATUS_INVALID_EVENT = -20,
+ DP_STATUS_INVALID_OPCODE = -21,
+ DP_STATUS_CAN_NOT_MERGE = -22,
+ DP_STATUS_OUT_OF_MEMORY = -23,
+ DP_STATUS_BUFFER_FULL = -24,
+ DP_STATUS_BUFFER_EMPTY = -25,
+ DP_STATUS_OPERATION_FAILED = -26,
+ DP_STATUS_OVER_MAX_BRANCH = -27,
+ DP_STATUS_OVER_MAX_ENGINE = -28,
+ DP_STATUS_OVER_MAX_BACKUP = -29,
+ DP_STATUS_SCHEDULE_ERROR = -30,
+ DP_STATUS_OVER_MAX_WIDTH = -31,
+ DP_STATUS_OVER_MAX_HEIGHT = -32,
+ DP_STATUS_LEFT_EDGE_ERROR = -33,
+ DP_STATUS_RIGHT_EDGE_ERROR = -34,
+ DP_STATUS_TOP_EDGE_ERROR = -35,
+ DP_STATUS_BOTTOM_EDGE_ERROR = -36,
+ DP_STATUS_X_LESS_THAN_LAST = -37,
+ DP_STATUS_Y_LESS_THAN_LAST = -38,
+ DP_STATUS_UNWANTED_X_CAL = -39,
+ DP_STATUS_LOSS_OVER_WIDTH = -40,
+ DP_STATUS_LOSS_OVER_HEIGHT = -41,
+ DP_STATUS_X_ALIGN_ERROR = -42,
+ DP_STATUS_Y_ALIGN_ERROR = -43,
+ DP_STATUS_X_OUT_OVERLAP = -44,
+ DP_STATUS_Y_OUT_OVERLAP = -45,
+ DP_STATUS_BACK_LE_FORWARD = -46,
+ DP_STATUS_UNKNOWN_ERROR = -47,
+} DP_STATUS_ENUM;
+
+
+typedef enum DP_MEMORY_ENUM
+{
+ DP_MEMORY_VA,
+ DP_MEMORY_ION,
+ DP_MEMORY_PHY,
+ DP_MEMORY_MVA
+} DP_MEMORY_ENUM;
+
+typedef struct DpJPEGEnc_Config_st // for JPEG port only
+{
+ int32_t fileDesc;
+ uint32_t size;
+ uint32_t fQuality;
+ uint32_t soi_en;
+ void *memSWAddr[3];
+} DpJPEGEnc_Config;
+
+typedef struct DpVEnc_Config // for VENC port only
+{
+ /* Venc Modify + */
+ unsigned long rVencDrvHandle;
+ /* Venc Modify - */
+ uint32_t memYUVMVAAddr[3];
+ uint32_t memYUVMVASize[3];
+ void *memYUVSWAddr[3];
+ void *memOutputSWAddr[3];
+
+ uint32_t* pNumPABuffer;
+ uint32_t* pPABuffer;
+ uint64_t* pConfigFrameCount;
+ uint64_t* pDequeueFrameCount;
+ DpCommand* pVEncCommander;
+} DpVEnc_Config;
+
+
+#ifndef __KERNEL__
+class DpRect
+{
+public:
+
+ enum
+ {
+ eINVALID_VALUE = -1,
+ eINITIAL_VALUE = 0 //TBD, why to set as "0"?
+ };
+
+ inline DpRect(void)
+ : x(eINITIAL_VALUE), sub_x(eINITIAL_VALUE),
+ y(eINITIAL_VALUE), sub_y(eINITIAL_VALUE),
+ w(eINITIAL_VALUE), h(eINITIAL_VALUE)
+ {}
+
+ inline DpRect(int32_t in_x, int32_t in_y, int32_t in_w, int32_t in_h,
+ int32_t in_sub_x = 0, int32_t in_sub_y = 0)
+ : x(in_x),
+ sub_x(in_sub_x),
+ y(in_y),
+ sub_y(in_sub_y),
+ w(in_w),
+ h(in_h)
+ {}
+
+ inline DpRect(const DpRect& rt)
+ : x(rt.x),
+ sub_x(rt.sub_x),
+ y(rt.y),
+ sub_y(rt.sub_y),
+ w(rt.w),
+ h(rt.h)
+ {}
+
+ ~DpRect(void) {}
+
+ inline DpRect& operator= (const DpRect rval)
+ {
+ x = rval.x;
+ sub_x = rval.sub_x;
+ y = rval.y;
+ sub_y = rval.sub_y;
+ w = rval.w;
+ h = rval.h;
+ return *this;
+ }
+
+ int32_t x;
+ int32_t sub_x;
+ int32_t y;
+ int32_t sub_y;
+ int32_t w;
+ int32_t h;
+};
+#endif
+
+typedef enum DP_PROFILE_ENUM
+{
+ DP_PROFILE_BT601, //Limited range
+ DP_PROFILE_BT709,
+ DP_PROFILE_JPEG,
+ DP_PROFILE_FULL_BT601 = DP_PROFILE_JPEG
+} DP_PROFILE_ENUM;
+
+
+typedef enum DP_STREAM_ID_ENUM
+{
+ DP_BLITSTREAM = 0x10000000,
+ DP_FRAGSTREAM = 0x20000000,
+ DP_ISPSTREAM = 0x30000000,
+ DP_ASYNCBLITSTREAM = 0x40000000,
+ DP_UNKNOWN_STREAM = 0xF0000000,
+} DP_STREAM_ID_ENUM;
+
+typedef enum DP_MEDIA_TYPE_ENUM
+{
+ MEDIA_UNKNOWN,
+ MEDIA_VIDEO,
+ MEDIA_PICTURE,
+ MEDIA_ISP_PREVIEW
+} DP_MEDIA_TYPE_ENUM;
+
+typedef struct
+{
+ uint32_t id;
+ uint32_t timeStamp;
+ uint32_t reserved[28]; // padding and reserved
+} DpVideoParam;
+
+typedef struct
+{
+ bool withHist;
+ uint32_t info[20];
+ uint32_t reserved[9]; // padding and reserved
+} DpImageParam;
+
+struct DpPqParam {
+ bool enable;
+ DP_MEDIA_TYPE_ENUM scenario;
+
+ union {
+ DpVideoParam video;
+ DpImageParam image;
+ } u;
+};
+
+struct DpPqConfig {
+ uint32_t enSharp;
+ uint32_t enDC;
+ uint32_t enColor;
+};
+
+
+//FMT GROUP , 0-RGB , 1-YUV , 2-Bayer raw , 3-compressed format
+#define DP_COLORFMT_PACK(VIDEO, PLANE, COPLANE, HFACTOR, VFACTOR, BITS, GROUP ,SWAP_ENABLE, UNIQUEID) \
+ ((VIDEO << 27) | \
+ (PLANE << 24) | \
+ (COPLANE << 22) | \
+ (HFACTOR << 20) | \
+ (VFACTOR << 18) | \
+ (BITS << 8) | \
+ (GROUP << 6) | \
+ (SWAP_ENABLE << 5) | \
+ (UNIQUEID << 0))
+
+#define DP_COLOR_GET_UFP_ENABLE(color) ((0x20000000 & color) >> 29)
+#define DP_COLOR_GET_INTERLACED_MODE(color) ((0x10000000 & color) >> 28)
+#define DP_COLOR_GET_BLOCK_MODE(color) ((0x08000000 & color) >> 27)
+#define DP_COLOR_GET_PLANE_COUNT(color) ((0x07000000 & color) >> 24)
+#define DP_COLOR_IS_UV_COPLANE(color) ((0x00C00000 & color) >> 22)
+#define DP_COLOR_GET_H_SUBSAMPLE(color) ((0x00300000 & color) >> 20)
+#define DP_COLOR_GET_V_SUBSAMPLE(color) ((0x000C0000 & color) >> 18)
+#define DP_COLOR_BITS_PER_PIXEL(color) ((0x0003FF00 & color) >> 8)
+#define DP_COLOR_GET_COLOR_GROUP(color) ((0x000000C0 & color) >> 6)
+#define DP_COLOR_GET_SWAP_ENABLE(color) ((0x00000020 & color) >> 5)
+#define DP_COLOR_GET_UNIQUE_ID(color) ((0x0000001F & color) >> 0)
+#define DP_COLOR_GET_HW_FORMAT(color) ((0x0000001F & color) >> 0)
+
+typedef enum DP_COLOR_ENUM
+{
+ DP_COLOR_UNKNOWN = 0,
+ DP_COLOR_FULLG8 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 8, 3, 0, 20),
+ DP_COLOR_FULLG10 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 10, 3, 0, 21),
+ DP_COLOR_FULLG12 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 12, 3, 0, 22),
+ DP_COLOR_FULLG14 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 14, 3, 0, 26),
+ DP_COLOR_UFO10 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 10, 3, 0, 27),
+
+ DP_COLOR_BAYER8 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 8, 2, 0, 20),
+ DP_COLOR_BAYER10 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 10, 2, 0, 21),
+ DP_COLOR_BAYER12 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 12, 2, 0, 22),
+
+ // Unified format
+ DP_COLOR_GREY = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 8, 1, 0, 7),
+
+ DP_COLOR_RGB565 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 16, 0, 0, 0),
+ DP_COLOR_BGR565 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 16, 0, 1, 0),
+ DP_COLOR_RGB888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 0, 1, 1),
+ DP_COLOR_BGR888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 0, 0, 1),
+ DP_COLOR_RGBA8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 1, 2),
+ DP_COLOR_BGRA8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 2),
+ DP_COLOR_ARGB8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 1, 3),
+ DP_COLOR_ABGR8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 3),
+
+ DP_COLOR_UYVY = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 0, 4),
+ DP_COLOR_VYUY = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 1, 4),
+ DP_COLOR_YUYV = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 0, 5),
+ DP_COLOR_YVYU = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 1, 5),
+
+ DP_COLOR_I420 = DP_COLORFMT_PACK(0, 3, 0, 1, 1, 8, 1, 0, 8),
+ DP_COLOR_YV12 = DP_COLORFMT_PACK(0, 3, 0, 1, 1, 8, 1, 1, 8),
+ DP_COLOR_I422 = DP_COLORFMT_PACK(0, 3, 0, 1, 0, 8, 1, 0, 9),
+ DP_COLOR_YV16 = DP_COLORFMT_PACK(0, 3, 0, 1, 0, 8, 1, 1, 9),
+ DP_COLOR_I444 = DP_COLORFMT_PACK(0, 3, 0, 0, 0, 8, 1, 0, 10),
+ DP_COLOR_YV24 = DP_COLORFMT_PACK(0, 3, 0, 0, 0, 8, 1, 1, 10),
+
+ DP_COLOR_NV12 = DP_COLORFMT_PACK(0, 2, 1, 1, 1, 8, 1, 0, 12),
+ DP_COLOR_NV21 = DP_COLORFMT_PACK(0, 2, 1, 1, 1, 8, 1, 1, 12),
+ DP_COLOR_NV16 = DP_COLORFMT_PACK(0, 2, 1, 1, 0, 8, 1, 0, 13),
+ DP_COLOR_NV61 = DP_COLORFMT_PACK(0, 2, 1, 1, 0, 8, 1, 1, 13),
+ DP_COLOR_NV24 = DP_COLORFMT_PACK(0, 2, 1, 0, 0, 8, 1, 0, 14),
+ DP_COLOR_NV42 = DP_COLORFMT_PACK(0, 2, 1, 0, 0, 8, 1, 1, 14),
+
+ // Mediatek proprietary format
+ DP_COLOR_420_BLKP_UFO = DP_COLORFMT_PACK(5, 2, 1, 1, 1, 256, 1, 0, 12),//Frame mode + Block mode
+ DP_COLOR_420_BLKP = DP_COLORFMT_PACK(1, 2, 1, 1, 1, 256, 1, 0, 12),//Frame mode + Block mode
+ DP_COLOR_420_BLKI = DP_COLORFMT_PACK(3, 2, 1, 1, 1, 256, 1, 0, 12),//Field mode + Block mode
+ DP_COLOR_422_BLKP = DP_COLORFMT_PACK(1, 1, 0, 1, 0, 512, 1, 0, 4), //Frame mode
+
+ DP_COLOR_PARGB8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 26),
+ DP_COLOR_XARGB8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 27),
+ DP_COLOR_PABGR8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 28),
+ DP_COLOR_XABGR8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 29),
+
+ DP_COLOR_IYU2 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 1, 0, 25),
+ DP_COLOR_YUV444 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 1, 0, 30),
+// DP_COLOR_YUV422I = DP_COLORFMT_PACK(1, 0, 1, 0, 16, 1, 41),//Dup to DP_COLOR_YUYV
+// DP_COLOR_Y800 = DP_COLORFMT_PACK(1, 0, 1, 0, 8, 1, 42),//Dup to DP_COLOR_GREY
+// DP_COLOR_COMPACT_RAW1 = DP_COLORFMT_PACK(1, 0, 1, 0, 10, 2, 43),//Dup to Bayer10
+// DP_COLOR_420_3P_YVU = DP_COLORFMT_PACK(3, 0, 1, 1, 8, 1, 44),//Dup to DP_COLOR_YV12
+} DP_COLOR_ENUM;
+
+// Legacy for 6589 compatible
+typedef DP_COLOR_ENUM DpColorFormat;
+
+#define eYUV_420_3P DP_COLOR_I420
+#define eYUV_420_2P_YUYV DP_COLOR_YUYV
+#define eYUV_420_2P_UYVY DP_COLOR_UYVY
+#define eYUV_420_2P_YVYU DP_COLOR_YVYU
+#define eYUV_420_2P_VYUY DP_COLOR_VYUY
+#define eYUV_420_2P_ISP_BLK DP_COLOR_420_BLKP
+#define eYUV_420_2P_VDO_BLK DP_COLOR_420_BLKI
+#define eYUV_422_3P DP_COLOR_I422
+#define eYUV_422_2P DP_COLOR_NV16
+#define eYUV_422_I DP_COLOR_YUYV
+#define eYUV_422_I_BLK DP_COLOR_422_BLKP
+#define eYUV_444_3P DP_COLOR_I444
+#define eYUV_444_2P DP_COLOR_NV24
+#define eYUV_444_1P DP_COLOR_YUV444
+#define eBAYER8 DP_COLOR_BAYER8
+#define eBAYER10 DP_COLOR_BAYER10
+#define eBAYER12 DP_COLOR_BAYER12
+#define eRGB565 DP_COLOR_RGB565
+#define eBGR565 DP_COLOR_BGR565
+#define eRGB888 DP_COLOR_RGB888
+#define eBGR888 DP_COLOR_BGR888
+#define eARGB8888 DP_COLOR_ARGB8888
+#define eABGR8888 DP_COLOR_ABGR8888
+#define DP_COLOR_XRGB8888 DP_COLOR_ARGB8888
+#define DP_COLOR_XBGR8888 DP_COLOR_ABGR8888
+#define eRGBA8888 DP_COLOR_RGBA8888
+#define eBGRA8888 DP_COLOR_BGRA8888
+#define eXRGB8888 DP_COLOR_XRGB8888
+#define eXBGR8888 DP_COLOR_XBGR8888
+#define DP_COLOR_RGBX8888 DP_COLOR_RGBA8888
+#define DP_COLOR_BGRX8888 DP_COLOR_BGRA8888
+#define eRGBX8888 DP_COLOR_RGBX8888
+#define eBGRX8888 DP_COLOR_BGRX8888
+#define ePARGB8888 DP_COLOR_PARGB8888
+#define eXARGB8888 DP_COLOR_XARGB8888
+#define ePABGR8888 DP_COLOR_PABGR8888
+#define eXABGR8888 DP_COLOR_XABGR8888
+#define eGREY DP_COLOR_GREY
+#define eI420 DP_COLOR_I420
+#define eYV12 DP_COLOR_YV12
+#define eIYU2 DP_COLOR_IYU2
+
+
+#define eYV21 DP_COLOR_I420
+#define eNV12_BLK DP_COLOR_420_BLKP
+#define eNV12_BLK_FCM DP_COLOR_420_BLKI
+#define eYUV_420_3P_YVU DP_COLOR_YV12
+
+#define eNV12_BP DP_COLOR_420_BLKP
+#define eNV12_BI DP_COLOR_420_BLKI
+#define eNV12 DP_COLOR_NV12
+#define eNV21 DP_COLOR_NV21
+#define eI422 DP_COLOR_I422
+#define eYV16 DP_COLOR_YV16
+#define eNV16 DP_COLOR_NV16
+#define eNV61 DP_COLOR_NV61
+#define eUYVY DP_COLOR_UYVY
+#define eVYUY DP_COLOR_VYUY
+#define eYUYV DP_COLOR_YUYV
+#define eYVYU DP_COLOR_YVYU
+#define eUYVY_BP DP_COLOR_422_BLKP
+#define eI444 DP_COLOR_I444
+#define eNV24 DP_COLOR_NV24
+#define eNV42 DP_COLOR_NV42
+#define DP_COLOR_YUY2 DP_COLOR_YUYV
+#define eYUY2 DP_COLOR_YUY2
+#define eY800 DP_COLOR_GREY
+//#define eIYU2
+#define eMTKYUV DP_COLOR_422_BLKP
+
+#define eCompactRaw1 DP_COLOR_BAYER10
+
+
+enum DpInterlaceFormat
+{
+ eInterlace_None,
+ eTop_Field,
+ eBottom_Field
+};
+
+enum DpSecure
+{
+ DP_SECURE_NONE = 0,
+ DP_SECURE = 1,
+ DP_SECURE_SHIFT = 8
+};
+
+#define MAX_NUM_READBACK_REGS (20)
+
+#define VENC_ENABLE_FLAG (0x08967)
+
+#ifndef __KERNEL__
+struct DpPortOption
+{
+ int width;
+ int height;
+ DpRect ROI;
+ DpColorFormat format;
+ DpInterlaceFormat interlace;
+
+ enum DpPort
+ {
+ eLCD0_PORT,
+ eLCD1_PORT,
+ eHDMI_PORT,
+ eTVOUT_PORT,
+ eOVERLAY_PORT,
+ eVIRTUAL_PORT,
+ eMEMORY_PORT
+ };
+
+ DpPort port;
+
+ int overlayID; // setting if choose port = eOVERLAY
+ int virtualID; // setting if choose port = eVIRTUAL_PORT
+ DpBufferPool *buffer; // setting if choose port = eMEMORY
+};
+#endif // __KERNEL__
+
+#endif // __DP_DATA_TYPE_H__
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 8cf623a..d2196fe 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -59,6 +59,11 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libavextensions \
$(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
+# Mediatek
+ifeq ($(strip $(BOARD_HAS_MTK_HARDWARE)),true)
+LOCAL_SHARED_LIBRARIES += libmtkplayer
+endif
+
LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall #-DLOG_NDEBUG=0
LOCAL_CLANG := true
diff --git a/media/libmediaplayerservice/Android.mk.orig b/media/libmediaplayerservice/Android.mk.orig
new file mode 100644
index 0000000..8cf623a
--- /dev/null
+++ b/media/libmediaplayerservice/Android.mk.orig
@@ -0,0 +1,75 @@
+LOCAL_PATH:= $(call my-dir)
+
+#
+# libmediaplayerservice
+#
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ ActivityManager.cpp \
+ Crypto.cpp \
+ Drm.cpp \
+ DrmSessionManager.cpp \
+ HDCP.cpp \
+ MediaPlayerFactory.cpp \
+ MediaPlayerService.cpp \
+ MediaRecorderClient.cpp \
+ MetadataRetrieverClient.cpp \
+ RemoteDisplay.cpp \
+ SharedLibrary.cpp \
+ StagefrightRecorder.cpp \
+ TestPlayerStub.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libcamera_client \
+ libcrypto \
+ libcutils \
+ libdrmframework \
+ liblog \
+ libdl \
+ libgui \
+ libmedia \
+ libmediautils \
+ libsonivox \
+ libstagefright \
+ libstagefright_foundation \
+ libstagefright_httplive \
+ libstagefright_omx \
+ libstagefright_wfd \
+ libutils \
+ libvorbisidec \
+ libaudioutils \
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_nuplayer \
+ libstagefright_rtsp \
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+ libavmediaserviceextensions \
+
+LOCAL_C_INCLUDES := \
+ $(TOP)/frameworks/av/media/libstagefright/include \
+ $(TOP)/frameworks/av/media/libstagefright/rtsp \
+ $(TOP)/frameworks/av/media/libstagefright/wifi-display \
+ $(TOP)/frameworks/av/media/libstagefright/webm \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/external/tremolo/Tremolo \
+ $(TOP)/frameworks/av/media/libavextensions \
+ $(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
+
+LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall #-DLOG_NDEBUG=0
+LOCAL_CLANG := true
+
+LOCAL_MODULE:= libmediaplayerservice
+
+#LOCAL_32_BIT_ONLY := true
+
+ifeq ($(TARGET_BOARD_PLATFORM),msm8974)
+ LOCAL_CFLAGS += -DTARGET_8974
+endif
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libmediaplayerservice/FMAudioPlayer.h b/media/libmediaplayerservice/FMAudioPlayer.h
new file mode 100644
index 0000000..0f239d4
--- /dev/null
+++ b/media/libmediaplayerservice/FMAudioPlayer.h
@@ -0,0 +1,144 @@
+/* Copyright Statement:
+ *
+ * This software/firmware and related documentation ("MediaTek Software") are
+ * protected under relevant copyright laws. The information contained herein
+ * is confidential and proprietary to MediaTek Inc. and/or its licensors.
+ * Without the prior written permission of MediaTek inc. and/or its licensors,
+ * any reproduction, modification, use or disclosure of MediaTek Software,
+ * and information contained herein, in whole or in part, shall be strictly prohibited.
+ */
+/* MediaTek Inc. (C) 2010. All rights reserved.
+ *
+ * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
+ * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
+ * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER ON
+ * AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL WARRANTIES,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NONINFRINGEMENT.
+ * NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH RESPECT TO THE
+ * SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY, INCORPORATED IN, OR
+ * SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES TO LOOK ONLY TO SUCH
+ * THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO. RECEIVER EXPRESSLY ACKNOWLEDGES
+ * THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES
+ * CONTAINED IN MEDIATEK SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK
+ * SOFTWARE RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
+ * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S ENTIRE AND
+ * CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE RELEASED HEREUNDER WILL BE,
+ * AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE MEDIATEK SOFTWARE AT ISSUE,
+ * OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE CHARGE PAID BY RECEIVER TO
+ * MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
+ *
+ * The following software/firmware and/or related documentation ("MediaTek Software")
+ * have been modified by MediaTek Inc. All revisions are subject to any receiver's
+ * applicable license agreements with MediaTek Inc.
+ */
+
+#ifndef ANDROID_FM_AUDIOPLAYER_H
+#define ANDROID_FM_AUDIOPLAYER_H
+
+
+#include <utils/threads.h>
+
+#include <media/MediaPlayerInterface.h>
+#include <media/AudioTrack.h>
+#include <media/AudioRecord.h>
+
+namespace android
+{
+
+class FMAudioPlayer : public MediaPlayerInterface
+{
+public:
+ FMAudioPlayer();
+ ~FMAudioPlayer();
+
+ virtual void onFirstRef();
+ virtual status_t initCheck();
+ //virtual status_t setDataSource(const char *path, const KeyedVector<String8, String8> *headers);
+ virtual status_t setDataSource(const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8> *headers);
+ virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+ virtual status_t setVideoSurface(const sp<Surface>& /*surface*/)
+ {
+ return UNKNOWN_ERROR;
+ }
+ virtual status_t setVideoSurfaceTexture(
+ const sp<IGraphicBufferProducer>& /*bufferProducer*/)
+ {
+ return UNKNOWN_ERROR;
+ }
+ virtual status_t prepare();
+ virtual status_t prepareAsync();
+ virtual status_t start();
+ virtual status_t stop();
+ virtual status_t seekTo(int msec);
+ virtual status_t pause();
+ virtual bool isPlaying();
+ virtual status_t getCurrentPosition(int *msec);
+ virtual status_t getDuration(int *msec);
+ virtual status_t release();
+ virtual status_t reset();
+ virtual status_t setLooping(int loop);
+#ifndef FAKE_FM
+ virtual status_t setRender(bool enable);
+#endif
+ virtual player_type playerType()
+ {
+ return FM_AUDIO_PLAYER;
+ }
+ virtual status_t invoke(const Parcel &/*request*/, Parcel */*reply*/)
+ {
+ return INVALID_OPERATION;
+ }
+ virtual status_t setParameter(int /*key*/, const Parcel &/*request*/)
+ {
+ return INVALID_OPERATION;
+ }
+ virtual status_t getParameter(int /*key*/, Parcel */*reply*/)
+ {
+ return INVALID_OPERATION;
+ }
+
+private:
+ status_t setdatasource(const char *path, int fd, int64_t offset, int64_t length);
+ status_t reset_nosync();
+ status_t createOutputTrack();
+ static int renderThread(void *);
+ int render();
+ bool createAudioRecord();
+ bool deleteAudioRecord();
+
+#ifndef FAKE_FM
+ void setHwCallback(bool enable);
+#endif
+
+ sp<AudioRecord> mAudioRecord;
+ Mutex mMutex;
+ Condition mCondition;
+ FILE *mFile;
+ int64_t mOffset;
+ int64_t mLength;
+ char *mAudioBuffer;
+ char *mDummyBuffer;
+ int mPlayTime;
+ int mDuration;
+ uint32_t mFmAudioSamplingRate;
+
+ status_t mState;
+ int mStreamType;
+ bool mAndroidLoop;
+ volatile bool mExit;
+ bool mPaused;
+
+ bool mSetRender;
+ volatile bool mRender;
+ pid_t mRenderTid;
+ bool flagRecordError;
+
+ int mMutePause;
+};
+
+}; // namespace android
+
+#endif
+
+
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index f0afc5a..e378f3c 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -34,6 +34,10 @@
#include "nuplayer/NuPlayerDriver.h"
#include <mediaplayerservice/AVMediaServiceExtensions.h>
+#ifdef MTK_HARDWARE
+#include "FMAudioPlayer.h"
+#endif
+
namespace android {
Mutex MediaPlayerFactory::sLock;
@@ -241,6 +245,24 @@ class TestPlayerFactory : public MediaPlayerFactory::IFactory {
}
};
+#ifdef MTK_HARDWARE
+class FMPlayerFactory : public MediaPlayerFactory::IFactory {
+ public:
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ const char* url,
+ float /*curScore*/) {
+ if(strncmp(url, "MEDIATEK://MEDIAPLAYER_PLAYERTYPE_FM", 36) == 0)
+ return 1.0;
+ return 0.0;
+ }
+
+ virtual sp<MediaPlayerBase> createPlayer(pid_t /* pid */) {
+ ALOGV("Create FM Player");
+ return new FMAudioPlayer();
+ }
+};
+#endif
+
void MediaPlayerFactory::registerBuiltinFactories() {
MediaPlayerFactory::IFactory* pCustomFactory = NULL;
@@ -256,6 +278,9 @@ void MediaPlayerFactory::registerBuiltinFactories() {
ALOGV("Registering DASH_PLAYER");
registerFactory_l(pCustomFactory, DASH_PLAYER);
}
+#ifdef MTK_HARDWARE
+ registerFactory_l(new FMPlayerFactory(), FM_AUDIO_PLAYER);
+#endif
sInitComplete = true;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 442dba1..1e217ca 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1613,6 +1613,14 @@ status_t StagefrightRecorder::setupVideoEncoder(
break;
}
+ // force hw video encoder for camera
+ if (mVideoSource == VIDEO_SOURCE_CAMERA) {
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ mime.append("_cam");
+ format->setString("mime", mime);
+ }
+
if (cameraSource != NULL) {
sp<MetaData> meta = cameraSource->getFormat();
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 5ef2411..7656755 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -108,6 +108,16 @@
#include "include/OMX_Audio_DTS.h"
#endif
+#ifdef MTK_HARDWARE
+#include <media/stagefright/dpframework/DpBlitStream.h>
+
+#define HAL_PIXEL_FORMAT_NV12_BLK 0x7F000001
+#define HAL_PIXEL_FORMAT_I420 (0x32315659 + 0x10)
+
+const OMX_COLOR_FORMATTYPE OMX_MTK_COLOR_FormatYV12 = (OMX_COLOR_FORMATTYPE)0x7F000200;
+const OMX_COLOR_FORMATTYPE OMX_COLOR_FormatVendorMTKYUV = (OMX_COLOR_FORMATTYPE)0x7F000001;
+#endif
+
namespace android {
// OMX errors are directly mapped into status_t range if
@@ -869,7 +879,13 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type),
portIndex == kPortIndexInput ? "input" : "output");
+#ifdef MTK_HARDWARE
+ OMX_U32 memoryAlign = 32;
+ size_t totalSize = def.nBufferCountActual *
+ ((bufSize + (memoryAlign - 1))&(~(memoryAlign - 1)));
+#else
size_t totalSize = def.nBufferCountActual * bufSize;
+#endif
mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
@@ -970,6 +986,11 @@ status_t ACodec::setupNativeWindowSizeFormatAndUsage(
setNativeWindowColorFormat(eNativeColorFormat);
#endif
+#ifdef MTK_HARDWARE
+ OMX_COLOR_FORMATTYPE eHalColorFormat = def.format.video.eColorFormat;
+ setHalWindowColorFormat(eHalColorFormat);
+#endif
+
ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
int32_t width = 0, height = 0;
int32_t isAdaptivePlayback = 0;
@@ -991,6 +1012,8 @@ status_t ACodec::setupNativeWindowSizeFormatAndUsage(
height,
#ifdef USE_SAMSUNG_COLORFORMAT
eNativeColorFormat,
+#elif MTK_HARDWARE
+ eHalColorFormat,
#else
def.format.video.eColorFormat,
#endif
@@ -1374,6 +1397,27 @@ void ACodec::setNativeWindowColorFormat(OMX_COLOR_FORMATTYPE &eNativeColorFormat
}
#endif
+#ifdef MTK_HARDWARE
+void ACodec::setHalWindowColorFormat(OMX_COLOR_FORMATTYPE &eHalColorFormat) {
+ if (!strncmp("OMX.MTK.", mComponentName.c_str(), 8)) {
+ switch (eHalColorFormat) {
+ case OMX_COLOR_FormatYUV420Planar:
+ eHalColorFormat = (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_I420;
+ break;
+ case OMX_MTK_COLOR_FormatYV12:
+ eHalColorFormat = (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12;
+ break;
+ case OMX_COLOR_FormatVendorMTKYUV:
+ eHalColorFormat = (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_NV12_BLK;
+ break;
+ default:
+ eHalColorFormat = (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_I420;
+ break;
+ }
+ }
+}
+#endif
+
status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
diff --git a/media/libstagefright/ACodec.cpp.orig b/media/libstagefright/ACodec.cpp.orig
new file mode 100644
index 0000000..5ef2411
--- /dev/null
+++ b/media/libstagefright/ACodec.cpp.orig
@@ -0,0 +1,7270 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * This file was modified by Dolby Laboratories, Inc. The portions of the
+ * code that are surrounded by "DOLBY..." are copyrighted and
+ * licensed separately, as follows:
+ *
+ * (C) 2011-2015 Dolby Laboratories, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ **
+ ** This file was modified by DTS, Inc. The portions of the
+ ** code that are surrounded by "DTS..." are copyrighted and
+ ** licensed separately, as follows:
+ **
+ ** (C) 2015 DTS, Inc.
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ ** http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ACodec"
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
+#include <inttypes.h>
+#include <utils/Trace.h>
+
+#include <gui/Surface.h>
+
+#include <media/stagefright/ACodec.h>
+
+#include <binder/MemoryDealer.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/SurfaceUtils.h>
+#include <media/stagefright/FFMPEGSoftCodec.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/hardware/HardwareAPI.h>
+
+#include <OMX_AudioExt.h>
+#include <OMX_VideoExt.h>
+#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include <OMX_AsString.h>
+
+#ifdef USE_SAMSUNG_COLORFORMAT
+#include <sec_format.h>
+#endif
+
+#include "include/avc_utils.h"
+
+#include <stagefright/AVExtensions.h>
+#ifdef DOLBY_ENABLE
+#include "DolbyACodecExtImpl.h"
+#endif // DOLBY_END
+
+#ifdef DTS_CODEC_M_
+#include "include/DTSUtils.h"
+#include "include/OMX_Audio_DTS.h"
+#endif
+
+namespace android {
+
+// OMX errors are directly mapped into status_t range if
+// there is no corresponding MediaError status code.
+// Use the statusFromOMXError(int32_t omxError) function.
+//
+// Currently this is a direct map.
+// See frameworks/native/include/media/openmax/OMX_Core.h
+//
+// Vendor OMX errors from 0x90000000 - 0x9000FFFF
+// Extension OMX errors from 0x8F000000 - 0x90000000
+// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current)
+//
+
+// returns true if err is a recognized OMX error code.
+// as OMX error is OMX_S32, this is an int32_t type
+static inline bool isOMXError(int32_t err) {
+ return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX);
+}
+
+// converts an OMX error to a status_t
+static inline status_t statusFromOMXError(int32_t omxError) {
+ switch (omxError) {
+ case OMX_ErrorInvalidComponentName:
+ case OMX_ErrorComponentNotFound:
+ return NAME_NOT_FOUND; // can trigger illegal argument error for provided names.
+ default:
+ return isOMXError(omxError) ? omxError : 0; // no translation required
+ }
+}
+
+// checks and converts status_t to a non-side-effect status_t
+static inline status_t makeNoSideEffectStatus(status_t err) {
+ switch (err) {
+ // the following errors have side effects and may come
+ // from other code modules. Remap for safety reasons.
+ case INVALID_OPERATION:
+ case DEAD_OBJECT:
+ return UNKNOWN_ERROR;
+ default:
+ return err;
+ }
+}
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+struct MessageList : public RefBase {
+ MessageList() {
+ }
+ virtual ~MessageList() {
+ }
+ std::list<sp<AMessage> > &getList() { return mList; }
+private:
+ std::list<sp<AMessage> > mList;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MessageList);
+};
+
+struct CodecObserver : public BnOMXObserver {
+ CodecObserver() {}
+
+ void setNotificationMessage(const sp<AMessage> &msg) {
+ mNotify = msg;
+ }
+
+ // from IOMXObserver
+ virtual void onMessages(const std::list<omx_message> &messages) {
+ if (messages.empty()) {
+ return;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ bool first = true;
+ sp<MessageList> msgList = new MessageList();
+ for (std::list<omx_message>::const_iterator it = messages.cbegin();
+ it != messages.cend(); ++it) {
+ const omx_message &omx_msg = *it;
+ if (first) {
+ notify->setInt32("node", omx_msg.node);
+ first = false;
+ }
+
+ sp<AMessage> msg = new AMessage;
+ msg->setInt32("type", omx_msg.type);
+ switch (omx_msg.type) {
+ case omx_message::EVENT:
+ {
+ msg->setInt32("event", omx_msg.u.event_data.event);
+ msg->setInt32("data1", omx_msg.u.event_data.data1);
+ msg->setInt32("data2", omx_msg.u.event_data.data2);
+ break;
+ }
+
+ case omx_message::EMPTY_BUFFER_DONE:
+ {
+ msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
+ msg->setInt32("fence_fd", omx_msg.fenceFd);
+ break;
+ }
+
+ case omx_message::FILL_BUFFER_DONE:
+ {
+ msg->setInt32(
+ "buffer", omx_msg.u.extended_buffer_data.buffer);
+ msg->setInt32(
+ "range_offset",
+ omx_msg.u.extended_buffer_data.range_offset);
+ msg->setInt32(
+ "range_length",
+ omx_msg.u.extended_buffer_data.range_length);
+ msg->setInt32(
+ "flags",
+ omx_msg.u.extended_buffer_data.flags);
+ msg->setInt64(
+ "timestamp",
+ omx_msg.u.extended_buffer_data.timestamp);
+ msg->setInt32(
+ "fence_fd", omx_msg.fenceFd);
+ break;
+ }
+
+ case omx_message::FRAME_RENDERED:
+ {
+ msg->setInt64(
+ "media_time_us", omx_msg.u.render_data.timestamp);
+ msg->setInt64(
+ "system_nano", omx_msg.u.render_data.nanoTime);
+ break;
+ }
+
+ default:
+ ALOGE("Unrecognized message type: %d", omx_msg.type);
+ break;
+ }
+ msgList->getList().push_back(msg);
+ }
+ notify->setObject("messages", msgList);
+ notify->post();
+ }
+
+protected:
+ virtual ~CodecObserver() {}
+
+private:
+ sp<AMessage> mNotify;
+
+ DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::BaseState : public AState {
+ BaseState(ACodec *codec, const sp<AState> &parentState = NULL);
+
+protected:
+ enum PortMode {
+ KEEP_BUFFERS,
+ RESUBMIT_BUFFERS,
+ FREE_BUFFERS,
+ };
+
+ ACodec *mCodec;
+
+ virtual PortMode getPortMode(OMX_U32 portIndex);
+
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+ virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+ virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+ void postFillThisBuffer(BufferInfo *info);
+
+private:
+ // Handles an OMX message. Returns true iff message was handled.
+ bool onOMXMessage(const sp<AMessage> &msg);
+
+ // Handles a list of messages. Returns true iff messages were handled.
+ bool onOMXMessageList(const sp<AMessage> &msg);
+
+ // returns true iff this message is for this component and the component is alive
+ bool checkOMXMessage(const sp<AMessage> &msg);
+
+ bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd);
+
+ bool onOMXFillBufferDone(
+ IOMX::buffer_id bufferID,
+ size_t rangeOffset, size_t rangeLength,
+ OMX_U32 flags,
+ int64_t timeUs,
+ int fenceFd);
+
+ virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
+ void getMoreInputDataIfPossible();
+
+ DISALLOW_EVIL_CONSTRUCTORS(BaseState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::DeathNotifier : public IBinder::DeathRecipient {
+ DeathNotifier(const sp<AMessage> &notify)
+ : mNotify(notify) {
+ }
+
+ virtual void binderDied(const wp<IBinder> &) {
+ mNotify->post();
+ }
+
+protected:
+ virtual ~DeathNotifier() {}
+
+private:
+ sp<AMessage> mNotify;
+
+ DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier);
+};
+
+struct ACodec::UninitializedState : public ACodec::BaseState {
+ UninitializedState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+private:
+ void onSetup(const sp<AMessage> &msg);
+ bool onAllocateComponent(const sp<AMessage> &msg);
+
+ sp<DeathNotifier> mDeathNotifier;
+
+ DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::LoadedState : public ACodec::BaseState {
+ LoadedState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+private:
+ friend struct ACodec::UninitializedState;
+
+ bool onConfigureComponent(const sp<AMessage> &msg);
+ void onCreateInputSurface(const sp<AMessage> &msg);
+ void onSetInputSurface(const sp<AMessage> &msg);
+ void onStart();
+ void onShutdown(bool keepComponentAllocated);
+
+ status_t setupInputSurface();
+
+ DISALLOW_EVIL_CONSTRUCTORS(LoadedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::LoadedToIdleState : public ACodec::BaseState {
+ LoadedToIdleState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ virtual void stateEntered();
+
+private:
+ status_t allocateBuffers();
+
+ DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::IdleToExecutingState : public ACodec::BaseState {
+ IdleToExecutingState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ virtual void stateEntered();
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ExecutingState : public ACodec::BaseState {
+ ExecutingState(ACodec *codec);
+
+ void submitRegularOutputBuffers();
+ void submitOutputMetaBuffers();
+ void submitOutputBuffers();
+
+ // Submit output buffers to the decoder, submit input buffers to client
+ // to fill with data.
+ void resume();
+
+ // Returns true iff input and output buffers are in play.
+ bool active() const { return mActive; }
+
+protected:
+ virtual PortMode getPortMode(OMX_U32 portIndex);
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
+private:
+ bool mActive;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ExecutingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState {
+ OutputPortSettingsChangedState(ACodec *codec);
+
+protected:
+ virtual PortMode getPortMode(OMX_U32 portIndex);
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ExecutingToIdleState : public ACodec::BaseState {
+ ExecutingToIdleState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+ virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+ virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+private:
+ void changeStateIfWeOwnAllBuffers();
+
+ bool mComponentNowIdle;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::IdleToLoadedState : public ACodec::BaseState {
+ IdleToLoadedState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::FlushingState : public ACodec::BaseState {
+ FlushingState(ACodec *codec);
+
+protected:
+ virtual bool onMessageReceived(const sp<AMessage> &msg);
+ virtual void stateEntered();
+
+ virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+ virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+ virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+private:
+ bool mFlushComplete[2];
+
+ void changeStateIfWeOwnAllBuffers();
+
+ DISALLOW_EVIL_CONSTRUCTORS(FlushingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) {
+ if (mFenceFd >= 0) {
+ ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s",
+ mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
+ }
+ mFenceFd = fenceFd;
+ mIsReadFence = false;
+}
+
+void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) {
+ if (mFenceFd >= 0) {
+ ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s",
+ mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
+ }
+ mFenceFd = fenceFd;
+ mIsReadFence = true;
+}
+
+void ACodec::BufferInfo::checkWriteFence(const char *dbg) {
+ if (mFenceFd >= 0 && mIsReadFence) {
+ ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg);
+ }
+}
+
+void ACodec::BufferInfo::checkReadFence(const char *dbg) {
+ if (mFenceFd >= 0 && !mIsReadFence) {
+ ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ACodec()
+ : mQuirks(0),
+ mNode(0),
+ mNativeWindowUsageBits(0),
+ mSentFormat(false),
+ mIsVideo(false),
+ mIsEncoder(false),
+ mEncoderComponent(false),
+ mComponentAllocByName(false),
+ mFatalError(false),
+ mShutdownInProgress(false),
+ mExplicitShutdown(false),
+ mEncoderDelay(0),
+ mEncoderPadding(0),
+ mRotationDegrees(0),
+ mChannelMaskPresent(false),
+ mChannelMask(0),
+ mDequeueCounter(0),
+ mInputMetadataType(kMetadataBufferTypeInvalid),
+ mOutputMetadataType(kMetadataBufferTypeInvalid),
+ mLegacyAdaptiveExperiment(false),
+ mMetadataBuffersToSubmit(0),
+ mRepeatFrameDelayUs(-1ll),
+ mMaxPtsGapUs(-1ll),
+ mMaxFps(-1),
+ mTimePerFrameUs(-1ll),
+ mTimePerCaptureUs(-1ll),
+ mCreateInputBuffersSuspended(false),
+ mTunneled(false) {
+ mUninitializedState = new UninitializedState(this);
+ mLoadedState = new LoadedState(this);
+ mLoadedToIdleState = new LoadedToIdleState(this);
+ mIdleToExecutingState = new IdleToExecutingState(this);
+ mExecutingState = new ExecutingState(this);
+
+ mOutputPortSettingsChangedState =
+ new OutputPortSettingsChangedState(this);
+
+ mExecutingToIdleState = new ExecutingToIdleState(this);
+ mIdleToLoadedState = new IdleToLoadedState(this);
+ mFlushingState = new FlushingState(this);
+
+ mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
+ mInputEOSResult = OK;
+
+ changeState(mUninitializedState);
+}
+
+ACodec::~ACodec() {
+}
+
+status_t ACodec::setupCustomCodec(status_t err, const char * /*mime*/, const sp<AMessage> &/*msg*/) {
+ return err;
+}
+
+void ACodec::setNotificationMessage(const sp<AMessage> &msg) {
+ mNotify = msg;
+}
+
+void ACodec::initiateSetup(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatSetup);
+ msg->setTarget(this);
+ msg->post();
+}
+
+void ACodec::signalSetParameters(const sp<AMessage> &params) {
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
+ msg->setMessage("params", params);
+ msg->post();
+}
+
+void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatAllocateComponent);
+ msg->setTarget(this);
+ msg->post();
+}
+
+void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatConfigureComponent);
+ msg->setTarget(this);
+ msg->post();
+}
+
+status_t ACodec::setSurface(const sp<Surface> &surface) {
+ sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
+ msg->setObject("surface", surface);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err == OK) {
+ (void)response->findInt32("err", &err);
+ }
+ return err;
+}
+
+void ACodec::initiateCreateInputSurface() {
+ (new AMessage(kWhatCreateInputSurface, this))->post();
+}
+
+void ACodec::initiateSetInputSurface(
+ const sp<PersistentSurface> &surface) {
+ sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
+ msg->setObject("input-surface", surface);
+ msg->post();
+}
+
+void ACodec::signalEndOfInputStream() {
+ (new AMessage(kWhatSignalEndOfInputStream, this))->post();
+}
+
+void ACodec::initiateStart() {
+ (new AMessage(kWhatStart, this))->post();
+}
+
+void ACodec::signalFlush() {
+ ALOGV("[%s] signalFlush", mComponentName.c_str());
+ (new AMessage(kWhatFlush, this))->post();
+}
+
+void ACodec::signalResume() {
+ (new AMessage(kWhatResume, this))->post();
+}
+
+void ACodec::initiateShutdown(bool keepComponentAllocated) {
+ sp<AMessage> msg = new AMessage(kWhatShutdown, this);
+ msg->setInt32("keepComponentAllocated", keepComponentAllocated);
+ msg->post();
+ if (!keepComponentAllocated) {
+ // ensure shutdown completes in 30 seconds
+ (new AMessage(kWhatReleaseCodecInstance, this))->post(30000000);
+ }
+}
+
+void ACodec::signalRequestIDRFrame() {
+ (new AMessage(kWhatRequestIDRFrame, this))->post();
+}
+
+// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+// Some codecs may return input buffers before having them processed.
+// This causes a halt if we already signaled an EOS on the input
+// port. For now keep submitting an output buffer if there was an
+// EOS on the input port, but not yet on the output port.
+void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() {
+ if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
+ mMetadataBuffersToSubmit > 0) {
+ (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post();
+ }
+}
+
+status_t ACodec::handleSetSurface(const sp<Surface> &surface) {
+ // allow keeping unset surface
+ if (surface == NULL) {
+ if (mNativeWindow != NULL) {
+ ALOGW("cannot unset a surface");
+ return INVALID_OPERATION;
+ }
+ return OK;
+ }
+
+ // cannot switch from bytebuffers to surface
+ if (mNativeWindow == NULL) {
+ ALOGW("component was not configured with a surface");
+ return INVALID_OPERATION;
+ }
+
+ ANativeWindow *nativeWindow = surface.get();
+ // if we have not yet started the codec, we can simply set the native window
+ if (mBuffers[kPortIndexInput].size() == 0) {
+ mNativeWindow = surface;
+ return OK;
+ }
+
+ // we do not support changing a tunneled surface after start
+ if (mTunneled) {
+ ALOGW("cannot change tunneled surface");
+ return INVALID_OPERATION;
+ }
+
+ int usageBits = 0;
+ status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits);
+ if (err != OK) {
+ return err;
+ }
+
+ int ignoredFlags = kVideoGrallocUsage;
+ // New output surface is not allowed to add new usage flag except ignored ones.
+ if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) {
+ ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits);
+ return BAD_VALUE;
+ }
+
+ // get min undequeued count. We cannot switch to a surface that has a higher
+ // undequeued count than we allocated.
+ int minUndequeuedBuffers = 0;
+ err = nativeWindow->query(
+ nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &minUndequeuedBuffers);
+ if (err != 0) {
+ ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+ if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) {
+ ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)",
+ minUndequeuedBuffers, mNumUndequeuedBuffers);
+ return BAD_VALUE;
+ }
+
+ // we cannot change the number of output buffers while OMX is running
+ // set up surface to the same count
+ Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
+ ALOGV("setting up surface for %zu buffers", buffers.size());
+
+ err = native_window_set_buffer_count(nativeWindow, buffers.size());
+ if (err != 0) {
+ ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ // need to enable allocation when attaching
+ surface->getIGraphicBufferProducer()->allowAllocation(true);
+
+ // for meta data mode, we move dequeud buffers to the new surface.
+ // for non-meta mode, we must move all registered buffers
+ for (size_t i = 0; i < buffers.size(); ++i) {
+ const BufferInfo &info = buffers[i];
+ // skip undequeued buffers for meta data mode
+ if (storingMetadataInDecodedBuffers()
+ && !mLegacyAdaptiveExperiment
+ && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGV("skipping buffer %p", info.mGraphicBuffer.get() ? info.mGraphicBuffer->getNativeBuffer() : 0x0);
+ continue;
+ }
+ ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer());
+
+ err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer());
+ if (err != OK) {
+ ALOGE("failed to attach buffer %p to the new surface: %s (%d)",
+ info.mGraphicBuffer->getNativeBuffer(),
+ strerror(-err), -err);
+ return err;
+ }
+ }
+
+ // cancel undequeued buffers to new surface
+ if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) {
+ for (size_t i = 0; i < buffers.size(); ++i) {
+ BufferInfo &info = buffers.editItemAt(i);
+ if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
+ err = nativeWindow->cancelBuffer(
+ nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd);
+ info.mFenceFd = -1;
+ if (err != OK) {
+ ALOGE("failed to cancel buffer %p to the new surface: %s (%d)",
+ info.mGraphicBuffer->getNativeBuffer(),
+ strerror(-err), -err);
+ return err;
+ }
+ }
+ }
+ // disallow further allocation
+ (void)surface->getIGraphicBufferProducer()->allowAllocation(false);
+ }
+
+ // push blank buffers to previous window if requested
+ if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown ||
+ mFlags & kFlagPushBlankBuffersToNativeWindowOnSwitch) {
+ pushBlankBuffersToNativeWindow(mNativeWindow.get());
+ }
+
+ mNativeWindow = nativeWindow;
+ mNativeWindowUsageBits = usageBits;
+ return OK;
+}
+
+status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+ CHECK(mDealer[portIndex] == NULL);
+ CHECK(mBuffers[portIndex].isEmpty());
+
+ status_t err;
+ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+ if (storingMetadataInDecodedBuffers()) {
+ err = allocateOutputMetadataBuffers();
+ } else {
+ err = allocateOutputBuffersFromNativeWindow();
+ }
+ } else {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err == OK) {
+ MetadataBufferType type =
+ portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
+ int32_t bufSize = def.nBufferSize;
+ if (type == kMetadataBufferTypeGrallocSource) {
+ bufSize = sizeof(VideoGrallocMetadata);
+ } else if (type == kMetadataBufferTypeANWBuffer) {
+ bufSize = sizeof(VideoNativeMetadata);
+ }
+
+ // If using gralloc or native source input metadata buffers, allocate largest
+ // metadata size as we prefer to generate native source metadata, but component
+ // may require gralloc source. For camera source, allocate at least enough
+ // size for native metadata buffers.
+ int32_t allottedSize = bufSize;
+ if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) {
+ bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata));
+ } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) {
+ bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata));
+ }
+
+ ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port",
+ mComponentName.c_str(),
+ def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type),
+ portIndex == kPortIndexInput ? "input" : "output");
+
+ size_t totalSize = def.nBufferCountActual * bufSize;
+ mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
+
+ for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
+ sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize);
+ if (mem == NULL || mem->pointer() == NULL) {
+ return NO_MEMORY;
+ }
+
+ BufferInfo info;
+ info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mFenceFd = -1;
+ info.mRenderInfo = NULL;
+
+ uint32_t requiresAllocateBufferBit =
+ (portIndex == kPortIndexInput)
+ ? OMXCodec::kRequiresAllocateBufferOnInputPorts
+ : OMXCodec::kRequiresAllocateBufferOnOutputPorts;
+
+ if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure))
+ || (portIndex == kPortIndexOutput && usingMetadataOnEncoderOutput())
+ || canAllocateBuffer(portIndex)) {
+ mem.clear();
+
+ err = allocateBuffer(portIndex, bufSize, info);
+ } else if (mQuirks & requiresAllocateBufferBit) {
+ err = mOMX->allocateBufferWithBackup(
+ mNode, portIndex, mem, &info.mBufferID, allottedSize);
+ } else {
+ err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize);
+ }
+
+ if (mem != NULL) {
+ info.mData = new ABuffer(mem->pointer(), bufSize);
+ if (type == kMetadataBufferTypeANWBuffer) {
+ ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
+ }
+ }
+
+ mBuffers[portIndex].push(info);
+ }
+ }
+ }
+
+ if (err != OK) {
+ return err;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatBuffersAllocated);
+
+ notify->setInt32("portIndex", portIndex);
+
+ sp<PortDescription> desc = new PortDescription;
+
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ const BufferInfo &info = mBuffers[portIndex][i];
+
+ desc->addBuffer(info.mBufferID, info.mData);
+ }
+
+ notify->setObject("portDesc", desc);
+ notify->post();
+
+ return OK;
+}
+
+status_t ACodec::setupNativeWindowSizeFormatAndUsage(
+ ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_U32 usage = 0;
+ err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
+ if (err != 0) {
+ ALOGW("querying usage flags from OMX IL component failed: %d", err);
+ // XXX: Currently this error is logged, but not fatal.
+ usage = 0;
+ }
+ int omxUsage = usage;
+
+ if (mFlags & kFlagIsGrallocUsageProtected) {
+ usage |= GRALLOC_USAGE_PROTECTED;
+ }
+
+ usage |= kVideoGrallocUsage;
+ *finalUsage = usage;
+
+#ifdef USE_SAMSUNG_COLORFORMAT
+ OMX_COLOR_FORMATTYPE eNativeColorFormat = def.format.video.eColorFormat;
+ setNativeWindowColorFormat(eNativeColorFormat);
+#endif
+
+ ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
+ int32_t width = 0, height = 0;
+ int32_t isAdaptivePlayback = 0;
+
+ if (mInputFormat->findInt32("adaptive-playback", &isAdaptivePlayback)
+ && isAdaptivePlayback
+ && mInputFormat->findInt32("max-width", &width)
+ && mInputFormat->findInt32("max-height", &height)) {
+ width = max(width, (int32_t)def.format.video.nFrameWidth);
+ height = max(height, (int32_t)def.format.video.nFrameHeight);
+ ALOGV("Adaptive playback width = %d, height = %d", width, height);
+ } else {
+ width = def.format.video.nFrameWidth;
+ height = def.format.video.nFrameHeight;
+ }
+ err = setNativeWindowSizeFormatAndUsage(
+ nativeWindow,
+ width,
+ height,
+#ifdef USE_SAMSUNG_COLORFORMAT
+ eNativeColorFormat,
+#else
+ def.format.video.eColorFormat,
+#endif
+ mRotationDegrees,
+ usage);
+#ifdef QCOM_HARDWARE
+ if (err == OK) {
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = kPortIndexOutput;
+ err = mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop, &rect, sizeof(rect));
+ if (err == OK) {
+ ALOGV("rect size = %d, %d, %d, %d", rect.nLeft, rect.nTop, rect.nWidth, rect.nHeight);
+ android_native_rect_t crop;
+ crop.left = rect.nLeft;
+ crop.top = rect.nTop;
+ crop.right = rect.nLeft + rect.nWidth - 1;
+ crop.bottom = rect.nTop + rect.nHeight - 1;
+ ALOGV("crop update (%d, %d), (%d, %d)", crop.left, crop.top, crop.right, crop.bottom);
+ err = native_window_set_crop(nativeWindow, &crop);
+ }
+ }
+#endif
+ return err;
+}
+
+status_t ACodec::configureOutputBuffersFromNativeWindow(
+ OMX_U32 *bufferCount, OMX_U32 *bufferSize,
+ OMX_U32 *minUndequeuedBuffers) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err == OK) {
+ err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits);
+ }
+ if (err != OK) {
+ mNativeWindowUsageBits = 0;
+ return err;
+ }
+
+ // Exits here for tunneled video playback codecs -- i.e. skips native window
+ // buffer allocation step as this is managed by the tunneled OMX omponent
+ // itself and explicitly sets def.nBufferCountActual to 0.
+ if (mTunneled) {
+ ALOGV("Tunneled Playback: skipping native window buffer allocation.");
+ def.nBufferCountActual = 0;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ *minUndequeuedBuffers = 0;
+ *bufferCount = 0;
+ *bufferSize = 0;
+ return err;
+ }
+
+ *minUndequeuedBuffers = 0;
+ err = mNativeWindow->query(
+ mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ (int *)minUndequeuedBuffers);
+
+ if (err != 0) {
+ ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+
+ // FIXME: assume that surface is controlled by app (native window
+ // returns the number for the case when surface is not controlled by app)
+ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
+ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
+
+ // Use conservative allocation while also trying to reduce starvation
+ //
+ // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
+ // minimum needed for the consumer to be able to work
+ // 2. try to allocate two (2) additional buffers to reduce starvation from
+ // the consumer
+ // plus an extra buffer to account for incorrect minUndequeuedBufs
+#ifdef BOARD_CANT_REALLOCATE_OMX_BUFFERS
+ // Some devices don't like to set OMX_IndexParamPortDefinition at this
+ // point (even with an unmodified def), so skip it if possible.
+ // This check was present in KitKat.
+ if (def.nBufferCountActual < def.nBufferCountMin + *minUndequeuedBuffers) {
+#endif
+ for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
+ OMX_U32 newBufferCount =
+ def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
+ def.nBufferCountActual = newBufferCount;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err == OK) {
+ *minUndequeuedBuffers += extraBuffers;
+ break;
+ }
+
+ ALOGW("[%s] setting nBufferCountActual to %u failed: %d",
+ mComponentName.c_str(), newBufferCount, err);
+ /* exit condition */
+ if (extraBuffers == 0) {
+ return err;
+ }
+ }
+#ifdef BOARD_CANT_REALLOCATE_OMX_BUFFERS
+ }
+#endif
+
+ err = native_window_set_buffer_count(
+ mNativeWindow.get(), def.nBufferCountActual);
+
+ if (err != 0) {
+ ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ *bufferCount = def.nBufferCountActual;
+ *bufferSize = def.nBufferSize;
+ return err;
+}
+
+status_t ACodec::allocateOutputBuffersFromNativeWindow() {
+ OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
+ status_t err = configureOutputBuffersFromNativeWindow(
+ &bufferCount, &bufferSize, &minUndequeuedBuffers);
+ if (err != 0)
+ return err;
+ mNumUndequeuedBuffers = minUndequeuedBuffers;
+
+ if (!storingMetadataInDecodedBuffers()) {
+ static_cast<Surface*>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(true);
+ }
+
+ ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
+ "output port",
+ mComponentName.c_str(), bufferCount, bufferSize);
+
+ // Dequeue buffers and send them to OMX
+ for (OMX_U32 i = 0; i < bufferCount; i++) {
+ ANativeWindowBuffer *buf;
+ int fenceFd;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+ if (err != 0) {
+ ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ BufferInfo info;
+ info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mFenceFd = fenceFd;
+ info.mIsReadFence = false;
+ info.mRenderInfo = NULL;
+ info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);
+ info.mGraphicBuffer = graphicBuffer;
+ mBuffers[kPortIndexOutput].push(info);
+
+ IOMX::buffer_id bufferId;
+ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+ &bufferId);
+ if (err != 0) {
+ ALOGE("registering GraphicBuffer %u with OMX IL component failed: "
+ "%d", i, err);
+ break;
+ }
+
+ mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
+
+ ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
+ mComponentName.c_str(),
+ bufferId, graphicBuffer.get());
+ }
+
+ OMX_U32 cancelStart;
+ OMX_U32 cancelEnd;
+
+ if (err != 0) {
+ // If an error occurred while dequeuing we need to cancel any buffers
+ // that were dequeued.
+ cancelStart = 0;
+ cancelEnd = mBuffers[kPortIndexOutput].size();
+ } else {
+ // Return the required minimum undequeued buffers to the native window.
+ cancelStart = bufferCount - minUndequeuedBuffers;
+ cancelEnd = bufferCount;
+ }
+
+ for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ status_t error = cancelBufferToNativeWindow(info);
+ if (err == 0) {
+ err = error;
+ }
+ }
+ }
+
+ if (!storingMetadataInDecodedBuffers()) {
+ static_cast<Surface*>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(false);
+ }
+
+ return err;
+}
+
+status_t ACodec::allocateOutputMetadataBuffers() {
+ OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
+ status_t err = configureOutputBuffersFromNativeWindow(
+ &bufferCount, &bufferSize, &minUndequeuedBuffers);
+ if (err != 0)
+ return err;
+ mNumUndequeuedBuffers = minUndequeuedBuffers;
+
+ ALOGV("[%s] Allocating %u meta buffers on output port",
+ mComponentName.c_str(), bufferCount);
+
+ size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ?
+ sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata);
+ size_t totalSize = bufferCount * bufSize;
+ mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec");
+
+ // Dequeue buffers and send them to OMX
+ for (OMX_U32 i = 0; i < bufferCount; i++) {
+ BufferInfo info;
+ info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+ info.mFenceFd = -1;
+ info.mRenderInfo = NULL;
+ info.mGraphicBuffer = NULL;
+ info.mDequeuedAt = mDequeueCounter;
+
+ sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize);
+ if (mem == NULL || mem->pointer() == NULL) {
+ return NO_MEMORY;
+ }
+ if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
+ ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
+ }
+ info.mData = new ABuffer(mem->pointer(), mem->size());
+
+ // we use useBuffer for metadata regardless of quirks
+ err = mOMX->useBuffer(
+ mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size());
+
+ mBuffers[kPortIndexOutput].push(info);
+
+ ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)",
+ mComponentName.c_str(), info.mBufferID, mem->pointer());
+ }
+
+ if (mLegacyAdaptiveExperiment) {
+ // preallocate and preregister buffers
+ static_cast<Surface *>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(true);
+
+ ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
+ "output port",
+ mComponentName.c_str(), bufferCount, bufferSize);
+
+ // Dequeue buffers then cancel them all
+ for (OMX_U32 i = 0; i < bufferCount; i++) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+
+ ANativeWindowBuffer *buf;
+ int fenceFd;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+ if (err != 0) {
+ ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ mOMX->updateGraphicBufferInMeta(
+ mNode, kPortIndexOutput, graphicBuffer, info->mBufferID);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy");
+ info->mGraphicBuffer = graphicBuffer;
+ }
+
+ for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ status_t error = cancelBufferToNativeWindow(info);
+ if (err == OK) {
+ err = error;
+ }
+ }
+ }
+
+ static_cast<Surface*>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(false);
+ }
+
+ mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
+ return err;
+}
+
+status_t ACodec::submitOutputMetadataBuffer() {
+ CHECK(storingMetadataInDecodedBuffers());
+ if (mMetadataBuffersToSubmit == 0)
+ return OK;
+
+ BufferInfo *info = dequeueBufferFromNativeWindow();
+ if (info == NULL) {
+ return ERROR_IO;
+ }
+
+ ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",
+ mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get());
+
+ --mMetadataBuffersToSubmit;
+ info->checkWriteFence("submitOutputMetadataBuffer");
+ status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err == OK) {
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ }
+
+ return err;
+}
+
+status_t ACodec::waitForFence(int fd, const char *dbg ) {
+ status_t res = OK;
+ if (fd >= 0) {
+ sp<Fence> fence = new Fence(fd);
+ res = fence->wait(IOMX::kFenceTimeoutMs);
+ ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg);
+ }
+ return res;
+}
+
+// static
+const char *ACodec::_asString(BufferInfo::Status s) {
+ switch (s) {
+ case BufferInfo::OWNED_BY_US: return "OUR";
+ case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT";
+ case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM";
+ case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM";
+ case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE";
+ case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED";
+ default: return "?";
+ }
+}
+
+void ACodec::dumpBuffers(OMX_U32 portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+ ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(),
+ portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size());
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ const BufferInfo &info = mBuffers[portIndex][i];
+ ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u",
+ i, info.mBufferID, info.mGraphicBuffer.get(),
+ info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(),
+ _asString(info.mStatus), info.mStatus, info.mDequeuedAt);
+ }
+}
+
+#ifdef USE_SAMSUNG_COLORFORMAT
+void ACodec::setNativeWindowColorFormat(OMX_COLOR_FORMATTYPE &eNativeColorFormat)
+{
+ // In case of Samsung decoders, we set proper native color format for the Native Window
+ if (!strcasecmp(mComponentName.c_str(), "OMX.SEC.AVC.Decoder")
+ || !strcasecmp(mComponentName.c_str(), "OMX.SEC.FP.AVC.Decoder")
+ || !strcasecmp(mComponentName.c_str(), "OMX.SEC.MPEG4.Decoder")
+ || !strcasecmp(mComponentName.c_str(), "OMX.Exynos.AVC.Decoder")) {
+ switch (eNativeColorFormat) {
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ eNativeColorFormat = (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YCbCr_420_SP;
+ break;
+ case OMX_COLOR_FormatYUV420Planar:
+ default:
+ eNativeColorFormat = (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YCbCr_420_P;
+ break;
+ }
+ }
+}
+#endif
+
+status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
+ CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+ ALOGV("[%s] Calling cancelBuffer on buffer %u",
+ mComponentName.c_str(), info->mBufferID);
+
+ info->checkWriteFence("cancelBufferToNativeWindow");
+ int err = mNativeWindow->cancelBuffer(
+ mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+ info->mFenceFd = -1;
+
+ ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",
+ mComponentName.c_str(), info->mBufferID);
+ // change ownership even if cancelBuffer fails
+ info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+
+ return err;
+}
+
+void ACodec::updateRenderInfoForDequeuedBuffer(
+ ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
+
+ info->mRenderInfo =
+ mRenderTracker.updateInfoForDequeuedBuffer(
+ buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
+
+ // check for any fences already signaled
+ notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
+}
+
+void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+ if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
+ mRenderTracker.dumpRenderQueue();
+ }
+}
+
+void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", CodecBase::kWhatOutputFramesRendered);
+ std::list<FrameRenderTracker::Info> done =
+ mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
+
+ // unlink untracked frames
+ for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
+ it != done.cend(); ++it) {
+ ssize_t index = it->getIndex();
+ if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
+ mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL;
+ } else if (index >= 0) {
+ // THIS SHOULD NEVER HAPPEN
+ ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
+ }
+ }
+
+ if (MediaCodec::CreateFramesRenderedMessage(done, msg)) {
+ msg->post();
+ }
+}
+
+ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
+ ANativeWindowBuffer *buf;
+ CHECK(mNativeWindow.get() != NULL);
+
+ if (mTunneled) {
+ ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel"
+ " video playback mode mode!");
+ return NULL;
+ }
+
+ if (mFatalError) {
+ ALOGW("not dequeuing from native window due to fatal error");
+ return NULL;
+ }
+
+ int fenceFd = -1;
+ do {
+ status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+ if (err != 0) {
+ ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err);
+ return NULL;
+ }
+
+ bool stale = false;
+ for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
+ i--;
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+
+ if (info->mGraphicBuffer != NULL &&
+ info->mGraphicBuffer->handle == buf->handle) {
+ // Since consumers can attach buffers to BufferQueues, it is possible
+ // that a known yet stale buffer can return from a surface that we
+ // once used. We can simply ignore this as we have already dequeued
+ // this buffer properly. NOTE: this does not eliminate all cases,
+ // e.g. it is possible that we have queued the valid buffer to the
+ // NW, and a stale copy of the same buffer gets dequeued - which will
+ // be treated as the valid buffer by ACodec.
+ if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGI("dequeued stale buffer %p. discarding", buf);
+ stale = true;
+ break;
+ }
+
+ ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer());
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
+ updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
+ return info;
+ }
+ }
+
+ // It is also possible to receive a previously unregistered buffer
+ // in non-meta mode. These should be treated as stale buffers. The
+ // same is possible in meta mode, in which case, it will be treated
+ // as a normal buffer, which is not desirable.
+ // TODO: fix this.
+ if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) {
+ ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf);
+ stale = true;
+ }
+ if (stale) {
+ // TODO: detach stale buffer, but there is no API yet to do it.
+ buf = NULL;
+ }
+ } while (buf == NULL);
+
+ // get oldest undequeued buffer
+ BufferInfo *oldest = NULL;
+ for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
+ i--;
+ BufferInfo *info =
+ &mBuffers[kPortIndexOutput].editItemAt(i);
+ if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
+ (oldest == NULL ||
+ // avoid potential issues from counter rolling over
+ mDequeueCounter - info->mDequeuedAt >
+ mDequeueCounter - oldest->mDequeuedAt)) {
+ oldest = info;
+ }
+ }
+
+ // it is impossible dequeue a buffer when there are no buffers with ANW
+ CHECK(oldest != NULL);
+ // it is impossible to dequeue an unknown buffer in non-meta mode, as the
+ // while loop above does not complete
+ CHECK(storingMetadataInDecodedBuffers());
+
+ // discard buffer in LRU info and replace with new buffer
+ oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
+ oldest->mStatus = BufferInfo::OWNED_BY_US;
+ oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
+ mRenderTracker.untrackFrame(oldest->mRenderInfo);
+ oldest->mRenderInfo = NULL;
+
+ mOMX->updateGraphicBufferInMeta(
+ mNode, kPortIndexOutput, oldest->mGraphicBuffer,
+ oldest->mBufferID);
+
+ if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) {
+ VideoGrallocMetadata *grallocMeta =
+ reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base());
+ ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
+ (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
+ mDequeueCounter - oldest->mDequeuedAt,
+ (void *)(uintptr_t)grallocMeta->pHandle,
+ oldest->mGraphicBuffer->handle, oldest->mData->base());
+ } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
+ VideoNativeMetadata *nativeMeta =
+ reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base());
+ ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
+ (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
+ mDequeueCounter - oldest->mDequeuedAt,
+ (void *)(uintptr_t)nativeMeta->pBuffer,
+ oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());
+ }
+
+ updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
+ return oldest;
+}
+
+status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
+ status_t err = OK;
+ for (size_t i = mBuffers[portIndex].size(); i > 0;) {
+ i--;
+ status_t err2 = freeBuffer(portIndex, i);
+ if (err == OK) {
+ err = err2;
+ }
+ }
+
+ // clear mDealer even on an error
+ mDealer[portIndex].clear();
+ return err;
+}
+
+status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
+ status_t err = OK;
+ for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
+ i--;
+ BufferInfo *info =
+ &mBuffers[kPortIndexOutput].editItemAt(i);
+
+ // At this time some buffers may still be with the component
+ // or being drained.
+ if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&
+ info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) {
+ status_t err2 = freeBuffer(kPortIndexOutput, i);
+ if (err == OK) {
+ err = err2;
+ }
+ }
+ }
+
+ return err;
+}
+
+status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
+ BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+ status_t err = OK;
+
+ // there should not be any fences in the metadata
+ MetadataBufferType type =
+ portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
+ if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL
+ && info->mData->size() >= sizeof(VideoNativeMetadata)) {
+ int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd;
+ if (fenceFd >= 0) {
+ ALOGW("unreleased fence (%d) in %s metadata buffer %zu",
+ fenceFd, portIndex == kPortIndexInput ? "input" : "output", i);
+ }
+ }
+
+ switch (info->mStatus) {
+ case BufferInfo::OWNED_BY_US:
+ if (portIndex == kPortIndexOutput && mNativeWindow != NULL) {
+ (void)cancelBufferToNativeWindow(info);
+ }
+ // fall through
+
+ case BufferInfo::OWNED_BY_NATIVE_WINDOW:
+ err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID);
+ break;
+
+ default:
+ ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus);
+ err = FAILED_TRANSACTION;
+ break;
+ }
+
+ if (info->mFenceFd >= 0) {
+ ::close(info->mFenceFd);
+ }
+
+ if (portIndex == kPortIndexOutput) {
+ mRenderTracker.untrackFrame(info->mRenderInfo, i);
+ info->mRenderInfo = NULL;
+ }
+
+ // remove buffer even if mOMX->freeBuffer fails
+ mBuffers[portIndex].removeAt(i);
+ return err;
+}
+
+ACodec::BufferInfo *ACodec::findBufferByID(
+ uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+ if (info->mBufferID == bufferID) {
+ if (index != NULL) {
+ *index = i;
+ }
+ return info;
+ }
+ }
+
+ ALOGE("Could not find buffer with ID %u", bufferID);
+ return NULL;
+}
+
+status_t ACodec::setComponentRole(
+ bool isEncoder, const char *mime) {
+ struct MimeToRole {
+ const char *mime;
+ const char *decoderRole;
+ const char *encoderRole;
+ };
+
+ static const MimeToRole kMimeToRole[] = {
+ { MEDIA_MIMETYPE_AUDIO_MPEG,
+ "audio_decoder.mp3", "audio_encoder.mp3" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ "audio_decoder.mp1", "audio_encoder.mp1" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ "audio_decoder.mp2", "audio_encoder.mp2" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_NB,
+ "audio_decoder.amrnb", "audio_encoder.amrnb" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ "audio_decoder.amrwb", "audio_encoder.amrwb" },
+ { MEDIA_MIMETYPE_AUDIO_AAC,
+ "audio_decoder.aac", "audio_encoder.aac" },
+ { MEDIA_MIMETYPE_AUDIO_VORBIS,
+ "audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
+ { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
+ { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+ "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
+ { MEDIA_MIMETYPE_VIDEO_AVC,
+ "video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4,
+ "video_decoder.mpeg4", "video_encoder.mpeg4" },
+ { MEDIA_MIMETYPE_VIDEO_H263,
+ "video_decoder.h263", "video_encoder.h263" },
+ { MEDIA_MIMETYPE_VIDEO_VP8,
+ "video_decoder.vp8", "video_encoder.vp8" },
+ { MEDIA_MIMETYPE_VIDEO_VP9,
+ "video_decoder.vp9", "video_encoder.vp9" },
+ { MEDIA_MIMETYPE_AUDIO_RAW,
+ "audio_decoder.raw", "audio_encoder.raw" },
+ { MEDIA_MIMETYPE_AUDIO_FLAC,
+ "audio_decoder.flac", "audio_encoder.flac" },
+ { MEDIA_MIMETYPE_AUDIO_MSGSM,
+ "audio_decoder.gsm", "audio_encoder.gsm" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2,
+ "video_decoder.mpeg2", "video_encoder.mpeg2" },
+ { MEDIA_MIMETYPE_AUDIO_AC3,
+ "audio_decoder.ac3", "audio_encoder.ac3" },
+ { MEDIA_MIMETYPE_AUDIO_EAC3,
+ "audio_decoder.eac3", "audio_encoder.eac3" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4_DP,
+ "video_decoder.mpeg4", NULL },
+#ifdef DOLBY_UDC
+ { MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+ "audio_decoder.eac3_joc", NULL },
+#endif // DOLBY_END
+#ifdef DTS_CODEC_M_
+ { MEDIA_MIMETYPE_AUDIO_DTS,
+ "audio_decoder.dts", "audio_encoder.dts" },
+#endif
+ };
+
+ static const size_t kNumMimeToRole =
+ sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
+
+ size_t i;
+ for (i = 0; i < kNumMimeToRole; ++i) {
+ if (!strcasecmp(mime, kMimeToRole[i].mime)) {
+ break;
+ }
+ }
+
+ if (i == kNumMimeToRole) {
+ return FFMPEGSoftCodec::setSupportedRole(mOMX, mNode, isEncoder, mime);
+ }
+
+ const char *role =
+ isEncoder ? kMimeToRole[i].encoderRole
+ : kMimeToRole[i].decoderRole;
+
+ if (role != NULL) {
+ OMX_PARAM_COMPONENTROLETYPE roleParams;
+ InitOMXParams(&roleParams);
+
+ strncpy((char *)roleParams.cRole,
+ role, OMX_MAX_STRINGNAME_SIZE - 1);
+
+ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+ status_t err = mOMX->setParameter(
+ mNode, OMX_IndexParamStandardComponentRole,
+ &roleParams, sizeof(roleParams));
+
+ if (err != OK) {
+ ALOGW("[%s] Failed to set standard component role '%s'.",
+ mComponentName.c_str(), role);
+
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t ACodec::configureCodec(
+ const char *mime, const sp<AMessage> &msg) {
+ int32_t encoder;
+ if (!msg->findInt32("encoder", &encoder)) {
+ encoder = false;
+ }
+
+ sp<AMessage> inputFormat = new AMessage();
+ sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged
+
+ mIsEncoder = encoder;
+
+ mInputMetadataType = kMetadataBufferTypeInvalid;
+ mOutputMetadataType = kMetadataBufferTypeInvalid;
+
+ status_t err = setComponentRole(encoder /* isEncoder */, mime);
+
+ if (err != OK) {
+ return err;
+ }
+
+ enableCustomAllocationMode(msg);
+
+ int32_t bitRate = 0;
+ // FLAC encoder doesn't need a bitrate, other encoders do
+ if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
+ && !msg->findInt32("bitrate", &bitRate)) {
+ return INVALID_OPERATION;
+ }
+
+ int32_t storeMeta;
+ if (encoder
+ && msg->findInt32("store-metadata-in-buffers", &storeMeta)
+ && storeMeta != 0) {
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
+ if (err != OK) {
+ ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ return err;
+ }
+ // For this specific case we could be using camera source even if storeMetaDataInBuffers
+ // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize.
+ if (mInputMetadataType == kMetadataBufferTypeGrallocSource) {
+ mInputMetadataType = kMetadataBufferTypeCameraSource;
+ }
+
+ uint32_t usageBits;
+ if (mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ &usageBits, sizeof(usageBits)) == OK) {
+ inputFormat->setInt32(
+ "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
+ }
+ }
+
+ int32_t prependSPSPPS = 0;
+ if (encoder
+ && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS)
+ && prependSPSPPS != 0) {
+ OMX_INDEXTYPE index;
+ err = mOMX->getExtensionIndex(
+ mNode,
+ "OMX.google.android.index.prependSPSPPSToIDRFrames",
+ &index);
+
+ if (err == OK) {
+ PrependSPSPPSToIDRFramesParams params;
+ InitOMXParams(&params);
+ params.bEnable = OMX_TRUE;
+
+ err = mOMX->setParameter(
+ mNode, index, &params, sizeof(params));
+ }
+
+ if (err != OK) {
+ ALOGE("Encoder could not be configured to emit SPS/PPS before "
+ "IDR frames. (err %d)", err);
+
+ return err;
+ }
+ }
+
+ // Only enable metadata mode on encoder output if encoder can prepend
+ // sps/pps to idr frames, since in metadata mode the bitstream is in an
+ // opaque handle, to which we don't have access.
+ int32_t video = !strncasecmp(mime, "video/", 6);
+ mIsVideo = video;
+ if (encoder && video) {
+ OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
+ && msg->findInt32("store-metadata-in-buffers-output", &storeMeta)
+ && storeMeta != 0);
+
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);
+ if (err != OK) {
+ ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
+ mComponentName.c_str(), err);
+ }
+
+ if (!msg->findInt64(
+ "repeat-previous-frame-after",
+ &mRepeatFrameDelayUs)) {
+ mRepeatFrameDelayUs = -1ll;
+ }
+
+ if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
+ mMaxPtsGapUs = -1ll;
+ }
+
+ if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
+ mMaxFps = -1;
+ }
+
+ if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
+ mTimePerCaptureUs = -1ll;
+ }
+
+ if (!msg->findInt32(
+ "create-input-buffers-suspended",
+ (int32_t*)&mCreateInputBuffersSuspended)) {
+ mCreateInputBuffersSuspended = false;
+ }
+ }
+
+ // NOTE: we only use native window for video decoders
+ sp<RefBase> obj;
+ bool haveNativeWindow = msg->findObject("native-window", &obj)
+ && obj != NULL && video && !encoder;
+ mLegacyAdaptiveExperiment = false;
+ if (video && !encoder) {
+ inputFormat->setInt32("adaptive-playback", false);
+
+ int32_t usageProtected;
+ if (msg->findInt32("protected", &usageProtected) && usageProtected) {
+ if (!haveNativeWindow) {
+ ALOGE("protected output buffers must be sent to an ANativeWindow");
+ return PERMISSION_DENIED;
+ }
+ mFlags |= kFlagIsGrallocUsageProtected;
+ mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ }
+ }
+ if (haveNativeWindow) {
+ sp<ANativeWindow> nativeWindow =
+ static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get()));
+
+ // START of temporary support for automatic FRC - THIS WILL BE REMOVED
+ int32_t autoFrc;
+ if (msg->findInt32("auto-frc", &autoFrc)) {
+ bool enabled = autoFrc;
+ OMX_CONFIG_BOOLEANTYPE config;
+ InitOMXParams(&config);
+ config.bEnabled = (OMX_BOOL)enabled;
+ status_t temp = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
+ &config, sizeof(config));
+ if (temp == OK) {
+ outputFormat->setInt32("auto-frc", enabled);
+ } else if (enabled) {
+ ALOGI("codec does not support requested auto-frc (err %d)", temp);
+ }
+ }
+ // END of temporary support for automatic FRC
+
+ int32_t tunneled;
+ if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
+ tunneled != 0) {
+ ALOGI("Configuring TUNNELED video playback.");
+ mTunneled = true;
+
+ int32_t audioHwSync = 0;
+ if (!msg->findInt32("audio-hw-sync", &audioHwSync)) {
+ ALOGW("No Audio HW Sync provided for video tunnel");
+ }
+ err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
+ if (err != OK) {
+ ALOGE("configureTunneledVideoPlayback(%d,%p) failed!",
+ audioHwSync, nativeWindow.get());
+ return err;
+ }
+
+ int32_t maxWidth = 0, maxHeight = 0;
+ if (msg->findInt32("max-width", &maxWidth) &&
+ msg->findInt32("max-height", &maxHeight)) {
+
+ err = mOMX->prepareForAdaptivePlayback(
+ mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
+ if (err != OK) {
+ ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
+ mComponentName.c_str(), err);
+ // allow failure
+ err = OK;
+ } else {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+ }
+ } else {
+ ALOGV("Configuring CPU controlled video playback.");
+ mTunneled = false;
+
+ // Explicity reset the sideband handle of the window for
+ // non-tunneled video in case the window was previously used
+ // for a tunneled video playback.
+ err = native_window_set_sideband_stream(nativeWindow.get(), NULL);
+ if (err != OK) {
+ ALOGE("set_sideband_stream(NULL) failed! (err %d).", err);
+ return err;
+ }
+
+ // Always try to enable dynamic output buffers on native surface
+ err = mOMX->storeMetaDataInBuffers(
+ mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
+ if (err != OK) {
+ ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ // if adaptive playback has been requested, try JB fallback
+ // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
+ // LARGE MEMORY REQUIREMENT
+
+ // we will not do adaptive playback on software accessed
+ // surfaces as they never had to respond to changes in the
+ // crop window, and we don't trust that they will be able to.
+ int usageBits = 0;
+ bool canDoAdaptivePlayback;
+
+ if (nativeWindow->query(
+ nativeWindow.get(),
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+ &usageBits) != OK) {
+ canDoAdaptivePlayback = false;
+ } else {
+ canDoAdaptivePlayback =
+ (usageBits &
+ (GRALLOC_USAGE_SW_READ_MASK |
+ GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+ }
+
+ int32_t maxWidth = 0, maxHeight = 0;
+ if (canDoAdaptivePlayback &&
+ msg->findInt32("max-width", &maxWidth) &&
+ msg->findInt32("max-height", &maxHeight)) {
+ ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
+ mComponentName.c_str(), maxWidth, maxHeight);
+
+ err = mOMX->prepareForAdaptivePlayback(
+ mNode, kPortIndexOutput, OMX_TRUE, maxWidth,
+ maxHeight);
+ ALOGW_IF(err != OK,
+ "[%s] prepareForAdaptivePlayback failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ if (err == OK) {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+ }
+ // allow failure
+ err = OK;
+ } else {
+ ALOGV("[%s] storeMetaDataInBuffers succeeded",
+ mComponentName.c_str());
+ CHECK(storingMetadataInDecodedBuffers());
+ mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled(
+ "legacy-adaptive", !msg->contains("no-experiments"));
+
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+
+ int32_t push;
+ if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
+ && push != 0) {
+ mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ }
+
+ int32_t val;
+ if (msg->findInt32("push-blank-buffers-on-switch", &val)
+ && val != 0) {
+ mFlags |= kFlagPushBlankBuffersToNativeWindowOnSwitch;
+ }
+ }
+
+ int32_t rotationDegrees;
+ if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
+ mRotationDegrees = rotationDegrees;
+ } else {
+ mRotationDegrees = 0;
+ }
+ }
+
+ if (video) {
+ // determine need for software renderer
+ bool usingSwRenderer = false;
+ if (haveNativeWindow && (mComponentName.startsWith("OMX.google.") ||
+ mComponentName.startsWith("OMX.ffmpeg."))) {
+ usingSwRenderer = true;
+ haveNativeWindow = false;
+ }
+
+ if (encoder) {
+ err = setupVideoEncoder(mime, msg);
+ } else {
+ err = setupVideoDecoder(mime, msg, haveNativeWindow);
+ }
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (haveNativeWindow) {
+ mNativeWindow = static_cast<Surface *>(obj.get());
+ }
+
+ // initialize native window now to get actual output format
+ // TODO: this is needed for some encoders even though they don't use native window
+ err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
+
+ // fallback for devices that do not handle flex-YUV for native buffers
+ if (haveNativeWindow) {
+ int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
+ if (msg->findInt32("color-format", &requestedColorFormat) &&
+ requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
+ status_t err = getPortFormat(kPortIndexOutput, outputFormat);
+ if (err != OK) {
+ return err;
+ }
+ int32_t colorFormat = OMX_COLOR_FormatUnused;
+ OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
+ if (!outputFormat->findInt32("color-format", &colorFormat)) {
+ ALOGE("ouptut port did not have a color format (wrong domain?)");
+ return BAD_VALUE;
+ }
+ ALOGD("[%s] Requested output format %#x and got %#x.",
+ mComponentName.c_str(), requestedColorFormat, colorFormat);
+ if (!isFlexibleColorFormat(
+ mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
+ || flexibleEquivalent != (OMX_U32)requestedColorFormat) {
+ // device did not handle flex-YUV request for native window, fall back
+ // to SW renderer
+ ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
+ mNativeWindow.clear();
+ mNativeWindowUsageBits = 0;
+ haveNativeWindow = false;
+ usingSwRenderer = true;
+ if (storingMetadataInDecodedBuffers()) {
+ err = mOMX->storeMetaDataInBuffers(
+ mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType);
+ mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case
+ // TODO: implement adaptive-playback support for bytebuffer mode.
+ // This is done by SW codecs, but most HW codecs don't support it.
+ inputFormat->setInt32("adaptive-playback", false);
+ }
+ if (err == OK) {
+ err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+ }
+ if (mFlags & kFlagIsGrallocUsageProtected) {
+ // fallback is not supported for protected playback
+ err = PERMISSION_DENIED;
+ } else if (err == OK) {
+ err = setupVideoDecoder(mime, msg, false);
+ }
+ }
+ }
+ }
+
+ if (usingSwRenderer) {
+ outputFormat->setInt32("using-sw-renderer", 1);
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+ int32_t numChannels, sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ // Since we did not always check for these, leave them optional
+ // and have the decoder figure it all out.
+ err = OK;
+ } else {
+ int32_t bitsPerSample = 16;
+ msg->findInt32("bits-per-sample", &bitsPerSample);
+ err = setupRawAudioFormatInternal(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels, bitsPerSample);
+ }
+#ifdef DTS_CODEC_M_
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_DTS)) {
+ ALOGV(" (DTS) mime == MEDIA_MIMETYPE_AUDIO_DTS");
+ int32_t numChannels, sampleRate;
+ int32_t bitWidth = 24;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ ALOGE(" (DTS) missing channel count or sample rate for DTS decoder");
+ err = INVALID_OPERATION;
+ } else {
+ ALOGI(" (DTS) bit width to setup decoder %d", bitWidth);
+ err = DTSUtils::setupDecoder(mOMX, mNode, sampleRate, bitWidth);
+ // Also update output format bit-width so ACodec client too gets to know
+ outputFormat->setInt32("bit-width", bitWidth);
+ }
+ if (err != OK) {
+ return err;
+ }
+#endif
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+ int32_t numChannels, sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ int32_t isADTS, aacProfile;
+ int32_t sbrMode;
+ int32_t maxOutputChannelCount;
+ int32_t pcmLimiterEnable;
+ int32_t bitsPerSample = 16;
+ drcParams_t drc;
+ if (!msg->findInt32("is-adts", &isADTS)) {
+ isADTS = 0;
+ }
+ if (!msg->findInt32("aac-profile", &aacProfile)) {
+ aacProfile = OMX_AUDIO_AACObjectNull;
+ }
+ if (!msg->findInt32("aac-sbr-mode", &sbrMode)) {
+ sbrMode = -1;
+ }
+
+ if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
+ maxOutputChannelCount = -1;
+ }
+ if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
+ // value is unknown
+ pcmLimiterEnable = -1;
+ }
+ if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
+ // value is unknown
+ drc.encodedTargetLevel = -1;
+ }
+ if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
+ // value is unknown
+ drc.drcCut = -1;
+ }
+ if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
+ // value is unknown
+ drc.drcBoost = -1;
+ }
+ if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
+ // value is unknown
+ drc.heavyCompression = -1;
+ }
+ if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
+ // value is unknown
+ drc.targetRefLevel = -1;
+ }
+ msg->findInt32("bits-per-sample", &bitsPerSample);
+
+ err = setupAACCodec(
+ encoder, numChannels, sampleRate, bitRate, aacProfile,
+ isADTS != 0, sbrMode, maxOutputChannelCount, drc,
+ pcmLimiterEnable, bitsPerSample);
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
+ err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
+ err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
+ || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
+ // These are PCM-like formats with a fixed sample rate but
+ // a variable number of channels.
+
+ int32_t numChannels;
+ if (!msg->findInt32("channel-count", &numChannels)) {
+ err = INVALID_OPERATION;
+ } else {
+ int32_t sampleRate;
+ if (!msg->findInt32("sample-rate", &sampleRate)) {
+ sampleRate = 8000;
+ }
+ err = setupG711Codec(encoder, sampleRate, numChannels);
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) && encoder) {
+ int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1;
+ if (encoder &&
+ (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate))) {
+ ALOGE("missing channel count or sample rate for FLAC encoder");
+ err = INVALID_OPERATION;
+ } else {
+ if (encoder) {
+ if (!msg->findInt32(
+ "complexity", &compressionLevel) &&
+ !msg->findInt32(
+ "flac-compression-level", &compressionLevel)) {
+ compressionLevel = 5; // default FLAC compression level
+ } else if (compressionLevel < 0) {
+ ALOGW("compression level %d outside [0..8] range, "
+ "using 0",
+ compressionLevel);
+ compressionLevel = 0;
+ } else if (compressionLevel > 8) {
+ ALOGW("compression level %d outside [0..8] range, "
+ "using 8",
+ compressionLevel);
+ compressionLevel = 8;
+ }
+ }
+ err = setupFlacCodec(
+ encoder, numChannels, sampleRate, compressionLevel);
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
+ int32_t numChannels, sampleRate;
+ if (encoder
+ || !msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ int32_t bitsPerSample = 16;
+ msg->findInt32("bits-per-sample", &bitsPerSample);
+ err = setupRawAudioFormatInternal(kPortIndexInput, sampleRate, numChannels, bitsPerSample);
+ }
+ } else if (!strncmp(mComponentName.c_str(), "OMX.google.", 11)
+ && !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) {
+ int32_t numChannels;
+ int32_t sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ int32_t bitsPerSample = 16;
+ msg->findInt32("bits-per-sample", &bitsPerSample);
+ err = setupAC3Codec(encoder, numChannels, sampleRate, bitsPerSample);
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) {
+ int32_t numChannels;
+ int32_t sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ int32_t bitsPerSample = 16;
+ msg->findInt32("bits-per-sample", &bitsPerSample);
+ err = setupEAC3Codec(encoder, numChannels, sampleRate, bitsPerSample);
+ }
+ } else {
+ if (!strncmp(mComponentName.c_str(), "OMX.ffmpeg.", 11) && !mIsEncoder) {
+ err = FFMPEGSoftCodec::setAudioFormat(
+ msg, mime, mOMX, mNode);
+ } else {
+ err = setupCustomCodec(err, mime, msg);
+ }
+ }
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findInt32("encoder-delay", &mEncoderDelay)) {
+ mEncoderDelay = 0;
+ }
+
+ if (!msg->findInt32("encoder-padding", &mEncoderPadding)) {
+ mEncoderPadding = 0;
+ }
+
+ if (msg->findInt32("channel-mask", &mChannelMask)) {
+ mChannelMaskPresent = true;
+ } else {
+ mChannelMaskPresent = false;
+ }
+
+ int32_t maxInputSize;
+ if (msg->findInt32("max-input-size", &maxInputSize)) {
+ err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
+ } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
+ err = setMinBufferSize(kPortIndexInput, 8192); // XXX
+ }
+
+ int32_t priority;
+ if (msg->findInt32("priority", &priority)) {
+ err = setPriority(priority);
+ }
+
+ int32_t rateInt = -1;
+ float rateFloat = -1;
+ if (!msg->findFloat("operating-rate", &rateFloat)) {
+ msg->findInt32("operating-rate", &rateInt);
+ rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
+ }
+ if (rateFloat > 0) {
+ err = setOperatingRate(rateFloat, video);
+ }
+
+ mBaseOutputFormat = outputFormat;
+
+ err = getPortFormat(kPortIndexInput, inputFormat);
+ if (err == OK) {
+ err = getPortFormat(kPortIndexOutput, outputFormat);
+ if (err == OK) {
+ mInputFormat = inputFormat;
+ mOutputFormat = outputFormat;
+ }
+ }
+ return err;
+}
+
+status_t ACodec::setPriority(int32_t priority) {
+ if (priority < 0) {
+ return BAD_VALUE;
+ }
+ OMX_PARAM_U32TYPE config;
+ InitOMXParams(&config);
+ config.nU32 = (OMX_U32)priority;
+ status_t temp = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority,
+ &config, sizeof(config));
+ if (temp != OK) {
+ ALOGI("codec does not support config priority (err %d)", temp);
+ }
+ return OK;
+}
+
+status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) {
+ if (rateFloat < 0) {
+ return BAD_VALUE;
+ }
+ OMX_U32 rate;
+ if (isVideo) {
+ if (rateFloat > 65535) {
+ return BAD_VALUE;
+ }
+ rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f);
+ } else {
+ if (rateFloat > UINT_MAX) {
+ return BAD_VALUE;
+ }
+ rate = (OMX_U32)(rateFloat);
+ }
+ OMX_PARAM_U32TYPE config;
+ InitOMXParams(&config);
+ config.nU32 = rate;
+ status_t err = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
+ &config, sizeof(config));
+ if (err != OK) {
+ ALOGI("codec does not support config operating rate (err %d)", err);
+ }
+ return OK;
+}
+
+status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (def.nBufferSize >= size) {
+ return OK;
+ }
+
+ def.nBufferSize = size;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (def.nBufferSize < size) {
+ ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize);
+ return FAILED_TRANSACTION;
+ }
+
+ return OK;
+}
+
+status_t ACodec::selectAudioPortFormat(
+ OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) {
+ OMX_AUDIO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+
+ format.nPortIndex = portIndex;
+ for (OMX_U32 index = 0;; ++index) {
+ format.nIndex = index;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPortFormat,
+ &format, sizeof(format));
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (format.eEncoding == desiredFormat) {
+ break;
+ }
+ }
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format));
+}
+
+status_t ACodec::setupAACCodec(
+ bool encoder, int32_t numChannels, int32_t sampleRate,
+ int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode,
+ int32_t maxOutputChannelCount, const drcParams_t& drc,
+ int32_t pcmLimiterEnable, int32_t bitsPerSample) {
+ if (encoder && isADTS) {
+ return -EINVAL;
+ }
+
+ status_t err = setupRawAudioFormatInternal(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels,
+ bitsPerSample);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (encoder) {
+ err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC);
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.format.audio.bFlagErrorConcealment = OMX_TRUE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+ InitOMXParams(&profile);
+ profile.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+ if (err != OK) {
+ return err;
+ }
+
+ profile.nChannels = numChannels;
+
+ profile.eChannelMode =
+ (numChannels == 1)
+ ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo;
+
+ profile.nSampleRate = sampleRate;
+ profile.nBitRate = bitRate;
+ profile.nAudioBandWidth = 0;
+ profile.nFrameLength = 0;
+ profile.nAACtools = OMX_AUDIO_AACToolAll;
+ profile.nAACERtools = OMX_AUDIO_AACERNone;
+ profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile;
+ profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
+ switch (sbrMode) {
+ case 0:
+ // disable sbr
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 1:
+ // enable single-rate sbr
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 2:
+ // enable dual-rate sbr
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case -1:
+ // enable both modes -> the codec will decide which mode should be used
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ default:
+ // unsupported sbr mode
+ return BAD_VALUE;
+ }
+
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+ if (err != OK) {
+ return err;
+ }
+
+ return err;
+ }
+
+ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+ InitOMXParams(&profile);
+ profile.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+ if (err != OK) {
+ return err;
+ }
+
+ profile.nChannels = numChannels;
+ profile.nSampleRate = sampleRate;
+
+ profile.eAACStreamFormat =
+ isADTS
+ ? OMX_AUDIO_AACStreamFormatMP4ADTS
+ : OMX_AUDIO_AACStreamFormatMP4FF;
+
+ OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation;
+ InitOMXParams(&presentation);
+ presentation.nMaxOutputChannels = maxOutputChannelCount;
+ presentation.nDrcCut = drc.drcCut;
+ presentation.nDrcBoost = drc.drcBoost;
+ presentation.nHeavyCompression = drc.heavyCompression;
+ presentation.nTargetReferenceLevel = drc.targetRefLevel;
+ presentation.nEncodedTargetLevel = drc.encodedTargetLevel;
+ presentation.nPCMLimiterEnable = pcmLimiterEnable;
+
+ status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ if (res == OK) {
+ // optional parameters, will not cause configuration failure
+ mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
+ &presentation, sizeof(presentation));
+ } else {
+ ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res);
+ }
+ return res;
+}
+
+status_t ACodec::setupAC3Codec(
+ bool encoder, int32_t numChannels, int32_t sampleRate, int32_t bitsPerSample) {
+ status_t err = setupRawAudioFormatInternal(
+ encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels, bitsPerSample);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (encoder) {
+ ALOGW("AC3 encoding is not supported.");
+ return INVALID_OPERATION;
+ }
+
+ OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.nChannels = numChannels;
+ def.nSampleRate = sampleRate;
+
+ return mOMX->setParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+}
+
+status_t ACodec::setupEAC3Codec(
+ bool encoder, int32_t numChannels, int32_t sampleRate, int32_t bitsPerSample) {
+ status_t err = setupRawAudioFormatInternal(
+ encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels, bitsPerSample);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (encoder) {
+ ALOGW("EAC3 encoding is not supported.");
+ return INVALID_OPERATION;
+ }
+
+ OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ &def,
+ sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.nChannels = numChannels;
+ def.nSampleRate = sampleRate;
+
+ return mOMX->setParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ &def,
+ sizeof(def));
+}
+
+static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
+ bool isAMRWB, int32_t bps) {
+ if (isAMRWB) {
+ if (bps <= 6600) {
+ return OMX_AUDIO_AMRBandModeWB0;
+ } else if (bps <= 8850) {
+ return OMX_AUDIO_AMRBandModeWB1;
+ } else if (bps <= 12650) {
+ return OMX_AUDIO_AMRBandModeWB2;
+ } else if (bps <= 14250) {
+ return OMX_AUDIO_AMRBandModeWB3;
+ } else if (bps <= 15850) {
+ return OMX_AUDIO_AMRBandModeWB4;
+ } else if (bps <= 18250) {
+ return OMX_AUDIO_AMRBandModeWB5;
+ } else if (bps <= 19850) {
+ return OMX_AUDIO_AMRBandModeWB6;
+ } else if (bps <= 23050) {
+ return OMX_AUDIO_AMRBandModeWB7;
+ }
+
+ // 23850 bps
+ return OMX_AUDIO_AMRBandModeWB8;
+ } else { // AMRNB
+ if (bps <= 4750) {
+ return OMX_AUDIO_AMRBandModeNB0;
+ } else if (bps <= 5150) {
+ return OMX_AUDIO_AMRBandModeNB1;
+ } else if (bps <= 5900) {
+ return OMX_AUDIO_AMRBandModeNB2;
+ } else if (bps <= 6700) {
+ return OMX_AUDIO_AMRBandModeNB3;
+ } else if (bps <= 7400) {
+ return OMX_AUDIO_AMRBandModeNB4;
+ } else if (bps <= 7950) {
+ return OMX_AUDIO_AMRBandModeNB5;
+ } else if (bps <= 10200) {
+ return OMX_AUDIO_AMRBandModeNB6;
+ }
+
+ // 12200 bps
+ return OMX_AUDIO_AMRBandModeNB7;
+ }
+}
+
+status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
+ OMX_AUDIO_PARAM_AMRTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput;
+
+ status_t err =
+ mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
+ def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate);
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ return setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ isWAMR ? 16000 : 8000 /* sampleRate */,
+ 1 /* numChannels */);
+}
+
+status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) {
+ if (encoder) {
+ return INVALID_OPERATION;
+ }
+
+ return setupRawAudioFormat(
+ kPortIndexInput, sampleRate, numChannels);
+}
+
+status_t ACodec::setupFlacCodec(
+ bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) {
+
+ if (encoder) {
+ OMX_AUDIO_PARAM_FLACTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ // configure compression level
+ status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
+ if (err != OK) {
+ ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err);
+ return err;
+ }
+ def.nCompressionLevel = compressionLevel;
+ err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
+ if (err != OK) {
+ ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err);
+ return err;
+ }
+ }
+
+ return setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels);
+}
+
+status_t ACodec::setupRawAudioFormat(
+ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) {
+ return setupRawAudioFormatInternal(portIndex, sampleRate, numChannels, 16);
+}
+
+status_t ACodec::setupRawAudioFormatInternal(
+ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, int32_t bitsPerSample) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
+ InitOMXParams(&pcmParams);
+ pcmParams.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+
+ if (err != OK) {
+ return err;
+ }
+
+ pcmParams.nChannels = numChannels;
+ pcmParams.eNumData = OMX_NumericalDataSigned;
+ pcmParams.bInterleaved = OMX_TRUE;
+ pcmParams.nBitPerSample = bitsPerSample;
+ pcmParams.nSamplingRate = sampleRate;
+ pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear;
+
+ if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) {
+ return OMX_ErrorNone;
+ }
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+}
+
+status_t ACodec::configureTunneledVideoPlayback(
+ int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) {
+ native_handle_t* sidebandHandle;
+
+ status_t err = mOMX->configureVideoTunnelMode(
+ mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
+ if (err != OK) {
+ ALOGE("configureVideoTunnelMode failed! (err %d).", err);
+ return err;
+ }
+
+ err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle);
+ if (err != OK) {
+ ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
+ sidebandHandle, err);
+ return err;
+ }
+
+ return OK;
+}
+
+status_t ACodec::setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat,
+ bool usingNativeBuffers) {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = portIndex;
+ format.nIndex = 0;
+ bool found = false;
+
+ OMX_U32 index = 0;
+ for (;;) {
+ format.nIndex = index;
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+
+ if (err != OK) {
+ return err;
+ }
+
+ // substitute back flexible color format to codec supported format
+ OMX_U32 flexibleEquivalent;
+ if (compressionFormat == OMX_VIDEO_CodingUnused
+ && isFlexibleColorFormat(
+ mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent)
+ && colorFormat == flexibleEquivalent) {
+ ALOGI("[%s] using color format %#x in place of %#x",
+ mComponentName.c_str(), format.eColorFormat, colorFormat);
+ colorFormat = format.eColorFormat;
+ }
+
+ // The following assertion is violated by TI's video decoder.
+ // CHECK_EQ(format.nIndex, index);
+
+ if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) {
+ if (portIndex == kPortIndexInput
+ && colorFormat == format.eColorFormat) {
+ // eCompressionFormat does not seem right.
+ found = true;
+ break;
+ }
+ if (portIndex == kPortIndexOutput
+ && compressionFormat == format.eCompressionFormat) {
+ // eColorFormat does not seem right.
+ found = true;
+ break;
+ }
+ }
+
+ if (format.eCompressionFormat == compressionFormat
+ && format.eColorFormat == colorFormat) {
+ found = true;
+ break;
+ }
+
+ ++index;
+ }
+
+ if (!found) {
+ return UNKNOWN_ERROR;
+ }
+
+ status_t err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+
+ return err;
+}
+
+// Set optimal output format. OMX component lists output formats in the order
+// of preference, but this got more complicated since the introduction of flexible
+// YUV formats. We support a legacy behavior for applications that do not use
+// surface output, do not specify an output format, but expect a "usable" standard
+// OMX format. SW readable and standard formats must be flex-YUV.
+//
+// Suggested preference order:
+// - optimal format for texture rendering (mediaplayer behavior)
+// - optimal SW readable & texture renderable format (flex-YUV support)
+// - optimal SW readable non-renderable format (flex-YUV bytebuffer support)
+// - legacy "usable" standard formats
+//
+// For legacy support, we prefer a standard format, but will settle for a SW readable
+// flex-YUV format.
+status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat;
+ InitOMXParams(&format);
+ format.nPortIndex = kPortIndexOutput;
+
+ InitOMXParams(&legacyFormat);
+ // this field will change when we find a suitable legacy format
+ legacyFormat.eColorFormat = OMX_COLOR_FormatUnused;
+
+ for (OMX_U32 index = 0; ; ++index) {
+ format.nIndex = index;
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+ if (err != OK) {
+ // no more formats, pick legacy format if found
+ if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) {
+ memcpy(&format, &legacyFormat, sizeof(format));
+ break;
+ }
+ return err;
+ }
+ if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) {
+ return OMX_ErrorBadParameter;
+ }
+ if (!getLegacyFlexibleFormat) {
+ break;
+ }
+ // standard formats that were exposed to users before
+ if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar
+ || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar
+ || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
+ || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar
+ || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
+ break;
+ }
+ // find best legacy non-standard format
+ OMX_U32 flexibleEquivalent;
+ if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused
+ && isFlexibleColorFormat(
+ mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */,
+ &flexibleEquivalent)
+ && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) {
+ memcpy(&legacyFormat, &format, sizeof(format));
+ }
+ }
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+}
+
+static const struct VideoCodingMapEntry {
+ const char *mMime;
+ OMX_VIDEO_CODINGTYPE mVideoCodingType;
+} kVideoCodingMapEntry[] = {
+ { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
+ { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4_DP, OMX_VIDEO_CodingMPEG4 },
+ { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
+ { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 },
+ { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 },
+};
+
+status_t ACodec::GetVideoCodingTypeFromMime(
+ const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
+ for (size_t i = 0;
+ i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+ ++i) {
+ if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) {
+ *codingType = kVideoCodingMapEntry[i].mVideoCodingType;
+ return OK;
+ }
+ }
+
+ *codingType = OMX_VIDEO_CodingUnused;
+
+ return ERROR_UNSUPPORTED;
+}
+
+static status_t GetMimeTypeForVideoCoding(
+ OMX_VIDEO_CODINGTYPE codingType, AString *mime) {
+ for (size_t i = 0;
+ i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+ ++i) {
+ if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) {
+ *mime = kVideoCodingMapEntry[i].mMime;
+ return OK;
+ }
+ }
+
+ mime->clear();
+
+ return ERROR_UNSUPPORTED;
+}
+
+status_t ACodec::setupVideoDecoder(
+ const char *mime, const sp<AMessage> &msg, bool haveNativeWindow) {
+ int32_t width, height;
+ if (!msg->findInt32("width", &width)
+ || !msg->findInt32("height", &height)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_VIDEO_CODINGTYPE compressionFormat;
+ status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
+
+ err = FFMPEGSoftCodec::setVideoFormat(err,
+ msg, mime, mOMX, mNode, mIsEncoder, &compressionFormat,
+ mComponentName.c_str());
+ if (err != OK) {
+ return err;
+ }
+
+ err = setVideoPortFormatType(
+ kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t tmp;
+ if (msg->findInt32("color-format", &tmp)) {
+ OMX_COLOR_FORMATTYPE colorFormat =
+ static_cast<OMX_COLOR_FORMATTYPE>(tmp);
+ err = setVideoPortFormatType(
+ kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow);
+ if (err != OK) {
+ ALOGW("[%s] does not support color format %d",
+ mComponentName.c_str(), colorFormat);
+ err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
+ }
+ } else {
+ err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
+ }
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t frameRateInt;
+ float frameRateFloat;
+ if (!msg->findFloat("frame-rate", &frameRateFloat)) {
+ if (!msg->findInt32("frame-rate", &frameRateInt)) {
+ frameRateInt = -1;
+ }
+ frameRateFloat = (float)frameRateInt;
+ }
+
+ err = setVideoFormatOnPort(
+ kPortIndexInput, width, height, compressionFormat, frameRateFloat);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = setVideoFormatOnPort(
+ kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return OK;
+}
+
+status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
+ int32_t tmp;
+ if (!msg->findInt32("color-format", &tmp)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_COLOR_FORMATTYPE colorFormat =
+ static_cast<OMX_COLOR_FORMATTYPE>(tmp);
+
+ status_t err = setVideoPortFormatType(
+ kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat);
+
+ if (err != OK) {
+ ALOGE("[%s] does not support color format %d",
+ mComponentName.c_str(), colorFormat);
+
+ return err;
+ }
+
+ /* Input port configuration */
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t width, height, bitrate;
+ if (!msg->findInt32("width", &width)
+ || !msg->findInt32("height", &height)
+ || !msg->findInt32("bitrate", &bitrate)) {
+ return INVALID_OPERATION;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ int32_t stride;
+ if (!msg->findInt32("stride", &stride)) {
+ stride = width;
+ }
+
+ video_def->nStride = stride;
+
+ int32_t sliceHeight;
+ if (!msg->findInt32("slice-height", &sliceHeight)) {
+ sliceHeight = height;
+ }
+
+ video_def->nSliceHeight = sliceHeight;
+
+ def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2;
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ mTimePerFrameUs = (int64_t) (1000000.0f / frameRate);
+ }
+
+ video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
+ video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ // this is redundant as it was already set up in setVideoPortFormatType
+ // FIXME for now skip this only for flexible YUV formats
+ if (colorFormat != OMX_COLOR_FormatYUV420Flexible) {
+ video_def->eColorFormat = colorFormat;
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ ALOGE("[%s] failed to set input port definition parameters.",
+ mComponentName.c_str());
+
+ return err;
+ }
+
+ /* Output port configuration */
+
+ OMX_VIDEO_CODINGTYPE compressionFormat;
+ err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
+
+ err = FFMPEGSoftCodec::setVideoFormat(err,
+ msg, mime, mOMX, mNode, mIsEncoder, &compressionFormat,
+ mComponentName.c_str());
+ if (err != OK) {
+ ALOGE("Not a supported video mime type: %s", mime);
+ return err;
+ }
+
+ err = setVideoPortFormatType(
+ kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != OK) {
+ ALOGE("[%s] does not support compression format %d",
+ mComponentName.c_str(), compressionFormat);
+
+ return err;
+ }
+
+ def.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+ video_def->xFramerate = 0;
+ video_def->nBitrate = bitrate;
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ ALOGE("[%s] failed to set output port definition parameters.",
+ mComponentName.c_str());
+
+ return err;
+ }
+
+ switch (compressionFormat) {
+ case OMX_VIDEO_CodingMPEG4:
+ err = setupMPEG4EncoderParameters(msg);
+ break;
+
+ case OMX_VIDEO_CodingH263:
+ err = setupH263EncoderParameters(msg);
+ break;
+
+ case OMX_VIDEO_CodingAVC:
+ err = setupAVCEncoderParameters(msg);
+ break;
+
+ case OMX_VIDEO_CodingHEVC:
+ err = setupHEVCEncoderParameters(msg);
+ break;
+
+ case OMX_VIDEO_CodingVP8:
+ case OMX_VIDEO_CodingVP9:
+ err = setupVPXEncoderParameters(msg);
+ break;
+
+ default:
+ break;
+ }
+
+ if (err == OK) {
+ ALOGI("setupVideoEncoder succeeded");
+ }
+
+ return err;
+}
+
+status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) {
+ OMX_VIDEO_PARAM_INTRAREFRESHTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
+
+ params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode);
+
+ if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic ||
+ params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) {
+ int32_t mbs;
+ if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) {
+ return INVALID_OPERATION;
+ }
+ params.nCirMBs = mbs;
+ }
+
+ if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive ||
+ params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) {
+ int32_t mbs;
+ if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) {
+ return INVALID_OPERATION;
+ }
+ params.nAirMBs = mbs;
+
+ int32_t ref;
+ if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) {
+ return INVALID_OPERATION;
+ }
+ params.nAirRef = ref;
+ }
+
+ status_t err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoIntraRefresh,
+ &params, sizeof(params));
+ return err;
+}
+
+static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
+ if (iFramesInterval < 0) {
+ return 0xFFFFFFFF;
+ } else if (iFramesInterval == 0) {
+ return 0;
+ }
+ OMX_U32 ret = frameRate * iFramesInterval;
+ return ret;
+}
+
+static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) {
+ int32_t tmp;
+ if (!msg->findInt32("bitrate-mode", &tmp)) {
+ return OMX_Video_ControlRateVariable;
+ }
+
+ return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp);
+}
+
+status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
+ InitOMXParams(&mpeg4type);
+ mpeg4type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ mpeg4type.nSliceHeaderSpacing = 0;
+ mpeg4type.bSVH = OMX_FALSE;
+ mpeg4type.bGov = OMX_FALSE;
+
+ mpeg4type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+ if (mpeg4type.nPFrames == 0) {
+ mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ mpeg4type.nBFrames = 0;
+ mpeg4type.nIDCVLCThreshold = 0;
+ mpeg4type.bACPred = OMX_TRUE;
+ mpeg4type.nMaxPacketSize = 256;
+ mpeg4type.nTimeIncRes = 1000;
+ mpeg4type.nHeaderExtension = 0;
+ mpeg4type.bReversibleVLC = OMX_FALSE;
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile);
+ mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level);
+ }
+
+ setBFrames(&mpeg4type);
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = configureBitrate(bitrate, bitrateMode);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return setupErrorCorrectionParameters();
+}
+
+status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ OMX_VIDEO_PARAM_H263TYPE h263type;
+ InitOMXParams(&h263type);
+ h263type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ h263type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+ if (h263type.nPFrames == 0) {
+ h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h263type.nBFrames = 0;
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+
+ if (err != OK) {
+ return err;
+ }
+
+ h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile);
+ h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level);
+ }
+
+ h263type.bPLUSPTYPEAllowed = OMX_FALSE;
+ h263type.bForceRoundingTypeToZero = OMX_FALSE;
+ h263type.nPictureHeaderRepetition = 0;
+ h263type.nGOBHeaderInterval = 0;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = configureBitrate(bitrate, bitrateMode);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return setupErrorCorrectionParameters();
+}
+
+// static
+int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor(
+ int width, int height, int rate, int bitrate,
+ OMX_VIDEO_AVCPROFILETYPE profile) {
+ // convert bitrate to main/baseline profile kbps equivalent
+ switch (profile) {
+ case OMX_VIDEO_AVCProfileHigh10:
+ bitrate = divUp(bitrate, 3000); break;
+ case OMX_VIDEO_AVCProfileHigh:
+ bitrate = divUp(bitrate, 1250); break;
+ default:
+ bitrate = divUp(bitrate, 1000); break;
+ }
+
+ // convert size and rate to MBs
+ width = divUp(width, 16);
+ height = divUp(height, 16);
+ int mbs = width * height;
+ rate *= mbs;
+ int maxDimension = max(width, height);
+
+ static const int limits[][5] = {
+ /* MBps MB dim bitrate level */
+ { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 },
+ { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b },
+ { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 },
+ { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 },
+ { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 },
+ { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 },
+ { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 },
+ { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 },
+ { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 },
+ { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 },
+ { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 },
+ { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 },
+ { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 },
+ { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 },
+ { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 },
+ { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 },
+ { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 },
+ };
+
+ for (size_t i = 0; i < ARRAY_SIZE(limits); i++) {
+ const int (&limit)[5] = limits[i];
+ if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2]
+ && bitrate <= limit[3]) {
+ return limit[4];
+ }
+ }
+ return 0;
+}
+
+status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ status_t err = OK;
+ int32_t intraRefreshMode = 0;
+ if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) {
+ err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode);
+ if (err != OK) {
+ ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x",
+ err, intraRefreshMode);
+ return err;
+ }
+ }
+
+ OMX_VIDEO_PARAM_AVCTYPE h264type;
+ InitOMXParams(&h264type);
+ h264type.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ h264type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+
+ if (err != OK) {
+ ALOGE("%s does not support profile %x @ level %x",
+ mComponentName.c_str(), profile, level);
+ return err;
+ }
+
+ h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile);
+ h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level);
+ }
+
+ // XXX
+ // Allow higher profiles to be set since the encoder seems to support
+#if 0
+ if (h264type.eProfile != OMX_VIDEO_AVCProfileBaseline) {
+ ALOGW("Use baseline profile instead of %d for AVC recording",
+ h264type.eProfile);
+ h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
+ }
+#endif
+
+ if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
+ h264type.nSliceHeaderSpacing = 0;
+ h264type.bUseHadamard = OMX_TRUE;
+ h264type.nRefFrames = 1;
+ h264type.nBFrames = 0;
+ h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+ if (h264type.nPFrames == 0) {
+ h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h264type.nRefIdx10ActiveMinus1 = 0;
+ h264type.nRefIdx11ActiveMinus1 = 0;
+ h264type.bEntropyCodingCABAC = OMX_FALSE;
+ h264type.bWeightedPPrediction = OMX_FALSE;
+ h264type.bconstIpred = OMX_FALSE;
+ h264type.bDirect8x8Inference = OMX_FALSE;
+ h264type.bDirectSpatialTemporal = OMX_FALSE;
+ h264type.nCabacInitIdc = 0;
+ }
+
+ setBFrames(&h264type, iFrameInterval, frameRate);
+ if (h264type.nBFrames != 0) {
+ h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
+ }
+
+ h264type.bEnableUEP = OMX_FALSE;
+ h264type.bEnableFMO = OMX_FALSE;
+ h264type.bEnableASO = OMX_FALSE;
+ h264type.bEnableRS = OMX_FALSE;
+ h264type.bFrameMBsOnly = OMX_TRUE;
+ h264type.bMBAFF = OMX_FALSE;
+ h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+
+ if (err != OK) {
+ return err;
+ }
+
+ return configureBitrate(bitrate, bitrateMode);
+}
+
+status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ AVUtils::get()->setIntraPeriod(setPFramesSpacing(iFrameInterval, frameRate), 0, mOMX, mNode);
+
+ OMX_VIDEO_PARAM_HEVCTYPE hevcType;
+ InitOMXParams(&hevcType);
+ hevcType.nPortIndex = kPortIndexOutput;
+
+ status_t err = OK;
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+ if (err != OK) {
+ return err;
+ }
+
+ hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile);
+ hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level);
+ }
+
+ // TODO: Need OMX structure definition for setting iFrameInterval
+
+ err = mOMX->setParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
+ if (err != OK) {
+ return err;
+ }
+
+ return configureBitrate(bitrate, bitrateMode);
+}
+
+status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate;
+ int32_t iFrameInterval = 0;
+ size_t tsLayers = 0;
+ OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern =
+ OMX_VIDEO_VPXTemporalLayerPatternNone;
+ static const uint32_t kVp8LayerRateAlloction
+ [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS]
+ [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = {
+ {100, 100, 100}, // 1 layer
+ { 60, 100, 100}, // 2 layers {60%, 40%}
+ { 40, 60, 100}, // 3 layers {40%, 20%, 40%}
+ };
+ if (!msg->findInt32("bitrate", &bitrate)) {
+ return INVALID_OPERATION;
+ }
+ msg->findInt32("i-frame-interval", &iFrameInterval);
+
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ AString tsSchema;
+ if (msg->findString("ts-schema", &tsSchema)) {
+ if (tsSchema == "webrtc.vp8.1-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 1;
+ } else if (tsSchema == "webrtc.vp8.2-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 2;
+ } else if (tsSchema == "webrtc.vp8.3-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 3;
+ } else {
+ ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str());
+ }
+ }
+
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
+ InitOMXParams(&vp8type);
+ vp8type.nPortIndex = kPortIndexOutput;
+ status_t err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type, sizeof(vp8type));
+
+ if (err == OK) {
+ if (iFrameInterval > 0) {
+ vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate);
+ }
+ vp8type.eTemporalPattern = pattern;
+ vp8type.nTemporalLayerCount = tsLayers;
+ if (tsLayers > 0) {
+ for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) {
+ vp8type.nTemporalLayerBitrateRatio[i] =
+ kVp8LayerRateAlloction[tsLayers - 1][i];
+ }
+ }
+ if (bitrateMode == OMX_Video_ControlRateConstant) {
+ vp8type.nMinQuantizer = 2;
+ vp8type.nMaxQuantizer = 63;
+ }
+
+ err = mOMX->setParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type, sizeof(vp8type));
+ if (err != OK) {
+ ALOGW("Extended VP8 parameters set failed: %d", err);
+ }
+ }
+
+ return configureBitrate(bitrate, bitrateMode);
+}
+
+status_t ACodec::verifySupportForProfileAndLevel(
+ int32_t profile, int32_t level) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
+
+ for (params.nProfileIndex = 0;; ++params.nProfileIndex) {
+ status_t err = mOMX->getParameter(
+ mNode,
+ OMX_IndexParamVideoProfileLevelQuerySupported,
+ &params,
+ sizeof(params));
+
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t supportedProfile = static_cast<int32_t>(params.eProfile);
+ int32_t supportedLevel = static_cast<int32_t>(params.eLevel);
+
+ if (profile == supportedProfile && level <= supportedLevel) {
+ return OK;
+ }
+ }
+}
+
+status_t ACodec::configureBitrate(
+ int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) {
+ OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
+ InitOMXParams(&bitrateType);
+ bitrateType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+
+ if (err != OK) {
+ return err;
+ }
+
+ bitrateType.eControlRate = bitrateMode;
+ bitrateType.nTargetBitrate = bitrate;
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+}
+
+status_t ACodec::setupErrorCorrectionParameters() {
+ OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
+ InitOMXParams(&errorCorrectionType);
+ errorCorrectionType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+
+ if (err != OK) {
+ return OK; // Optional feature. Ignore this failure
+ }
+
+ errorCorrectionType.bEnableHEC = OMX_FALSE;
+ errorCorrectionType.bEnableResync = OMX_TRUE;
+ errorCorrectionType.nResynchMarkerSpacing = 0;
+ errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
+ errorCorrectionType.bEnableRVLC = OMX_FALSE;
+
+ return mOMX->setParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+}
+
+status_t ACodec::setVideoFormatOnPort(
+ OMX_U32 portIndex,
+ int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat,
+ float frameRate) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ if (portIndex == kPortIndexInput) {
+ // XXX Need a (much) better heuristic to compute input buffer sizes.
+ const size_t X = 64 * 1024;
+ if (def.nBufferSize < X) {
+ def.nBufferSize = X;
+ }
+ }
+
+ if (def.eDomain != OMX_PortDomainVideo) {
+ ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain);
+ return FAILED_TRANSACTION;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ if (portIndex == kPortIndexInput) {
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+ if (frameRate >= 0) {
+ video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
+ }
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ return err;
+}
+
+status_t ACodec::initNativeWindow() {
+ if (mNativeWindow != NULL) {
+ return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+ }
+
+ mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+ return OK;
+}
+
+size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const {
+ size_t n = 0;
+
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ const BufferInfo &info = mBuffers[portIndex].itemAt(i);
+
+ if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) {
+ ++n;
+ }
+ }
+
+ return n;
+}
+
+size_t ACodec::countBuffersOwnedByNativeWindow() const {
+ size_t n = 0;
+
+ for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
+ const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i);
+
+ if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ++n;
+ }
+ }
+
+ return n;
+}
+
+void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {
+ if (mNativeWindow == NULL) {
+ return;
+ }
+
+ while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers
+ && dequeueBufferFromNativeWindow() != NULL) {
+ // these buffers will be submitted as regular buffers; account for this
+ if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) {
+ --mMetadataBuffersToSubmit;
+ }
+ }
+}
+
+bool ACodec::allYourBuffersAreBelongToUs(
+ OMX_U32 portIndex) {
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+ if (info->mStatus != BufferInfo::OWNED_BY_US
+ && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGV("[%s] Buffer %u on port %u still has status %d",
+ mComponentName.c_str(),
+ info->mBufferID, portIndex, info->mStatus);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool ACodec::allYourBuffersAreBelongToUs() {
+ return allYourBuffersAreBelongToUs(kPortIndexInput)
+ && allYourBuffersAreBelongToUs(kPortIndexOutput);
+}
+
+void ACodec::deferMessage(const sp<AMessage> &msg) {
+ mDeferredQueue.push_back(msg);
+}
+
+void ACodec::processDeferredMessages() {
+ List<sp<AMessage> > queue = mDeferredQueue;
+ mDeferredQueue.clear();
+
+ List<sp<AMessage> >::iterator it = queue.begin();
+ while (it != queue.end()) {
+ onMessageReceived(*it++);
+ }
+}
+
+// static
+bool ACodec::describeDefaultColorFormat(DescribeColorFormatParams &params) {
+ MediaImage &image = params.sMediaImage;
+ memset(&image, 0, sizeof(image));
+
+ image.mType = MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN;
+ image.mNumPlanes = 0;
+
+ const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
+ image.mWidth = params.nFrameWidth;
+ image.mHeight = params.nFrameHeight;
+
+ // only supporting YUV420
+ if (fmt != OMX_COLOR_FormatYUV420Planar &&
+ fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
+ fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
+ fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
+ fmt != OMX_TI_COLOR_FormatYUV420PackedSemiPlanar &&
+ fmt != HAL_PIXEL_FORMAT_YV12) {
+ ALOGW("do not know color format 0x%x = %d", fmt, fmt);
+ return false;
+ }
+
+ // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
+ if (params.nStride != 0 && params.nSliceHeight == 0) {
+ ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
+ params.nFrameHeight);
+ params.nSliceHeight = params.nFrameHeight;
+ }
+
+ // we need stride and slice-height to be non-zero
+ if (params.nStride == 0 || params.nSliceHeight == 0) {
+ ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
+ fmt, fmt, params.nStride, params.nSliceHeight);
+ return false;
+ }
+
+ // set-up YUV format
+ image.mType = MediaImage::MEDIA_IMAGE_TYPE_YUV;
+ image.mNumPlanes = 3;
+ image.mBitDepth = 8;
+ image.mPlane[image.Y].mOffset = 0;
+ image.mPlane[image.Y].mColInc = 1;
+ image.mPlane[image.Y].mRowInc = params.nStride;
+ image.mPlane[image.Y].mHorizSubsampling = 1;
+ image.mPlane[image.Y].mVertSubsampling = 1;
+
+ switch ((int)fmt) {
+ case HAL_PIXEL_FORMAT_YV12:
+ if (params.bUsingNativeBuffers) {
+ size_t ystride = align(params.nStride, 16);
+ size_t cstride = align(params.nStride / 2, 16);
+ image.mPlane[image.Y].mRowInc = ystride;
+
+ image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
+ image.mPlane[image.V].mColInc = 1;
+ image.mPlane[image.V].mRowInc = cstride;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+
+ image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
+ + (cstride * params.nSliceHeight / 2);
+ image.mPlane[image.U].mColInc = 1;
+ image.mPlane[image.U].mRowInc = cstride;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+ break;
+ } else {
+ // fall through as YV12 is used for YUV420Planar by some codecs
+ }
+
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420PackedPlanar:
+ image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
+ image.mPlane[image.U].mColInc = 1;
+ image.mPlane[image.U].mRowInc = params.nStride / 2;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+
+ image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
+ + (params.nStride * params.nSliceHeight / 4);
+ image.mPlane[image.V].mColInc = 1;
+ image.mPlane[image.V].mRowInc = params.nStride / 2;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+ break;
+
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
+ case OMX_COLOR_FormatYUV420PackedSemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ // NV12
+ image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
+ image.mPlane[image.U].mColInc = 2;
+ image.mPlane[image.U].mRowInc = params.nStride;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+
+ image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
+ image.mPlane[image.V].mColInc = 2;
+ image.mPlane[image.V].mRowInc = params.nStride;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+ break;
+
+ default:
+ TRESPASS();
+ }
+ return true;
+}
+
+// static
+bool ACodec::describeColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ DescribeColorFormatParams &describeParams)
+{
+ OMX_INDEXTYPE describeColorFormatIndex;
+ if (omx->getExtensionIndex(
+ node, "OMX.google.android.index.describeColorFormat",
+ &describeColorFormatIndex) != OK ||
+ omx->getParameter(
+ node, describeColorFormatIndex,
+ &describeParams, sizeof(describeParams)) != OK) {
+ return describeDefaultColorFormat(describeParams);
+ }
+ return describeParams.sMediaImage.mType !=
+ MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN;
+}
+
+// static
+bool ACodec::isFlexibleColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
+ DescribeColorFormatParams describeParams;
+ InitOMXParams(&describeParams);
+ describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
+ // reasonable dummy values
+ describeParams.nFrameWidth = 128;
+ describeParams.nFrameHeight = 128;
+ describeParams.nStride = 128;
+ describeParams.nSliceHeight = 128;
+ describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
+
+ CHECK(flexibleEquivalent != NULL);
+
+ if (!describeColorFormat(omx, node, describeParams)) {
+ return false;
+ }
+
+ const MediaImage &img = describeParams.sMediaImage;
+ if (img.mType == MediaImage::MEDIA_IMAGE_TYPE_YUV) {
+ if (img.mNumPlanes != 3 ||
+ img.mPlane[img.Y].mHorizSubsampling != 1 ||
+ img.mPlane[img.Y].mVertSubsampling != 1) {
+ return false;
+ }
+
+ // YUV 420
+ if (img.mPlane[img.U].mHorizSubsampling == 2
+ && img.mPlane[img.U].mVertSubsampling == 2
+ && img.mPlane[img.V].mHorizSubsampling == 2
+ && img.mPlane[img.V].mVertSubsampling == 2) {
+ // possible flexible YUV420 format
+ if (img.mBitDepth <= 8) {
+ *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify) {
+ const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output";
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) {
+ ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex);
+ return BAD_VALUE;
+ }
+
+ switch (def.eDomain) {
+ case OMX_PortDomainVideo:
+ {
+ OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
+ switch ((int)videoDef->eCompressionFormat) {
+ case OMX_VIDEO_CodingUnused:
+ {
+ CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput));
+ notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+
+ notify->setInt32("stride", videoDef->nStride);
+ notify->setInt32("slice-height", videoDef->nSliceHeight);
+ notify->setInt32("color-format", videoDef->eColorFormat);
+
+ if (mNativeWindow == NULL) {
+ DescribeColorFormatParams describeParams;
+ InitOMXParams(&describeParams);
+ describeParams.eColorFormat = videoDef->eColorFormat;
+ describeParams.nFrameWidth = videoDef->nFrameWidth;
+ describeParams.nFrameHeight = videoDef->nFrameHeight;
+ describeParams.nStride = videoDef->nStride;
+ describeParams.nSliceHeight = videoDef->nSliceHeight;
+ describeParams.bUsingNativeBuffers = OMX_FALSE;
+
+ if (describeColorFormat(mOMX, mNode, describeParams)) {
+ notify->setBuffer(
+ "image-data",
+ ABuffer::CreateAsCopy(
+ &describeParams.sMediaImage,
+ sizeof(describeParams.sMediaImage)));
+
+ MediaImage *img = &describeParams.sMediaImage;
+ ALOGV("[%s] MediaImage { F(%ux%u) @%u+%u+%u @%u+%u+%u @%u+%u+%u }",
+ mComponentName.c_str(), img->mWidth, img->mHeight,
+ img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc,
+ img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc,
+ img->mPlane[2].mOffset, img->mPlane[2].mColInc, img->mPlane[2].mRowInc);
+ }
+ }
+
+ if (portIndex != kPortIndexOutput) {
+ // TODO: also get input crop
+ break;
+ }
+
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = portIndex;
+
+ if (mOMX->getConfig(
+ mNode,
+ (portIndex == kPortIndexOutput ?
+ OMX_IndexConfigCommonOutputCrop :
+ OMX_IndexConfigCommonInputCrop),
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
+#ifdef MTK_HARDWARE
+ if (!strncmp(mComponentName.c_str(), "OMX.MTK.", 8) && mOMX->getConfig(
+ mNode, (OMX_INDEXTYPE) 0x7f00001c /* OMX_IndexVendorMtkOmxVdecGetCropInfo */,
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
+#endif
+
+ if (rect.nLeft < 0 ||
+ rect.nTop < 0 ||
+ rect.nLeft + rect.nWidth > videoDef->nFrameWidth ||
+ rect.nTop + rect.nHeight > videoDef->nFrameHeight) {
+ ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)",
+ rect.nLeft, rect.nTop,
+ rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight,
+ videoDef->nFrameWidth, videoDef->nFrameHeight);
+ return BAD_VALUE;
+ }
+
+ notify->setRect(
+ "crop",
+ rect.nLeft,
+ rect.nTop,
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ break;
+ }
+
+ case OMX_VIDEO_CodingVP8:
+ case OMX_VIDEO_CodingVP9:
+ {
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
+ InitOMXParams(&vp8type);
+ vp8type.nPortIndex = kPortIndexOutput;
+ status_t err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type,
+ sizeof(vp8type));
+
+ if (err == OK) {
+ AString tsSchema = "none";
+ if (vp8type.eTemporalPattern
+ == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ switch (vp8type.nTemporalLayerCount) {
+ case 1:
+ {
+ tsSchema = "webrtc.vp8.1-layer";
+ break;
+ }
+ case 2:
+ {
+ tsSchema = "webrtc.vp8.2-layer";
+ break;
+ }
+ case 3:
+ {
+ tsSchema = "webrtc.vp8.3-layer";
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ }
+ notify->setString("ts-schema", tsSchema);
+ }
+ // Fall through to set up mime.
+ }
+
+ default:
+ {
+ if (!strncmp(mComponentName.c_str(), "OMX.ffmpeg.", 11)) {
+ err = FFMPEGSoftCodec::getVideoPortFormat(portIndex,
+ (int)videoDef->eCompressionFormat, notify, mOMX, mNode);
+ if (err == OK) {
+ break;
+ }
+ }
+
+ if (mIsEncoder ^ (portIndex == kPortIndexOutput)) {
+ // should be CodingUnused
+ ALOGE("Raw port video compression format is %s(%d)",
+ asString(videoDef->eCompressionFormat),
+ videoDef->eCompressionFormat);
+ return BAD_VALUE;
+ }
+ AString mime;
+ if (GetMimeTypeForVideoCoding(
+ videoDef->eCompressionFormat, &mime) != OK) {
+ notify->setString("mime", "application/octet-stream");
+ } else {
+ notify->setString("mime", mime.c_str());
+ }
+ break;
+ }
+ }
+ notify->setInt32("width", videoDef->nFrameWidth);
+ notify->setInt32("height", videoDef->nFrameHeight);
+ ALOGV("[%s] %s format is %s", mComponentName.c_str(),
+ portIndex == kPortIndexInput ? "input" : "output",
+ notify->debugString().c_str());
+
+ break;
+ }
+
+ case OMX_PortDomainAudio:
+ {
+ OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
+
+ switch ((int)audioDef->eEncoding) {
+ case OMX_AUDIO_CodingPCM:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ if (params.nChannels <= 0
+ || (params.nChannels != 1 && !params.bInterleaved)
+ || (params.nBitPerSample != 16u
+ && params.nBitPerSample != 24u
+ && params.nBitPerSample != 32u
+ && params.nBitPerSample != 8u)// we support 8/16/24/32 bit s/w decoding
+ || params.eNumData != OMX_NumericalDataSigned
+ || params.ePCMMode != OMX_AUDIO_PCMModeLinear) {
+ ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ",
+ params.nChannels,
+ params.bInterleaved ? " interleaved" : "",
+ params.nBitPerSample,
+ asString(params.eNumData), params.eNumData,
+ asString(params.ePCMMode), params.ePCMMode);
+ return FAILED_TRANSACTION;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSamplingRate);
+ notify->setInt32("bits-per-sample", params.nBitPerSample);
+ notify->setInt32("pcm-format", getPCMFormat(notify));
+
+ if (mChannelMaskPresent) {
+ notify->setInt32("channel-mask", mChannelMask);
+ }
+ break;
+ }
+
+ case OMX_AUDIO_CodingAAC:
+ {
+ OMX_AUDIO_PARAM_AACPROFILETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAMR:
+ {
+ OMX_AUDIO_PARAM_AMRTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAmr, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setInt32("channel-count", 1);
+ if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
+ notify->setInt32("sample-rate", 16000);
+ } else {
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
+ notify->setInt32("sample-rate", 8000);
+ }
+ break;
+ }
+
+ case OMX_AUDIO_CodingFLAC:
+ {
+ if (!strncmp(mComponentName.c_str(), "OMX.ffmpeg.", 11)) {
+ err = FFMPEGSoftCodec::getAudioPortFormat(portIndex,
+ (int)audioDef->eEncoding, notify, mOMX, mNode);
+ if (err != OK) {
+ return err;
+ }
+ } else {
+ OMX_AUDIO_PARAM_FLACTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioFlac, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ }
+ break;
+ }
+
+ case OMX_AUDIO_CodingMP3:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioMp3, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingVORBIS:
+ {
+ OMX_AUDIO_PARAM_VORBISTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioVorbis, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAndroidAC3:
+ {
+ OMX_AUDIO_PARAM_ANDROID_AC3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAndroidEAC3:
+ {
+ OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAndroidOPUS:
+ {
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
+ &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingG711:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ const char *mime = NULL;
+ if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) {
+ mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW;
+ } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) {
+ mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW;
+ } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear
+ mime = MEDIA_MIMETYPE_AUDIO_RAW;
+ }
+ notify->setString("mime", mime);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSamplingRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingGSMFR:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSamplingRate);
+ break;
+ }
+#ifdef DTS_CODEC_M_
+ case OMX_AUDIO_CodingDTSHD:
+ {
+ OMX_AUDIO_PARAM_DTSDECTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ((status_t)OK, mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioDTSDec,
+ &params,
+ sizeof(params)));
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_DTS);
+ break;
+ }
+#endif
+
+ default:
+ if (!strncmp(mComponentName.c_str(), "OMX.ffmpeg.", 11)) {
+ err = FFMPEGSoftCodec::getAudioPortFormat(portIndex,
+ (int)audioDef->eEncoding, notify, mOMX, mNode);
+ }
+ if (err == OK) {
+ break;
+ }
+
+ ALOGE("Unsupported audio coding: %s(%d)\n",
+ asString(audioDef->eEncoding), audioDef->eEncoding);
+ return BAD_TYPE;
+ }
+ break;
+ }
+
+ default:
+ ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain);
+ return BAD_TYPE;
+ }
+
+ return OK;
+}
+
+void ACodec::sendFormatChange(const sp<AMessage> &reply) {
+ sp<AMessage> notify = mBaseOutputFormat->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+
+ if (getPortFormat(kPortIndexOutput, notify) != OK) {
+ ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str());
+ return;
+ }
+
+ AString mime;
+ CHECK(notify->findString("mime", &mime));
+
+ int32_t left, top, right, bottom;
+ if (mime == MEDIA_MIMETYPE_VIDEO_RAW &&
+ mNativeWindow != NULL &&
+ notify->findRect("crop", &left, &top, &right, &bottom)) {
+ // notify renderer of the crop change
+ // NOTE: native window uses extended right-bottom coordinate
+ reply->setRect("crop", left, top, right + 1, bottom + 1);
+ } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW &&
+ (mEncoderDelay || mEncoderPadding)) {
+ int32_t channelCount;
+ CHECK(notify->findInt32("channel-count", &channelCount));
+ if (mSkipCutBuffer != NULL) {
+ size_t prevbufsize = mSkipCutBuffer->size();
+ if (prevbufsize != 0) {
+ ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize);
+ }
+ }
+ mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount);
+ }
+
+ getVQZIPInfo(notify);
+
+ notify->post();
+
+ mSentFormat = true;
+}
+
+void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatError);
+ ALOGE("signalError(omxError %#x, internalError %d)", error, internalError);
+
+ if (internalError == UNKNOWN_ERROR) { // find better error code
+ const status_t omxStatus = statusFromOMXError(error);
+ if (omxStatus != 0) {
+ internalError = omxStatus;
+ } else {
+ ALOGW("Invalid OMX error %#x", error);
+ }
+ }
+
+ mFatalError = true;
+
+ notify->setInt32("err", internalError);
+ notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error.
+ notify->post();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::PortDescription::PortDescription() {
+}
+
+status_t ACodec::requestIDRFrame() {
+ if (!mIsEncoder) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ OMX_CONFIG_INTRAREFRESHVOPTYPE params;
+ InitOMXParams(&params);
+
+ params.nPortIndex = kPortIndexOutput;
+ params.IntraRefreshVOP = OMX_TRUE;
+
+ return mOMX->setConfig(
+ mNode,
+ OMX_IndexConfigVideoIntraVOPRefresh,
+ &params,
+ sizeof(params));
+}
+
+void ACodec::PortDescription::addBuffer(
+ IOMX::buffer_id id, const sp<ABuffer> &buffer) {
+ mBufferIDs.push_back(id);
+ mBuffers.push_back(buffer);
+}
+
+size_t ACodec::PortDescription::countBuffers() {
+ return mBufferIDs.size();
+}
+
+IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const {
+ return mBufferIDs.itemAt(index);
+}
+
+sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const {
+ return mBuffers.itemAt(index);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
+ : AState(parentState),
+ mCodec(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
+ OMX_U32 /* portIndex */) {
+ return KEEP_BUFFERS;
+}
+
+bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatInputBufferFilled:
+ {
+ onInputBufferFilled(msg);
+ break;
+ }
+
+ case kWhatOutputBufferDrained:
+ {
+ onOutputBufferDrained(msg);
+ break;
+ }
+
+ case ACodec::kWhatOMXMessageList:
+ {
+ return checkOMXMessage(msg) ? onOMXMessageList(msg) : true;
+ }
+
+ case ACodec::kWhatOMXMessageItem:
+ {
+ // no need to check as we already did it for kWhatOMXMessageList
+ return onOMXMessage(msg);
+ }
+
+ case ACodec::kWhatOMXMessage:
+ {
+ return checkOMXMessage(msg) ? onOMXMessage(msg) : true;
+ }
+
+ case ACodec::kWhatSetSurface:
+ {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("surface", &obj));
+
+ status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case ACodec::kWhatCreateInputSurface:
+ case ACodec::kWhatSetInputSurface:
+ case ACodec::kWhatSignalEndOfInputStream:
+ {
+ // This may result in an app illegal state exception.
+ ALOGE("Message 0x%x was not handled", msg->what());
+ mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION);
+ return true;
+ }
+
+ case ACodec::kWhatOMXDied:
+ {
+ // This will result in kFlagSawMediaServerDie handling in MediaCodec.
+ ALOGE("OMX/mediaserver died, signalling error!");
+ mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
+ break;
+ }
+
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ ALOGI("[%s] forcing the release of codec",
+ mCodec->mComponentName.c_str());
+ status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
+ mCodec->changeState(mCodec->mUninitializedState);
+ ALOGE_IF("[%s] failed to release codec instance: err=%d",
+ mCodec->mComponentName.c_str(), err);
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+ break;
+ }
+
+ default:
+ return false;
+ }
+
+ return true;
+}
+
+bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) {
+ // there is a possibility that this is an outstanding message for a
+ // codec that we have already destroyed
+ if (mCodec->mNode == 0) {
+ ALOGI("ignoring message as already freed component: %s",
+ msg->debugString().c_str());
+ return false;
+ }
+
+ IOMX::node_id nodeID;
+ CHECK(msg->findInt32("node", (int32_t*)&nodeID));
+ if (nodeID != mCodec->mNode) {
+ ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode);
+ return false;
+ }
+ return true;
+}
+
+bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("messages", &obj));
+ sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
+
+ bool receivedRenderedEvents = false;
+ for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
+ it != msgList->getList().cend(); ++it) {
+ (*it)->setWhat(ACodec::kWhatOMXMessageItem);
+ mCodec->handleMessage(*it);
+ int32_t type;
+ CHECK((*it)->findInt32("type", &type));
+ if (type == omx_message::FRAME_RENDERED) {
+ receivedRenderedEvents = true;
+ }
+ }
+
+ if (receivedRenderedEvents) {
+ // NOTE: all buffers are rendered in this case
+ mCodec->notifyOfRenderedFrames();
+ }
+ return true;
+}
+
+bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
+ int32_t type;
+ CHECK(msg->findInt32("type", &type));
+
+ switch (type) {
+ case omx_message::EVENT:
+ {
+ int32_t event, data1, data2;
+ CHECK(msg->findInt32("event", &event));
+ CHECK(msg->findInt32("data1", &data1));
+ CHECK(msg->findInt32("data2", &data2));
+
+ if (event == OMX_EventCmdComplete
+ && data1 == OMX_CommandFlush
+ && data2 == (int32_t)OMX_ALL) {
+ // Use of this notification is not consistent across
+ // implementations. We'll drop this notification and rely
+ // on flush-complete notifications on the individual port
+ // indices instead.
+
+ return true;
+ }
+
+ return onOMXEvent(
+ static_cast<OMX_EVENTTYPE>(event),
+ static_cast<OMX_U32>(data1),
+ static_cast<OMX_U32>(data2));
+ }
+
+ case omx_message::EMPTY_BUFFER_DONE:
+ {
+ IOMX::buffer_id bufferID;
+ int32_t fenceFd;
+
+ CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
+ CHECK(msg->findInt32("fence_fd", &fenceFd));
+
+ return onOMXEmptyBufferDone(bufferID, fenceFd);
+ }
+
+ case omx_message::FILL_BUFFER_DONE:
+ {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
+
+ int32_t rangeOffset, rangeLength, flags, fenceFd;
+ int64_t timeUs;
+
+ CHECK(msg->findInt32("range_offset", &rangeOffset));
+ CHECK(msg->findInt32("range_length", &rangeLength));
+ CHECK(msg->findInt32("flags", &flags));
+ CHECK(msg->findInt64("timestamp", &timeUs));
+ CHECK(msg->findInt32("fence_fd", &fenceFd));
+
+ return onOMXFillBufferDone(
+ bufferID,
+ (size_t)rangeOffset, (size_t)rangeLength,
+ (OMX_U32)flags,
+ timeUs,
+ fenceFd);
+ }
+
+ case omx_message::FRAME_RENDERED:
+ {
+ int64_t mediaTimeUs, systemNano;
+
+ CHECK(msg->findInt64("media_time_us", &mediaTimeUs));
+ CHECK(msg->findInt64("system_nano", &systemNano));
+
+ return onOMXFrameRendered(
+ mediaTimeUs, systemNano);
+ }
+
+ default:
+ ALOGE("Unexpected message type: %d", type);
+ return false;
+ }
+}
+
+bool ACodec::BaseState::onOMXFrameRendered(
+ int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) {
+ // ignore outside of Executing and PortSettingsChanged states
+ return true;
+}
+
+bool ACodec::BaseState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ if (event != OMX_EventError) {
+ ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)",
+ mCodec->mComponentName.c_str(), event, data1, data2);
+
+ return false;
+ }
+
+ ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1);
+
+ // verify OMX component sends back an error we expect.
+ OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1;
+ if (!isOMXError(omxError)) {
+ ALOGW("Invalid OMX error %#x", omxError);
+ omxError = OMX_ErrorUndefined;
+ }
+ mCodec->signalError(omxError);
+
+ return true;
+}
+
+bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) {
+ ALOGV("[%s] onOMXEmptyBufferDone %u",
+ mCodec->mComponentName.c_str(), bufferID);
+
+ BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_COMPONENT) {
+ ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexInput);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return false;
+ }
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ // input buffers cannot take fences, so wait for any fence now
+ (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone");
+ fenceFd = -1;
+
+ // still save fence for completeness
+ info->setWriteFence(fenceFd, "onOMXEmptyBufferDone");
+
+ // We're in "store-metadata-in-buffers" mode, the underlying
+ // OMX component had access to data that's implicitly refcounted
+ // by this "MediaBuffer" object. Now that the OMX component has
+ // told us that it's done with the input buffer, we can decrement
+ // the mediaBuffer's reference count.
+ info->mData->setMediaBufferBase(NULL);
+
+ PortMode mode = getPortMode(kPortIndexInput);
+
+ switch (mode) {
+ case KEEP_BUFFERS:
+ break;
+
+ case RESUBMIT_BUFFERS:
+ postFillThisBuffer(info);
+ break;
+
+ case FREE_BUFFERS:
+ default:
+ ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers");
+ return false;
+ }
+
+ return true;
+}
+
+void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
+ if (mCodec->mPortEOS[kPortIndexInput]) {
+ return;
+ }
+
+ CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFillThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
+
+ info->mData->meta()->clear();
+ notify->setBuffer("buffer", info->mData);
+
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec);
+ reply->setInt32("buffer-id", info->mBufferID);
+
+ notify->setMessage("reply", reply);
+
+ notify->post();
+
+ info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+}
+
+void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ sp<ABuffer> buffer;
+ int32_t err = OK;
+ bool eos = false;
+ PortMode mode = getPortMode(kPortIndexInput);
+
+ if (!msg->findBuffer("buffer", &buffer)) {
+ /* these are unfilled buffers returned by client */
+ CHECK(msg->findInt32("err", &err));
+
+ if (err == OK) {
+ /* buffers with no errors are returned on MediaCodec.flush */
+ mode = KEEP_BUFFERS;
+ } else {
+ ALOGV("[%s] saw error %d instead of an input buffer",
+ mCodec->mComponentName.c_str(), err);
+ eos = true;
+ }
+
+ buffer.clear();
+ }
+
+ int32_t tmp;
+ if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
+ eos = true;
+ err = ERROR_END_OF_STREAM;
+ }
+
+ BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_UPSTREAM) {
+ ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexInput);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
+
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ switch (mode) {
+ case KEEP_BUFFERS:
+ {
+ if (eos) {
+ if (!mCodec->mPortEOS[kPortIndexInput]) {
+ mCodec->mPortEOS[kPortIndexInput] = true;
+ mCodec->mInputEOSResult = err;
+ }
+ }
+ break;
+ }
+
+ case RESUBMIT_BUFFERS:
+ {
+ if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) {
+ // Do not send empty input buffer w/o EOS to the component.
+ if (buffer->size() == 0 && !eos) {
+ postFillThisBuffer(info);
+ break;
+ }
+
+ int64_t timeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME;
+
+ int32_t isCSD;
+ if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) {
+ flags |= OMX_BUFFERFLAG_CODECCONFIG;
+ }
+
+ if (eos) {
+ flags |= OMX_BUFFERFLAG_EOS;
+ }
+
+ if (buffer != info->mData) {
+ ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)",
+ mCodec->mComponentName.c_str(),
+ bufferID,
+ buffer.get(), info->mData.get());
+
+ if (buffer->size() > info->mData->capacity()) {
+ ALOGE("data size (%zu) is greated than buffer capacity (%zu)",
+ buffer->size(), // this is the data received
+ info->mData->capacity()); // this is out buffer size
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
+ memcpy(info->mData->data(), buffer->data(), buffer->size());
+ }
+
+ if (flags & OMX_BUFFERFLAG_CODECCONFIG) {
+ ALOGV("[%s] calling emptyBuffer %u w/ codec specific data",
+ mCodec->mComponentName.c_str(), bufferID);
+ } else if (flags & OMX_BUFFERFLAG_EOS) {
+ ALOGV("[%s] calling emptyBuffer %u w/ EOS",
+ mCodec->mComponentName.c_str(), bufferID);
+ } else {
+#if TRACK_BUFFER_TIMING
+ ALOGI("[%s] calling emptyBuffer %u w/ time %lld us",
+ mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);
+#else
+ ALOGV("[%s] calling emptyBuffer %u w/ time %lld us",
+ mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);
+#endif
+ }
+
+#if TRACK_BUFFER_TIMING
+ ACodec::BufferStats stats;
+ stats.mEmptyBufferTimeUs = ALooper::GetNowUs();
+ stats.mFillBufferDoneTimeUs = -1ll;
+ mCodec->mBufferStats.add(timeUs, stats);
+#endif
+
+ if (mCodec->storingMetadataInDecodedBuffers()) {
+ // try to submit an output buffer for each input buffer
+ PortMode outputMode = getPortMode(kPortIndexOutput);
+
+ ALOGV("MetadataBuffersToSubmit=%u portMode=%s",
+ mCodec->mMetadataBuffersToSubmit,
+ (outputMode == FREE_BUFFERS ? "FREE" :
+ outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
+ if (outputMode == RESUBMIT_BUFFERS) {
+ mCodec->submitOutputMetadataBuffer();
+ }
+ }
+ info->checkReadFence("onInputBufferFilled");
+ status_t err2 = mCodec->mOMX->emptyBuffer(
+ mCodec->mNode,
+ bufferID,
+ 0,
+ buffer->size(),
+ flags,
+ timeUs,
+ info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err2 != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
+ return;
+ }
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+
+ if (!eos && err == OK) {
+ getMoreInputDataIfPossible();
+ } else {
+ ALOGV("[%s] Signalled EOS (%d) on the input port",
+ mCodec->mComponentName.c_str(), err);
+
+ mCodec->mPortEOS[kPortIndexInput] = true;
+ mCodec->mInputEOSResult = err;
+ }
+ } else if (!mCodec->mPortEOS[kPortIndexInput]) {
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ ALOGV("[%s] Signalling EOS on the input port due to error %d",
+ mCodec->mComponentName.c_str(), err);
+ } else {
+ ALOGV("[%s] Signalling EOS on the input port",
+ mCodec->mComponentName.c_str());
+ }
+
+ ALOGV("[%s] calling emptyBuffer %u signalling EOS",
+ mCodec->mComponentName.c_str(), bufferID);
+
+ info->checkReadFence("onInputBufferFilled");
+ status_t err2 = mCodec->mOMX->emptyBuffer(
+ mCodec->mNode,
+ bufferID,
+ 0,
+ 0,
+ OMX_BUFFERFLAG_EOS,
+ 0,
+ info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err2 != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
+ return;
+ }
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+
+ mCodec->mPortEOS[kPortIndexInput] = true;
+ mCodec->mInputEOSResult = err;
+ }
+ break;
+ }
+
+ case FREE_BUFFERS:
+ break;
+
+ default:
+ ALOGE("invalid port mode: %d", mode);
+ break;
+ }
+}
+
+void ACodec::BaseState::getMoreInputDataIfPossible() {
+ if (mCodec->mPortEOS[kPortIndexInput]) {
+ return;
+ }
+
+ BufferInfo *eligible = NULL;
+
+ for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+
+#if 0
+ if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) {
+ // There's already a "read" pending.
+ return;
+ }
+#endif
+
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ eligible = info;
+ }
+ }
+
+ if (eligible == NULL) {
+ return;
+ }
+
+ postFillThisBuffer(eligible);
+}
+
+bool ACodec::BaseState::onOMXFillBufferDone(
+ IOMX::buffer_id bufferID,
+ size_t rangeOffset, size_t rangeLength,
+ OMX_U32 flags,
+ int64_t timeUs,
+ int fenceFd) {
+ ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",
+ mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
+
+ ssize_t index;
+ status_t err= OK;
+
+#if TRACK_BUFFER_TIMING
+ index = mCodec->mBufferStats.indexOfKey(timeUs);
+ if (index >= 0) {
+ ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index);
+ stats->mFillBufferDoneTimeUs = ALooper::GetNowUs();
+
+ ALOGI("frame PTS %lld: %lld",
+ timeUs,
+ stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs);
+
+ mCodec->mBufferStats.removeItemsAt(index);
+ stats = NULL;
+ }
+#endif
+
+ BufferInfo *info =
+ mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_COMPONENT) {
+ ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexOutput);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return true;
+ }
+
+ info->mDequeuedAt = ++mCodec->mDequeueCounter;
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ if (info->mRenderInfo != NULL) {
+ // The fence for an emptied buffer must have signaled, but there still could be queued
+ // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
+ // as we will soon requeue this buffer to the surface. While in theory we could still keep
+ // track of buffers that are requeued to the surface, it is better to add support to the
+ // buffer-queue to notify us of released buffers and their fences (in the future).
+ mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
+ }
+
+ // byte buffers cannot take fences, so wait for any fence now
+ if (mCodec->mNativeWindow == NULL) {
+ (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
+ fenceFd = -1;
+ }
+ info->setReadFence(fenceFd, "onOMXFillBufferDone");
+
+ PortMode mode = getPortMode(kPortIndexOutput);
+
+ switch (mode) {
+ case KEEP_BUFFERS:
+ break;
+
+ case RESUBMIT_BUFFERS:
+ {
+ if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS)
+ || mCodec->mPortEOS[kPortIndexOutput])) {
+ ALOGV("[%s] calling fillBuffer %u",
+ mCodec->mComponentName.c_str(), info->mBufferID);
+
+ err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ return true;
+ }
+
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ break;
+ }
+
+ sp<AMessage> reply =
+ new AMessage(kWhatOutputBufferDrained, mCodec);
+
+ if (!mCodec->mSentFormat && rangeLength > 0) {
+ mCodec->sendFormatChange(reply);
+ }
+ if (mCodec->usingMetadataOnEncoderOutput()) {
+ native_handle_t *handle = NULL;
+ VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data();
+ VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data();
+ if (info->mData->size() >= sizeof(grallocMeta)
+ && grallocMeta.eType == kMetadataBufferTypeGrallocSource) {
+ handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle;
+ } else if (info->mData->size() >= sizeof(nativeMeta)
+ && nativeMeta.eType == kMetadataBufferTypeANWBuffer) {
+#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+ // ANativeWindowBuffer is only valid on 32-bit/mediaserver process
+ handle = NULL;
+#else
+ handle = (native_handle_t *)nativeMeta.pBuffer->handle;
+#endif
+ }
+ info->mData->meta()->setPointer("handle", handle);
+ info->mData->meta()->setInt32("rangeOffset", rangeOffset);
+ info->mData->meta()->setInt32("rangeLength", rangeLength);
+ } else {
+ info->mData->setRange(rangeOffset, rangeLength);
+ }
+#if 0
+ if (mCodec->mNativeWindow == NULL) {
+ if (IsIDR(info->mData)) {
+ ALOGI("IDR frame");
+ }
+ }
+#endif
+
+ if (mCodec->mSkipCutBuffer != NULL) {
+ mCodec->mSkipCutBuffer->submit(info->mData);
+ }
+ info->mData->meta()->setInt64("timeUs", timeUs);
+ info->mData->meta()->setObject("graphic-buffer", info->mGraphicBuffer);
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
+ notify->setBuffer("buffer", info->mData);
+ notify->setInt32("flags", flags);
+
+ reply->setInt32("buffer-id", info->mBufferID);
+
+ (void)mCodec->setDSModeHint(reply, flags, timeUs);
+
+ notify->setMessage("reply", reply);
+
+ notify->post();
+
+ info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+
+ if (flags & OMX_BUFFERFLAG_EOS) {
+ ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str());
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatEOS);
+ notify->setInt32("err", mCodec->mInputEOSResult);
+ notify->post();
+
+ mCodec->mPortEOS[kPortIndexOutput] = true;
+ }
+ break;
+ }
+
+ case FREE_BUFFERS:
+ err = mCodec->freeBuffer(kPortIndexOutput, index);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ return true;
+ }
+ break;
+
+ default:
+ ALOGE("Invalid port mode: %d", mode);
+ return false;
+ }
+
+ return true;
+}
+
+void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ ssize_t index;
+ BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_DOWNSTREAM) {
+ ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexOutput);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
+
+ android_native_rect_t crop;
+ if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) {
+ status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
+ ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
+ }
+
+ bool skip = mCodec->getDSModeHint(msg);
+ int32_t render;
+ if (!skip && mCodec->mNativeWindow != NULL
+ && msg->findInt32("render", &render) && render != 0
+ && info->mData != NULL && info->mData->size() != 0) {
+ ATRACE_NAME("render");
+ // The client wants this buffer to be rendered.
+
+ // save buffers sent to the surface so we can get render time when they return
+ int64_t mediaTimeUs = -1;
+ info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
+ if (mediaTimeUs >= 0) {
+ mCodec->mRenderTracker.onFrameQueued(
+ mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
+ }
+
+ int64_t timestampNs = 0;
+ if (!msg->findInt64("timestampNs", &timestampNs)) {
+ // use media timestamp if client did not request a specific render timestamp
+ if (info->mData->meta()->findInt64("timeUs", &timestampNs)) {
+ ALOGV("using buffer PTS of %lld", (long long)timestampNs);
+ timestampNs *= 1000;
+ }
+ }
+
+ status_t err;
+ err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
+ ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
+
+ info->checkReadFence("onOutputBufferDrained before queueBuffer");
+ err = mCodec->mNativeWindow->queueBuffer(
+ mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err == OK) {
+ info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+ } else {
+ ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ // keeping read fence as write fence to avoid clobbering
+ info->mIsReadFence = false;
+ }
+ } else {
+ if (mCodec->mNativeWindow != NULL &&
+ (info->mData == NULL || info->mData->size() != 0)) {
+ // move read fence into write fence to avoid clobbering
+ info->mIsReadFence = false;
+ ATRACE_NAME("frame-drop");
+ }
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ }
+
+ PortMode mode = getPortMode(kPortIndexOutput);
+
+ switch (mode) {
+ case KEEP_BUFFERS:
+ {
+ // XXX fishy, revisit!!! What about the FREE_BUFFERS case below?
+
+ if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ // We cannot resubmit the buffer we just rendered, dequeue
+ // the spare instead.
+
+ info = mCodec->dequeueBufferFromNativeWindow();
+ }
+ break;
+ }
+
+ case RESUBMIT_BUFFERS:
+ {
+ if (!mCodec->mPortEOS[kPortIndexOutput]) {
+ if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ // We cannot resubmit the buffer we just rendered, dequeue
+ // the spare instead.
+
+ info = mCodec->dequeueBufferFromNativeWindow();
+ }
+
+ if (info != NULL) {
+ ALOGV("[%s] calling fillBuffer %u",
+ mCodec->mComponentName.c_str(), info->mBufferID);
+ info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
+ status_t err = mCodec->mOMX->fillBuffer(
+ mCodec->mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err == OK) {
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ } else {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ }
+ }
+ }
+ break;
+ }
+
+ case FREE_BUFFERS:
+ {
+ status_t err = mCodec->freeBuffer(kPortIndexOutput, index);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ }
+ break;
+ }
+
+ default:
+ ALOGE("Invalid port mode: %d", mode);
+ return;
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::UninitializedState::UninitializedState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+void ACodec::UninitializedState::stateEntered() {
+ ALOGV("Now uninitialized");
+
+ if (mDeathNotifier != NULL) {
+ IInterface::asBinder(mCodec->mOMX)->unlinkToDeath(mDeathNotifier);
+ mDeathNotifier.clear();
+ }
+
+ mCodec->mNativeWindow.clear();
+ mCodec->mNativeWindowUsageBits = 0;
+ mCodec->mNode = 0;
+ mCodec->mOMX.clear();
+ mCodec->mQuirks = 0;
+ mCodec->mFlags = 0;
+ mCodec->mEncoderComponent = 0;
+ mCodec->mComponentAllocByName = 0;
+ mCodec->mInputMetadataType = kMetadataBufferTypeInvalid;
+ mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid;
+ mCodec->mComponentName.clear();
+}
+
+bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case ACodec::kWhatSetup:
+ {
+ onSetup(msg);
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatAllocateComponent:
+ {
+ onAllocateComponent(msg);
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatShutdown:
+ {
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+ ALOGW_IF(keepComponentAllocated,
+ "cannot keep component allocated on shutdown in Uninitialized state");
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatFlush:
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ // nothing to do, as we have already signaled shutdown
+ handled = true;
+ break;
+ }
+
+ default:
+ return BaseState::onMessageReceived(msg);
+ }
+
+ return handled;
+}
+
+void ACodec::UninitializedState::onSetup(
+ const sp<AMessage> &msg) {
+ if (onAllocateComponent(msg)
+ && mCodec->mLoadedState->onConfigureComponent(msg)) {
+ mCodec->mLoadedState->onStart();
+ }
+}
+
+bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
+ ALOGV("onAllocateComponent");
+
+ CHECK(mCodec->mNode == 0);
+
+ OMXClient client;
+ if (client.connect() != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, NO_INIT);
+ return false;
+ }
+
+ sp<IOMX> omx = client.interface();
+
+ sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);
+
+ mDeathNotifier = new DeathNotifier(notify);
+ if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) {
+ // This was a local binder, if it dies so do we, we won't care
+ // about any notifications in the afterlife.
+ mDeathNotifier.clear();
+ }
+
+ Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
+
+ AString mime;
+
+ AString componentName;
+ uint32_t quirks = 0;
+ int32_t encoder = false;
+ if (msg->findString("componentName", &componentName)) {
+ ssize_t index = matchingCodecs.add();
+ OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index);
+ entry->mName = String8(componentName.c_str());
+ mCodec->mComponentAllocByName = true;
+
+ if (!OMXCodec::findCodecQuirks(
+ componentName.c_str(), &entry->mQuirks)) {
+ entry->mQuirks = 0;
+ }
+ } else {
+ CHECK(msg->findString("mime", &mime));
+
+ if (!msg->findInt32("encoder", &encoder)) {
+ encoder = false;
+ }
+
+ if (encoder == true) {
+ mCodec->mEncoderComponent = true;
+ }
+
+ OMXCodec::findMatchingCodecs(
+ mime.c_str(),
+ encoder, // createEncoder
+ NULL, // matchComponentName
+ 0, // flags
+ &matchingCodecs);
+ }
+
+ sp<CodecObserver> observer = new CodecObserver;
+ IOMX::node_id node = 0;
+
+ status_t err = NAME_NOT_FOUND;
+ for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
+ ++matchIndex) {
+ componentName = matchingCodecs.itemAt(matchIndex).mName.string();
+ quirks = matchingCodecs.itemAt(matchIndex).mQuirks;
+
+ pid_t tid = gettid();
+ int prevPriority = androidGetThreadPriority(tid);
+ androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
+ err = omx->allocateNode(componentName.c_str(), observer, &node);
+ androidSetThreadPriority(tid, prevPriority);
+
+ if (err == OK) {
+ break;
+ } else {
+ ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());
+ }
+
+ node = 0;
+ }
+
+ if (node == 0) {
+ if (!mime.empty()) {
+ ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.",
+ encoder ? "en" : "de", mime.c_str(), err);
+ } else {
+ ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err);
+ }
+
+ mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));
+ return false;
+ }
+
+ notify = new AMessage(kWhatOMXMessageList, mCodec);
+ observer->setNotificationMessage(notify);
+
+ mCodec->mComponentName = componentName;
+ mCodec->mRenderTracker.setComponentName(componentName);
+ mCodec->mFlags = 0;
+
+ if (componentName.endsWith(".secure")) {
+ mCodec->mFlags |= kFlagIsSecure;
+ mCodec->mFlags |= kFlagIsGrallocUsageProtected;
+ mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ }
+
+ mCodec->mQuirks = quirks;
+ mCodec->mOMX = omx;
+ mCodec->mNode = node;
+
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatComponentAllocated);
+ notify->setString("componentName", mCodec->mComponentName.c_str());
+ notify->post();
+ }
+
+ mCodec->changeState(mCodec->mLoadedState);
+
+ return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::LoadedState::LoadedState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+void ACodec::LoadedState::stateEntered() {
+ ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
+
+ mCodec->mPortEOS[kPortIndexInput] =
+ mCodec->mPortEOS[kPortIndexOutput] = false;
+
+ mCodec->mInputEOSResult = OK;
+
+ mCodec->mDequeueCounter = 0;
+ mCodec->mMetadataBuffersToSubmit = 0;
+ mCodec->mRepeatFrameDelayUs = -1ll;
+ mCodec->mInputFormat.clear();
+ mCodec->mOutputFormat.clear();
+ mCodec->mBaseOutputFormat.clear();
+
+ if (mCodec->mShutdownInProgress) {
+ bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
+
+ mCodec->mShutdownInProgress = false;
+ mCodec->mKeepComponentAllocated = false;
+
+ onShutdown(keepComponentAllocated);
+ }
+ mCodec->mExplicitShutdown = false;
+
+ mCodec->processDeferredMessages();
+}
+
+void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
+ if (!keepComponentAllocated) {
+ (void)mCodec->mOMX->freeNode(mCodec->mNode);
+
+ mCodec->changeState(mCodec->mUninitializedState);
+ }
+
+ if (mCodec->mExplicitShutdown) {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+ mCodec->mExplicitShutdown = false;
+ }
+}
+
+bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case ACodec::kWhatConfigureComponent:
+ {
+ onConfigureComponent(msg);
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatCreateInputSurface:
+ {
+ onCreateInputSurface(msg);
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatSetInputSurface:
+ {
+ onSetInputSurface(msg);
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatStart:
+ {
+ onStart();
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatShutdown:
+ {
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+
+ mCodec->mExplicitShutdown = true;
+ onShutdown(keepComponentAllocated);
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatFlush:
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+
+ handled = true;
+ break;
+ }
+#ifdef DOLBY_ENABLE
+ case ACodec::kWhatSetParameters:
+ {
+ mCodec->setDolbyParameter(msg);
+
+ handled = true;
+ break;
+ }
+#endif // DOLBY_END
+
+ default:
+ return BaseState::onMessageReceived(msg);
+ }
+
+ return handled;
+}
+
+bool ACodec::LoadedState::onConfigureComponent(
+ const sp<AMessage> &msg) {
+ ALOGV("onConfigureComponent");
+
+ CHECK(mCodec->mNode != 0);
+
+ status_t err = OK;
+ AString mime;
+
+ if (!msg->findString("mime", &mime)) {
+ err = BAD_VALUE;
+ } else {
+ err = mCodec->configureCodec(mime.c_str(), msg);
+ }
+ if (err != OK) {
+ ALOGE("[%s] configureCodec returning error %d",
+ mCodec->mComponentName.c_str(), err);
+
+ if (!mCodec->mEncoderComponent && !mCodec->mComponentAllocByName &&
+ !strncmp(mime.c_str(), "video/", strlen("video/"))) {
+ Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
+
+ OMXCodec::findMatchingCodecs(
+ mime.c_str(),
+ false, // createEncoder
+ NULL, // matchComponentName
+ 0, // flags
+ &matchingCodecs);
+
+ err = mCodec->mOMX->freeNode(mCodec->mNode);
+
+ if (err != OK) {
+ ALOGE("Failed to freeNode");
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ return false;
+ }
+
+ mCodec->mNode = 0;
+ AString componentName;
+ sp<CodecObserver> observer = new CodecObserver;
+
+ err = NAME_NOT_FOUND;
+ for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
+ ++matchIndex) {
+ componentName = matchingCodecs.itemAt(matchIndex).mName.string();
+ if (!strcmp(mCodec->mComponentName.c_str(), componentName.c_str())) {
+ continue;
+ }
+
+ pid_t tid = gettid();
+ int prevPriority = androidGetThreadPriority(tid);
+ androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
+ err = mCodec->mOMX->allocateNode(componentName.c_str(), observer, &mCodec->mNode);
+ androidSetThreadPriority(tid, prevPriority);
+
+ if (err == OK) {
+ break;
+ } else {
+ ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());
+ }
+
+ mCodec->mNode = 0;
+ }
+
+ if (mCodec->mNode == 0) {
+ if (!mime.empty()) {
+ ALOGE("Unable to instantiate a decoder for type '%s'", mime.c_str());
+ } else {
+ ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err);
+ }
+
+ mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));
+ return false;
+ }
+
+ sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
+ observer->setNotificationMessage(notify);
+ mCodec->mComponentName = componentName;
+
+ err = mCodec->configureCodec(mime.c_str(), msg);
+ }
+
+ if (err != OK) {
+ mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));
+ return false;
+ }
+ }
+
+ {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatComponentConfigured);
+ notify->setString("componentName", mCodec->mComponentName.c_str());
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
+ notify->post();
+ }
+
+ return true;
+}
+
+status_t ACodec::LoadedState::setupInputSurface() {
+ status_t err = OK;
+
+ if (mCodec->mRepeatFrameDelayUs > 0ll) {
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,
+ &mCodec->mRepeatFrameDelayUs,
+ sizeof(mCodec->mRepeatFrameDelayUs));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure option to repeat previous "
+ "frames (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ return err;
+ }
+ }
+
+ if (mCodec->mMaxPtsGapUs > 0ll) {
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP,
+ &mCodec->mMaxPtsGapUs,
+ sizeof(mCodec->mMaxPtsGapUs));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure max timestamp gap (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ return err;
+ }
+ }
+
+ if (mCodec->mMaxFps > 0) {
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_MAX_FPS,
+ &mCodec->mMaxFps,
+ sizeof(mCodec->mMaxFps));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure max fps (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ return err;
+ }
+ }
+
+ if (mCodec->mTimePerCaptureUs > 0ll
+ && mCodec->mTimePerFrameUs > 0ll) {
+ int64_t timeLapse[2];
+ timeLapse[0] = mCodec->mTimePerFrameUs;
+ timeLapse[1] = mCodec->mTimePerCaptureUs;
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_TIME_LAPSE,
+ &timeLapse[0],
+ sizeof(timeLapse));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure time lapse (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ return err;
+ }
+ }
+
+ if (mCodec->mCreateInputBuffersSuspended) {
+ bool suspend = true;
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_SUSPEND,
+ &suspend,
+ sizeof(suspend));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure option to suspend (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ return err;
+ }
+ }
+
+ uint32_t usageBits;
+ if (mCodec->mOMX->getParameter(
+ mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ &usageBits, sizeof(usageBits)) == OK) {
+ mCodec->mInputFormat->setInt32(
+ "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
+ }
+
+ return OK;
+}
+
+void ACodec::LoadedState::onCreateInputSurface(
+ const sp<AMessage> & /* msg */) {
+ ALOGV("onCreateInputSurface");
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ status_t err = mCodec->mOMX->createInputSurface(
+ mCodec->mNode, kPortIndexInput, &bufferProducer, &mCodec->mInputMetadataType);
+
+ if (err == OK) {
+ err = setupInputSurface();
+ }
+
+ if (err == OK) {
+ notify->setObject("input-surface",
+ new BufferProducerWrapper(bufferProducer));
+ } else {
+ // Can't use mCodec->signalError() here -- MediaCodec won't forward
+ // the error through because it's in the "configured" state. We
+ // send a kWhatInputSurfaceCreated with an error value instead.
+ ALOGE("[%s] onCreateInputSurface returning error %d",
+ mCodec->mComponentName.c_str(), err);
+ notify->setInt32("err", err);
+ }
+ notify->post();
+}
+
+void ACodec::LoadedState::onSetInputSurface(
+ const sp<AMessage> &msg) {
+ ALOGV("onSetInputSurface");
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted);
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("input-surface", &obj));
+ sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get());
+
+ status_t err = mCodec->mOMX->setInputSurface(
+ mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
+ &mCodec->mInputMetadataType);
+
+ if (err == OK) {
+ err = setupInputSurface();
+ }
+
+ if (err != OK) {
+ // Can't use mCodec->signalError() here -- MediaCodec won't forward
+ // the error through because it's in the "configured" state. We
+ // send a kWhatInputSurfaceAccepted with an error value instead.
+ ALOGE("[%s] onSetInputSurface returning error %d",
+ mCodec->mComponentName.c_str(), err);
+ notify->setInt32("err", err);
+ }
+ notify->post();
+}
+
+void ACodec::LoadedState::onStart() {
+ ALOGV("onStart");
+
+ status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ } else {
+#ifdef DOLBY_ENABLE
+ mCodec->setDolbyParameterOnEndpChange();
+#endif // DOLBY_END
+ mCodec->changeState(mCodec->mLoadedToIdleState);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+void ACodec::LoadedToIdleState::stateEntered() {
+ ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str());
+
+ status_t err;
+ if ((err = allocateBuffers()) != OK) {
+ ALOGE("Failed to allocate buffers after transitioning to IDLE state "
+ "(error 0x%08x)",
+ err);
+
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+
+ mCodec->changeState(mCodec->mLoadedState);
+ }
+}
+
+status_t ACodec::LoadedToIdleState::allocateBuffers() {
+ status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return mCodec->allocateBuffersOnPort(kPortIndexOutput);
+}
+
+bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatSetParameters:
+ case kWhatShutdown:
+ {
+ mCodec->deferMessage(msg);
+ return true;
+ }
+
+ case kWhatSignalEndOfInputStream:
+ {
+ mCodec->onSignalEndOfInputStream();
+ return true;
+ }
+
+ case kWhatResume:
+ {
+ // We'll be active soon enough.
+ return true;
+ }
+
+ case kWhatFlush:
+ {
+ // We haven't even started yet, so we're flushed alright...
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+ return true;
+ }
+
+ default:
+ return BaseState::onMessageReceived(msg);
+ }
+}
+
+bool ACodec::LoadedToIdleState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ status_t err = OK;
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateIdle) {
+ ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ err = FAILED_TRANSACTION;
+ }
+
+ if (err == OK) {
+ err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting);
+ }
+
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ } else {
+ mCodec->changeState(mCodec->mIdleToExecutingState);
+ }
+
+ return true;
+ }
+
+ default:
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+void ACodec::IdleToExecutingState::stateEntered() {
+ ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatSetParameters:
+ case kWhatShutdown:
+ {
+ mCodec->deferMessage(msg);
+ return true;
+ }
+
+ case kWhatResume:
+ {
+ // We'll be active soon enough.
+ return true;
+ }
+
+ case kWhatFlush:
+ {
+ // We haven't even started yet, so we're flushed alright...
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+
+ return true;
+ }
+
+ case kWhatSignalEndOfInputStream:
+ {
+ mCodec->onSignalEndOfInputStream();
+ return true;
+ }
+
+ default:
+ return BaseState::onMessageReceived(msg);
+ }
+}
+
+bool ACodec::IdleToExecutingState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateExecuting) {
+ ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
+
+ mCodec->mExecutingState->resume();
+ mCodec->changeState(mCodec->mExecutingState);
+
+ return true;
+ }
+
+ default:
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ExecutingState::ExecutingState(ACodec *codec)
+ : BaseState(codec),
+ mActive(false) {
+}
+
+ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode(
+ OMX_U32 /* portIndex */) {
+ return RESUBMIT_BUFFERS;
+}
+
+void ACodec::ExecutingState::submitOutputMetaBuffers() {
+ // submit as many buffers as there are input buffers with the codec
+ // in case we are in port reconfiguring
+ for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+
+ if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) {
+ if (mCodec->submitOutputMetadataBuffer() != OK)
+ break;
+ }
+ }
+
+ // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+ mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
+}
+
+void ACodec::ExecutingState::submitRegularOutputBuffers() {
+ bool failed = false;
+ for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
+
+ if (mCodec->mNativeWindow != NULL) {
+ if (info->mStatus != BufferInfo::OWNED_BY_US
+ && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGE("buffers should be owned by us or the surface");
+ failed = true;
+ break;
+ }
+
+ if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ continue;
+ }
+ } else {
+ if (info->mStatus != BufferInfo::OWNED_BY_US) {
+ ALOGE("buffers should be owned by us");
+ failed = true;
+ break;
+ }
+ }
+
+ ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID);
+
+ info->checkWriteFence("submitRegularOutputBuffers");
+ status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err != OK) {
+ failed = true;
+ break;
+ }
+
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ }
+
+ if (failed) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ }
+}
+
+void ACodec::ExecutingState::submitOutputBuffers() {
+ submitRegularOutputBuffers();
+ if (mCodec->storingMetadataInDecodedBuffers()) {
+ submitOutputMetaBuffers();
+ }
+}
+
+void ACodec::ExecutingState::resume() {
+ if (mActive) {
+ ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str());
+ return;
+ }
+
+ submitOutputBuffers();
+
+ // Post all available input buffers
+ if (mCodec->mBuffers[kPortIndexInput].size() == 0u) {
+ ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str());
+ }
+
+ for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ postFillThisBuffer(info);
+ }
+ }
+
+ mActive = true;
+}
+
+void ACodec::ExecutingState::stateEntered() {
+ ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
+
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+ mCodec->processDeferredMessages();
+}
+
+bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case kWhatShutdown:
+ {
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+
+ mCodec->mShutdownInProgress = true;
+ mCodec->mExplicitShutdown = true;
+ mCodec->mKeepComponentAllocated = keepComponentAllocated;
+
+ mActive = false;
+
+ status_t err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ if (err != OK) {
+ if (keepComponentAllocated) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ }
+ // TODO: do some recovery here.
+ } else {
+ mCodec->changeState(mCodec->mExecutingToIdleState);
+ }
+
+ handled = true;
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ ALOGV("[%s] ExecutingState flushing now "
+ "(codec owns %zu/%zu input, %zu/%zu output).",
+ mCodec->mComponentName.c_str(),
+ mCodec->countBuffersOwnedByComponent(kPortIndexInput),
+ mCodec->mBuffers[kPortIndexInput].size(),
+ mCodec->countBuffersOwnedByComponent(kPortIndexOutput),
+ mCodec->mBuffers[kPortIndexOutput].size());
+
+ mActive = false;
+
+ status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ } else {
+ mCodec->changeState(mCodec->mFlushingState);
+ }
+
+ handled = true;
+ break;
+ }
+
+ case kWhatResume:
+ {
+ resume();
+
+ handled = true;
+ break;
+ }
+
+ case kWhatRequestIDRFrame:
+ {
+ status_t err = mCodec->requestIDRFrame();
+ if (err != OK) {
+ ALOGW("Requesting an IDR frame failed.");
+ }
+
+ handled = true;
+ break;
+ }
+
+ case kWhatSetParameters:
+ {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ status_t err = mCodec->setParameters(params);
+
+ sp<AMessage> reply;
+ if (msg->findMessage("reply", &reply)) {
+ reply->setInt32("err", err);
+ reply->post();
+ }
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatSignalEndOfInputStream:
+ {
+ mCodec->onSignalEndOfInputStream();
+ handled = true;
+ break;
+ }
+
+ // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+ case kWhatSubmitOutputMetadataBufferIfEOS:
+ {
+ if (mCodec->mPortEOS[kPortIndexInput] &&
+ !mCodec->mPortEOS[kPortIndexOutput]) {
+ status_t err = mCodec->submitOutputMetadataBuffer();
+ if (err == OK) {
+ mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
+ }
+ }
+ return true;
+ }
+
+ default:
+ handled = BaseState::onMessageReceived(msg);
+ break;
+ }
+
+ return handled;
+}
+
+status_t ACodec::setParameters(const sp<AMessage> &params) {
+ int32_t videoBitrate;
+ if (params->findInt32("video-bitrate", &videoBitrate)) {
+ OMX_VIDEO_CONFIG_BITRATETYPE configParams;
+ InitOMXParams(&configParams);
+ configParams.nPortIndex = kPortIndexOutput;
+ configParams.nEncodeBitrate = videoBitrate;
+
+ status_t err = mOMX->setConfig(
+ mNode,
+ OMX_IndexConfigVideoBitrate,
+ &configParams,
+ sizeof(configParams));
+
+ if (err != OK) {
+ ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d",
+ videoBitrate, err);
+
+ return err;
+ }
+ }
+
+ int64_t skipFramesBeforeUs;
+ if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
+ status_t err =
+ mOMX->setInternalOption(
+ mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_START_TIME,
+ &skipFramesBeforeUs,
+ sizeof(skipFramesBeforeUs));
+
+ if (err != OK) {
+ ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err);
+ return err;
+ }
+ }
+
+ int32_t dropInputFrames;
+ if (params->findInt32("drop-input-frames", &dropInputFrames)) {
+ bool suspend = dropInputFrames != 0;
+
+ status_t err =
+ mOMX->setInternalOption(
+ mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_SUSPEND,
+ &suspend,
+ sizeof(suspend));
+
+ if (err != OK) {
+ ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err);
+ return err;
+ }
+ }
+
+ int32_t dummy;
+ if (params->findInt32("request-sync", &dummy)) {
+ status_t err = requestIDRFrame();
+
+ if (err != OK) {
+ ALOGE("Requesting a sync frame failed w/ err %d", err);
+ return err;
+ }
+ }
+
+ float rate;
+ if (params->findFloat("operating-rate", &rate) && rate > 0) {
+ status_t err = setOperatingRate(rate, mIsVideo);
+ if (err != OK) {
+ ALOGE("Failed to set parameter 'operating-rate' (err %d)", err);
+ return err;
+ }
+ }
+#ifdef DOLBY_ENABLE
+ return setDolbyParameterOnProcessedAudio(params);
+#endif // DOLBY_END
+
+ return OK;
+}
+
+void ACodec::onSignalEndOfInputStream() {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatSignaledInputEOS);
+
+ status_t err = mOMX->signalEndOfInputStream(mNode);
+ if (err != OK) {
+ notify->setInt32("err", err);
+ }
+ notify->post();
+}
+
+sp<IOMXObserver> ACodec::createObserver() {
+ sp<CodecObserver> observer = new CodecObserver;
+ sp<AMessage> notify = new AMessage(kWhatOMXMessageList, this);
+ observer->setNotificationMessage(notify);
+ return observer;
+}
+
+status_t ACodec::allocateBuffer(
+ OMX_U32 portIndex, size_t bufSize, BufferInfo &info) {
+ void *ptr;
+ status_t err = mOMX->allocateBuffer(
+ mNode, portIndex, bufSize, &info.mBufferID, &ptr);
+
+ info.mData = new ABuffer(ptr, bufSize);
+ return err;
+}
+
+bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+ mCodec->onFrameRendered(mediaTimeUs, systemNano);
+ return true;
+}
+
+bool ACodec::ExecutingState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventPortSettingsChanged:
+ {
+ CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+ mCodec->mMetadataBuffersToSubmit = 0;
+ CHECK_EQ(mCodec->mOMX->sendCommand(
+ mCodec->mNode,
+ OMX_CommandPortDisable, kPortIndexOutput),
+ (status_t)OK);
+
+ mCodec->freeOutputBuffersNotOwnedByComponent();
+
+ mCodec->changeState(mCodec->mOutputPortSettingsChangedState);
+ } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
+ mCodec->mSentFormat = false;
+
+ if (mCodec->mTunneled) {
+ sp<AMessage> dummy = new AMessage(kWhatOutputBufferDrained, mCodec);
+ mCodec->sendFormatChange(dummy);
+ }
+ } else {
+ ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x",
+ mCodec->mComponentName.c_str(), data2);
+ }
+
+ return true;
+ }
+
+ case OMX_EventBufferFlag:
+ {
+ return true;
+ }
+
+ default:
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState(
+ ACodec *codec)
+ : BaseState(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode(
+ OMX_U32 portIndex) {
+ if (portIndex == kPortIndexOutput) {
+ return FREE_BUFFERS;
+ }
+
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput);
+
+ return RESUBMIT_BUFFERS;
+}
+
+bool ACodec::OutputPortSettingsChangedState::onMessageReceived(
+ const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case kWhatShutdown:
+ {
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32(
+ "keepComponentAllocated", &keepComponentAllocated));
+
+ mCodec->mShutdownInProgress = true;
+ mCodec->mExplicitShutdown = true;
+ mCodec->mKeepComponentAllocated = keepComponentAllocated;
+
+ status_t err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ if (err != OK) {
+ if (keepComponentAllocated) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ }
+ // TODO: do some recovery here.
+ } else {
+ // This is technically not correct, but appears to be
+ // the only way to free the component instance using
+ // ExectingToIdleState.
+ mCodec->changeState(mCodec->mExecutingToIdleState);
+ }
+
+ handled = true;
+ break;
+ }
+ case kWhatFlush:
+ case kWhatResume:
+ case kWhatSetParameters:
+ {
+ if (msg->what() == kWhatResume) {
+ ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
+ }
+
+ mCodec->deferMessage(msg);
+ handled = true;
+ break;
+ }
+
+ default:
+ handled = BaseState::onMessageReceived(msg);
+ break;
+ }
+
+ return handled;
+}
+
+void ACodec::OutputPortSettingsChangedState::stateEntered() {
+ ALOGV("[%s] Now handling output port settings change",
+ mCodec->mComponentName.c_str());
+}
+
+bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
+ int64_t mediaTimeUs, nsecs_t systemNano) {
+ mCodec->onFrameRendered(mediaTimeUs, systemNano);
+ return true;
+}
+
+bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ if (data1 == (OMX_U32)OMX_CommandPortDisable) {
+ if (data2 != (OMX_U32)kPortIndexOutput) {
+ ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2);
+ return false;
+ }
+
+ ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str());
+
+ status_t err = OK;
+ if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) {
+ ALOGE("disabled port should be empty, but has %zu buffers",
+ mCodec->mBuffers[kPortIndexOutput].size());
+ err = FAILED_TRANSACTION;
+ } else {
+ mCodec->mDealer[kPortIndexOutput].clear();
+ }
+
+ if (err == OK) {
+ err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput);
+ }
+
+ /* Clear the RenderQueue in which queued GraphicBuffers hold the
+ * actual buffer references in order to free them early.
+ */
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
+ if (err == OK) {
+ err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
+ ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
+ "reconfiguration: (%d)", err);
+ }
+
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ }
+
+ return true;
+ } else if (data1 == (OMX_U32)OMX_CommandPortEnable) {
+ if (data2 != (OMX_U32)kPortIndexOutput) {
+ ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2);
+ return false;
+ }
+
+ mCodec->mSentFormat = false;
+
+ if (mCodec->mTunneled) {
+ sp<AMessage> dummy = new AMessage(kWhatOutputBufferDrained, mCodec);
+ mCodec->sendFormatChange(dummy);
+ }
+
+ ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str());
+
+ if (mCodec->mExecutingState->active()) {
+ mCodec->mExecutingState->submitOutputBuffers();
+ }
+
+ mCodec->changeState(mCodec->mExecutingState);
+
+ return true;
+ }
+
+ return false;
+ }
+
+ default:
+ return false;
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec)
+ : BaseState(codec),
+ mComponentNowIdle(false) {
+}
+
+bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case kWhatFlush:
+ {
+ // Don't send me a flush request if you previously wanted me
+ // to shutdown.
+ ALOGW("Ignoring flush request in ExecutingToIdleState");
+ break;
+ }
+
+ case kWhatShutdown:
+ {
+ // We're already doing that...
+
+ handled = true;
+ break;
+ }
+
+ default:
+ handled = BaseState::onMessageReceived(msg);
+ break;
+ }
+
+ return handled;
+}
+
+void ACodec::ExecutingToIdleState::stateEntered() {
+ ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
+
+ mComponentNowIdle = false;
+ mCodec->mSentFormat = false;
+}
+
+bool ACodec::ExecutingToIdleState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateIdle) {
+ ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
+
+ mComponentNowIdle = true;
+
+ changeStateIfWeOwnAllBuffers();
+
+ return true;
+ }
+
+ case OMX_EventPortSettingsChanged:
+ case OMX_EventBufferFlag:
+ {
+ // We're shutting down and don't care about this anymore.
+ return true;
+ }
+
+ default:
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+}
+
+void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
+ if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) {
+ status_t err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded);
+ if (err == OK) {
+ err = mCodec->freeBuffersOnPort(kPortIndexInput);
+ status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput);
+ if (err == OK) {
+ err = err2;
+ }
+ }
+
+ if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown)
+ && mCodec->mNativeWindow != NULL) {
+ // We push enough 1x1 blank buffers to ensure that one of
+ // them has made it to the display. This allows the OMX
+ // component teardown to zero out any protected buffers
+ // without the risk of scanning out one of those buffers.
+ pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get());
+ }
+
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
+
+ mCodec->changeState(mCodec->mIdleToLoadedState);
+ }
+}
+
+void ACodec::ExecutingToIdleState::onInputBufferFilled(
+ const sp<AMessage> &msg) {
+ BaseState::onInputBufferFilled(msg);
+
+ changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::ExecutingToIdleState::onOutputBufferDrained(
+ const sp<AMessage> &msg) {
+ BaseState::onOutputBufferDrained(msg);
+
+ changeStateIfWeOwnAllBuffers();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case kWhatShutdown:
+ {
+ // We're already doing that...
+
+ handled = true;
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ // Don't send me a flush request if you previously wanted me
+ // to shutdown.
+ ALOGE("Got flush request in IdleToLoadedState");
+ break;
+ }
+
+ default:
+ handled = BaseState::onMessageReceived(msg);
+ break;
+ }
+
+ return handled;
+}
+
+void ACodec::IdleToLoadedState::stateEntered() {
+ ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::IdleToLoadedState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateLoaded) {
+ ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
+
+ mCodec->changeState(mCodec->mLoadedState);
+
+ return true;
+ }
+
+ default:
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::FlushingState::FlushingState(ACodec *codec)
+ : BaseState(codec) {
+}
+
+void ACodec::FlushingState::stateEntered() {
+ ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str());
+
+ mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false;
+}
+
+bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) {
+ bool handled = false;
+
+ switch (msg->what()) {
+ case kWhatShutdown:
+ {
+ mCodec->deferMessage(msg);
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ // We're already doing this right now.
+ handled = true;
+ break;
+ }
+
+ default:
+ handled = BaseState::onMessageReceived(msg);
+ break;
+ }
+
+ return handled;
+}
+
+bool ACodec::FlushingState::onOMXEvent(
+ OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ ALOGV("[%s] FlushingState onOMXEvent(%u,%d)",
+ mCodec->mComponentName.c_str(), event, (OMX_S32)data1);
+
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ if (data1 != (OMX_U32)OMX_CommandFlush) {
+ ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState",
+ asString((OMX_COMMANDTYPE)data1), data1, data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
+
+ if (data2 == kPortIndexInput || data2 == kPortIndexOutput) {
+ if (mFlushComplete[data2]) {
+ ALOGW("Flush already completed for %s port",
+ data2 == kPortIndexInput ? "input" : "output");
+ return true;
+ }
+ mFlushComplete[data2] = true;
+
+ if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) {
+ changeStateIfWeOwnAllBuffers();
+ }
+ } else if (data2 == OMX_ALL) {
+ if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) {
+ ALOGW("received flush complete event for OMX_ALL before ports have been"
+ "flushed (%d/%d)",
+ mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]);
+ return false;
+ }
+
+ changeStateIfWeOwnAllBuffers();
+ } else {
+ ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2);
+ }
+
+ return true;
+ }
+
+ case OMX_EventPortSettingsChanged:
+ {
+ sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec);
+ msg->setInt32("type", omx_message::EVENT);
+ msg->setInt32("node", mCodec->mNode);
+ msg->setInt32("event", event);
+ msg->setInt32("data1", data1);
+ msg->setInt32("data2", data2);
+
+ ALOGV("[%s] Deferring OMX_EventPortSettingsChanged",
+ mCodec->mComponentName.c_str());
+
+ mCodec->deferMessage(msg);
+
+ return true;
+ }
+
+ default:
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+
+ return true;
+}
+
+void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) {
+ BaseState::onOutputBufferDrained(msg);
+
+ changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) {
+ BaseState::onInputBufferFilled(msg);
+
+ changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
+ if (mFlushComplete[kPortIndexInput]
+ && mFlushComplete[kPortIndexOutput]
+ && mCodec->allYourBuffersAreBelongToUs()) {
+ // We now own all buffers except possibly those still queued with
+ // the native window for rendering. Let's get those back as well.
+ mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
+
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+
+ mCodec->mPortEOS[kPortIndexInput] =
+ mCodec->mPortEOS[kPortIndexOutput] = false;
+
+ mCodec->mInputEOSResult = OK;
+
+ if (mCodec->mSkipCutBuffer != NULL) {
+ mCodec->mSkipCutBuffer->clear();
+ }
+
+ mCodec->changeState(mCodec->mExecutingState);
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 056da53..5fddacf 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -233,6 +233,17 @@ ifeq ($(DTS_CODEC_M_), true)
LOCAL_CFLAGS += -DDTS_CODEC_M_
endif
+# Mediatek
+ifeq ($(strip $(BOARD_HAS_MTK_HARDWARE)),true)
+LOCAL_CFLAGS += -DMTK_HARDWARE
+
+LOCAL_C_INCLUDES += \
+ $(TOP)/hardware/mediatek/dpframework/inc
+
+LOCAL_SHARED_LIBRARIES += \
+ libdpframework
+endif
+
LOCAL_MODULE:= libstagefright
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/Android.mk.orig b/media/libstagefright/Android.mk.orig
new file mode 100644
index 0000000..056da53
--- /dev/null
+++ b/media/libstagefright/Android.mk.orig
@@ -0,0 +1,242 @@
+#
+# This file was modified by DTS, Inc. The portions of the
+# code that are surrounded by "DTS..." are copyrighted and
+# licensed separately, as follows:
+#
+# (C) 2015 DTS, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+#
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+
+LOCAL_SRC_FILES:= \
+ ACodec.cpp \
+ AACExtractor.cpp \
+ AACWriter.cpp \
+ AMRExtractor.cpp \
+ AMRWriter.cpp \
+ AudioPlayer.cpp \
+ AudioSource.cpp \
+ AwesomePlayer.cpp \
+ CallbackDataSource.cpp \
+ CameraSource.cpp \
+ CameraSourceTimeLapse.cpp \
+ ClockEstimator.cpp \
+ CodecBase.cpp \
+ DataSource.cpp \
+ DataURISource.cpp \
+ DRMExtractor.cpp \
+ ESDS.cpp \
+ FileSource.cpp \
+ FLACExtractor.cpp \
+ FrameRenderTracker.cpp \
+ HTTPBase.cpp \
+ JPEGSource.cpp \
+ MP3Extractor.cpp \
+ MPEG2TSWriter.cpp \
+ MPEG4Extractor.cpp \
+ MPEG4Writer.cpp \
+ MediaAdapter.cpp \
+ MediaBuffer.cpp \
+ MediaBufferGroup.cpp \
+ MediaClock.cpp \
+ MediaCodec.cpp \
+ MediaCodecList.cpp \
+ MediaCodecListOverrides.cpp \
+ MediaCodecSource.cpp \
+ MediaDefs.cpp \
+ MediaExtractor.cpp \
+ MediaSync.cpp \
+ MidiExtractor.cpp \
+ http/MediaHTTP.cpp \
+ MediaMuxer.cpp \
+ MediaSource.cpp \
+ MetaData.cpp \
+ NuCachedSource2.cpp \
+ NuMediaExtractor.cpp \
+ OMXClient.cpp \
+ OMXCodec.cpp \
+ OggExtractor.cpp \
+ ProcessInfo.cpp \
+ SampleIterator.cpp \
+ SampleTable.cpp \
+ SkipCutBuffer.cpp \
+ StagefrightMediaScanner.cpp \
+ StagefrightMetadataRetriever.cpp \
+ SurfaceMediaSource.cpp \
+ SurfaceUtils.cpp \
+ ThrottledSource.cpp \
+ TimeSource.cpp \
+ TimedEventQueue.cpp \
+ Utils.cpp \
+ VBRISeeker.cpp \
+ VideoFrameScheduler.cpp \
+ WAVExtractor.cpp \
+ WAVEWriter.cpp \
+ WVMExtractor.cpp \
+ XINGSeeker.cpp \
+ avc_utils.cpp \
+ APE.cpp \
+ FFMPEGSoftCodec.cpp \
+
+LOCAL_C_INCLUDES:= \
+ $(TOP)/frameworks/av/include/media/ \
+ $(TOP)/frameworks/av/media/libavextensions \
+ $(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
+ $(TOP)/frameworks/av/include/media/stagefright/timedtext \
+ $(TOP)/frameworks/native/include/media/hardware \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/external/flac/include \
+ $(TOP)/external/tremolo \
+ $(TOP)/external/libvpx/libwebm \
+ $(TOP)/system/netd/include \
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libcamera_client \
+ libcutils \
+ libdl \
+ libdrmframework \
+ libexpat \
+ libgui \
+ libicui18n \
+ libicuuc \
+ liblog \
+ libmedia \
+ libmediautils \
+ libnetd_client \
+ libopus \
+ libsonivox \
+ libssl \
+ libstagefright_omx \
+ libstagefright_yuv \
+ libsync \
+ libui \
+ libutils \
+ libvorbisidec \
+ libz \
+ libpowermanager \
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_color_conversion \
+ libstagefright_aacenc \
+ libstagefright_matroska \
+ libstagefright_mediafilter \
+ libstagefright_webm \
+ libstagefright_timedtext \
+ libvpx \
+ libwebm \
+ libstagefright_mpeg2ts \
+ libstagefright_id3 \
+ libFLAC \
+ libmedia_helper \
+
+LOCAL_WHOLE_STATIC_LIBRARIES := libavextensions
+
+ifeq ($(BOARD_USE_S3D_SUPPORT), true)
+ifeq ($(BOARD_USES_HWC_SERVICES), true)
+LOCAL_CFLAGS += -DUSE_S3D_SUPPORT -DHWC_SERVICES
+LOCAL_C_INCLUDES += \
+ $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/openmax/include/exynos \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/$(TARGET_BOARD_PLATFORM)/libhwcService \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/$(TARGET_BOARD_PLATFORM)/libhwc \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/$(TARGET_BOARD_PLATFORM)/include \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/$(TARGET_SOC)/libhwcmodule \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/$(TARGET_SOC)/include \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/exynos/libexynosutils \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/exynos/include \
+ $(TOP)/hardware/samsung_slsi-$(TARGET_SLSI_VARIANT)/exynos/libhwc
+
+LOCAL_ADDITIONAL_DEPENDENCIES := \
+ $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_SHARED_LIBRARIES += \
+ libExynosHWCService
+endif
+endif
+
+LOCAL_SHARED_LIBRARIES += \
+ libstagefright_enc_common \
+ libstagefright_avc_common \
+ libstagefright_foundation \
+ libdl \
+ libRScpp \
+
+LOCAL_CFLAGS += -Werror -Wno-multichar -Wno-error=deprecated-declarations
+
+ifeq ($(TARGET_USES_QCOM_BSP), true)
+ LOCAL_C_INCLUDES += $(call project-path-for,qcom-display)/libgralloc
+ LOCAL_CFLAGS += -DQTI_BSP
+endif
+
+LOCAL_C_INCLUDES += $(call project-path-for,qcom-media)/mm-core/inc
+
+# enable experiments only in userdebug and eng builds
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+LOCAL_CFLAGS += -DENABLE_STAGEFRIGHT_EXPERIMENTS
+endif
+
+ifeq ($(TARGET_BOARD_PLATFORM),omap4)
+LOCAL_CFLAGS += -DBOARD_CANT_REALLOCATE_OMX_BUFFERS
+endif
+
+ifeq ($(call is-vendor-board-platform,QCOM),true)
+LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS)/mm-audio
+ifeq ($(strip $(AUDIO_FEATURE_ENABLED_EXTN_FLAC_DECODER)),true)
+ LOCAL_CFLAGS += -DQTI_FLAC_DECODER
+endif
+endif
+
+ifeq ($(strip $(AUDIO_FEATURE_ENABLED_FLAC_OFFLOAD)),true)
+ LOCAL_CFLAGS += -DFLAC_OFFLOAD_ENABLED
+endif
+
+LOCAL_CLANG := true
+
+ifeq ($(BOARD_USE_SAMSUNG_CAMERAFORMAT_NV21), true)
+# This needs flag requires the following string constant in
+# CameraParametersExtra.h:
+#
+# const char CameraParameters::PIXEL_FORMAT_YUV420SP_NV21[] = "nv21";
+LOCAL_CFLAGS += -DUSE_SAMSUNG_CAMERAFORMAT_NV21
+endif
+
+# FFMPEG plugin
+LOCAL_C_INCLUDES += $(TOP)/external/stagefright-plugins/include
+
+#LOCAL_CFLAGS += -DLOG_NDEBUG=0
+
+ifeq ($(BOARD_USE_SAMSUNG_COLORFORMAT), true)
+LOCAL_CFLAGS += -DUSE_SAMSUNG_COLORFORMAT
+
+# Include native color format header path
+LOCAL_C_INCLUDES += \
+ $(TOP)/hardware/samsung/exynos4/hal/include \
+ $(TOP)/hardware/samsung/exynos4/include
+endif
+
+ifeq ($(DTS_CODEC_M_), true)
+ LOCAL_SRC_FILES+= DTSUtils.cpp
+ LOCAL_CFLAGS += -DDTS_CODEC_M_
+endif
+
+LOCAL_MODULE:= libstagefright
+
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index f6b4741..0013c0b 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -43,6 +43,10 @@
#define UNUSED_UNLESS_VERBOSE(x)
#endif
+#ifdef MTK_HARDWARE
+#define OMX_MTK_COLOR_FormatYV12 0x7F000200
+#endif
+
namespace android {
static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
@@ -108,7 +112,11 @@ static int32_t getColorFormat(const char* colorFormat) {
}
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+#ifdef MTK_HARDWARE
+ return OMX_MTK_COLOR_FormatYV12;
+#else
return OMX_COLOR_FormatYUV420Planar;
+#endif
}
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
diff --git a/media/libstagefright/CameraSource.cpp.orig b/media/libstagefright/CameraSource.cpp.orig
new file mode 100644
index 0000000..f6b4741
--- /dev/null
+++ b/media/libstagefright/CameraSource.cpp.orig
@@ -0,0 +1,1045 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSource"
+#include <utils/Log.h>
+
+#include <OMX_Component.h>
+#include <binder/IPCThreadState.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/hardware/HardwareAPI.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <camera/ICameraRecordingProxy.h>
+#include <gui/Surface.h>
+#include <utils/String8.h>
+#include <cutils/properties.h>
+
+#include <stagefright/AVExtensions.h>
+
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
+namespace android {
+
+static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
+
+struct CameraSourceListener : public CameraListener {
+ CameraSourceListener(const sp<CameraSource> &source);
+
+ virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+ virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
+ camera_frame_metadata_t *metadata);
+
+ virtual void postDataTimestamp(
+ nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
+
+protected:
+ virtual ~CameraSourceListener();
+
+private:
+ wp<CameraSource> mSource;
+
+ CameraSourceListener(const CameraSourceListener &);
+ CameraSourceListener &operator=(const CameraSourceListener &);
+};
+
+CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
+ : mSource(source) {
+}
+
+CameraSourceListener::~CameraSourceListener() {
+}
+
+void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
+ UNUSED_UNLESS_VERBOSE(msgType);
+ UNUSED_UNLESS_VERBOSE(ext1);
+ UNUSED_UNLESS_VERBOSE(ext2);
+ ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
+}
+
+void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
+ camera_frame_metadata_t * /* metadata */) {
+ ALOGV("postData(%d, ptr:%p, size:%zu)",
+ msgType, dataPtr->pointer(), dataPtr->size());
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallback(msgType, dataPtr);
+ }
+}
+
+void CameraSourceListener::postDataTimestamp(
+ nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
+ }
+}
+
+static int32_t getColorFormat(const char* colorFormat) {
+ if (!colorFormat) {
+ ALOGE("Invalid color format");
+ return -1;
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+ return OMX_COLOR_FormatYUV420Planar;
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
+ return OMX_COLOR_FormatYUV422SemiPlanar;
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
+#ifdef USE_SAMSUNG_COLORFORMAT
+ static const int OMX_SEC_COLOR_FormatNV12LPhysicalAddress = 0x7F000002;
+ return OMX_SEC_COLOR_FormatNV12LPhysicalAddress;
+#else
+ return OMX_COLOR_FormatYUV420SemiPlanar;
+#endif
+ }
+
+#ifdef USE_SAMSUNG_CAMERAFORMAT_NV21
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP_NV21)) {
+ static const int OMX_SEC_COLOR_FormatNV21Linear = 0x7F000011;
+ return OMX_SEC_COLOR_FormatNV21Linear;
+ }
+#endif /* USE_SAMSUNG_CAMERAFORMAT_NV21 */
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
+ return OMX_COLOR_FormatYCbYCr;
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
+ return OMX_COLOR_Format16bitRGB565;
+ }
+
+ if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
+ return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
+ return OMX_COLOR_FormatAndroidOpaque;
+ }
+
+ if (!strcmp(colorFormat, "YVU420SemiPlanar")) {
+ return OMX_QCOM_COLOR_FormatYVU420SemiPlanar;
+ }
+
+ ALOGE("Uknown color format (%s), please add it to "
+ "CameraSource::getColorFormat", colorFormat);
+
+ CHECK(!"Unknown color format");
+ return -1;
+}
+
+CameraSource *CameraSource::Create(const String16 &clientName) {
+ Size size;
+ size.width = -1;
+ size.height = -1;
+
+ sp<ICamera> camera;
+ return new CameraSource(camera, NULL, 0, clientName, -1,
+ size, -1, NULL, false);
+}
+
+// static
+CameraSource *CameraSource::CreateFromCamera(
+ const sp<ICamera>& camera,
+ const sp<ICameraRecordingProxy>& proxy,
+ int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<IGraphicBufferProducer>& surface,
+ bool storeMetaDataInVideoBuffers) {
+
+ CameraSource *source = new CameraSource(camera, proxy, cameraId,
+ clientName, clientUid, videoSize, frameRate, surface,
+ storeMetaDataInVideoBuffers);
+ return source;
+}
+
+CameraSource::CameraSource(
+ const sp<ICamera>& camera,
+ const sp<ICameraRecordingProxy>& proxy,
+ int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<IGraphicBufferProducer>& surface,
+ bool storeMetaDataInVideoBuffers)
+ : mCameraFlags(0),
+ mNumInputBuffers(0),
+ mVideoFrameRate(-1),
+ mCamera(0),
+ mSurface(surface),
+ mNumFramesReceived(0),
+ mLastFrameTimestampUs(0),
+ mStarted(false),
+ mNumFramesEncoded(0),
+ mTimeBetweenFrameCaptureUs(0),
+ mFirstFrameTimeUs(0),
+ mNumFramesDropped(0),
+ mNumGlitches(0),
+ mGlitchDurationThresholdUs(200000),
+ mCollectStats(false),
+ mPauseAdjTimeUs(0),
+ mPauseStartTimeUs(0),
+ mPauseEndTimeUs(0),
+ mRecPause(false) {
+ mVideoSize.width = -1;
+ mVideoSize.height = -1;
+
+ mInitCheck = init(camera, proxy, cameraId,
+ clientName, clientUid,
+ videoSize, frameRate,
+ storeMetaDataInVideoBuffers);
+ if (mInitCheck != OK) releaseCamera();
+}
+
+status_t CameraSource::initCheck() const {
+ return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+ const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
+ int32_t cameraId, const String16& clientName, uid_t clientUid) {
+
+ if (camera == 0) {
+ mCamera = Camera::connect(cameraId, clientName, clientUid);
+ if (mCamera == 0) return -EBUSY;
+ mCameraFlags &= ~FLAGS_HOT_CAMERA;
+ } else {
+ // We get the proxy from Camera, not ICamera. We need to get the proxy
+ // to the remote Camera owned by the application. Here mCamera is a
+ // local Camera object created by us. We cannot use the proxy from
+ // mCamera here.
+ mCamera = Camera::create(camera);
+ if (mCamera == 0) return -EBUSY;
+ mCameraRecordingProxy = proxy;
+ mCameraFlags |= FLAGS_HOT_CAMERA;
+ mDeathNotifier = new DeathNotifier();
+ // isBinderAlive needs linkToDeath to work.
+ IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
+ }
+
+ mCamera->lock();
+
+ return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+ int32_t width, int32_t height,
+ const Vector<Size>& supportedSizes) {
+
+ ALOGV("isVideoSizeSupported");
+ for (size_t i = 0; i < supportedSizes.size(); ++i) {
+ if (width == supportedSizes[i].width &&
+ height == supportedSizes[i].height) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ * CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ * supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+ const CameraParameters& params,
+ bool *isSetVideoSizeSupported,
+ Vector<Size>& sizes) {
+
+ *isSetVideoSizeSupported = true;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ ALOGD("Camera does not support setVideoSize()");
+ params.getSupportedPreviewSizes(sizes);
+ *isSetVideoSizeSupported = false;
+ }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+ const CameraParameters& params) {
+ mColorFormat = getColorFormat(params.get(
+ CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ if (mColorFormat == -1) {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+ CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate) {
+ ALOGV("configureCamera");
+ Vector<Size> sizes;
+ bool isSetVideoSizeSupportedByCamera = true;
+ getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+ bool isCameraParamChanged = false;
+ if (width != -1 && height != -1) {
+ if (!isVideoSizeSupported(width, height, sizes)) {
+ ALOGE("Video dimension (%dx%d) is unsupported", width, height);
+ return BAD_VALUE;
+ }
+ if (isSetVideoSizeSupportedByCamera) {
+ params->setVideoSize(width, height);
+ } else {
+ params->setPreviewSize(width, height);
+ }
+ isCameraParamChanged = true;
+ } else if ((width == -1 && height != -1) ||
+ (width != -1 && height == -1)) {
+ // If one and only one of the width and height is -1
+ // we reject such a request.
+ ALOGE("Requested video size (%dx%d) is not supported", width, height);
+ return BAD_VALUE;
+ } else { // width == -1 && height == -1
+ // Do not configure the camera.
+ // Use the current width and height value setting from the camera.
+ }
+
+ if (frameRate != -1) {
+ CHECK(frameRate > 0 && frameRate <= 120);
+ const char* supportedFrameRates =
+ params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+ CHECK(supportedFrameRates != NULL);
+ ALOGV("Supported frame rates: %s", supportedFrameRates);
+ char buf[4];
+ snprintf(buf, 4, "%d", frameRate);
+ if (strstr(supportedFrameRates, buf) == NULL) {
+ ALOGE("Requested frame rate (%d) is not supported: %s",
+ frameRate, supportedFrameRates);
+ return BAD_VALUE;
+ }
+
+ // The frame rate is supported, set the camera to the requested value.
+ params->setPreviewFrameRate(frameRate);
+ isCameraParamChanged = true;
+ } else { // frameRate == -1
+ // Do not configure the camera.
+ // Use the current frame rate value setting from the camera
+ }
+
+ if (isCameraParamChanged) {
+ // Either frame rate or frame size needs to be changed.
+ String8 s = params->flatten();
+ if (OK != mCamera->setParameters(s)) {
+ ALOGE("Could not change settings."
+ " Someone else is using camera %p?", mCamera.get());
+ return -EBUSY;
+ }
+ }
+ return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+ const CameraParameters& params,
+ int32_t width, int32_t height) {
+
+ ALOGV("checkVideoSize");
+ // The actual video size is the same as the preview size
+ // if the camera hal does not support separate video and
+ // preview output. In this case, we retrieve the video
+ // size from preview.
+ int32_t frameWidthActual = -1;
+ int32_t frameHeightActual = -1;
+ Vector<Size> sizes;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ // video size is the same as preview size
+ params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+ } else {
+ // video size may not be the same as preview
+ params.getVideoSize(&frameWidthActual, &frameHeightActual);
+ }
+ if (frameWidthActual < 0 || frameHeightActual < 0) {
+ ALOGE("Failed to retrieve video frame size (%dx%d)",
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame size against the target/requested
+ // video frame size.
+ if (width != -1 && height != -1) {
+ if (frameWidthActual != width || frameHeightActual != height) {
+ ALOGE("Failed to set video frame size to %dx%d. "
+ "The actual video size is %dx%d ", width, height,
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Good now.
+ mVideoSize.width = frameWidthActual;
+ mVideoSize.height = frameHeightActual;
+ return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+ const CameraParameters& params,
+ int32_t frameRate) {
+
+ ALOGV("checkFrameRate");
+ int32_t frameRateActual = params.getPreviewFrameRate();
+ if (frameRateActual < 0) {
+ ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame rate against the target/requested
+ // video frame rate.
+ if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+ ALOGE("Failed to set preview frame rate to %d fps. The actual "
+ "frame rate is %d", frameRate, frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Good now.
+ mVideoFrameRate = frameRateActual;
+ return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ * as the video source
+ * @param videoSize the target video frame size. If both
+ * width and height in videoSize is -1, use the current
+ * width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ * if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ * data or real YUV data in video buffers. Request to
+ * store meta data in video buffers may not be honored
+ * if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+ const sp<ICamera>& camera,
+ const sp<ICameraRecordingProxy>& proxy,
+ int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers) {
+
+ ALOGV("init");
+ status_t err = OK;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
+ videoSize, frameRate,
+ storeMetaDataInVideoBuffers);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return err;
+}
+
+status_t CameraSource::initWithCameraAccess(
+ const sp<ICamera>& camera,
+ const sp<ICameraRecordingProxy>& proxy,
+ int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers) {
+ ALOGV("initWithCameraAccess");
+ status_t err = OK;
+
+ if ((err = isCameraAvailable(camera, proxy, cameraId,
+ clientName, clientUid)) != OK) {
+ ALOGE("Camera connection could not be established.");
+ return err;
+ }
+ CameraParameters params(mCamera->getParameters());
+ if ((err = isCameraColorFormatSupported(params)) != OK) {
+ return err;
+ }
+
+ // Set the camera to use the requested video frame size
+ // and/or frame rate.
+ if ((err = configureCamera(&params,
+ videoSize.width, videoSize.height,
+ frameRate))) {
+ return err;
+ }
+
+ // Check on video frame size and frame rate.
+ CameraParameters newCameraParams(mCamera->getParameters());
+ if ((err = checkVideoSize(newCameraParams,
+ videoSize.width, videoSize.height)) != OK) {
+ return err;
+ }
+ if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+ return err;
+ }
+
+ // Set the preview display. Skip this if mSurface is null because
+ // applications may already set a surface to the camera.
+ if (mSurface != NULL) {
+ // This CHECK is good, since we just passed the lock/unlock
+ // check earlier by calling mCamera->setParameters().
+ CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
+ }
+
+ // By default, do not store metadata in video buffers
+ mIsMetaDataStoredInVideoBuffers = false;
+ mCamera->storeMetaDataInBuffers(false);
+ if (storeMetaDataInVideoBuffers) {
+ if (OK == mCamera->storeMetaDataInBuffers(true)) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
+ }
+
+ int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
+ if (glitchDurationUs > mGlitchDurationThresholdUs) {
+ mGlitchDurationThresholdUs = glitchDurationUs;
+ }
+
+ // XXX: query camera for the stride and slice height
+ // when the capability becomes available.
+ mMeta = new MetaData;
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ mMeta->setInt32(kKeyColorFormat, mColorFormat);
+ mMeta->setInt32(kKeyWidth, mVideoSize.width);
+ mMeta->setInt32(kKeyHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyStride, mVideoSize.width);
+ mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
+ AVUtils::get()->extractCustomCameraKeys(params, mMeta);
+
+ return OK;
+}
+
+CameraSource::~CameraSource() {
+ if (mStarted) {
+ reset();
+ } else if (mInitCheck == OK) {
+ // Camera is initialized but because start() is never called,
+ // the lock on Camera is never released(). This makes sure
+ // Camera's lock is released in this case.
+ releaseCamera();
+ }
+}
+
+status_t CameraSource::startCameraRecording() {
+ ALOGV("startCameraRecording");
+ // Reset the identity to the current thread because media server owns the
+ // camera and recording is started by the applications. The applications
+ // will connect to the camera in ICameraRecordingProxy::startRecording.
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ status_t err;
+ if (mNumInputBuffers > 0) {
+ err = mCamera->sendCommand(
+ CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
+
+ // This could happen for CameraHAL1 clients; thus the failure is
+ // not a fatal error
+ if (err != OK) {
+ ALOGW("Failed to set video buffer count to %d due to %d",
+ mNumInputBuffers, err);
+ }
+ }
+
+ err = mCamera->sendCommand(
+ CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
+
+ // This could happen for CameraHAL1 clients; thus the failure is
+ // not a fatal error
+ if (err != OK) {
+ ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
+ mEncoderFormat, mEncoderDataSpace, err);
+ }
+
+ err = OK;
+ if (mCameraFlags & FLAGS_HOT_CAMERA) {
+ mCamera->unlock();
+ mCamera.clear();
+ if ((err = mCameraRecordingProxy->startRecording(
+ new ProxyListener(this))) != OK) {
+ ALOGE("Failed to start recording, received error: %s (%d)",
+ strerror(-err), err);
+ }
+ } else {
+ mCamera->setListener(new CameraSourceListener(this));
+ mCamera->startRecording();
+ if (!mCamera->recordingEnabled()) {
+ err = -EINVAL;
+ ALOGE("Failed to start recording");
+ }
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return err;
+}
+
+status_t CameraSource::start(MetaData *meta) {
+ ALOGV("start");
+ if(mRecPause) {
+ mRecPause = false;
+ mPauseAdjTimeUs = mPauseEndTimeUs - mPauseStartTimeUs;
+ ALOGV("resume : mPause Adj / End / Start : %" PRId64 " / %" PRId64 " / %" PRId64" us",
+ mPauseAdjTimeUs, mPauseEndTimeUs, mPauseStartTimeUs);
+ return OK;
+ }
+
+ CHECK(!mStarted);
+ if (mInitCheck != OK) {
+ ALOGE("CameraSource is not initialized yet");
+ return mInitCheck;
+ }
+
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("media.stagefright.record-stats", value, NULL)
+ && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
+ mCollectStats = true;
+ }
+
+ mStartTimeUs = 0;
+ mRecPause = false;
+ mPauseAdjTimeUs = 0;
+ mPauseStartTimeUs = 0;
+ mPauseEndTimeUs = 0;
+ mNumInputBuffers = 0;
+ mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ mEncoderDataSpace = HAL_DATASPACE_BT709;
+
+ if (meta) {
+ int64_t startTimeUs;
+
+ auto key = kKeyTime;
+ if (!property_get_bool("media.camera.ts.monotonic", true)) {
+ key = kKeyTimeBoot;
+ }
+
+ if (meta->findInt64(key, &startTimeUs)) {
+ mStartTimeUs = startTimeUs;
+ }
+
+ int32_t nBuffers;
+ if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
+ CHECK_GT(nBuffers, 0);
+ mNumInputBuffers = nBuffers;
+ }
+
+ // apply encoder color format if specified
+ if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
+ ALOGV("Using encoder format: %#x", mEncoderFormat);
+ }
+ if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
+ ALOGV("Using encoder data space: %#x", mEncoderDataSpace);
+ }
+ }
+
+ status_t err;
+ if ((err = startCameraRecording()) == OK) {
+ mStarted = true;
+ }
+
+ return err;
+}
+
+status_t CameraSource::pause() {
+ mRecPause = true;
+ mPauseStartTimeUs = mLastFrameTimestampUs;
+ //record the end time too, or there is a risk the end time is 0
+ mPauseEndTimeUs = mLastFrameTimestampUs;
+ ALOGV("pause : mPauseStart %" PRId64 " us, #Queued Frames : %zd",
+ mPauseStartTimeUs, mFramesReceived.size());
+ return OK;
+}
+
+void CameraSource::stopCameraRecording() {
+ ALOGV("stopCameraRecording");
+ if (mCameraFlags & FLAGS_HOT_CAMERA) {
+ mCameraRecordingProxy->stopRecording();
+ } else {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+ }
+}
+
+void CameraSource::releaseCamera() {
+ ALOGV("releaseCamera");
+ sp<Camera> camera;
+ bool coldCamera = false;
+ {
+ Mutex::Autolock autoLock(mLock);
+ // get a local ref and clear ref to mCamera now
+ camera = mCamera;
+ mCamera.clear();
+ coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
+ }
+
+ if (camera != 0) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (coldCamera) {
+ ALOGV("Camera was cold when we started, stopping preview");
+ camera->stopPreview();
+ camera->disconnect();
+ }
+ camera->unlock();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mCameraRecordingProxy != 0) {
+ IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
+ mCameraRecordingProxy.clear();
+ }
+ mCameraFlags = 0;
+ }
+}
+
+status_t CameraSource::reset() {
+ ALOGD("reset: E");
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
+ int64_t token;
+ bool isTokenValid = false;
+ if (mCamera != 0) {
+ token = IPCThreadState::self()->clearCallingIdentity();
+ isTokenValid = true;
+ }
+ releaseQueuedFrames();
+ while (!mFramesBeingEncoded.empty()) {
+ if (NO_ERROR !=
+ mFrameCompleteCondition.waitRelative(mLock,
+ mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+ ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
+ mFramesBeingEncoded.size());
+ }
+ }
+ stopCameraRecording();
+ if (isTokenValid) {
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+
+ if (mCollectStats) {
+ ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
+ mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
+ mLastFrameTimestampUs - mFirstFrameTimeUs);
+ }
+
+ if (mNumGlitches > 0) {
+ ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ }
+
+ CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+ }
+
+ releaseCamera();
+
+ ALOGD("reset: X");
+ return OK;
+}
+
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+ ALOGV("releaseRecordingFrame");
+ if (mCameraRecordingProxy != NULL) {
+ mCameraRecordingProxy->releaseRecordingFrame(frame);
+ } else if (mCamera != NULL) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mCamera->releaseRecordingFrame(frame);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+}
+
+void CameraSource::releaseQueuedFrames() {
+ List<sp<IMemory> >::iterator it;
+ while (!mFramesReceived.empty()) {
+ it = mFramesReceived.begin();
+ // b/28466701
+ adjustOutgoingANWBuffer(it->get());
+ releaseRecordingFrame(*it);
+ mFramesReceived.erase(it);
+ ++mNumFramesDropped;
+ }
+}
+
+sp<MetaData> CameraSource::getFormat() {
+ return mMeta;
+}
+
+void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
+ releaseRecordingFrame(frame);
+}
+
+void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
+ ALOGV("signalBufferReturned: %p", buffer->data());
+ Mutex::Autolock autoLock(mLock);
+ for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
+ it != mFramesBeingEncoded.end(); ++it) {
+ if ((*it)->pointer() == buffer->data()) {
+ // b/28466701
+ adjustOutgoingANWBuffer(it->get());
+
+ releaseOneRecordingFrame((*it));
+ mFramesBeingEncoded.erase(it);
+ ++mNumFramesEncoded;
+ buffer->setObserver(0);
+ buffer->release();
+ mFrameCompleteCondition.signal();
+ return;
+ }
+ }
+ CHECK(!"signalBufferReturned: bogus buffer");
+}
+
+status_t CameraSource::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ ALOGV("read");
+
+ *buffer = NULL;
+
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode mode;
+ if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ sp<IMemory> frame;
+ int64_t frameTime;
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ while (mStarted && mFramesReceived.empty()) {
+ if (NO_ERROR !=
+ mFrameAvailableCondition.waitRelative(mLock,
+ mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+ if (mCameraRecordingProxy != 0 &&
+ !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
+ ALOGW("camera recording proxy is gone");
+ return ERROR_END_OF_STREAM;
+ }
+ ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
+ mLastFrameTimestampUs);
+ }
+ }
+ if (!mStarted) {
+ return OK;
+ }
+ frame = *mFramesReceived.begin();
+ mFramesReceived.erase(mFramesReceived.begin());
+
+ frameTime = *mFrameTimes.begin();
+ mFrameTimes.erase(mFrameTimes.begin());
+ mFramesBeingEncoded.push_back(frame);
+ *buffer = new MediaBuffer(frame->pointer(), frame->size());
+ (*buffer)->setObserver(this);
+ (*buffer)->add_ref();
+ (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+ }
+ return OK;
+}
+
+void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
+ int32_t msgType __unused, const sp<IMemory> &data) {
+ ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
+ Mutex::Autolock autoLock(mLock);
+ if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
+ ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
+ releaseOneRecordingFrame(data);
+ return;
+ }
+
+ // May need to skip frame or modify timestamp. Currently implemented
+ // by the subclass CameraSourceTimeLapse.
+ if (skipCurrentFrame(timestampUs)) {
+ releaseOneRecordingFrame(data);
+ return;
+ }
+
+ if (mRecPause == true) {
+ if(!mFramesReceived.empty()) {
+ ALOGV("releaseQueuedFrames - #Queued Frames : %zd", mFramesReceived.size());
+ releaseQueuedFrames();
+ }
+ ALOGV("release One Video Frame for Pause : %" PRId64 "us", timestampUs);
+ releaseOneRecordingFrame(data);
+ mPauseEndTimeUs = timestampUs;
+ return;
+ }
+ timestampUs -= mPauseAdjTimeUs;
+ ALOGV("dataCallbackTimestamp: AdjTimestamp %" PRId64 "us", timestampUs);
+
+ if (mNumFramesReceived > 0) {
+ if (timestampUs <= mLastFrameTimestampUs) {
+ ALOGW("Dropping frame with backward timestamp %" PRId64 " (last %" PRId64 ")",
+ timestampUs, mLastFrameTimestampUs);
+ releaseOneRecordingFrame(data);
+ return;
+ }
+ if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
+ ++mNumGlitches;
+ }
+ }
+
+ mLastFrameTimestampUs = timestampUs;
+ if (mNumFramesReceived == 0) {
+ mFirstFrameTimeUs = timestampUs;
+ // Initial delay
+ if (mStartTimeUs > 0) {
+ if (timestampUs < mStartTimeUs) {
+ // Frame was captured before recording was started
+ // Drop it without updating the statistical data.
+ releaseOneRecordingFrame(data);
+ return;
+ }
+ mStartTimeUs = timestampUs - mStartTimeUs;
+ }
+ }
+ ++mNumFramesReceived;
+
+ CHECK(data != NULL && data->size() > 0);
+
+ // b/28466701
+ adjustIncomingANWBuffer(data.get());
+
+ mFramesReceived.push_back(data);
+ int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
+ mFrameTimes.push_back(timeUs);
+ ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
+ mStartTimeUs, timeUs);
+ mFrameAvailableCondition.signal();
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+ ALOGV("isMetaDataStoredInVideoBuffers");
+ return mIsMetaDataStoredInVideoBuffers;
+}
+
+void CameraSource::adjustIncomingANWBuffer(IMemory* data) {
+ VideoNativeMetadata *payload =
+ reinterpret_cast<VideoNativeMetadata*>(data->pointer());
+ if (payload->eType == kMetadataBufferTypeANWBuffer) {
+ payload->pBuffer = (ANativeWindowBuffer*)(((uint8_t*)payload->pBuffer) +
+ ICameraRecordingProxy::getCommonBaseAddress());
+ }
+}
+
+void CameraSource::adjustOutgoingANWBuffer(IMemory* data) {
+ VideoNativeMetadata *payload =
+ reinterpret_cast<VideoNativeMetadata*>(data->pointer());
+ if (payload->eType == kMetadataBufferTypeANWBuffer) {
+ payload->pBuffer = (ANativeWindowBuffer*)(((uint8_t*)payload->pBuffer) -
+ ICameraRecordingProxy::getCommonBaseAddress());
+ }
+}
+
+CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
+ mSource = source;
+}
+
+void CameraSource::ProxyListener::dataCallbackTimestamp(
+ nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
+ mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
+}
+
+void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
+ ALOGI("Camera recording proxy died");
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 14a3c0d..3fe82c2 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -411,6 +411,12 @@ status_t MediaCodecSource::initEncoder() {
mCodecLooper, outputMIME.c_str(), true /* encoder */);
}
+ // remove camera tag from mime
+ if (outputMIME.endsWith("_cam")) {
+ outputMIME.erase(outputMIME.size() - 4, 4);
+ mOutputFormat->setString("mime", outputMIME);
+ }
+
if (mEncoder == NULL) {
return NO_INIT;
}
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index 3a45e25..c5dcd3a 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -346,7 +346,7 @@ void MediaSync::flush() {
mFrameScheduler->restart();
}
while (!mBufferItems.empty()) {
- BufferItem *bufferItem = &*mBufferItems.begin();
+ BufferQueue::BufferItem *bufferItem = &*mBufferItems.begin();
returnBufferToInput_l(bufferItem->mGraphicBuffer, bufferItem->mFence);
mBufferItems.erase(mBufferItems.begin());
}
@@ -552,7 +552,7 @@ void MediaSync::onDrainVideo_l() {
while (!mBufferItems.empty()) {
int64_t nowUs = ALooper::GetNowUs();
- BufferItem *bufferItem = &*mBufferItems.begin();
+ BufferQueue::BufferItem *bufferItem = &*mBufferItems.begin();
int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
@@ -625,7 +625,7 @@ void MediaSync::onFrameAvailableFromInput() {
}
// Acquire and detach the buffer from the input.
- BufferItem bufferItem;
+ BufferQueue::BufferItem bufferItem;
status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */);
if (status != NO_ERROR) {
ALOGE("acquiring buffer from input failed (%d)", status);
@@ -669,7 +669,7 @@ void MediaSync::onFrameAvailableFromInput() {
}
}
-void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem) {
+void MediaSync::renderOneBufferItem_l(const BufferQueue::BufferItem &bufferItem) {
IGraphicBufferProducer::QueueBufferInput queueInput(
bufferItem.mTimestamp,
bufferItem.mIsAutoTimestamp,
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 05c1c9e..03cb198 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -264,6 +264,17 @@ void OMXCodec::findMatchingCodecs(
return;
}
+ AString *tempMime = new AString(mime);
+ if (tempMime->endsWith("_cam")) {
+ // remove camera tag from mime
+ tempMime->erase(tempMime->size() - 4, 4);
+ mime = tempMime->c_str();
+ }
+ else {
+ // no camera recording, prefer software codecs
+ flags |= kPreferSoftwareCodecs;
+ }
+
size_t index = 0;
for (;;) {
ssize_t matchIndex =
diff --git a/media/libstagefright/OMXCodec.cpp.orig b/media/libstagefright/OMXCodec.cpp.orig
new file mode 100644
index 0000000..05c1c9e
--- /dev/null
+++ b/media/libstagefright/OMXCodec.cpp.orig
@@ -0,0 +1,4629 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * This file was modified by Dolby Laboratories, Inc. The portions of the
+ * code that are surrounded by "DOLBY..." are copyrighted and
+ * licensed separately, as follows:
+ *
+ * (C) 2011-2015 Dolby Laboratories, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ **
+ ** This file was modified by DTS, Inc. The portions of the
+ ** code that are surrounded by "DTS..." are copyrighted and
+ ** licensed separately, as follows:
+ **
+ ** (C) 2015 DTS, Inc.
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ ** http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXCodec"
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
+#include <utils/Log.h>
+
+#include "include/AACEncoder.h"
+
+#include "include/ESDS.h"
+
+#include <binder/IServiceManager.h>
+#include <binder/MemoryDealer.h>
+#include <binder/ProcessState.h>
+#include <HardwareAPI.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/IMediaPlayerService.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/stagefright/SurfaceUtils.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <utils/Vector.h>
+
+#include <OMX_AudioExt.h>
+#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include <OMX_VideoExt.h>
+#include <OMX_AsString.h>
+
+#include "include/avc_utils.h"
+#ifdef DOLBY_ENABLE
+#include "DolbyOMXCodecExtImpl.h"
+#endif // DOLBY_END
+
+#ifdef USE_S3D_SUPPORT
+#include "Exynos_OMX_Def.h"
+#include "ExynosHWCService.h"
+#endif
+
+#ifdef DTS_CODEC_M_
+#include "include/DTSUtils.h"
+#include "include/OMX_Audio_DTS.h"
+#endif
+
+namespace android {
+
+// Treat time out as an error if we have not received any output
+// buffers after 3 seconds.
+const static int64_t kBufferFilledEventTimeOutNs = 3000000000LL;
+
+// OMX Spec defines less than 50 color formats. If the query for
+// color format is executed for more than kMaxColorFormatSupported,
+// the query will fail to avoid looping forever.
+// 1000 is more than enough for us to tell whether the omx
+// component in question is buggy or not.
+const static uint32_t kMaxColorFormatSupported = 1000;
+
+#define FACTORY_CREATE_ENCODER(name) \
+static sp<MediaSource> Make##name(const sp<MediaSource> &source, const sp<MetaData> &meta) { \
+ return new name(source, meta); \
+}
+
+#define FACTORY_REF(name) { #name, Make##name },
+
+FACTORY_CREATE_ENCODER(AACEncoder)
+
+static sp<MediaSource> InstantiateSoftwareEncoder(
+ const char *name, const sp<MediaSource> &source,
+ const sp<MetaData> &meta) {
+ struct FactoryInfo {
+ const char *name;
+ sp<MediaSource> (*CreateFunc)(const sp<MediaSource> &, const sp<MetaData> &);
+ };
+
+ static const FactoryInfo kFactoryInfo[] = {
+ FACTORY_REF(AACEncoder)
+ };
+ for (size_t i = 0;
+ i < sizeof(kFactoryInfo) / sizeof(kFactoryInfo[0]); ++i) {
+ if (!strcmp(name, kFactoryInfo[i].name)) {
+ return (*kFactoryInfo[i].CreateFunc)(source, meta);
+ }
+ }
+
+ return NULL;
+}
+
+#undef FACTORY_CREATE_ENCODER
+#undef FACTORY_REF
+
+#define CODEC_LOGI(x, ...) ALOGI("[%s] " x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGV(x, ...) ALOGV("[%s] " x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGW(x, ...) ALOGW("[%s] " x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGE(x, ...) ALOGE("[%s] " x, mComponentName, ##__VA_ARGS__)
+
+struct OMXCodecObserver : public BnOMXObserver {
+ OMXCodecObserver() {
+ }
+
+ void setCodec(const sp<OMXCodec> &target) {
+ mTarget = target;
+ }
+
+ // from IOMXObserver
+ virtual void onMessages(const std::list<omx_message> &messages) {
+ sp<OMXCodec> codec = mTarget.promote();
+
+ if (codec.get() != NULL) {
+ Mutex::Autolock autoLock(codec->mLock);
+ for (std::list<omx_message>::const_iterator it = messages.cbegin();
+ it != messages.cend(); ++it) {
+ codec->on_message(*it);
+ }
+ codec.clear();
+ }
+ }
+
+protected:
+ virtual ~OMXCodecObserver() {}
+
+private:
+ wp<OMXCodec> mTarget;
+
+ OMXCodecObserver(const OMXCodecObserver &);
+ OMXCodecObserver &operator=(const OMXCodecObserver &);
+};
+
+template<class T>
+static void InitOMXParams(T *params) {
+ COMPILE_TIME_ASSERT_FUNCTION_SCOPE(sizeof(OMX_PTR) == 4); // check OMX_PTR is 4 bytes.
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+static bool IsSoftwareCodec(const char *componentName) {
+#ifdef DOLBY_ENABLE
+ if (!strncmp("OMX.dolby.", componentName, 10)) {
+ return true;
+ }
+#endif // DOLBY_END
+ if (!strncmp("OMX.google.", componentName, 11)
+ || !strncmp("OMX.ffmpeg.", componentName, 11)) {
+ return true;
+ }
+
+ if (!strncmp("OMX.", componentName, 4)) {
+ return false;
+ }
+
+ return true;
+}
+
+// A sort order in which OMX software codecs are first, followed
+// by other (non-OMX) software codecs, followed by everything else.
+static int CompareSoftwareCodecsFirst(
+ const OMXCodec::CodecNameAndQuirks *elem1,
+ const OMXCodec::CodecNameAndQuirks *elem2) {
+ bool isOMX1 = !strncmp(elem1->mName.string(), "OMX.", 4);
+ bool isOMX2 = !strncmp(elem2->mName.string(), "OMX.", 4);
+
+ bool isSoftwareCodec1 = IsSoftwareCodec(elem1->mName.string());
+ bool isSoftwareCodec2 = IsSoftwareCodec(elem2->mName.string());
+
+ if (isSoftwareCodec1) {
+ if (!isSoftwareCodec2) { return -1; }
+
+ if (isOMX1) {
+ if (isOMX2) { return 0; }
+
+ return -1;
+ } else {
+ if (isOMX2) { return 0; }
+
+ return 1;
+ }
+
+ return -1;
+ }
+
+ if (isSoftwareCodec2) {
+ return 1;
+ }
+
+ return 0;
+}
+
+// static
+void OMXCodec::findMatchingCodecs(
+ const char *mime,
+ bool createEncoder, const char *matchComponentName,
+ uint32_t flags,
+ Vector<CodecNameAndQuirks> *matchingCodecs) {
+ matchingCodecs->clear();
+
+ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ if (list == NULL) {
+ return;
+ }
+
+ size_t index = 0;
+ for (;;) {
+ ssize_t matchIndex =
+ list->findCodecByType(mime, createEncoder, index);
+
+ if (matchIndex < 0) {
+ break;
+ }
+
+ index = matchIndex + 1;
+
+ const sp<MediaCodecInfo> info = list->getCodecInfo(matchIndex);
+ CHECK(info != NULL);
+ const char *componentName = info->getCodecName();
+
+ // If a specific codec is requested, skip the non-matching ones.
+ if (matchComponentName && strcmp(componentName, matchComponentName)) {
+ continue;
+ }
+
+ // When requesting software-only codecs, only push software codecs
+ // When requesting hardware-only codecs, only push hardware codecs
+ // When there is request neither for software-only nor for
+ // hardware-only codecs, push all codecs
+ if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) ||
+ ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) ||
+ (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+ ssize_t index = matchingCodecs->add();
+ CodecNameAndQuirks *entry = &matchingCodecs->editItemAt(index);
+ entry->mName = String8(componentName);
+ entry->mQuirks = getComponentQuirks(info);
+
+ ALOGV("matching '%s' quirks 0x%08x",
+ entry->mName.string(), entry->mQuirks);
+ }
+ }
+
+ if (flags & kPreferSoftwareCodecs) {
+ matchingCodecs->sort(CompareSoftwareCodecsFirst);
+ }
+}
+
+// static
+uint32_t OMXCodec::getComponentQuirks(
+ const sp<MediaCodecInfo> &info) {
+ uint32_t quirks = 0;
+ if (info->hasQuirk("requires-allocate-on-input-ports")) {
+ quirks |= kRequiresAllocateBufferOnInputPorts;
+ }
+ if (info->hasQuirk("requires-allocate-on-output-ports")) {
+ quirks |= kRequiresAllocateBufferOnOutputPorts;
+ }
+ if (info->hasQuirk("output-buffers-are-unreadable")) {
+ quirks |= kOutputBuffersAreUnreadable;
+ }
+#ifdef DOLBY_ENABLE
+ quirks |= getDolbyComponentQuirks(info);
+#endif // DOLBY_END
+
+ return quirks;
+}
+
+// static
+bool OMXCodec::findCodecQuirks(const char *componentName, uint32_t *quirks) {
+ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ if (list == NULL) {
+ return false;
+ }
+
+ ssize_t index = list->findCodecByName(componentName);
+
+ if (index < 0) {
+ return false;
+ }
+
+ const sp<MediaCodecInfo> info = list->getCodecInfo(index);
+ CHECK(info != NULL);
+ *quirks = getComponentQuirks(info);
+
+ return true;
+}
+
+// static
+sp<MediaSource> OMXCodec::Create(
+ const sp<IOMX> &omx,
+ const sp<MetaData> &meta, bool createEncoder,
+ const sp<MediaSource> &source,
+ const char *matchComponentName,
+ uint32_t flags,
+ const sp<ANativeWindow> &nativeWindow) {
+ int32_t requiresSecureBuffers;
+ if (source->getFormat()->findInt32(
+ kKeyRequiresSecureBuffers,
+ &requiresSecureBuffers)
+ && requiresSecureBuffers) {
+ flags |= kIgnoreCodecSpecificData;
+ flags |= kUseSecureInputBuffers;
+ }
+
+ const char *mime;
+ bool success = meta->findCString(kKeyMIMEType, &mime);
+ CHECK(success);
+
+ Vector<CodecNameAndQuirks> matchingCodecs;
+ findMatchingCodecs(
+ mime, createEncoder, matchComponentName, flags, &matchingCodecs);
+
+ if (matchingCodecs.isEmpty()) {
+ ALOGV("No matching codecs! (mime: %s, createEncoder: %s, "
+ "matchComponentName: %s, flags: 0x%x)",
+ mime, createEncoder ? "true" : "false", matchComponentName, flags);
+ return NULL;
+ }
+
+ sp<OMXCodecObserver> observer = new OMXCodecObserver;
+ IOMX::node_id node = 0;
+
+ for (size_t i = 0; i < matchingCodecs.size(); ++i) {
+ const char *componentNameBase = matchingCodecs[i].mName.string();
+ uint32_t quirks = matchingCodecs[i].mQuirks;
+ const char *componentName = componentNameBase;
+
+ AString tmp;
+ if (flags & kUseSecureInputBuffers) {
+ tmp = componentNameBase;
+ tmp.append(".secure");
+
+ componentName = tmp.c_str();
+ }
+
+ if (createEncoder) {
+ sp<MediaSource> softwareCodec =
+ InstantiateSoftwareEncoder(componentName, source, meta);
+
+ if (softwareCodec != NULL) {
+ ALOGV("Successfully allocated software codec '%s'", componentName);
+
+ return softwareCodec;
+ }
+ }
+
+ ALOGV("Attempting to allocate OMX node '%s'", componentName);
+
+ status_t err = omx->allocateNode(componentName, observer, &node);
+ if (err == OK) {
+ ALOGV("Successfully allocated OMX node '%s'", componentName);
+
+ sp<OMXCodec> codec = new OMXCodec(
+ omx, node, quirks, flags,
+ createEncoder, mime, componentName,
+ source, nativeWindow);
+
+ observer->setCodec(codec);
+
+ err = codec->configureCodec(meta);
+ if (err == OK) {
+ return codec;
+ }
+
+ ALOGV("Failed to configure codec '%s'", componentName);
+ }
+ }
+
+ return NULL;
+}
+
+status_t OMXCodec::parseHEVCCodecSpecificData(
+ const void *data, size_t size,
+ unsigned *profile, unsigned *level) {
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ // verify minimum size and configurationVersion == 1.
+ if (size < 23 || ptr[0] != 1) {
+ return ERROR_MALFORMED;
+ }
+
+ *profile = (ptr[1] & 31);
+ *level = ptr[12];
+
+ ptr += 22;
+ size -= 22;
+
+ size_t numofArrays = (char)ptr[0];
+ ptr += 1;
+ size -= 1;
+ size_t j = 0, i = 0;
+ for (i = 0; i < numofArrays; i++) {
+ if (size < 3) {
+ return ERROR_MALFORMED;
+ }
+ ptr += 1;
+ size -= 1;
+
+ // Num of nals
+ size_t numofNals = U16_AT(ptr);
+ ptr += 2;
+ size -= 2;
+
+ for (j = 0;j < numofNals;j++) {
+ if (size < 2) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ if (size < length) {
+ return ERROR_MALFORMED;
+ }
+ addCodecSpecificData(ptr, length);
+
+ ptr += length;
+ size -= length;
+ }
+ }
+ return OK;
+}
+
+status_t OMXCodec::parseAVCCodecSpecificData(
+ const void *data, size_t size,
+ unsigned *profile, unsigned *level) {
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ // verify minimum size and configurationVersion == 1.
+ if (size < 7 || ptr[0] != 1) {
+ return ERROR_MALFORMED;
+ }
+
+ *profile = ptr[1];
+ *level = ptr[3];
+
+ // There is decodable content out there that fails the following
+ // assertion, let's be lenient for now...
+ // CHECK((ptr[4] >> 2) == 0x3f); // reserved
+
+ size_t lengthSize __unused = 1 + (ptr[4] & 3);
+
+ // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+ // violates it...
+ // CHECK((ptr[5] >> 5) == 7); // reserved
+
+ size_t numSeqParameterSets = ptr[5] & 31;
+
+ ptr += 6;
+ size -= 6;
+
+ for (size_t i = 0; i < numSeqParameterSets; ++i) {
+ if (size < 2) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ if (size < length) {
+ return ERROR_MALFORMED;
+ }
+
+ addCodecSpecificData(ptr, length);
+
+ ptr += length;
+ size -= length;
+ }
+
+ if (size < 1) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t numPictureParameterSets = *ptr;
+ ++ptr;
+ --size;
+
+ for (size_t i = 0; i < numPictureParameterSets; ++i) {
+ if (size < 2) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ if (size < length) {
+ return ERROR_MALFORMED;
+ }
+
+ addCodecSpecificData(ptr, length);
+
+ ptr += length;
+ size -= length;
+ }
+
+ return OK;
+}
+
+status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
+ ALOGV("configureCodec protected=%d",
+ (mFlags & kEnableGrallocUsageProtected) ? 1 : 0);
+
+ if (!(mFlags & kIgnoreCodecSpecificData)) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyESDS, &type, &data, &size)) {
+ ESDS esds((const char *)data, size);
+ CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+ const void *codec_specific_data;
+ size_t codec_specific_data_size;
+ esds.getCodecSpecificInfo(
+ &codec_specific_data, &codec_specific_data_size);
+
+ addCodecSpecificData(
+ codec_specific_data, codec_specific_data_size);
+ } else if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+ // Parse the AVCDecoderConfigurationRecord
+
+ unsigned profile, level;
+ status_t err;
+ if ((err = parseAVCCodecSpecificData(
+ data, size, &profile, &level)) != OK) {
+ ALOGE("Malformed AVC codec specific data.");
+ return err;
+ }
+
+ CODEC_LOGI(
+ "AVC profile = %u (%s), level = %u",
+ profile, AVCProfileToString(profile), level);
+ } else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
+ // Parse the HEVCDecoderConfigurationRecord
+
+ unsigned profile, level;
+ status_t err;
+ if ((err = parseHEVCCodecSpecificData(
+ data, size, &profile, &level)) != OK) {
+ ALOGE("Malformed HEVC codec specific data.");
+ return err;
+ }
+
+ CODEC_LOGI(
+ "HEVC profile = %u , level = %u",
+ profile, level);
+ } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
+ addCodecSpecificData(data, size);
+
+ CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size));
+ addCodecSpecificData(data, size);
+ } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
+ addCodecSpecificData(data, size);
+
+ CHECK(meta->findData(kKeyOpusCodecDelay, &type, &data, &size));
+ addCodecSpecificData(data, size);
+ CHECK(meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size));
+ addCodecSpecificData(data, size);
+ }
+ }
+
+ int32_t bitRate = 0;
+ if (mIsEncoder) {
+ CHECK(meta->findInt32(kKeyBitRate, &bitRate));
+ }
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mMIME)) {
+ setAMRFormat(false /* isWAMR */, bitRate);
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mMIME)) {
+ setAMRFormat(true /* isWAMR */, bitRate);
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mMIME)) {
+ int32_t numChannels, sampleRate, aacProfile;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ if (!meta->findInt32(kKeyAACProfile, &aacProfile)) {
+ aacProfile = OMX_AUDIO_AACObjectNull;
+ }
+
+ int32_t isADTS;
+ if (!meta->findInt32(kKeyIsADTS, &isADTS)) {
+ isADTS = false;
+ }
+
+ status_t err = setAACFormat(numChannels, sampleRate, bitRate, aacProfile, isADTS);
+ if (err != OK) {
+ CODEC_LOGE("setAACFormat() failed (err = %d)", err);
+ return err;
+ }
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_MPEG, mMIME)) {
+ int32_t numChannels, sampleRate;
+ if (meta->findInt32(kKeyChannelCount, &numChannels)
+ && meta->findInt32(kKeySampleRate, &sampleRate)) {
+ // Since we did not always check for these, leave them optional
+ // and have the decoder figure it all out.
+ setRawAudioFormat(
+ mIsEncoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels);
+ }
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AC3, mMIME)) {
+ int32_t numChannels;
+ int32_t sampleRate;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ status_t err = setAC3Format(numChannels, sampleRate);
+ if (err != OK) {
+ CODEC_LOGE("setAC3Format() failed (err = %d)", err);
+ return err;
+ }
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME)
+ || !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) {
+ // These are PCM-like formats with a fixed sample rate but
+ // a variable number of channels.
+
+ int32_t sampleRate;
+ int32_t numChannels;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ if (!meta->findInt32(kKeySampleRate, &sampleRate)) {
+ sampleRate = 8000;
+ }
+
+ setG711Format(sampleRate, numChannels);
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mMIME)) {
+ CHECK(!mIsEncoder);
+
+ int32_t numChannels, sampleRate;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
+#ifdef DTS_CODEC_M_
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_DTS, mMIME)) {
+ ALOGV(" (DTS) mime == MEDIA_MIMETYPE_AUDIO_DTS");
+ int32_t numChannels, sampleRate;
+ int32_t bitWidth = 24;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ status_t err = DTSUtils::setupDecoder(mOMX, mNode, sampleRate, bitWidth);
+
+ if (err != OK) {
+ return err;
+ }
+#endif
+ }
+
+ if (!strncasecmp(mMIME, "video/", 6)) {
+
+ if (mIsEncoder) {
+ setVideoInputFormat(mMIME, meta);
+ } else {
+ status_t err = setVideoOutputFormat(
+ mMIME, meta);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+ }
+
+ int32_t maxInputSize;
+ if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+ setMinBufferSize(kPortIndexInput, (OMX_U32)maxInputSize);
+ }
+
+ initOutputFormat(meta);
+
+ if (mNativeWindow != NULL
+ && !mIsEncoder
+ && !strncasecmp(mMIME, "video/", 6)
+ && !strncmp(mComponentName, "OMX.", 4)) {
+ status_t err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+void OMXCodec::setMinBufferSize(OMX_U32 portIndex, OMX_U32 size) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ if ((portIndex == kPortIndexInput && (mQuirks & kInputBufferSizesAreBogus))
+ || (def.nBufferSize < size)) {
+ def.nBufferSize = size;
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ // Make sure the setting actually stuck.
+ if (portIndex == kPortIndexInput
+ && (mQuirks & kInputBufferSizesAreBogus)) {
+ CHECK_EQ(def.nBufferSize, size);
+ } else {
+ CHECK(def.nBufferSize >= size);
+ }
+}
+
+status_t OMXCodec::setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat) {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = portIndex;
+ format.nIndex = 0;
+ bool found = false;
+
+ OMX_U32 index = 0;
+ for (;;) {
+ format.nIndex = index;
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+
+ if (err != OK) {
+ return err;
+ }
+
+ // The following assertion is violated by TI's video decoder.
+ // CHECK_EQ(format.nIndex, index);
+
+#if 1
+ CODEC_LOGV("portIndex: %u, index: %u, eCompressionFormat=%d eColorFormat=%d",
+ portIndex,
+ index, format.eCompressionFormat, format.eColorFormat);
+#endif
+
+ if (format.eCompressionFormat == compressionFormat
+ && format.eColorFormat == colorFormat) {
+ found = true;
+ break;
+ }
+
+ ++index;
+ if (index >= kMaxColorFormatSupported) {
+ CODEC_LOGE("color format %d or compression format %d is not supported",
+ colorFormat, compressionFormat);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ if (!found) {
+ return UNKNOWN_ERROR;
+ }
+
+ CODEC_LOGV("found a match.");
+ status_t err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+
+ return err;
+}
+
+static size_t getFrameSize(
+ OMX_COLOR_FORMATTYPE colorFormat, int32_t width, int32_t height) {
+ switch (colorFormat) {
+ case OMX_COLOR_FormatYCbYCr:
+ case OMX_COLOR_FormatCbYCrY:
+ return width * height * 2;
+
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ /*
+ * FIXME: For the Opaque color format, the frame size does not
+ * need to be (w*h*3)/2. It just needs to
+ * be larger than certain minimum buffer size. However,
+ * currently, this opaque foramt has been tested only on
+ * YUV420 formats. If that is changed, then we need to revisit
+ * this part in the future
+ */
+ case OMX_COLOR_FormatAndroidOpaque:
+ return (width * height * 3) / 2;
+
+ default:
+ CHECK(!"Should not be here. Unsupported color format.");
+ break;
+ }
+ return 0;
+}
+
+status_t OMXCodec::findTargetColorFormat(
+ const sp<MetaData>& meta, OMX_COLOR_FORMATTYPE *colorFormat) {
+ ALOGV("findTargetColorFormat");
+ CHECK(mIsEncoder);
+
+ *colorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ int32_t targetColorFormat;
+ if (meta->findInt32(kKeyColorFormat, &targetColorFormat)) {
+ *colorFormat = (OMX_COLOR_FORMATTYPE) targetColorFormat;
+ }
+
+ // Check whether the target color format is supported.
+ return isColorFormatSupported(*colorFormat, kPortIndexInput);
+}
+
+status_t OMXCodec::isColorFormatSupported(
+ OMX_COLOR_FORMATTYPE colorFormat, int portIndex) {
+ ALOGV("isColorFormatSupported: %d", static_cast<int>(colorFormat));
+
+ // Enumerate all the color formats supported by
+ // the omx component to see whether the given
+ // color format is supported.
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = portIndex;
+ OMX_U32 index = 0;
+ portFormat.nIndex = index;
+ while (true) {
+ if (OMX_ErrorNone != mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat))) {
+ break;
+ }
+ // Make sure that omx component does not overwrite
+ // the incremented index (bug 2897413).
+ CHECK_EQ(index, portFormat.nIndex);
+ if (portFormat.eColorFormat == colorFormat) {
+ CODEC_LOGV("Found supported color format: %d", portFormat.eColorFormat);
+ return OK; // colorFormat is supported!
+ }
+ ++index;
+ portFormat.nIndex = index;
+
+ if (index >= kMaxColorFormatSupported) {
+ CODEC_LOGE("More than %u color formats are supported???", index);
+ break;
+ }
+ }
+
+ CODEC_LOGE("color format %d is not supported", colorFormat);
+ return UNKNOWN_ERROR;
+}
+
+void OMXCodec::setVideoInputFormat(
+ const char *mime, const sp<MetaData>& meta) {
+
+ int32_t width, height, frameRate, bitRate, stride, sliceHeight;
+ bool success = meta->findInt32(kKeyWidth, &width);
+ success = success && meta->findInt32(kKeyHeight, &height);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
+ success = success && meta->findInt32(kKeyBitRate, &bitRate);
+ success = success && meta->findInt32(kKeyStride, &stride);
+ success = success && meta->findInt32(kKeySliceHeight, &sliceHeight);
+ CHECK(success);
+ CHECK(stride != 0);
+
+ OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
+ if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingAVC;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingHEVC;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4_DP, mime)) {
+ compressionFormat = OMX_VIDEO_CodingMPEG4;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
+ compressionFormat = OMX_VIDEO_CodingH263;
+ } else {
+ ALOGE("Not a supported video mime type: %s", mime);
+ CHECK(!"Should not be here. Not a supported video mime type.");
+ }
+
+ OMX_COLOR_FORMATTYPE colorFormat;
+ CHECK_EQ((status_t)OK, findTargetColorFormat(meta, &colorFormat));
+
+ status_t err;
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ //////////////////////// Input port /////////////////////////
+ CHECK_EQ(setVideoPortFormatType(
+ kPortIndexInput, OMX_VIDEO_CodingUnused,
+ colorFormat), (status_t)OK);
+
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ def.nBufferSize = getFrameSize(colorFormat,
+ stride > 0? stride: -stride, sliceHeight);
+
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+ video_def->nStride = stride;
+ video_def->nSliceHeight = sliceHeight;
+ video_def->xFramerate = (frameRate << 16); // Q16 format
+ video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ video_def->eColorFormat = colorFormat;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ //////////////////////// Output port /////////////////////////
+ CHECK_EQ(setVideoPortFormatType(
+ kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused),
+ (status_t)OK);
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ CHECK_EQ(err, (status_t)OK);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+ video_def->xFramerate = 0; // No need for output port
+ video_def->nBitrate = bitRate; // Q16 format
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+ if (mQuirks & kRequiresLargerEncoderOutputBuffer) {
+ // Increases the output buffer size
+ def.nBufferSize = ((def.nBufferSize * 3) >> 1);
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ /////////////////// Codec-specific ////////////////////////
+ switch (compressionFormat) {
+ case OMX_VIDEO_CodingMPEG4:
+ {
+ CHECK_EQ(setupMPEG4EncoderParameters(meta), (status_t)OK);
+ break;
+ }
+
+ case OMX_VIDEO_CodingH263:
+ CHECK_EQ(setupH263EncoderParameters(meta), (status_t)OK);
+ break;
+
+ case OMX_VIDEO_CodingAVC:
+ {
+ CHECK_EQ(setupAVCEncoderParameters(meta), (status_t)OK);
+ break;
+ }
+
+ default:
+ CHECK(!"Support for this compressionFormat to be implemented.");
+ break;
+ }
+}
+
+static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
+ if (iFramesInterval < 0) {
+ return 0xFFFFFFFF;
+ } else if (iFramesInterval == 0) {
+ return 0;
+ }
+ OMX_U32 ret = frameRate * iFramesInterval - 1;
+ return ret;
+}
+
+status_t OMXCodec::setupErrorCorrectionParameters() {
+ OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
+ InitOMXParams(&errorCorrectionType);
+ errorCorrectionType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+ if (err != OK) {
+ ALOGW("Error correction param query is not supported");
+ return OK; // Optional feature. Ignore this failure
+ }
+
+ errorCorrectionType.bEnableHEC = OMX_FALSE;
+ errorCorrectionType.bEnableResync = OMX_TRUE;
+ errorCorrectionType.nResynchMarkerSpacing = 256;
+ errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
+ errorCorrectionType.bEnableRVLC = OMX_FALSE;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoErrorCorrection,
+ &errorCorrectionType, sizeof(errorCorrectionType));
+ if (err != OK) {
+ ALOGW("Error correction param configuration is not supported");
+ }
+
+ // Optional feature. Ignore the failure.
+ return OK;
+}
+
+status_t OMXCodec::setupBitRate(int32_t bitRate) {
+ OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
+ InitOMXParams(&bitrateType);
+ bitrateType.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+ CHECK_EQ(err, (status_t)OK);
+
+ bitrateType.eControlRate = OMX_Video_ControlRateVariable;
+ bitrateType.nTargetBitrate = bitRate;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoBitrate,
+ &bitrateType, sizeof(bitrateType));
+ CHECK_EQ(err, (status_t)OK);
+ return OK;
+}
+
+status_t OMXCodec::getVideoProfileLevel(
+ const sp<MetaData>& meta,
+ const CodecProfileLevel& defaultProfileLevel,
+ CodecProfileLevel &profileLevel) {
+ CODEC_LOGV("Default profile: %u, level #x%x",
+ defaultProfileLevel.mProfile, defaultProfileLevel.mLevel);
+
+ // Are the default profile and level overwriten?
+ int32_t profile, level;
+ if (!meta->findInt32(kKeyVideoProfile, &profile)) {
+ profile = defaultProfileLevel.mProfile;
+ }
+ if (!meta->findInt32(kKeyVideoLevel, &level)) {
+ level = defaultProfileLevel.mLevel;
+ }
+ CODEC_LOGV("Target profile: %d, level: %d", profile, level);
+
+ // Are the target profile and level supported by the encoder?
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
+ InitOMXParams(&param);
+ param.nPortIndex = kPortIndexOutput;
+ for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoProfileLevelQuerySupported,
+ &param, sizeof(param));
+
+ if (err != OK) break;
+
+ int32_t supportedProfile = static_cast<int32_t>(param.eProfile);
+ int32_t supportedLevel = static_cast<int32_t>(param.eLevel);
+ CODEC_LOGV("Supported profile: %d, level %d",
+ supportedProfile, supportedLevel);
+
+ if (profile == supportedProfile &&
+ level <= supportedLevel) {
+ // We can further check whether the level is a valid
+ // value; but we will leave that to the omx encoder component
+ // via OMX_SetParameter call.
+ profileLevel.mProfile = profile;
+ profileLevel.mLevel = level;
+ return OK;
+ }
+ }
+
+ CODEC_LOGE("Target profile (%d) and level (%d) is not supported",
+ profile, level);
+ return BAD_VALUE;
+}
+
+status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
+ int32_t iFramesInterval, frameRate, bitRate;
+ bool success = meta->findInt32(kKeyBitRate, &bitRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
+ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
+ CHECK(success);
+ OMX_VIDEO_PARAM_H263TYPE h263type;
+ InitOMXParams(&h263type);
+ h263type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+ CHECK_EQ(err, (status_t)OK);
+
+ h263type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ h263type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
+ if (h263type.nPFrames == 0) {
+ h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h263type.nBFrames = 0;
+
+ // Check profile and level parameters
+ CodecProfileLevel defaultProfileLevel, profileLevel;
+ defaultProfileLevel.mProfile = h263type.eProfile;
+ defaultProfileLevel.mLevel = h263type.eLevel;
+ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
+ if (err != OK) return err;
+ h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profileLevel.mProfile);
+ h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(profileLevel.mLevel);
+
+ h263type.bPLUSPTYPEAllowed = OMX_FALSE;
+ h263type.bForceRoundingTypeToZero = OMX_FALSE;
+ h263type.nPictureHeaderRepetition = 0;
+ h263type.nGOBHeaderInterval = 0;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+ CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
+
+ return OK;
+}
+
+status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
+ int32_t iFramesInterval, frameRate, bitRate;
+ bool success = meta->findInt32(kKeyBitRate, &bitRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
+ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
+ CHECK(success);
+ OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
+ InitOMXParams(&mpeg4type);
+ mpeg4type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+ CHECK_EQ(err, (status_t)OK);
+
+ mpeg4type.nSliceHeaderSpacing = 0;
+ mpeg4type.bSVH = OMX_FALSE;
+ mpeg4type.bGov = OMX_FALSE;
+
+ mpeg4type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ mpeg4type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
+ if (mpeg4type.nPFrames == 0) {
+ mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ mpeg4type.nBFrames = 0;
+ mpeg4type.nIDCVLCThreshold = 0;
+ mpeg4type.bACPred = OMX_TRUE;
+ mpeg4type.nMaxPacketSize = 256;
+ mpeg4type.nTimeIncRes = 1000;
+ mpeg4type.nHeaderExtension = 0;
+ mpeg4type.bReversibleVLC = OMX_FALSE;
+
+ // Check profile and level parameters
+ CodecProfileLevel defaultProfileLevel, profileLevel;
+ defaultProfileLevel.mProfile = mpeg4type.eProfile;
+ defaultProfileLevel.mLevel = mpeg4type.eLevel;
+ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
+ if (err != OK) return err;
+ mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profileLevel.mProfile);
+ mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(profileLevel.mLevel);
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+ CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
+
+ return OK;
+}
+
+status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
+ int32_t iFramesInterval, frameRate, bitRate;
+ bool success = meta->findInt32(kKeyBitRate, &bitRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
+ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
+ CHECK(success);
+
+ OMX_VIDEO_PARAM_AVCTYPE h264type;
+ InitOMXParams(&h264type);
+ h264type.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+ CHECK_EQ(err, (status_t)OK);
+
+ h264type.nAllowedPictureTypes =
+ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+ // Check profile and level parameters
+ CodecProfileLevel defaultProfileLevel, profileLevel;
+ defaultProfileLevel.mProfile = h264type.eProfile;
+ defaultProfileLevel.mLevel = h264type.eLevel;
+ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
+ if (err != OK) return err;
+ h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profileLevel.mProfile);
+ h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(profileLevel.mLevel);
+
+ // XXX
+ if (h264type.eProfile != OMX_VIDEO_AVCProfileBaseline) {
+ ALOGW("Use baseline profile instead of %d for AVC recording",
+ h264type.eProfile);
+ h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
+ }
+
+ if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
+ h264type.nSliceHeaderSpacing = 0;
+ h264type.bUseHadamard = OMX_TRUE;
+ h264type.nRefFrames = 1;
+ h264type.nBFrames = 0;
+ h264type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
+ if (h264type.nPFrames == 0) {
+ h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+ }
+ h264type.nRefIdx10ActiveMinus1 = 0;
+ h264type.nRefIdx11ActiveMinus1 = 0;
+ h264type.bEntropyCodingCABAC = OMX_FALSE;
+ h264type.bWeightedPPrediction = OMX_FALSE;
+ h264type.bconstIpred = OMX_FALSE;
+ h264type.bDirect8x8Inference = OMX_FALSE;
+ h264type.bDirectSpatialTemporal = OMX_FALSE;
+ h264type.nCabacInitIdc = 0;
+ }
+
+ if (h264type.nBFrames != 0) {
+ h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
+ }
+
+ h264type.bEnableUEP = OMX_FALSE;
+ h264type.bEnableFMO = OMX_FALSE;
+ h264type.bEnableASO = OMX_FALSE;
+ h264type.bEnableRS = OMX_FALSE;
+ h264type.bFrameMBsOnly = OMX_TRUE;
+ h264type.bMBAFF = OMX_FALSE;
+ h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+
+ return OK;
+}
+
+status_t OMXCodec::setVideoOutputFormat(
+ const char *mime, const sp<MetaData>& meta) {
+
+ int32_t width, height;
+ bool success = meta->findInt32(kKeyWidth, &width);
+ success = success && meta->findInt32(kKeyHeight, &height);
+ CHECK(success);
+
+ CODEC_LOGV("setVideoOutputFormat width=%d, height=%d", width, height);
+
+ OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
+ if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingAVC;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4_DP, mime)) {
+ compressionFormat = OMX_VIDEO_CodingMPEG4;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingHEVC;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
+ compressionFormat = OMX_VIDEO_CodingH263;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP8, mime)) {
+ compressionFormat = OMX_VIDEO_CodingVP8;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP9, mime)) {
+ compressionFormat = OMX_VIDEO_CodingVP9;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
+ compressionFormat = OMX_VIDEO_CodingMPEG2;
+ } else {
+ ALOGE("Not a supported video mime type: %s", mime);
+ CHECK(!"Should not be here. Not a supported video mime type.");
+ }
+
+ status_t err = setVideoPortFormatType(
+ kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != OK) {
+ return err;
+ }
+
+#if 1
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = kPortIndexOutput;
+ format.nIndex = 0;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+ CHECK_EQ(err, (status_t)OK);
+ CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
+
+ int32_t colorFormat;
+ if (meta->findInt32(kKeyColorFormat, &colorFormat)
+ && colorFormat != OMX_COLOR_FormatUnused
+ && colorFormat != format.eColorFormat) {
+
+ while (OMX_ErrorNoMore != err) {
+ format.nIndex++;
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+ if (format.eColorFormat == colorFormat) {
+ break;
+ }
+ }
+ if (format.eColorFormat != colorFormat) {
+ CODEC_LOGE("Color format %d is not supported", colorFormat);
+ return ERROR_UNSUPPORTED;
+ }
+ }
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+
+ if (err != OK) {
+ return err;
+ }
+ }
+#endif
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ CHECK_EQ(err, (status_t)OK);
+
+#if 1
+ // XXX Need a (much) better heuristic to compute input buffer sizes.
+ const size_t X = 64 * 1024;
+ if (def.nBufferSize < X) {
+ def.nBufferSize = X;
+ }
+#endif
+
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ ////////////////////////////////////////////////////////////////////////////
+
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+
+#if 0
+ def.nBufferSize =
+ (((width + 15) & -16) * ((height + 15) & -16) * 3) / 2; // YUV420
+#endif
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ return err;
+}
+
+OMXCodec::OMXCodec(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t quirks, uint32_t flags,
+ bool isEncoder,
+ const char *mime,
+ const char *componentName,
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow)
+ : mOMX(omx),
+ mOMXLivesLocally(omx->livesLocally(node, getpid())),
+ mNode(node),
+ mQuirks(quirks),
+ mFlags(flags),
+ mIsEncoder(isEncoder),
+ mIsVideo(!strncasecmp("video/", mime, 6)),
+ mMIME(strdup(mime)),
+ mComponentName(strdup(componentName)),
+ mSource(source),
+ mCodecSpecificDataIndex(0),
+ mState(LOADED),
+ mInitialBufferSubmit(true),
+ mSignalledEOS(false),
+ mNoMoreOutputData(false),
+ mOutputPortSettingsHaveChanged(false),
+ mSeekTimeUs(-1),
+ mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC),
+ mTargetTimeUs(-1),
+ mOutputPortSettingsChangedPending(false),
+ mSkipCutBuffer(NULL),
+ mLeftOverBuffer(NULL),
+ mPaused(false),
+ mNativeWindow(
+ (!strncmp(componentName, "OMX.google.", 11))
+ ? NULL : nativeWindow) {
+ mPortStatus[kPortIndexInput] = ENABLED;
+ mPortStatus[kPortIndexOutput] = ENABLED;
+
+ setComponentRole();
+}
+
+// static
+void OMXCodec::setComponentRole(
+ const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
+ const char *mime) {
+ struct MimeToRole {
+ const char *mime;
+ const char *decoderRole;
+ const char *encoderRole;
+ };
+
+ static const MimeToRole kMimeToRole[] = {
+ { MEDIA_MIMETYPE_AUDIO_MPEG,
+ "audio_decoder.mp3", "audio_encoder.mp3" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ "audio_decoder.mp1", "audio_encoder.mp1" },
+ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ "audio_decoder.mp2", "audio_encoder.mp2" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_NB,
+ "audio_decoder.amrnb", "audio_encoder.amrnb" },
+ { MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ "audio_decoder.amrwb", "audio_encoder.amrwb" },
+ { MEDIA_MIMETYPE_AUDIO_AAC,
+ "audio_decoder.aac", "audio_encoder.aac" },
+ { MEDIA_MIMETYPE_AUDIO_VORBIS,
+ "audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
+ { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
+ { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+ "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
+ { MEDIA_MIMETYPE_VIDEO_AVC,
+ "video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4,
+ "video_decoder.mpeg4", "video_encoder.mpeg4" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4_DP,
+ "video_decoder.mpeg4", NULL },
+ { MEDIA_MIMETYPE_VIDEO_H263,
+ "video_decoder.h263", "video_encoder.h263" },
+ { MEDIA_MIMETYPE_VIDEO_VP8,
+ "video_decoder.vp8", "video_encoder.vp8" },
+ { MEDIA_MIMETYPE_VIDEO_VP9,
+ "video_decoder.vp9", "video_encoder.vp9" },
+ { MEDIA_MIMETYPE_AUDIO_RAW,
+ "audio_decoder.raw", "audio_encoder.raw" },
+ { MEDIA_MIMETYPE_AUDIO_FLAC,
+ "audio_decoder.flac", "audio_encoder.flac" },
+ { MEDIA_MIMETYPE_AUDIO_MSGSM,
+ "audio_decoder.gsm", "audio_encoder.gsm" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2,
+ "video_decoder.mpeg2", "video_encoder.mpeg2" },
+ { MEDIA_MIMETYPE_AUDIO_AC3,
+ "audio_decoder.ac3", "audio_encoder.ac3" },
+#ifdef DOLBY_ENABLE
+ { MEDIA_MIMETYPE_AUDIO_EAC3,
+ "audio_decoder.eac3", NULL },
+ { MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+ "audio_decoder.eac3_joc", NULL },
+#endif // DOLBY_END
+#ifdef DTS_CODEC_M_
+ { MEDIA_MIMETYPE_AUDIO_DTS,
+ "audio_decoder.dts", "audio_encoder.dts" },
+#endif
+ };
+
+ static const size_t kNumMimeToRole =
+ sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
+
+ size_t i;
+ for (i = 0; i < kNumMimeToRole; ++i) {
+ if (!strcasecmp(mime, kMimeToRole[i].mime)) {
+ break;
+ }
+ }
+
+ if (i == kNumMimeToRole) {
+ return;
+ }
+
+ const char *role =
+ isEncoder ? kMimeToRole[i].encoderRole
+ : kMimeToRole[i].decoderRole;
+
+ if (role != NULL) {
+ OMX_PARAM_COMPONENTROLETYPE roleParams;
+ InitOMXParams(&roleParams);
+
+ strncpy((char *)roleParams.cRole,
+ role, OMX_MAX_STRINGNAME_SIZE - 1);
+
+ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+ status_t err = omx->setParameter(
+ node, OMX_IndexParamStandardComponentRole,
+ &roleParams, sizeof(roleParams));
+
+ if (err != OK) {
+ ALOGW("Failed to set standard component role '%s'.", role);
+ }
+ }
+}
+
+void OMXCodec::setComponentRole() {
+ setComponentRole(mOMX, mNode, mIsEncoder, mMIME);
+}
+
+OMXCodec::~OMXCodec() {
+ mSource.clear();
+
+ CHECK(mState == LOADED || mState == ERROR || mState == LOADED_TO_IDLE);
+
+ status_t err = mOMX->freeNode(mNode);
+ CHECK_EQ(err, (status_t)OK);
+
+ mNode = 0;
+ setState(DEAD);
+
+ clearCodecSpecificData();
+
+ free(mComponentName);
+ mComponentName = NULL;
+
+ free(mMIME);
+ mMIME = NULL;
+}
+
+status_t OMXCodec::init() {
+ // mLock is held.
+
+ CHECK_EQ((int)mState, (int)LOADED);
+
+ status_t err;
+ if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) {
+ err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ CHECK_EQ(err, (status_t)OK);
+ setState(LOADED_TO_IDLE);
+ }
+
+ err = allocateBuffers();
+ if (err != (status_t)OK) {
+ return err;
+ }
+
+ if (mQuirks & kRequiresLoadedToIdleAfterAllocation) {
+ err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ CHECK_EQ(err, (status_t)OK);
+
+ setState(LOADED_TO_IDLE);
+ }
+
+ while (mState != EXECUTING && mState != ERROR) {
+ mAsyncCompletion.wait(mLock);
+ }
+
+ return mState == ERROR ? UNKNOWN_ERROR : OK;
+}
+
+// static
+bool OMXCodec::isIntermediateState(State state) {
+ return state == LOADED_TO_IDLE
+ || state == IDLE_TO_EXECUTING
+ || state == EXECUTING_TO_IDLE
+ || state == PAUSING
+ || state == FLUSHING
+ || state == IDLE_TO_LOADED
+ || state == RECONFIGURING;
+}
+
+status_t OMXCodec::allocateBuffers() {
+ status_t err = allocateBuffersOnPort(kPortIndexInput);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return allocateBuffersOnPort(kPortIndexOutput);
+}
+
+status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+ return allocateOutputBuffersFromNativeWindow();
+ }
+
+ if ((mFlags & kEnableGrallocUsageProtected) && portIndex == kPortIndexOutput) {
+ ALOGE("protected output buffers must be stent to an ANativeWindow");
+ return PERMISSION_DENIED;
+ }
+
+ status_t err = OK;
+ if ((mFlags & kStoreMetaDataInVideoBuffers)
+ && portIndex == kPortIndexInput) {
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
+ if (err != OK) {
+ ALOGE("Storing meta data in video buffers is not supported");
+ return err;
+ }
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ CODEC_LOGV("allocating %u buffers of size %u on %s port",
+ def.nBufferCountActual, def.nBufferSize,
+ portIndex == kPortIndexInput ? "input" : "output");
+
+ if (def.nBufferSize != 0 && def.nBufferCountActual > SIZE_MAX / def.nBufferSize) {
+ return BAD_VALUE;
+ }
+ size_t totalSize = def.nBufferCountActual * def.nBufferSize;
+ mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec");
+
+ for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
+ sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
+ if (mem == NULL || mem->pointer() == NULL) {
+ return NO_MEMORY;
+ }
+
+ BufferInfo info;
+ info.mData = NULL;
+ info.mSize = def.nBufferSize;
+
+ IOMX::buffer_id buffer;
+ if (portIndex == kPortIndexInput
+ && ((mQuirks & kRequiresAllocateBufferOnInputPorts)
+ || (mFlags & kUseSecureInputBuffers))) {
+ if (mOMXLivesLocally) {
+ mem.clear();
+
+ err = mOMX->allocateBuffer(
+ mNode, portIndex, def.nBufferSize, &buffer,
+ &info.mData);
+ } else {
+ err = mOMX->allocateBufferWithBackup(
+ mNode, portIndex, mem, &buffer, mem->size());
+ }
+ } else if (portIndex == kPortIndexOutput
+ && (mQuirks & kRequiresAllocateBufferOnOutputPorts)) {
+ if (mOMXLivesLocally) {
+ mem.clear();
+
+ err = mOMX->allocateBuffer(
+ mNode, portIndex, def.nBufferSize, &buffer,
+ &info.mData);
+ } else {
+ err = mOMX->allocateBufferWithBackup(
+ mNode, portIndex, mem, &buffer, mem->size());
+ }
+ } else {
+ err = mOMX->useBuffer(mNode, portIndex, mem, &buffer, mem->size());
+ }
+
+ if (err != OK) {
+ ALOGE("allocate_buffer_with_backup failed");
+ return err;
+ }
+
+ if (mem != NULL) {
+ info.mData = mem->pointer();
+ }
+
+ info.mBuffer = buffer;
+ info.mStatus = OWNED_BY_US;
+ info.mMem = mem;
+ info.mMediaBuffer = NULL;
+
+ if (portIndex == kPortIndexOutput) {
+ // Fail deferred MediaBuffer creation until FILL_BUFFER_DONE;
+ // this legacy mode is no longer supported.
+ LOG_ALWAYS_FATAL_IF((mOMXLivesLocally
+ && (mQuirks & kRequiresAllocateBufferOnOutputPorts)
+ && (mQuirks & kDefersOutputBufferAllocation)),
+ "allocateBuffersOnPort cannot defer buffer allocation");
+
+ info.mMediaBuffer = new MediaBuffer(info.mData, info.mSize);
+ info.mMediaBuffer->setObserver(this);
+ }
+
+ mPortBuffers[portIndex].push(info);
+
+ CODEC_LOGV("allocated buffer %u on %s port", buffer,
+ portIndex == kPortIndexInput ? "input" : "output");
+ }
+
+ if (portIndex == kPortIndexOutput) {
+
+ sp<MetaData> meta = mSource->getFormat();
+ int32_t delay = 0;
+ if (!meta->findInt32(kKeyEncoderDelay, &delay)) {
+ delay = 0;
+ }
+ int32_t padding = 0;
+ if (!meta->findInt32(kKeyEncoderPadding, &padding)) {
+ padding = 0;
+ }
+ int32_t numchannels = 0;
+ if (delay + padding) {
+ if (mOutputFormat->findInt32(kKeyChannelCount, &numchannels)) {
+ if (mSkipCutBuffer != NULL) {
+ size_t prevbuffersize = mSkipCutBuffer->size();
+ if (prevbuffersize != 0) {
+ ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbuffersize);
+ }
+ }
+ mSkipCutBuffer = new SkipCutBuffer(delay, padding, numchannels);
+ }
+ }
+ }
+
+ // dumpPortStatus(portIndex);
+
+ if (portIndex == kPortIndexInput && (mFlags & kUseSecureInputBuffers)) {
+ Vector<MediaBuffer *> buffers;
+ for (size_t i = 0; i < def.nBufferCountActual; ++i) {
+ const BufferInfo &info = mPortBuffers[kPortIndexInput].itemAt(i);
+
+ MediaBuffer *mbuf = new MediaBuffer(info.mData, info.mSize);
+ buffers.push(mbuf);
+ }
+
+ status_t err = mSource->setBuffers(buffers);
+
+ if (err != OK) {
+ for (size_t i = 0; i < def.nBufferCountActual; ++i) {
+ buffers.editItemAt(i)->release();
+ }
+ buffers.clear();
+
+ CODEC_LOGE(
+ "Codec requested to use secure input buffers but "
+ "upstream source didn't support that.");
+
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+ // Get the number of buffers needed.
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ CODEC_LOGE("getParameter failed: %d", err);
+ return err;
+ }
+
+ sp<MetaData> meta = mSource->getFormat();
+
+ int32_t rotationDegrees;
+ if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
+
+ // Set up the native window.
+ OMX_U32 usage = 0;
+ err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
+ if (err != 0) {
+ ALOGW("querying usage flags from OMX IL component failed: %d", err);
+ // XXX: Currently this error is logged, but not fatal.
+ usage = 0;
+ }
+
+ if (mFlags & kEnableGrallocUsageProtected) {
+ usage |= GRALLOC_USAGE_PROTECTED;
+#ifdef GRALLOC_USAGE_PRIVATE_NONSECURE
+ if (!(mFlags & kUseSecureInputBuffers))
+ usage |= GRALLOC_USAGE_PRIVATE_NONSECURE;
+#endif
+ }
+
+ err = setNativeWindowSizeFormatAndUsage(
+ mNativeWindow.get(),
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ def.format.video.eColorFormat,
+ rotationDegrees,
+ usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+ if (err != 0) {
+ return err;
+ }
+
+ int minUndequeuedBufs = 0;
+ err = mNativeWindow->query(mNativeWindow.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
+ if (err != 0) {
+ ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+ // FIXME: assume that surface is controlled by app (native window
+ // returns the number for the case when surface is not controlled by app)
+ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
+ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
+
+ // Use conservative allocation while also trying to reduce starvation
+ //
+ // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
+ // minimum needed for the consumer to be able to work
+ // 2. try to allocate two (2) additional buffers to reduce starvation from
+ // the consumer
+ // plus an extra buffer to account for incorrect minUndequeuedBufs
+ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1",
+ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
+#ifdef BOARD_CANT_REALLOCATE_OMX_BUFFERS
+ // Some devices don't like to set OMX_IndexParamPortDefinition at this
+ // point (even with an unmodified def), so skip it if possible.
+ // This check was present in KitKat.
+ if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) {
+#endif
+ for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
+ OMX_U32 newBufferCount =
+ def.nBufferCountMin + minUndequeuedBufs + extraBuffers;
+ def.nBufferCountActual = newBufferCount;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ if (err == OK) {
+ minUndequeuedBufs += extraBuffers;
+ break;
+ }
+
+ CODEC_LOGW("setting nBufferCountActual to %u failed: %d",
+ newBufferCount, err);
+ /* exit condition */
+ if (extraBuffers == 0) {
+ return err;
+ }
+ }
+ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1",
+ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
+#ifdef BOARD_CANT_REALLOCATE_OMX_BUFFERS
+ }
+#endif
+
+ err = native_window_set_buffer_count(
+ mNativeWindow.get(), def.nBufferCountActual);
+ if (err != 0) {
+ ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ CODEC_LOGV("allocating %u buffers from a native window of size %u on "
+ "output port", def.nBufferCountActual, def.nBufferSize);
+
+ // Dequeue buffers and send them to OMX
+ for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
+ ANativeWindowBuffer* buf;
+ err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ BufferInfo info;
+ info.mData = NULL;
+ info.mSize = def.nBufferSize;
+ info.mStatus = OWNED_BY_US;
+ info.mMem = NULL;
+ info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+ info.mMediaBuffer->setObserver(this);
+ mPortBuffers[kPortIndexOutput].push(info);
+
+ IOMX::buffer_id bufferId;
+ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+ &bufferId);
+ if (err != 0) {
+ CODEC_LOGE("registering GraphicBuffer with OMX IL component "
+ "failed: %d", err);
+ break;
+ }
+
+ mPortBuffers[kPortIndexOutput].editItemAt(i).mBuffer = bufferId;
+
+ CODEC_LOGV("registered graphic buffer with ID %u (pointer = %p)",
+ bufferId, graphicBuffer.get());
+ }
+
+ OMX_U32 cancelStart;
+ OMX_U32 cancelEnd;
+ if (err != 0) {
+ // If an error occurred while dequeuing we need to cancel any buffers
+ // that were dequeued.
+ cancelStart = 0;
+ cancelEnd = mPortBuffers[kPortIndexOutput].size();
+ } else {
+ // Return the last two buffers to the native window.
+ cancelStart = def.nBufferCountActual - minUndequeuedBufs;
+ cancelEnd = def.nBufferCountActual;
+ }
+
+ for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+ cancelBufferToNativeWindow(info);
+ }
+
+ return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+ CODEC_LOGV("Calling cancelBuffer on buffer %u", info->mBuffer);
+ int err = mNativeWindow->cancelBuffer(
+ mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get(), -1);
+ if (err != 0) {
+ CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return err;
+ }
+ info->mStatus = OWNED_BY_NATIVE_WINDOW;
+ return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+ // Dequeue the next buffer from the native window.
+ ANativeWindowBuffer* buf;
+ int err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // Determine which buffer we just dequeued.
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ BufferInfo *bufInfo = 0;
+ for (size_t i = 0; i < buffers->size(); i++) {
+ sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+ mMediaBuffer->graphicBuffer();
+ if (graphicBuffer->handle == buf->handle) {
+ bufInfo = &buffers->editItemAt(i);
+ break;
+ }
+ }
+
+ if (bufInfo == 0) {
+ CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // The native window no longer owns the buffer.
+ CHECK_EQ((int)bufInfo->mStatus, (int)OWNED_BY_NATIVE_WINDOW);
+ bufInfo->mStatus = OWNED_BY_US;
+
+ return bufInfo;
+}
+
+int64_t OMXCodec::getDecodingTimeUs() {
+ CHECK(mIsEncoder && mIsVideo);
+
+ if (mDecodingTimeList.empty()) {
+ CHECK(mSignalledEOS || mNoMoreOutputData);
+ // No corresponding input frame available.
+ // This could happen when EOS is reached.
+ return 0;
+ }
+
+ List<int64_t>::iterator it = mDecodingTimeList.begin();
+ int64_t timeUs = *it;
+ mDecodingTimeList.erase(it);
+ return timeUs;
+}
+
+void OMXCodec::on_message(const omx_message &msg) {
+ if (mState == ERROR) {
+ /*
+ * only drop EVENT messages, EBD and FBD are still
+ * processed for bookkeeping purposes
+ */
+ if (msg.type == omx_message::EVENT) {
+ ALOGW("Dropping OMX EVENT message - we're in ERROR state.");
+ return;
+ }
+ }
+
+ switch (msg.type) {
+ case omx_message::EVENT:
+ {
+ onEvent(
+ msg.u.event_data.event, msg.u.event_data.data1,
+ msg.u.event_data.data2);
+
+ break;
+ }
+
+ case omx_message::EMPTY_BUFFER_DONE:
+ {
+ IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
+
+ CODEC_LOGV("EMPTY_BUFFER_DONE(buffer: %u)", buffer);
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
+ size_t i = 0;
+ while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) {
+ ++i;
+ }
+
+ CHECK(i < buffers->size());
+ if ((*buffers)[i].mStatus != OWNED_BY_COMPONENT) {
+ ALOGW("We already own input buffer %u, yet received "
+ "an EMPTY_BUFFER_DONE.", buffer);
+ }
+
+ BufferInfo* info = &buffers->editItemAt(i);
+ info->mStatus = OWNED_BY_US;
+
+ // Buffer could not be released until empty buffer done is called.
+ if (info->mMediaBuffer != NULL) {
+ info->mMediaBuffer->release();
+ info->mMediaBuffer = NULL;
+ }
+
+ if (mPortStatus[kPortIndexInput] == DISABLING) {
+ CODEC_LOGV("Port is disabled, freeing buffer %u", buffer);
+
+ status_t err = freeBuffer(kPortIndexInput, i);
+ CHECK_EQ(err, (status_t)OK);
+ } else if (mState != ERROR
+ && mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
+ CHECK_EQ((int)mPortStatus[kPortIndexInput], (int)ENABLED);
+
+ if (mFlags & kUseSecureInputBuffers) {
+ drainAnyInputBuffer();
+ } else {
+ drainInputBuffer(&buffers->editItemAt(i));
+ }
+ }
+ break;
+ }
+
+ case omx_message::FILL_BUFFER_DONE:
+ {
+ IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
+ OMX_U32 flags = msg.u.extended_buffer_data.flags;
+
+ CODEC_LOGV("FILL_BUFFER_DONE(buffer: %u, size: %u, flags: 0x%08x, timestamp: %lld us (%.2f secs))",
+ buffer,
+ msg.u.extended_buffer_data.range_length,
+ flags,
+ msg.u.extended_buffer_data.timestamp,
+ msg.u.extended_buffer_data.timestamp / 1E6);
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ size_t i = 0;
+ while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) {
+ ++i;
+ }
+
+ CHECK(i < buffers->size());
+ BufferInfo *info = &buffers->editItemAt(i);
+
+ if (info->mStatus != OWNED_BY_COMPONENT) {
+ ALOGW("We already own output buffer %u, yet received "
+ "a FILL_BUFFER_DONE.", buffer);
+ }
+
+ info->mStatus = OWNED_BY_US;
+
+ if (mPortStatus[kPortIndexOutput] == DISABLING) {
+ CODEC_LOGV("Port is disabled, freeing buffer %u", buffer);
+
+ status_t err = freeBuffer(kPortIndexOutput, i);
+ CHECK_EQ(err, (status_t)OK);
+
+#if 0
+ } else if (mPortStatus[kPortIndexOutput] == ENABLED
+ && (flags & OMX_BUFFERFLAG_EOS)) {
+ CODEC_LOGV("No more output data.");
+ mNoMoreOutputData = true;
+ mBufferFilled.signal();
+#endif
+ } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
+ CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
+
+ MediaBuffer *buffer = info->mMediaBuffer;
+ bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
+
+ if (!isGraphicBuffer
+ && msg.u.extended_buffer_data.range_offset
+ + msg.u.extended_buffer_data.range_length
+ > buffer->size()) {
+ CODEC_LOGE(
+ "Codec lied about its buffer size requirements, "
+ "sending a buffer larger than the originally "
+ "advertised size in FILL_BUFFER_DONE!");
+ }
+ buffer->set_range(
+ msg.u.extended_buffer_data.range_offset,
+ msg.u.extended_buffer_data.range_length);
+
+ buffer->meta_data()->clear();
+
+ buffer->meta_data()->setInt64(
+ kKeyTime, msg.u.extended_buffer_data.timestamp);
+
+ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_SYNCFRAME) {
+ buffer->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+ bool isCodecSpecific = false;
+ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_CODECCONFIG) {
+ buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
+ isCodecSpecific = true;
+ }
+
+ if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
+ buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
+ }
+
+ buffer->meta_data()->setInt32(
+ kKeyBufferID,
+ msg.u.extended_buffer_data.buffer);
+
+ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_EOS) {
+ CODEC_LOGV("No more output data.");
+ mNoMoreOutputData = true;
+ }
+
+ if (mIsEncoder && mIsVideo) {
+ int64_t decodingTimeUs = isCodecSpecific? 0: getDecodingTimeUs();
+ buffer->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
+ }
+
+ if (mTargetTimeUs >= 0) {
+ CHECK(msg.u.extended_buffer_data.timestamp <= mTargetTimeUs);
+
+ if (msg.u.extended_buffer_data.timestamp < mTargetTimeUs) {
+ CODEC_LOGV(
+ "skipping output buffer at timestamp %lld us",
+ msg.u.extended_buffer_data.timestamp);
+
+ fillOutputBuffer(info);
+ break;
+ }
+
+ CODEC_LOGV(
+ "returning output buffer at target timestamp "
+ "%lld us",
+ msg.u.extended_buffer_data.timestamp);
+
+ mTargetTimeUs = -1;
+ }
+
+ mFilledBuffers.push_back(i);
+ mBufferFilled.signal();
+ if (mIsEncoder) {
+ sched_yield();
+ }
+ }
+
+ break;
+ }
+
+ default:
+ {
+ CHECK(!"should not be here.");
+ break;
+ }
+ }
+}
+
+// Has the format changed in any way that the client would have to be aware of?
+static bool formatHasNotablyChanged(
+ const sp<MetaData> &from, const sp<MetaData> &to) {
+ if (from.get() == NULL && to.get() == NULL) {
+ return false;
+ }
+
+ if ((from.get() == NULL && to.get() != NULL)
+ || (from.get() != NULL && to.get() == NULL)) {
+ return true;
+ }
+
+ const char *mime_from, *mime_to;
+ CHECK(from->findCString(kKeyMIMEType, &mime_from));
+ CHECK(to->findCString(kKeyMIMEType, &mime_to));
+
+ if (strcasecmp(mime_from, mime_to)) {
+ return true;
+ }
+
+ if (!strcasecmp(mime_from, MEDIA_MIMETYPE_VIDEO_RAW)) {
+ int32_t colorFormat_from, colorFormat_to;
+ CHECK(from->findInt32(kKeyColorFormat, &colorFormat_from));
+ CHECK(to->findInt32(kKeyColorFormat, &colorFormat_to));
+
+ if (colorFormat_from != colorFormat_to) {
+ return true;
+ }
+
+ int32_t width_from, width_to;
+ CHECK(from->findInt32(kKeyWidth, &width_from));
+ CHECK(to->findInt32(kKeyWidth, &width_to));
+
+ if (width_from != width_to) {
+ return true;
+ }
+
+ int32_t height_from, height_to;
+ CHECK(from->findInt32(kKeyHeight, &height_from));
+ CHECK(to->findInt32(kKeyHeight, &height_to));
+
+ if (height_from != height_to) {
+ return true;
+ }
+
+ int32_t left_from, top_from, right_from, bottom_from;
+ CHECK(from->findRect(
+ kKeyCropRect,
+ &left_from, &top_from, &right_from, &bottom_from));
+
+ int32_t left_to, top_to, right_to, bottom_to;
+ CHECK(to->findRect(
+ kKeyCropRect,
+ &left_to, &top_to, &right_to, &bottom_to));
+
+ if (left_to != left_from || top_to != top_from
+ || right_to != right_from || bottom_to != bottom_from) {
+ return true;
+ }
+ } else if (!strcasecmp(mime_from, MEDIA_MIMETYPE_AUDIO_RAW)) {
+ int32_t numChannels_from, numChannels_to;
+ CHECK(from->findInt32(kKeyChannelCount, &numChannels_from));
+ CHECK(to->findInt32(kKeyChannelCount, &numChannels_to));
+
+ if (numChannels_from != numChannels_to) {
+ return true;
+ }
+
+ int32_t sampleRate_from, sampleRate_to;
+ CHECK(from->findInt32(kKeySampleRate, &sampleRate_from));
+ CHECK(to->findInt32(kKeySampleRate, &sampleRate_to));
+
+ if (sampleRate_from != sampleRate_to) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete:
+ {
+ onCmdComplete((OMX_COMMANDTYPE)data1, data2);
+ break;
+ }
+
+ case OMX_EventError:
+ {
+ CODEC_LOGE("OMX_EventError(0x%08x, %u)", data1, data2);
+
+ setState(ERROR);
+ break;
+ }
+
+ case OMX_EventPortSettingsChanged:
+ {
+ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%u, data2=0x%08x)",
+ data1, data2);
+
+ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+ onPortSettingsChanged(data1);
+ } else if (data1 == kPortIndexOutput &&
+ (data2 == OMX_IndexConfigCommonOutputCrop ||
+ data2 == OMX_IndexConfigCommonScale)) {
+
+ sp<MetaData> oldOutputFormat = mOutputFormat;
+ initOutputFormat(mSource->getFormat());
+
+ if (data2 == OMX_IndexConfigCommonOutputCrop &&
+ formatHasNotablyChanged(oldOutputFormat, mOutputFormat)) {
+ mOutputPortSettingsHaveChanged = true;
+
+ } else if (data2 == OMX_IndexConfigCommonScale) {
+ OMX_CONFIG_SCALEFACTORTYPE scale;
+ InitOMXParams(&scale);
+ scale.nPortIndex = kPortIndexOutput;
+
+ // Change display dimension only when necessary.
+ if (OK == mOMX->getConfig(
+ mNode,
+ OMX_IndexConfigCommonScale,
+ &scale, sizeof(scale))) {
+ int32_t left, top, right, bottom;
+ CHECK(mOutputFormat->findRect(kKeyCropRect,
+ &left, &top,
+ &right, &bottom));
+
+ // The scale is in 16.16 format.
+ // scale 1.0 = 0x010000. When there is no
+ // need to change the display, skip it.
+ ALOGV("Get OMX_IndexConfigScale: 0x%x/0x%x",
+ scale.xWidth, scale.xHeight);
+
+ if (scale.xWidth != 0x010000) {
+ mOutputFormat->setInt32(kKeyDisplayWidth,
+ ((right - left + 1) * scale.xWidth) >> 16);
+ mOutputPortSettingsHaveChanged = true;
+ }
+
+ if (scale.xHeight != 0x010000) {
+ mOutputFormat->setInt32(kKeyDisplayHeight,
+ ((bottom - top + 1) * scale.xHeight) >> 16);
+ mOutputPortSettingsHaveChanged = true;
+ }
+ }
+ }
+ }
+ break;
+ }
+
+#if 0
+ case OMX_EventBufferFlag:
+ {
+ CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
+
+ if (data1 == kPortIndexOutput) {
+ mNoMoreOutputData = true;
+ }
+ break;
+ }
+#endif
+#ifdef USE_S3D_SUPPORT
+ case (OMX_EVENTTYPE)OMX_EventS3DInformation:
+ {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<android::IExynosHWCService> hwc = interface_cast<android::IExynosHWCService>(
+ sm->getService(String16("Exynos.HWCService")));
+ if (hwc != NULL) {
+ if (data1 == OMX_TRUE) {
+ int eS3DMode;
+ switch (data2) {
+ case OMX_SEC_FPARGMT_SIDE_BY_SIDE:
+ eS3DMode = S3D_SBS;
+ break;
+ case OMX_SEC_FPARGMT_TOP_BOTTOM:
+ eS3DMode = S3D_TB;
+ break;
+ case OMX_SEC_FPARGMT_CHECKERBRD_INTERL: // unsupport format at HDMI
+ case OMX_SEC_FPARGMT_COLUMN_INTERL:
+ case OMX_SEC_FPARGMT_ROW_INTERL:
+ case OMX_SEC_FPARGMT_TEMPORAL_INTERL:
+ default:
+ eS3DMode = S3D_NONE;
+ }
+
+ hwc->setHdmiResolution(0, eS3DMode);
+ }
+ } else {
+ ALOGE("Exynos.HWCService is unavailable");
+ }
+ break;
+ }
+#endif
+ default:
+ {
+ CODEC_LOGV("EVENT(%d, %u, %u)", event, data1, data2);
+ break;
+ }
+ }
+}
+
+void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
+ switch (cmd) {
+ case OMX_CommandStateSet:
+ {
+ onStateChange((OMX_STATETYPE)data);
+ break;
+ }
+
+ case OMX_CommandPortDisable:
+ {
+ OMX_U32 portIndex = data;
+ CODEC_LOGV("PORT_DISABLED(%u)", portIndex);
+
+ CHECK(mState == EXECUTING || mState == RECONFIGURING);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING);
+ CHECK_EQ(mPortBuffers[portIndex].size(), 0u);
+
+ mPortStatus[portIndex] = DISABLED;
+
+ if (mState == RECONFIGURING) {
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+
+ sp<MetaData> oldOutputFormat = mOutputFormat;
+ initOutputFormat(mSource->getFormat());
+
+ // Don't notify clients if the output port settings change
+ // wasn't of importance to them, i.e. it may be that just the
+ // number of buffers has changed and nothing else.
+ bool formatChanged = formatHasNotablyChanged(oldOutputFormat, mOutputFormat);
+ if (!mOutputPortSettingsHaveChanged) {
+ mOutputPortSettingsHaveChanged = formatChanged;
+ }
+
+ status_t err = enablePortAsync(portIndex);
+ if (err != OK) {
+ CODEC_LOGE("enablePortAsync(%u) failed (err = %d)", portIndex, err);
+ setState(ERROR);
+ } else {
+ err = allocateBuffersOnPort(portIndex);
+ if (err != OK) {
+ CODEC_LOGE("allocateBuffersOnPort (%s) failed "
+ "(err = %d)",
+ portIndex == kPortIndexInput
+ ? "input" : "output",
+ err);
+
+ setState(ERROR);
+ }
+ }
+ }
+ break;
+ }
+
+ case OMX_CommandPortEnable:
+ {
+ OMX_U32 portIndex = data;
+ CODEC_LOGV("PORT_ENABLED(%u)", portIndex);
+
+ CHECK(mState == EXECUTING || mState == RECONFIGURING);
+ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING);
+
+ mPortStatus[portIndex] = ENABLED;
+
+ if (mState == RECONFIGURING) {
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+
+ setState(EXECUTING);
+
+ fillOutputBuffers();
+ }
+ break;
+ }
+
+ case OMX_CommandFlush:
+ {
+ OMX_U32 portIndex = data;
+
+ CODEC_LOGV("FLUSH_DONE(%u)", portIndex);
+
+ CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN);
+ mPortStatus[portIndex] = ENABLED;
+
+ CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]),
+ mPortBuffers[portIndex].size());
+
+ if (mSkipCutBuffer != NULL && mPortStatus[kPortIndexOutput] == ENABLED) {
+ mSkipCutBuffer->clear();
+ }
+
+ if (mState == RECONFIGURING) {
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+
+ disablePortAsync(portIndex);
+ } else if (mState == EXECUTING_TO_IDLE) {
+ if (mPortStatus[kPortIndexInput] == ENABLED
+ && mPortStatus[kPortIndexOutput] == ENABLED) {
+ CODEC_LOGV("Finished flushing both ports, now completing "
+ "transition from EXECUTING to IDLE.");
+
+ mPortStatus[kPortIndexInput] = SHUTTING_DOWN;
+ mPortStatus[kPortIndexOutput] = SHUTTING_DOWN;
+
+ status_t err =
+ mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ CHECK_EQ(err, (status_t)OK);
+ }
+ } else {
+ // We're flushing both ports in preparation for seeking.
+
+ if (mPortStatus[kPortIndexInput] == ENABLED
+ && mPortStatus[kPortIndexOutput] == ENABLED) {
+ CODEC_LOGV("Finished flushing both ports, now continuing from"
+ " seek-time.");
+
+ // We implicitly resume pulling on our upstream source.
+ mPaused = false;
+ mNoMoreOutputData = false;
+
+ drainInputBuffers();
+ fillOutputBuffers();
+ }
+
+ if (mOutputPortSettingsChangedPending) {
+ CODEC_LOGV(
+ "Honoring deferred output port settings change.");
+
+ mOutputPortSettingsChangedPending = false;
+ onPortSettingsChanged(kPortIndexOutput);
+ }
+ }
+
+ break;
+ }
+
+ default:
+ {
+ CODEC_LOGV("CMD_COMPLETE(%d, %u)", cmd, data);
+ break;
+ }
+ }
+}
+
+void OMXCodec::onStateChange(OMX_STATETYPE newState) {
+ CODEC_LOGV("onStateChange %d", newState);
+
+ switch (newState) {
+ case OMX_StateIdle:
+ {
+ CODEC_LOGV("Now Idle.");
+ if (mState == LOADED_TO_IDLE) {
+ status_t err = mOMX->sendCommand(
+ mNode, OMX_CommandStateSet, OMX_StateExecuting);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ setState(IDLE_TO_EXECUTING);
+ } else {
+ CHECK_EQ((int)mState, (int)EXECUTING_TO_IDLE);
+
+ if (countBuffersWeOwn(mPortBuffers[kPortIndexInput]) !=
+ mPortBuffers[kPortIndexInput].size()) {
+ ALOGE("Codec did not return all input buffers "
+ "(received %zu / %zu)",
+ countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
+ mPortBuffers[kPortIndexInput].size());
+ TRESPASS();
+ }
+
+ if (countBuffersWeOwn(mPortBuffers[kPortIndexOutput]) !=
+ mPortBuffers[kPortIndexOutput].size()) {
+ ALOGE("Codec did not return all output buffers "
+ "(received %zu / %zu)",
+ countBuffersWeOwn(mPortBuffers[kPortIndexOutput]),
+ mPortBuffers[kPortIndexOutput].size());
+ TRESPASS();
+ }
+
+ status_t err = mOMX->sendCommand(
+ mNode, OMX_CommandStateSet, OMX_StateLoaded);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ err = freeBuffersOnPort(kPortIndexInput);
+ CHECK_EQ(err, (status_t)OK);
+
+ err = freeBuffersOnPort(kPortIndexOutput);
+ CHECK_EQ(err, (status_t)OK);
+
+ mPortStatus[kPortIndexInput] = ENABLED;
+ mPortStatus[kPortIndexOutput] = ENABLED;
+
+ if ((mFlags & kEnableGrallocUsageProtected) &&
+ mNativeWindow != NULL) {
+ // We push enough 1x1 blank buffers to ensure that one of
+ // them has made it to the display. This allows the OMX
+ // component teardown to zero out any protected buffers
+ // without the risk of scanning out one of those buffers.
+ pushBlankBuffersToNativeWindow(mNativeWindow.get());
+ }
+
+ setState(IDLE_TO_LOADED);
+ }
+ break;
+ }
+
+ case OMX_StateExecuting:
+ {
+ CHECK_EQ((int)mState, (int)IDLE_TO_EXECUTING);
+
+ CODEC_LOGV("Now Executing.");
+
+ mOutputPortSettingsChangedPending = false;
+
+ setState(EXECUTING);
+
+ // Buffers will be submitted to the component in the first
+ // call to OMXCodec::read as mInitialBufferSubmit is true at
+ // this point. This ensures that this on_message call returns,
+ // releases the lock and ::init can notice the state change and
+ // itself return.
+ break;
+ }
+
+ case OMX_StateLoaded:
+ {
+ CHECK_EQ((int)mState, (int)IDLE_TO_LOADED);
+
+ CODEC_LOGV("Now Loaded.");
+
+ setState(LOADED);
+ break;
+ }
+
+ case OMX_StatePause:
+ {
+ CODEC_LOGV("Now paused.");
+ CHECK_EQ((int)mState, (int)PAUSING);
+ setState(PAUSED);
+ break;
+ }
+
+ case OMX_StateInvalid:
+ {
+ setState(ERROR);
+ break;
+ }
+
+ default:
+ {
+ CHECK(!"should not be here.");
+ break;
+ }
+ }
+}
+
+// static
+size_t OMXCodec::countBuffersWeOwn(const Vector<BufferInfo> &buffers) {
+ size_t n = 0;
+ for (size_t i = 0; i < buffers.size(); ++i) {
+ if (buffers[i].mStatus != OWNED_BY_COMPONENT) {
+ ++n;
+ }
+ }
+
+ return n;
+}
+
+status_t OMXCodec::freeBuffersOnPort(
+ OMX_U32 portIndex, bool onlyThoseWeOwn) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+ status_t stickyErr = OK;
+
+ for (size_t i = buffers->size(); i > 0;) {
+ i--;
+ BufferInfo *info = &buffers->editItemAt(i);
+
+ if (onlyThoseWeOwn && info->mStatus == OWNED_BY_COMPONENT) {
+ continue;
+ }
+
+ CHECK(info->mStatus == OWNED_BY_US
+ || info->mStatus == OWNED_BY_NATIVE_WINDOW);
+
+ CODEC_LOGV("freeing buffer %u on port %u", info->mBuffer, portIndex);
+
+ status_t err = freeBuffer(portIndex, i);
+
+ if (err != OK) {
+ stickyErr = err;
+ }
+
+ }
+
+ CHECK(onlyThoseWeOwn || buffers->isEmpty());
+
+ return stickyErr;
+}
+
+status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+ BufferInfo *info = &buffers->editItemAt(bufIndex);
+
+ status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+
+ if (err == OK && info->mMediaBuffer != NULL) {
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+ info->mMediaBuffer->setObserver(NULL);
+
+ // Make sure nobody but us owns this buffer at this point.
+ CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (info->mStatus == OWNED_BY_US && graphicBuffer != 0) {
+ err = cancelBufferToNativeWindow(info);
+ }
+
+ info->mMediaBuffer->release();
+ info->mMediaBuffer = NULL;
+ }
+
+ if (err == OK) {
+ buffers->removeAt(bufIndex);
+ }
+
+ return err;
+}
+
+void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
+ CODEC_LOGV("PORT_SETTINGS_CHANGED(%u)", portIndex);
+
+ CHECK(mState == EXECUTING || mState == EXECUTING_TO_IDLE);
+ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+ CHECK(!mOutputPortSettingsChangedPending);
+
+ if (mPortStatus[kPortIndexOutput] != ENABLED) {
+ CODEC_LOGV("Deferring output port settings change.");
+ mOutputPortSettingsChangedPending = true;
+ return;
+ }
+
+ setState(RECONFIGURING);
+
+ if (mQuirks & kNeedsFlushBeforeDisable) {
+ if (!flushPortAsync(portIndex)) {
+ onCmdComplete(OMX_CommandFlush, portIndex);
+ }
+ } else {
+ disablePortAsync(portIndex);
+ }
+}
+
+bool OMXCodec::flushPortAsync(OMX_U32 portIndex) {
+ CHECK(mState == EXECUTING || mState == RECONFIGURING
+ || mState == EXECUTING_TO_IDLE || mState == FLUSHING);
+
+ CODEC_LOGV("flushPortAsync(%u): we own %zu out of %zu buffers already.",
+ portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),
+ mPortBuffers[portIndex].size());
+
+ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
+ mPortStatus[portIndex] = SHUTTING_DOWN;
+
+ if ((mQuirks & kRequiresFlushCompleteEmulation)
+ && countBuffersWeOwn(mPortBuffers[portIndex])
+ == mPortBuffers[portIndex].size()) {
+ // No flush is necessary and this component fails to send a
+ // flush-complete event in this case.
+
+ return false;
+ }
+
+ status_t err =
+ mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex);
+ CHECK_EQ(err, (status_t)OK);
+
+ return true;
+}
+
+void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
+ CHECK(mState == EXECUTING || mState == RECONFIGURING);
+
+ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
+ mPortStatus[portIndex] = DISABLING;
+
+ CODEC_LOGV("sending OMX_CommandPortDisable(%u)", portIndex);
+ status_t err =
+ mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
+ CHECK_EQ(err, (status_t)OK);
+
+ freeBuffersOnPort(portIndex, true);
+}
+
+status_t OMXCodec::enablePortAsync(OMX_U32 portIndex) {
+ CHECK(mState == EXECUTING || mState == RECONFIGURING);
+
+ CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED);
+ mPortStatus[portIndex] = ENABLING;
+
+ CODEC_LOGV("sending OMX_CommandPortEnable(%u)", portIndex);
+ return mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
+}
+
+void OMXCodec::fillOutputBuffers() {
+ CHECK(mState == EXECUTING || mState == FLUSHING);
+
+ // This is a workaround for some decoders not properly reporting
+ // end-of-output-stream. If we own all input buffers and also own
+ // all output buffers and we already signalled end-of-input-stream,
+ // the end-of-output-stream is implied.
+ if (mSignalledEOS
+ && countBuffersWeOwn(mPortBuffers[kPortIndexInput])
+ == mPortBuffers[kPortIndexInput].size()
+ && countBuffersWeOwn(mPortBuffers[kPortIndexOutput])
+ == mPortBuffers[kPortIndexOutput].size()) {
+ mNoMoreOutputData = true;
+ mBufferFilled.signal();
+
+ return;
+ }
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ BufferInfo *info = &buffers->editItemAt(i);
+ if (info->mStatus == OWNED_BY_US) {
+ fillOutputBuffer(&buffers->editItemAt(i));
+ }
+ }
+}
+
+void OMXCodec::drainInputBuffers() {
+ CHECK(mState == EXECUTING || mState == RECONFIGURING || mState == FLUSHING);
+
+ if (mFlags & kUseSecureInputBuffers) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ if (!drainAnyInputBuffer()
+ || (mFlags & kOnlySubmitOneInputBufferAtOneTime)) {
+ break;
+ }
+ }
+ } else {
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ BufferInfo *info = &buffers->editItemAt(i);
+
+ if (info->mStatus != OWNED_BY_US) {
+ continue;
+ }
+
+ if (!drainInputBuffer(info)) {
+ break;
+ }
+
+ if (mFlags & kOnlySubmitOneInputBufferAtOneTime) {
+ break;
+ }
+ }
+ }
+}
+
+bool OMXCodec::drainAnyInputBuffer() {
+ return drainInputBuffer((BufferInfo *)NULL);
+}
+
+OMXCodec::BufferInfo *OMXCodec::findInputBufferByDataPointer(void *ptr) {
+ Vector<BufferInfo> *infos = &mPortBuffers[kPortIndexInput];
+ for (size_t i = 0; i < infos->size(); ++i) {
+ BufferInfo *info = &infos->editItemAt(i);
+
+ if (info->mData == ptr) {
+ CODEC_LOGV(
+ "input buffer data ptr = %p, buffer_id = %u",
+ ptr,
+ info->mBuffer);
+
+ return info;
+ }
+ }
+
+ TRESPASS();
+}
+
+OMXCodec::BufferInfo *OMXCodec::findEmptyInputBuffer() {
+ Vector<BufferInfo> *infos = &mPortBuffers[kPortIndexInput];
+ for (size_t i = 0; i < infos->size(); ++i) {
+ BufferInfo *info = &infos->editItemAt(i);
+
+ if (info->mStatus == OWNED_BY_US) {
+ return info;
+ }
+ }
+
+ TRESPASS();
+}
+
+bool OMXCodec::drainInputBuffer(BufferInfo *info) {
+ if (info != NULL) {
+ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+ }
+
+ if (mSignalledEOS) {
+ return false;
+ }
+
+ if (mCodecSpecificDataIndex < mCodecSpecificData.size()) {
+ CHECK(!(mFlags & kUseSecureInputBuffers));
+
+ const CodecSpecificData *specific =
+ mCodecSpecificData[mCodecSpecificDataIndex];
+
+ size_t size = specific->mSize;
+
+ if ((!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME) ||
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mMIME))
+ && !(mQuirks & kWantsNALFragments)) {
+ static const uint8_t kNALStartCode[4] =
+ { 0x00, 0x00, 0x00, 0x01 };
+
+ CHECK(info->mSize >= specific->mSize + 4);
+
+ size += 4;
+
+ memcpy(info->mData, kNALStartCode, 4);
+ memcpy((uint8_t *)info->mData + 4,
+ specific->mData, specific->mSize);
+ } else {
+ CHECK(info->mSize >= specific->mSize);
+ memcpy(info->mData, specific->mData, specific->mSize);
+ }
+
+ mNoMoreOutputData = false;
+
+ CODEC_LOGV("calling emptyBuffer with codec specific data");
+
+ status_t err = mOMX->emptyBuffer(
+ mNode, info->mBuffer, 0, size,
+ OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
+ 0);
+ CHECK_EQ(err, (status_t)OK);
+
+ info->mStatus = OWNED_BY_COMPONENT;
+
+ ++mCodecSpecificDataIndex;
+ return true;
+ }
+
+ if (mPaused) {
+ return false;
+ }
+
+ status_t err;
+
+ bool signalEOS = false;
+ int64_t timestampUs = 0;
+
+ size_t offset = 0;
+ int32_t n = 0;
+
+
+ for (;;) {
+ MediaBuffer *srcBuffer;
+ if (mSeekTimeUs >= 0) {
+ if (mLeftOverBuffer) {
+ mLeftOverBuffer->release();
+ mLeftOverBuffer = NULL;
+ }
+
+ MediaSource::ReadOptions options;
+ options.setSeekTo(mSeekTimeUs, mSeekMode);
+
+ mSeekTimeUs = -1;
+ mSeekMode = ReadOptions::SEEK_CLOSEST_SYNC;
+ mBufferFilled.signal();
+
+ err = mSource->read(&srcBuffer, &options);
+
+ if (err == OK) {
+ int64_t targetTimeUs;
+ if (srcBuffer->meta_data()->findInt64(
+ kKeyTargetTime, &targetTimeUs)
+ && targetTimeUs >= 0) {
+ CODEC_LOGV("targetTimeUs = %lld us", (long long)targetTimeUs);
+ mTargetTimeUs = targetTimeUs;
+ } else {
+ mTargetTimeUs = -1;
+ }
+ }
+ } else if (mLeftOverBuffer) {
+ srcBuffer = mLeftOverBuffer;
+ mLeftOverBuffer = NULL;
+
+ err = OK;
+ } else {
+ err = mSource->read(&srcBuffer);
+ }
+
+ if (err != OK) {
+ signalEOS = true;
+ mFinalStatus = err;
+ mSignalledEOS = true;
+ mBufferFilled.signal();
+ break;
+ }
+
+ if (mFlags & kUseSecureInputBuffers) {
+ info = findInputBufferByDataPointer(srcBuffer->data());
+ CHECK(info != NULL);
+ }
+
+ size_t remainingBytes = info->mSize - offset;
+
+ if (srcBuffer->range_length() > remainingBytes) {
+ if (offset == 0) {
+ CODEC_LOGE(
+ "Codec's input buffers are too small to accomodate "
+ "buffer read from source (info->mSize = %zu, srcLength = %zu)",
+ info->mSize, srcBuffer->range_length());
+
+ srcBuffer->release();
+ srcBuffer = NULL;
+
+ setState(ERROR);
+ return false;
+ }
+
+ mLeftOverBuffer = srcBuffer;
+ break;
+ }
+
+ bool releaseBuffer = true;
+ if (mFlags & kStoreMetaDataInVideoBuffers) {
+ releaseBuffer = false;
+ info->mMediaBuffer = srcBuffer;
+ }
+
+ if (mFlags & kUseSecureInputBuffers) {
+ // Data in "info" is already provided at this time.
+
+ releaseBuffer = false;
+
+ CHECK(info->mMediaBuffer == NULL);
+ info->mMediaBuffer = srcBuffer;
+ } else {
+ CHECK(srcBuffer->data() != NULL) ;
+ memcpy((uint8_t *)info->mData + offset,
+ (const uint8_t *)srcBuffer->data()
+ + srcBuffer->range_offset(),
+ srcBuffer->range_length());
+ }
+
+ int64_t lastBufferTimeUs;
+ CHECK(srcBuffer->meta_data()->findInt64(kKeyTime, &lastBufferTimeUs));
+ CHECK(lastBufferTimeUs >= 0);
+ if (mIsEncoder && mIsVideo) {
+ mDecodingTimeList.push_back(lastBufferTimeUs);
+ }
+
+ if (offset == 0) {
+ timestampUs = lastBufferTimeUs;
+ }
+
+ offset += srcBuffer->range_length();
+
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_VORBIS, mMIME)) {
+ CHECK(!(mQuirks & kSupportsMultipleFramesPerInputBuffer));
+ CHECK_GE(info->mSize, offset + sizeof(int32_t));
+
+ int32_t numPageSamples;
+ if (!srcBuffer->meta_data()->findInt32(
+ kKeyValidSamples, &numPageSamples)) {
+ numPageSamples = -1;
+ }
+
+ memcpy((uint8_t *)info->mData + offset,
+ &numPageSamples,
+ sizeof(numPageSamples));
+
+ offset += sizeof(numPageSamples);
+ }
+
+ if (releaseBuffer) {
+ srcBuffer->release();
+ srcBuffer = NULL;
+ }
+
+ ++n;
+
+ if (!(mQuirks & kSupportsMultipleFramesPerInputBuffer)) {
+ break;
+ }
+
+ int64_t coalescedDurationUs = lastBufferTimeUs - timestampUs;
+
+ if (coalescedDurationUs > 250000ll) {
+ // Don't coalesce more than 250ms worth of encoded data at once.
+ break;
+ }
+ }
+
+ if (n > 1) {
+ ALOGV("coalesced %d frames into one input buffer", n);
+ }
+
+ OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME;
+
+ if (signalEOS) {
+ flags |= OMX_BUFFERFLAG_EOS;
+ } else {
+ mNoMoreOutputData = false;
+ }
+
+ if (info == NULL) {
+ CHECK(mFlags & kUseSecureInputBuffers);
+ CHECK(signalEOS);
+
+ // This is fishy, there's still a MediaBuffer corresponding to this
+ // info available to the source at this point even though we're going
+ // to use it to signal EOS to the codec.
+ info = findEmptyInputBuffer();
+ }
+
+ CODEC_LOGV("Calling emptyBuffer on buffer %u (length %zu), "
+ "timestamp %lld us (%.2f secs)",
+ info->mBuffer, offset,
+ (long long)timestampUs, timestampUs / 1E6);
+
+ err = mOMX->emptyBuffer(
+ mNode, info->mBuffer, 0, offset,
+ flags, timestampUs);
+
+ if (err != OK) {
+ setState(ERROR);
+ return false;
+ }
+
+ info->mStatus = OWNED_BY_COMPONENT;
+
+ return true;
+}
+
+void OMXCodec::fillOutputBuffer(BufferInfo *info) {
+ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+
+ if (mNoMoreOutputData) {
+ CODEC_LOGV("There is no more output data available, not "
+ "calling fillOutputBuffer");
+ return;
+ }
+
+ CODEC_LOGV("Calling fillBuffer on buffer %u", info->mBuffer);
+ status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
+
+ if (err != OK) {
+ CODEC_LOGE("fillBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return;
+ }
+
+ info->mStatus = OWNED_BY_COMPONENT;
+}
+
+bool OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ if ((*buffers)[i].mBuffer == buffer) {
+ return drainInputBuffer(&buffers->editItemAt(i));
+ }
+ }
+
+ CHECK(!"should not be here.");
+
+ return false;
+}
+
+void OMXCodec::fillOutputBuffer(IOMX::buffer_id buffer) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ if ((*buffers)[i].mBuffer == buffer) {
+ fillOutputBuffer(&buffers->editItemAt(i));
+ return;
+ }
+ }
+
+ CHECK(!"should not be here.");
+}
+
+void OMXCodec::setState(State newState) {
+ mState = newState;
+ mAsyncCompletion.signal();
+
+ // This may cause some spurious wakeups but is necessary to
+ // unblock the reader if we enter ERROR state.
+ mBufferFilled.signal();
+}
+
+status_t OMXCodec::waitForBufferFilled_l() {
+
+ if (mIsEncoder) {
+ // For timelapse video recording, the timelapse video recording may
+ // not send an input frame for a _long_ time. Do not use timeout
+ // for video encoding.
+ return mBufferFilled.wait(mLock);
+ }
+ status_t err = mBufferFilled.waitRelative(mLock, kBufferFilledEventTimeOutNs);
+ if (err != OK) {
+ CODEC_LOGE("Timed out waiting for output buffers: %zu/%zu",
+ countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
+ countBuffersWeOwn(mPortBuffers[kPortIndexOutput]));
+ }
+ return err;
+}
+
+void OMXCodec::setRawAudioFormat(
+ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) {
+
+ // port definition
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+ CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
+ &def, sizeof(def)), (status_t)OK);
+
+ // pcm param
+ OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
+ InitOMXParams(&pcmParams);
+ pcmParams.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+
+ CHECK_EQ(err, (status_t)OK);
+
+ pcmParams.nChannels = numChannels;
+ pcmParams.eNumData = OMX_NumericalDataSigned;
+ pcmParams.bInterleaved = OMX_TRUE;
+ pcmParams.nBitPerSample = 16;
+ pcmParams.nSamplingRate = sampleRate;
+ pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear;
+
+ CHECK_EQ(getOMXChannelMapping(
+ numChannels, pcmParams.eChannelMapping), (status_t)OK);
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
+
+ CHECK_EQ(err, (status_t)OK);
+}
+
+static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(bool isAMRWB, int32_t bps) {
+ if (isAMRWB) {
+ if (bps <= 6600) {
+ return OMX_AUDIO_AMRBandModeWB0;
+ } else if (bps <= 8850) {
+ return OMX_AUDIO_AMRBandModeWB1;
+ } else if (bps <= 12650) {
+ return OMX_AUDIO_AMRBandModeWB2;
+ } else if (bps <= 14250) {
+ return OMX_AUDIO_AMRBandModeWB3;
+ } else if (bps <= 15850) {
+ return OMX_AUDIO_AMRBandModeWB4;
+ } else if (bps <= 18250) {
+ return OMX_AUDIO_AMRBandModeWB5;
+ } else if (bps <= 19850) {
+ return OMX_AUDIO_AMRBandModeWB6;
+ } else if (bps <= 23050) {
+ return OMX_AUDIO_AMRBandModeWB7;
+ }
+
+ // 23850 bps
+ return OMX_AUDIO_AMRBandModeWB8;
+ } else { // AMRNB
+ if (bps <= 4750) {
+ return OMX_AUDIO_AMRBandModeNB0;
+ } else if (bps <= 5150) {
+ return OMX_AUDIO_AMRBandModeNB1;
+ } else if (bps <= 5900) {
+ return OMX_AUDIO_AMRBandModeNB2;
+ } else if (bps <= 6700) {
+ return OMX_AUDIO_AMRBandModeNB3;
+ } else if (bps <= 7400) {
+ return OMX_AUDIO_AMRBandModeNB4;
+ } else if (bps <= 7950) {
+ return OMX_AUDIO_AMRBandModeNB5;
+ } else if (bps <= 10200) {
+ return OMX_AUDIO_AMRBandModeNB6;
+ }
+
+ // 12200 bps
+ return OMX_AUDIO_AMRBandModeNB7;
+ }
+}
+
+void OMXCodec::setAMRFormat(bool isWAMR, int32_t bitRate) {
+ OMX_U32 portIndex = mIsEncoder ? kPortIndexOutput : kPortIndexInput;
+
+ OMX_AUDIO_PARAM_AMRTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ status_t err =
+ mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+
+ CHECK_EQ(err, (status_t)OK);
+
+ def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
+
+ def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitRate);
+ err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ ////////////////////////
+
+ if (mIsEncoder) {
+ sp<MetaData> format = mSource->getFormat();
+ int32_t sampleRate;
+ int32_t numChannels;
+ CHECK(format->findInt32(kKeySampleRate, &sampleRate));
+ CHECK(format->findInt32(kKeyChannelCount, &numChannels));
+
+ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
+ }
+}
+
+status_t OMXCodec::setAACFormat(
+ int32_t numChannels, int32_t sampleRate, int32_t bitRate, int32_t aacProfile, bool isADTS) {
+ if (numChannels > 2) {
+ ALOGW("Number of channels: (%d) \n", numChannels);
+ }
+
+ if (mIsEncoder) {
+ if (isADTS) {
+ return -EINVAL;
+ }
+
+ //////////////// input port ////////////////////
+ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
+
+ //////////////// output port ////////////////////
+ // format
+ OMX_AUDIO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = kPortIndexOutput;
+ format.nIndex = 0;
+ status_t err = OMX_ErrorNone;
+ while (OMX_ErrorNone == err) {
+ CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioPortFormat,
+ &format, sizeof(format)), (status_t)OK);
+ if (format.eEncoding == OMX_AUDIO_CodingAAC) {
+ break;
+ }
+ format.nIndex++;
+ }
+ CHECK_EQ((status_t)OK, err);
+ CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioPortFormat,
+ &format, sizeof(format)), (status_t)OK);
+
+ // port definition
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+ CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamPortDefinition,
+ &def, sizeof(def)), (status_t)OK);
+ def.format.audio.bFlagErrorConcealment = OMX_TRUE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+ CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
+ &def, sizeof(def)), (status_t)OK);
+
+ // profile
+ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+ InitOMXParams(&profile);
+ profile.nPortIndex = kPortIndexOutput;
+ CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioAac,
+ &profile, sizeof(profile)), (status_t)OK);
+ profile.nChannels = numChannels;
+ profile.eChannelMode = (numChannels == 1?
+ OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo);
+ profile.nSampleRate = sampleRate;
+ profile.nBitRate = bitRate;
+ profile.nAudioBandWidth = 0;
+ profile.nFrameLength = 0;
+ profile.nAACtools = OMX_AUDIO_AACToolAll;
+ profile.nAACERtools = OMX_AUDIO_AACERNone;
+ profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile;
+ profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
+ err = mOMX->setParameter(mNode, OMX_IndexParamAudioAac,
+ &profile, sizeof(profile));
+
+ if (err != OK) {
+ CODEC_LOGE("setParameter('OMX_IndexParamAudioAac') failed "
+ "(err = %d)",
+ err);
+ return err;
+ }
+ } else {
+ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+ InitOMXParams(&profile);
+ profile.nPortIndex = kPortIndexInput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ CHECK_EQ(err, (status_t)OK);
+
+ profile.nChannels = numChannels;
+ profile.nSampleRate = sampleRate;
+
+ profile.eAACStreamFormat =
+ isADTS
+ ? OMX_AUDIO_AACStreamFormatMP4ADTS
+ : OMX_AUDIO_AACStreamFormatMP4FF;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+ if (err != OK) {
+ CODEC_LOGE("setParameter('OMX_IndexParamAudioAac') failed "
+ "(err = %d)",
+ err);
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t OMXCodec::setAC3Format(int32_t numChannels, int32_t sampleRate) {
+ OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ status_t err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.nChannels = numChannels;
+ def.nSampleRate = sampleRate;
+
+ return mOMX->setParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+}
+
+void OMXCodec::setG711Format(int32_t sampleRate, int32_t numChannels) {
+ CHECK(!mIsEncoder);
+ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
+}
+
+void OMXCodec::setImageOutputFormat(
+ OMX_COLOR_FORMATTYPE format, OMX_U32 width, OMX_U32 height) {
+ CODEC_LOGV("setImageOutputFormat(%u, %u)", width, height);
+
+#if 0
+ OMX_INDEXTYPE index;
+ status_t err = mOMX->get_extension_index(
+ mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index);
+ CHECK_EQ(err, (status_t)OK);
+
+ err = mOMX->set_config(mNode, index, &format, sizeof(format));
+ CHECK_EQ(err, (status_t)OK);
+#endif
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
+
+ OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
+
+ CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingUnused);
+ imageDef->eColorFormat = format;
+ imageDef->nFrameWidth = width;
+ imageDef->nFrameHeight = height;
+
+ switch (format) {
+ case OMX_COLOR_FormatYUV420PackedPlanar:
+ case OMX_COLOR_FormatYUV411Planar:
+ {
+ def.nBufferSize = (width * height * 3) / 2;
+ break;
+ }
+
+ case OMX_COLOR_FormatCbYCrY:
+ {
+ def.nBufferSize = width * height * 2;
+ break;
+ }
+
+ case OMX_COLOR_Format32bitARGB8888:
+ {
+ def.nBufferSize = width * height * 4;
+ break;
+ }
+
+ case OMX_COLOR_Format16bitARGB4444:
+ case OMX_COLOR_Format16bitARGB1555:
+ case OMX_COLOR_Format16bitRGB565:
+ case OMX_COLOR_Format16bitBGR565:
+ {
+ def.nBufferSize = width * height * 2;
+ break;
+ }
+
+ default:
+ CHECK(!"Should not be here. Unknown color format.");
+ break;
+ }
+
+ def.nBufferCountActual = def.nBufferCountMin;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void OMXCodec::setJPEGInputFormat(
+ OMX_U32 width, OMX_U32 height, OMX_U32 compressedSize) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
+ OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
+
+ CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingJPEG);
+ imageDef->nFrameWidth = width;
+ imageDef->nFrameHeight = height;
+
+ def.nBufferSize = compressedSize;
+ def.nBufferCountActual = def.nBufferCountMin;
+
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void OMXCodec::addCodecSpecificData(const void *data, size_t size) {
+ CodecSpecificData *specific =
+ (CodecSpecificData *)malloc(sizeof(CodecSpecificData) + size - 1);
+
+ specific->mSize = size;
+ memcpy(specific->mData, data, size);
+
+ mCodecSpecificData.push(specific);
+}
+
+void OMXCodec::clearCodecSpecificData() {
+ for (size_t i = 0; i < mCodecSpecificData.size(); ++i) {
+ free(mCodecSpecificData.editItemAt(i));
+ }
+ mCodecSpecificData.clear();
+ mCodecSpecificDataIndex = 0;
+}
+
+status_t OMXCodec::start(MetaData *meta) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mPaused) {
+ status_t err = resumeLocked(true);
+ return err;
+ }
+
+ if (mState != LOADED) {
+ CODEC_LOGE("called start in the unexpected state: %d", mState);
+ return UNKNOWN_ERROR;
+ }
+
+ sp<MetaData> params = new MetaData;
+ if (mQuirks & kWantsNALFragments) {
+ params->setInt32(kKeyWantsNALFragments, true);
+ }
+ if (meta) {
+ int64_t startTimeUs = 0;
+ int64_t timeUs;
+ if (meta->findInt64(kKeyTime, &timeUs)) {
+ startTimeUs = timeUs;
+ }
+ params->setInt64(kKeyTime, startTimeUs);
+ }
+
+ mCodecSpecificDataIndex = 0;
+ mInitialBufferSubmit = true;
+ mSignalledEOS = false;
+ mNoMoreOutputData = false;
+ mOutputPortSettingsHaveChanged = false;
+ mSeekTimeUs = -1;
+ mSeekMode = ReadOptions::SEEK_CLOSEST_SYNC;
+ mTargetTimeUs = -1;
+ mFilledBuffers.clear();
+ mPaused = false;
+
+ status_t err;
+ if (mIsEncoder) {
+ // Calling init() before starting its source so that we can configure,
+ // if supported, the source to use exactly the same number of input
+ // buffers as requested by the encoder.
+ if ((err = init()) != OK) {
+ CODEC_LOGE("init failed: %d", err);
+ return err;
+ }
+
+ params->setInt32(kKeyNumBuffers, mPortBuffers[kPortIndexInput].size());
+ err = mSource->start(params.get());
+ if (err != OK) {
+ CODEC_LOGE("source failed to start: %d", err);
+ stopOmxComponent_l();
+ }
+ return err;
+ }
+
+ // Decoder case
+ if ((err = mSource->start(params.get())) != OK) {
+ CODEC_LOGE("source failed to start: %d", err);
+ return err;
+ }
+ return init();
+}
+
+status_t OMXCodec::stop() {
+ CODEC_LOGV("stop mState=%d", mState);
+ Mutex::Autolock autoLock(mLock);
+ status_t err = stopOmxComponent_l();
+ mSource->stop();
+
+ CODEC_LOGV("stopped in state %d", mState);
+ return err;
+}
+
+status_t OMXCodec::stopOmxComponent_l() {
+ CODEC_LOGV("stopOmxComponent_l mState=%d", mState);
+
+ while (isIntermediateState(mState)) {
+ mAsyncCompletion.wait(mLock);
+ }
+
+ bool isError = false;
+ switch (mState) {
+ case LOADED:
+ break;
+
+ case ERROR:
+ {
+ if (mPortStatus[kPortIndexOutput] == ENABLING) {
+ // Codec is in a wedged state (technical term)
+ // We've seen an output port settings change from the codec,
+ // We've disabled the output port, then freed the output
+ // buffers, initiated re-enabling the output port but
+ // failed to reallocate the output buffers.
+ // There doesn't seem to be a way to orderly transition
+ // from executing->idle and idle->loaded now that the
+ // output port hasn't been reenabled yet...
+ // Simply free as many resources as we can and pretend
+ // that we're in LOADED state so that the destructor
+ // will free the component instance without asserting.
+ freeBuffersOnPort(kPortIndexInput, true /* onlyThoseWeOwn */);
+ freeBuffersOnPort(kPortIndexOutput, true /* onlyThoseWeOwn */);
+ setState(LOADED);
+ break;
+ } else {
+ OMX_STATETYPE state = OMX_StateInvalid;
+ status_t err = mOMX->getState(mNode, &state);
+ CHECK_EQ(err, (status_t)OK);
+
+ if (state != OMX_StateExecuting) {
+ break;
+ }
+ // else fall through to the idling code
+ }
+
+ isError = true;
+ }
+
+ case PAUSED:
+ case EXECUTING:
+ {
+ setState(EXECUTING_TO_IDLE);
+
+ if (mQuirks & kRequiresFlushBeforeShutdown) {
+ CODEC_LOGV("This component requires a flush before transitioning "
+ "from EXECUTING to IDLE...");
+
+ bool emulateInputFlushCompletion =
+ !flushPortAsync(kPortIndexInput);
+
+ bool emulateOutputFlushCompletion =
+ !flushPortAsync(kPortIndexOutput);
+
+ if (emulateInputFlushCompletion) {
+ onCmdComplete(OMX_CommandFlush, kPortIndexInput);
+ }
+
+ if (emulateOutputFlushCompletion) {
+ onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
+ }
+ } else {
+ mPortStatus[kPortIndexInput] = SHUTTING_DOWN;
+ mPortStatus[kPortIndexOutput] = SHUTTING_DOWN;
+
+ status_t err =
+ mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
+ CHECK_EQ(err, (status_t)OK);
+ }
+
+ while (mState != LOADED && mState != ERROR) {
+ mAsyncCompletion.wait(mLock);
+ }
+
+ if (isError) {
+ // We were in the ERROR state coming in, so restore that now
+ // that we've idled the OMX component.
+ setState(ERROR);
+ }
+
+ break;
+ }
+
+ default:
+ {
+ CHECK(!"should not be here.");
+ break;
+ }
+ }
+
+ if (mLeftOverBuffer) {
+ mLeftOverBuffer->release();
+ mLeftOverBuffer = NULL;
+ }
+
+ return OK;
+}
+
+sp<MetaData> OMXCodec::getFormat() {
+ Mutex::Autolock autoLock(mLock);
+
+ return mOutputFormat;
+}
+
+status_t OMXCodec::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ status_t err = OK;
+ *buffer = NULL;
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (mPaused) {
+ err = resumeLocked(false);
+ if(err != OK) {
+ CODEC_LOGE("Failed to restart codec err= %d", err);
+ return err;
+ }
+ }
+
+ if (mState != EXECUTING && mState != RECONFIGURING) {
+ return UNKNOWN_ERROR;
+ }
+
+ bool seeking = false;
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode seekMode;
+ if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
+ seeking = true;
+ }
+
+ if (mInitialBufferSubmit) {
+ mInitialBufferSubmit = false;
+
+ if (seeking) {
+ CHECK(seekTimeUs >= 0);
+ mSeekTimeUs = seekTimeUs;
+ mSeekMode = seekMode;
+
+ // There's no reason to trigger the code below, there's
+ // nothing to flush yet.
+ seeking = false;
+ mPaused = false;
+ }
+
+ drainInputBuffers();
+
+ if (mState == EXECUTING) {
+ // Otherwise mState == RECONFIGURING and this code will trigger
+ // after the output port is reenabled.
+ fillOutputBuffers();
+ }
+ }
+
+ if (seeking) {
+ while (mState == RECONFIGURING) {
+ if ((err = waitForBufferFilled_l()) != OK) {
+ return err;
+ }
+ }
+
+ if (mState != EXECUTING) {
+ return UNKNOWN_ERROR;
+ }
+
+ CODEC_LOGV("seeking to %" PRId64 " us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
+
+ mSignalledEOS = false;
+
+ CHECK(seekTimeUs >= 0);
+ mSeekTimeUs = seekTimeUs;
+ mSeekMode = seekMode;
+
+ mFilledBuffers.clear();
+
+ CHECK_EQ((int)mState, (int)EXECUTING);
+ //DSP supports flushing of ports simultaneously. Flushing individual port is not supported.
+ setState(FLUSHING);
+
+ bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
+ bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
+
+ if (emulateInputFlushCompletion) {
+ onCmdComplete(OMX_CommandFlush, kPortIndexInput);
+ }
+
+ if (emulateOutputFlushCompletion) {
+ onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
+ }
+
+ while (mSeekTimeUs >= 0) {
+ if ((err = waitForBufferFilled_l()) != OK) {
+ return err;
+ }
+ }
+ }
+
+ while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
+ if ((err = waitForBufferFilled_l()) != OK) {
+ return err;
+ }
+ }
+
+ if (mState == ERROR) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (seeking) {
+ CHECK_EQ((int)mState, (int)FLUSHING);
+ setState(EXECUTING);
+ }
+
+ if (mFilledBuffers.empty()) {
+ return mSignalledEOS ? mFinalStatus : ERROR_END_OF_STREAM;
+ }
+
+ if (mOutputPortSettingsHaveChanged) {
+ mOutputPortSettingsHaveChanged = false;
+
+ return INFO_FORMAT_CHANGED;
+ }
+
+ size_t index = *mFilledBuffers.begin();
+ mFilledBuffers.erase(mFilledBuffers.begin());
+
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
+ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+ info->mStatus = OWNED_BY_CLIENT;
+
+ info->mMediaBuffer->add_ref();
+ if (mSkipCutBuffer != NULL) {
+ mSkipCutBuffer->submit(info->mMediaBuffer);
+ }
+ *buffer = info->mMediaBuffer;
+
+ return OK;
+}
+
+void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ for (size_t i = 0; i < buffers->size(); ++i) {
+ BufferInfo *info = &buffers->editItemAt(i);
+
+ if (info->mMediaBuffer == buffer) {
+ CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
+ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_CLIENT);
+
+ info->mStatus = OWNED_BY_US;
+
+ if (buffer->graphicBuffer() == 0) {
+ fillOutputBuffer(info);
+ } else {
+ sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+ int32_t rendered = 0;
+ if (!metaData->findInt32(kKeyRendered, &rendered)) {
+ rendered = 0;
+ }
+ if (!rendered) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err < 0) {
+ return;
+ }
+ }
+
+ info->mStatus = OWNED_BY_NATIVE_WINDOW;
+
+ // Dequeue the next buffer from the native window.
+ BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+ if (nextBufInfo == 0) {
+ return;
+ }
+
+ // Give the buffer to the OMX node to fill.
+ fillOutputBuffer(nextBufInfo);
+ }
+ return;
+ }
+ }
+
+ CHECK(!"should not be here.");
+}
+
+void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = portIndex;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output");
+
+ CHECK((portIndex == kPortIndexInput && def.eDir == OMX_DirInput)
+ || (portIndex == kPortIndexOutput && def.eDir == OMX_DirOutput));
+
+ printf(" nBufferCountActual = %" PRIu32 "\n", def.nBufferCountActual);
+ printf(" nBufferCountMin = %" PRIu32 "\n", def.nBufferCountMin);
+ printf(" nBufferSize = %" PRIu32 "\n", def.nBufferSize);
+
+ switch (def.eDomain) {
+ case OMX_PortDomainImage:
+ {
+ const OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
+
+ printf("\n");
+ printf(" // Image\n");
+ printf(" nFrameWidth = %" PRIu32 "\n", imageDef->nFrameWidth);
+ printf(" nFrameHeight = %" PRIu32 "\n", imageDef->nFrameHeight);
+ printf(" nStride = %" PRIu32 "\n", imageDef->nStride);
+
+ printf(" eCompressionFormat = %s\n",
+ asString(imageDef->eCompressionFormat));
+
+ printf(" eColorFormat = %s\n",
+ asString(imageDef->eColorFormat));
+
+ break;
+ }
+
+ case OMX_PortDomainVideo:
+ {
+ OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
+
+ printf("\n");
+ printf(" // Video\n");
+ printf(" nFrameWidth = %" PRIu32 "\n", videoDef->nFrameWidth);
+ printf(" nFrameHeight = %" PRIu32 "\n", videoDef->nFrameHeight);
+ printf(" nStride = %" PRIu32 "\n", videoDef->nStride);
+
+ printf(" eCompressionFormat = %s\n",
+ asString(videoDef->eCompressionFormat));
+
+ printf(" eColorFormat = %s\n",
+ asString(videoDef->eColorFormat));
+
+ break;
+ }
+
+ case OMX_PortDomainAudio:
+ {
+ OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
+
+ printf("\n");
+ printf(" // Audio\n");
+ printf(" eEncoding = %s\n",
+ asString(audioDef->eEncoding));
+
+ if (audioDef->eEncoding == OMX_AUDIO_CodingPCM) {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
+ CHECK_EQ(err, (status_t)OK);
+
+ printf(" nSamplingRate = %" PRIu32 "\n", params.nSamplingRate);
+ printf(" nChannels = %" PRIu32 "\n", params.nChannels);
+ printf(" bInterleaved = %d\n", params.bInterleaved);
+ printf(" nBitPerSample = %" PRIu32 "\n", params.nBitPerSample);
+
+ printf(" eNumData = %s\n",
+ params.eNumData == OMX_NumericalDataSigned
+ ? "signed" : "unsigned");
+
+ printf(" ePCMMode = %s\n", asString(params.ePCMMode));
+ } else if (audioDef->eEncoding == OMX_AUDIO_CodingAMR) {
+ OMX_AUDIO_PARAM_AMRTYPE amr;
+ InitOMXParams(&amr);
+ amr.nPortIndex = portIndex;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
+ CHECK_EQ(err, (status_t)OK);
+
+ printf(" nChannels = %" PRIu32 "\n", amr.nChannels);
+ printf(" eAMRBandMode = %s\n",
+ asString(amr.eAMRBandMode));
+ printf(" eAMRFrameFormat = %s\n",
+ asString(amr.eAMRFrameFormat));
+ }
+
+ break;
+ }
+
+ default:
+ {
+ printf(" // Unknown\n");
+ break;
+ }
+ }
+
+ printf("}\n");
+}
+
+status_t OMXCodec::initNativeWindow() {
+ // Enable use of a GraphicBuffer as the output for this node. This must
+ // happen before getting the IndexParamPortDefinition parameter because it
+ // will affect the pixel format that the node reports.
+ status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != 0) {
+ return err;
+ }
+
+ return OK;
+}
+
+void OMXCodec::initNativeWindowCrop() {
+ int32_t left, top, right, bottom;
+
+ CHECK(mOutputFormat->findRect(
+ kKeyCropRect,
+ &left, &top, &right, &bottom));
+
+ android_native_rect_t crop;
+ crop.left = left;
+ crop.top = top;
+ crop.right = right + 1;
+ crop.bottom = bottom + 1;
+
+ // We'll ignore any errors here, if the surface is
+ // already invalid, we'll know soon enough.
+ native_window_set_crop(mNativeWindow.get(), &crop);
+}
+
+void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
+ mOutputFormat = new MetaData;
+ mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
+ if (mIsEncoder) {
+ int32_t timeScale;
+ if (inputFormat->findInt32(kKeyTimeScale, &timeScale)) {
+ mOutputFormat->setInt32(kKeyTimeScale, timeScale);
+ }
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ CHECK_EQ(err, (status_t)OK);
+
+ switch (def.eDomain) {
+ case OMX_PortDomainImage:
+ {
+ OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
+ CHECK_EQ((int)imageDef->eCompressionFormat,
+ (int)OMX_IMAGE_CodingUnused);
+
+ mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat);
+ mOutputFormat->setInt32(kKeyWidth, imageDef->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, imageDef->nFrameHeight);
+ break;
+ }
+
+ case OMX_PortDomainAudio:
+ {
+ OMX_AUDIO_PORTDEFINITIONTYPE *audio_def = &def.format.audio;
+
+ if (audio_def->eEncoding == OMX_AUDIO_CodingPCM) {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+ CHECK_EQ(params.nBitPerSample, 16u);
+ CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
+
+ int32_t numChannels, sampleRate;
+ inputFormat->findInt32(kKeyChannelCount, &numChannels);
+ inputFormat->findInt32(kKeySampleRate, &sampleRate);
+
+ if ((OMX_U32)numChannels != params.nChannels) {
+ ALOGV("Codec outputs a different number of channels than "
+ "the input stream contains (contains %d channels, "
+ "codec outputs %u channels).",
+ numChannels, params.nChannels);
+ }
+
+ if (sampleRate != (int32_t)params.nSamplingRate) {
+ ALOGV("Codec outputs at different sampling rate than "
+ "what the input stream contains (contains data at "
+ "%d Hz, codec outputs %u Hz)",
+ sampleRate, params.nSamplingRate);
+ }
+
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+
+ // Use the codec-advertised number of channels, as some
+ // codecs appear to output stereo even if the input data is
+ // mono. If we know the codec lies about this information,
+ // use the actual number of channels instead.
+ mOutputFormat->setInt32(
+ kKeyChannelCount,
+ (mQuirks & kDecoderLiesAboutNumberOfChannels)
+ ? numChannels : params.nChannels);
+
+ mOutputFormat->setInt32(kKeySampleRate, params.nSamplingRate);
+#ifdef DOLBY_ENABLE
+ setDolbyProcessedAudio();
+#endif // DOLBY_END
+ } else if (audio_def->eEncoding == OMX_AUDIO_CodingAMR) {
+ OMX_AUDIO_PARAM_AMRTYPE amr;
+ InitOMXParams(&amr);
+ amr.nPortIndex = kPortIndexOutput;
+
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
+ CHECK_EQ(err, (status_t)OK);
+
+ CHECK_EQ(amr.nChannels, 1u);
+ mOutputFormat->setInt32(kKeyChannelCount, 1);
+
+ if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0
+ && amr.eAMRBandMode <= OMX_AUDIO_AMRBandModeNB7) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_NB);
+ mOutputFormat->setInt32(kKeySampleRate, 8000);
+ } else if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0
+ && amr.eAMRBandMode <= OMX_AUDIO_AMRBandModeWB8) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_WB);
+ mOutputFormat->setInt32(kKeySampleRate, 16000);
+ } else {
+ CHECK(!"Unknown AMR band mode.");
+ }
+ } else if (audio_def->eEncoding == OMX_AUDIO_CodingAAC) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+ int32_t numChannels, sampleRate, bitRate;
+ inputFormat->findInt32(kKeyChannelCount, &numChannels);
+ inputFormat->findInt32(kKeySampleRate, &sampleRate);
+ inputFormat->findInt32(kKeyBitRate, &bitRate);
+ mOutputFormat->setInt32(kKeyChannelCount, numChannels);
+ mOutputFormat->setInt32(kKeySampleRate, sampleRate);
+ mOutputFormat->setInt32(kKeyBitRate, bitRate);
+ } else if (audio_def->eEncoding ==
+ (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidAC3) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AC3);
+ int32_t numChannels, sampleRate, bitRate;
+ inputFormat->findInt32(kKeyChannelCount, &numChannels);
+ inputFormat->findInt32(kKeySampleRate, &sampleRate);
+ inputFormat->findInt32(kKeyBitRate, &bitRate);
+ mOutputFormat->setInt32(kKeyChannelCount, numChannels);
+ mOutputFormat->setInt32(kKeySampleRate, sampleRate);
+ mOutputFormat->setInt32(kKeyBitRate, bitRate);
+ } else {
+ CHECK(!"Should not be here. Unknown audio encoding.");
+ }
+ break;
+ }
+
+ case OMX_PortDomainVideo:
+ {
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ if (video_def->eCompressionFormat == OMX_VIDEO_CodingUnused) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingMPEG4) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingH263) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingAVC) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ } else {
+ CHECK(!"Unknown compression format.");
+ }
+
+ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+ mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
+
+ if (!mIsEncoder) {
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = kPortIndexOutput;
+ status_t err =
+ mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect));
+
+ CODEC_LOGI("video dimensions are %u x %u",
+ video_def->nFrameWidth, video_def->nFrameHeight);
+
+ if (err == OK) {
+ CHECK_GE(rect.nLeft, 0);
+ CHECK_GE(rect.nTop, 0);
+ CHECK_GE(rect.nWidth, 0u);
+ CHECK_GE(rect.nHeight, 0u);
+ CHECK_LE(rect.nLeft + rect.nWidth - 1, video_def->nFrameWidth);
+ CHECK_LE(rect.nTop + rect.nHeight - 1, video_def->nFrameHeight);
+
+ mOutputFormat->setRect(
+ kKeyCropRect,
+ rect.nLeft,
+ rect.nTop,
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ CODEC_LOGI("Crop rect is %u x %u @ (%d, %d)",
+ rect.nWidth, rect.nHeight, rect.nLeft, rect.nTop);
+ } else {
+ mOutputFormat->setRect(
+ kKeyCropRect,
+ 0, 0,
+ video_def->nFrameWidth - 1,
+ video_def->nFrameHeight - 1);
+ }
+
+ if (mNativeWindow != NULL) {
+ initNativeWindowCrop();
+ }
+ }
+ break;
+ }
+
+ default:
+ {
+ CHECK(!"should not be here, neither audio nor video.");
+ break;
+ }
+ }
+
+ // If the input format contains rotation information, flag the output
+ // format accordingly.
+
+ int32_t rotationDegrees;
+ if (mSource->getFormat()->findInt32(kKeyRotation, &rotationDegrees)) {
+ mOutputFormat->setInt32(kKeyRotation, rotationDegrees);
+ }
+}
+
+status_t OMXCodec::pause() {
+ CODEC_LOGV("pause mState=%d", mState);
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (mState != EXECUTING) {
+ return UNKNOWN_ERROR;
+ }
+
+ while (isIntermediateState(mState)) {
+ mAsyncCompletion.wait(mLock);
+ }
+ if (!strncmp(mComponentName, "OMX.qcom.", 9)) {
+ status_t err = mOMX->sendCommand(mNode,
+ OMX_CommandStateSet, OMX_StatePause);
+ CHECK_EQ(err, (status_t)OK);
+ setState(PAUSING);
+
+ mPaused = true;
+ while (mState != PAUSED && mState != ERROR) {
+ mAsyncCompletion.wait(mLock);
+ }
+ return mState == ERROR ? UNKNOWN_ERROR : OK;
+ } else {
+ mPaused = true;
+ return OK;
+ }
+
+}
+
+status_t OMXCodec::resumeLocked(bool drainInputBuf) {
+ CODEC_LOGV("resume mState=%d", mState);
+
+ if (!strncmp(mComponentName, "OMX.qcom.", 9)) {
+ while (isIntermediateState(mState)) {
+ mAsyncCompletion.wait(mLock);
+ }
+ CHECK_EQ(mState, (status_t)PAUSED);
+ status_t err = mOMX->sendCommand(mNode,
+ OMX_CommandStateSet, OMX_StateExecuting);
+ CHECK_EQ(err, (status_t)OK);
+ setState(IDLE_TO_EXECUTING);
+ mPaused = false;
+ while (mState != EXECUTING && mState != ERROR) {
+ mAsyncCompletion.wait(mLock);
+ }
+ if(drainInputBuf)
+ drainInputBuffers();
+ return mState == ERROR ? UNKNOWN_ERROR : OK;
+ } else { // SW Codec
+ mPaused = false;
+ if(drainInputBuf)
+ drainInputBuffers();
+ return OK;
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+status_t QueryCodecs(
+ const sp<IOMX> &omx,
+ const char *mime, bool queryDecoders, bool hwCodecOnly,
+ Vector<CodecCapabilities> *results) {
+ Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
+ results->clear();
+
+ OMXCodec::findMatchingCodecs(mime,
+ !queryDecoders /*createEncoder*/,
+ NULL /*matchComponentName*/,
+ hwCodecOnly ? OMXCodec::kHardwareCodecsOnly : 0 /*flags*/,
+ &matchingCodecs);
+
+ for (size_t c = 0; c < matchingCodecs.size(); c++) {
+ const char *componentName = matchingCodecs.itemAt(c).mName.string();
+
+ results->push();
+ CodecCapabilities *caps = &results->editItemAt(results->size() - 1);
+
+ status_t err =
+ QueryCodec(omx, componentName, mime, !queryDecoders, caps);
+
+ if (err != OK) {
+ results->removeAt(results->size() - 1);
+ }
+ }
+
+ return OK;
+}
+
+status_t QueryCodec(
+ const sp<IOMX> &omx,
+ const char *componentName, const char *mime,
+ bool isEncoder,
+ CodecCapabilities *caps) {
+ bool isVideo = !strncasecmp(mime, "video/", 6);
+
+ sp<OMXCodecObserver> observer = new OMXCodecObserver;
+ IOMX::node_id node;
+ status_t err = omx->allocateNode(componentName, observer, &node);
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMXCodec::setComponentRole(omx, node, isEncoder, mime);
+
+ caps->mFlags = 0;
+ caps->mComponentName = componentName;
+
+ // NOTE: OMX does not provide a way to query AAC profile support
+ if (isVideo) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
+ InitOMXParams(&param);
+
+ param.nPortIndex = !isEncoder ? 0 : 1;
+
+ for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoProfileLevelQuerySupported,
+ &param, sizeof(param));
+
+ if (err != OK) {
+ break;
+ }
+
+ CodecProfileLevel profileLevel;
+ profileLevel.mProfile = param.eProfile;
+ profileLevel.mLevel = param.eLevel;
+
+ caps->mProfileLevels.push(profileLevel);
+ }
+
+ // Color format query
+ // return colors in the order reported by the OMX component
+ // prefix "flexible" standard ones with the flexible equivalent
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = !isEncoder ? 1 : 0;
+ for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat));
+ if (err != OK) {
+ break;
+ }
+
+ OMX_U32 flexibleEquivalent;
+ if (ACodec::isFlexibleColorFormat(
+ omx, node, portFormat.eColorFormat, false /* usingNativeWindow */,
+ &flexibleEquivalent)) {
+ bool marked = false;
+ for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
+ if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
+ marked = true;
+ break;
+ }
+ }
+ if (!marked) {
+ caps->mColorFormats.push(flexibleEquivalent);
+ }
+ }
+ caps->mColorFormats.push(portFormat.eColorFormat);
+ }
+ }
+
+ if (isVideo && !isEncoder) {
+ if (omx->storeMetaDataInBuffers(
+ node, 1 /* port index */, OMX_TRUE) == OK ||
+ omx->prepareForAdaptivePlayback(
+ node, 1 /* port index */, OMX_TRUE,
+ 1280 /* width */, 720 /* height */) == OK) {
+ caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback;
+ }
+ }
+
+ CHECK_EQ(omx->freeNode(node), (status_t)OK);
+
+ return OK;
+}
+
+status_t QueryCodecs(
+ const sp<IOMX> &omx,
+ const char *mimeType, bool queryDecoders,
+ Vector<CodecCapabilities> *results) {
+ return QueryCodecs(omx, mimeType, queryDecoders, false /*hwCodecOnly*/, results);
+}
+
+// These are supposed be equivalent to the logic in
+// "audio_channel_out_mask_from_count".
+status_t getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) {
+ switch (numChannels) {
+ case 1:
+ map[0] = OMX_AUDIO_ChannelCF;
+ break;
+ case 2:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ break;
+ case 3:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ map[2] = OMX_AUDIO_ChannelCF;
+ break;
+ case 4:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ map[2] = OMX_AUDIO_ChannelLR;
+ map[3] = OMX_AUDIO_ChannelRR;
+ break;
+ case 5:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ map[2] = OMX_AUDIO_ChannelCF;
+ map[3] = OMX_AUDIO_ChannelLR;
+ map[4] = OMX_AUDIO_ChannelRR;
+ break;
+ case 6:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ map[2] = OMX_AUDIO_ChannelCF;
+ map[3] = OMX_AUDIO_ChannelLFE;
+ map[4] = OMX_AUDIO_ChannelLR;
+ map[5] = OMX_AUDIO_ChannelRR;
+ break;
+ case 7:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ map[2] = OMX_AUDIO_ChannelCF;
+ map[3] = OMX_AUDIO_ChannelLFE;
+ map[4] = OMX_AUDIO_ChannelLR;
+ map[5] = OMX_AUDIO_ChannelRR;
+ map[6] = OMX_AUDIO_ChannelCS;
+ break;
+ case 8:
+ map[0] = OMX_AUDIO_ChannelLF;
+ map[1] = OMX_AUDIO_ChannelRF;
+ map[2] = OMX_AUDIO_ChannelCF;
+ map[3] = OMX_AUDIO_ChannelLFE;
+ map[4] = OMX_AUDIO_ChannelLR;
+ map[5] = OMX_AUDIO_ChannelRR;
+ map[6] = OMX_AUDIO_ChannelLS;
+ map[7] = OMX_AUDIO_ChannelRS;
+ break;
+ default:
+ return -EINVAL;
+ }
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 147eb45..2fd3dab 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -26,7 +26,6 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
-#include <gui/BufferItem.h>
#include <gui/ISurfaceComposer.h>
#include <gui/IGraphicBufferAlloc.h>
#include <OMX_Component.h>
@@ -298,7 +297,7 @@ status_t SurfaceMediaSource::read(
// TODO: mCurrentSlot can be made a bufferstate since there
// can be more than one "current" slots.
- BufferItem item;
+ BufferQueue::BufferItem item;
// If the recording has started and the queue is empty, then just
// wait here till the frames come in from the client side
while (mStarted) {
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 4f7c48f..3f65c42 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -9,6 +9,13 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/native/include/media/openmax \
$(TOP)/hardware/msm7k
+ifeq ($(BOARD_HAS_MTK_HARDWARE),true)
+LOCAL_CFLAGS += -DMTK_HARDWARE
+
+LOCAL_C_INCLUDES += \
+ $(TOP)/frameworks/av/include/media/stagefright/dpframework
+endif
+
LOCAL_CFLAGS += -Werror
LOCAL_CLANG := true
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 597167f..b43a3b9 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -22,6 +22,14 @@
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/MediaErrors.h>
+#ifdef MTK_HARDWARE
+#include <DpBlitStream.h>
+
+const OMX_COLOR_FORMATTYPE OMX_MTK_COLOR_FormatYV12 = (OMX_COLOR_FORMATTYPE)0x7F000200;
+const OMX_COLOR_FORMATTYPE OMX_COLOR_FormatVendorMTKYUV = (OMX_COLOR_FORMATTYPE)0x7F000001;
+const OMX_COLOR_FORMATTYPE OMX_COLOR_FormatVendorMTKYUV_FCM = (OMX_COLOR_FORMATTYPE)0x7F000002;
+#endif
+
namespace android {
ColorConverter::ColorConverter(
@@ -47,6 +55,11 @@ bool ColorConverter::isValid() const {
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
case OMX_COLOR_FormatYUV420SemiPlanar:
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+#ifdef MTK_HARDWARE
+ case OMX_MTK_COLOR_FormatYV12:
+ case OMX_COLOR_FormatVendorMTKYUV:
+ case OMX_COLOR_FormatVendorMTKYUV_FCM:
+#endif
return true;
default:
@@ -103,7 +116,11 @@ status_t ColorConverter::convert(
switch (mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar:
+#ifdef MTK_HARDWARE
+ err = convertYUVToRGBHW(src, dst);
+#else
err = convertYUV420Planar(src, dst);
+#endif
break;
case OMX_COLOR_FormatCbYCrY:
@@ -122,6 +139,14 @@ status_t ColorConverter::convert(
err = convertTIYUV420PackedSemiPlanar(src, dst);
break;
+#ifdef MTK_HARDWARE
+ case OMX_MTK_COLOR_FormatYV12:
+ case OMX_COLOR_FormatVendorMTKYUV:
+ case OMX_COLOR_FormatVendorMTKYUV_FCM:
+ err = convertYUVToRGBHW(src, dst);
+ break;
+#endif
+
default:
{
CHECK(!"Should not be here. Unknown color conversion.");
@@ -521,4 +546,92 @@ uint8_t *ColorConverter::initClip() {
return &mClip[-kClipMin];
}
+#ifdef MTK_HARDWARE
+status_t ColorConverter::convertYUVToRGBHW(const BitmapParams &src, const BitmapParams &dst) {
+ DpBlitStream blitStream;
+ unsigned int srcWStride = src.mWidth;
+ unsigned int srcHStride = src.mHeight;
+
+ DpRect srcRoi;
+ srcRoi.x = src.mCropLeft;
+ srcRoi.y = src.mCropTop;
+ srcRoi.w = src.mCropRight - src.mCropLeft;
+ srcRoi.h = src.mCropBottom - src.mCropTop;
+
+ unsigned int dstWStride = dst.mWidth ;
+ unsigned int dstHStride = dst.mHeight ;
+ char name_yuv[100];
+ char retriever_yuv_propty[100];
+ char name_rgb[100];
+ char retriever_propty_rgb[100];
+
+ if (mSrcFormat == OMX_COLOR_FormatYUV420Planar) {
+ char* planar[3];
+ unsigned int length[3];
+ planar[0] = (char*)src.mBits;
+ length[0] = srcWStride*srcHStride;
+ planar[1] = planar[0] + length[0];
+ length[1] = srcWStride*srcHStride/4;
+ planar[2] = planar[1] + length[1];
+ length[2] = length[1];
+
+ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 3);
+ blitStream.setSrcConfig(srcWStride, srcHStride, eYUV_420_3P, eInterlace_None, &srcRoi);
+ }
+ else if (mSrcFormat == OMX_MTK_COLOR_FormatYV12) {
+ char* planar[3];
+ unsigned int length[3];
+ planar[0] = (char*)src.mBits;
+ length[0] = srcWStride*srcHStride;
+ planar[1] = planar[0] + length[0];
+ length[1] = srcWStride*srcHStride/4;
+ planar[2] = planar[1] + length[1];
+ length[2] = length[1];
+
+ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 3);
+ blitStream.setSrcConfig(srcWStride, srcHStride, eYV12, eInterlace_None, &srcRoi);
+ }
+ else if (mSrcFormat == OMX_COLOR_FormatVendorMTKYUV) {
+ char* planar[2];
+ unsigned int length[2];
+ planar[0] = (char*)src.mBits;
+ length[0] = srcWStride*srcHStride;
+ planar[1] = planar[0] + length[0];
+ length[1] = srcWStride*srcHStride/2;
+
+ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 2);
+ blitStream.setSrcConfig(srcWStride, srcHStride, srcWStride * 32, srcWStride * 16, eNV12_BLK, DP_PROFILE_BT601, eInterlace_None, &srcRoi);
+ }
+ else if (mSrcFormat == OMX_COLOR_FormatVendorMTKYUV_FCM) {
+ char* planar[2];
+ unsigned int length[2];
+ planar[0] = (char*)src.mBits;
+ length[0] = srcWStride*srcHStride;
+ planar[1] = planar[0] + length[0];
+ length[1] = srcWStride*srcHStride/2;
+
+ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 2);
+ blitStream.setSrcConfig(srcWStride, srcHStride, srcWStride * 32, srcWStride * 16, eNV12_BLK_FCM, DP_PROFILE_BT601, eInterlace_None, &srcRoi);
+ }
+
+ if (mDstFormat == OMX_COLOR_Format16bitRGB565) {
+ blitStream.setDstBuffer(dst.mBits, dst.mWidth * dst.mHeight * 2);
+ blitStream.setDstConfig(dst.mWidth, dst.mHeight, eRGB565);
+ }
+ else if (mDstFormat == OMX_COLOR_Format32bitARGB8888) {
+ blitStream.setDstBuffer(dst.mBits, dst.mWidth * dst.mHeight * 4);
+ blitStream.setDstConfig(dst.mWidth, dst.mHeight, eRGBA8888);
+ }
+
+ // Add Sharpness in Video Thumbnail
+ blitStream.setTdshp(1);
+ bool bRet = blitStream.invalidate();
+
+ if (!bRet)
+ return OK;
+ else
+ return UNKNOWN_ERROR;
+}
+#endif
+
} // namespace android
diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp
index a606315..952840b 100644
--- a/media/libstagefright/filters/GraphicBufferListener.cpp
+++ b/media/libstagefright/filters/GraphicBufferListener.cpp
@@ -21,8 +21,6 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaErrors.h>
-#include <gui/BufferItem.h>
-
#include "GraphicBufferListener.h"
namespace android {
@@ -85,8 +83,8 @@ void GraphicBufferListener::onSidebandStreamChanged() {
// nothing to do
}
-BufferItem GraphicBufferListener::getBufferItem() {
- BufferItem item;
+BufferQueue::BufferItem GraphicBufferListener::getBufferItem() {
+ BufferQueue::BufferItem item;
{
Mutex::Autolock autoLock(mMutex);
@@ -126,7 +124,8 @@ BufferItem GraphicBufferListener::getBufferItem() {
return item;
}
-sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) {
+sp<GraphicBuffer> GraphicBufferListener::getBuffer(
+ BufferQueue::BufferItem item) {
sp<GraphicBuffer> buf;
if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) {
ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf);
@@ -139,7 +138,8 @@ sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) {
return buf;
}
-status_t GraphicBufferListener::releaseBuffer(BufferItem item) {
+status_t GraphicBufferListener::releaseBuffer(
+ BufferQueue::BufferItem item) {
if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) {
ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf);
return ERROR_OUT_OF_RANGE;
diff --git a/media/libstagefright/filters/GraphicBufferListener.h b/media/libstagefright/filters/GraphicBufferListener.h
index 586bf65..b3e0ee3 100644
--- a/media/libstagefright/filters/GraphicBufferListener.h
+++ b/media/libstagefright/filters/GraphicBufferListener.h
@@ -41,9 +41,9 @@ struct GraphicBufferListener : public BufferQueue::ConsumerListener {
return mProducer;
}
- BufferItem getBufferItem();
- sp<GraphicBuffer> getBuffer(BufferItem item);
- status_t releaseBuffer(BufferItem item);
+ BufferQueue::BufferItem getBufferItem();
+ sp<GraphicBuffer> getBuffer(BufferQueue::BufferItem item);
+ status_t releaseBuffer(BufferQueue::BufferItem item);
enum {
kWhatFrameAvailable = 'frav',
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
index 0cf6b06..1806454 100644
--- a/media/libstagefright/filters/MediaFilter.cpp
+++ b/media/libstagefright/filters/MediaFilter.cpp
@@ -31,8 +31,6 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaFilter.h>
-#include <gui/BufferItem.h>
-
#include "ColorConvert.h"
#include "GraphicBufferListener.h"
#include "IntrinsicBlurFilter.h"
@@ -756,7 +754,7 @@ void MediaFilter::onCreateInputSurface() {
}
void MediaFilter::onInputFrameAvailable() {
- BufferItem item = mGraphicBufferListener->getBufferItem();
+ BufferQueue::BufferItem item = mGraphicBufferListener->getBufferItem();
sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item);
// get pointer to graphic buffer
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 1a7dc9d..65a97da 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -57,7 +57,7 @@ void GraphicBufferSource::PersistentProxyListener::onFrameAvailable(
if (consumer == NULL) {
return;
}
- BufferItem bi;
+ BufferQueue::BufferItem bi;
status_t err = consumer->acquireBuffer(&bi, 0);
if (err != OK) {
ALOGE("PersistentProxyListener: acquireBuffer failed (%d)", err);
@@ -461,7 +461,7 @@ void GraphicBufferSource::suspend(bool suspend) {
mSuspended = true;
while (mNumFramesAvailable > 0) {
- BufferItem item;
+ BufferQueue::BufferItem item;
status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
@@ -512,7 +512,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%zu",
mNumFramesAvailable);
- BufferItem item;
+ BufferQueue::BufferItem item;
status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen
@@ -599,7 +599,7 @@ bool GraphicBufferSource::repeatLatestBuffer_l() {
return false;
}
- BufferItem item;
+ BufferQueue::BufferItem item;
item.mBuf = mLatestBufferId;
item.mFrameNumber = mLatestBufferFrameNum;
item.mTimestamp = mRepeatLastFrameTimestamp;
@@ -631,7 +631,7 @@ bool GraphicBufferSource::repeatLatestBuffer_l() {
}
void GraphicBufferSource::setLatestBuffer_l(
- const BufferItem &item, bool dropped) {
+ const BufferQueue::BufferItem &item, bool dropped) {
ALOGV("setLatestBuffer_l");
if (mLatestBufferId >= 0) {
@@ -685,7 +685,7 @@ status_t GraphicBufferSource::signalEndOfInputStream() {
return OK;
}
-int64_t GraphicBufferSource::getTimestamp(const BufferItem &item) {
+int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) {
int64_t timeUs = item.mTimestamp / 1000;
if (mTimePerCaptureUs > 0ll) {
@@ -745,7 +745,8 @@ int64_t GraphicBufferSource::getTimestamp(const BufferItem &item) {
return timeUs;
}
-status_t GraphicBufferSource::submitBuffer_l(const BufferItem &item, int cbi) {
+status_t GraphicBufferSource::submitBuffer_l(
+ const BufferQueue::BufferItem &item, int cbi) {
ALOGV("submitBuffer_l cbi=%d", cbi);
int64_t timeUs = getTimestamp(item);
@@ -872,7 +873,7 @@ void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
ALOGV("onFrameAvailable: suspended, ignoring frame");
}
- BufferItem item;
+ BufferQueue::BufferItem item;
status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == OK) {
mNumBufferAcquired++;
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 2f929d9..d603308 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -223,7 +223,7 @@ class GraphicBufferSource : public BufferQueue::ConsumerListener {
// Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer
// reference into the codec buffer, and submits the data to the codec.
- status_t submitBuffer_l(const BufferItem &item, int cbi);
+ status_t submitBuffer_l(const BufferQueue::BufferItem &item, int cbi);
// Submits an empty buffer, with the EOS flag set. Returns without
// doing anything if we don't have a codec buffer available.
@@ -234,9 +234,9 @@ class GraphicBufferSource : public BufferQueue::ConsumerListener {
int &id, uint64_t frameNum,
const sp<GraphicBuffer> buffer, const sp<Fence> &fence);
- void setLatestBuffer_l(const BufferItem &item, bool dropped);
+ void setLatestBuffer_l(const BufferQueue::BufferItem &item, bool dropped);
bool repeatLatestBuffer_l();
- int64_t getTimestamp(const BufferItem &item);
+ int64_t getTimestamp(const BufferQueue::BufferItem &item);
// Lock, covers all member variables.
mutable Mutex mMutex;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 1a0a110..3ef8854 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1003,6 +1003,18 @@ status_t AudioFlinger::setStreamVolume(audio_stream_type_t stream, float value,
thread->setStreamVolume(stream, value);
}
+#ifdef MTK_HARDWARE
+ // MTK FM Volume
+ if(stream == AUDIO_STREAM_MUSIC) {
+ sp<ThreadBase> thread;
+ thread = checkPlaybackThread_l(output);
+ if (thread == primaryPlaybackThread_l()) {
+ audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
+ dev->set_parameters (dev, String8::format("SetFmVolume=%f", value));
+ }
+ }
+#endif
+
return NO_ERROR;
}
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index af46d63..f56f8f3 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -742,6 +742,18 @@ void CameraClient::notifyCallback(int32_t msgType, int32_t ext1,
int32_t ext2, void* user) {
LOG2("notifyCallback(%d)", msgType);
+#ifdef MTK_HARDWARE
+ if (msgType == 0x40000000) { //MTK_CAMERA_MSG_EXT_NOTIFY
+ if (ext1 == 0x11) { //MTK_CAMERA_MSG_EXT_NOTIFY_SHUTTER
+ msgType = CAMERA_MSG_SHUTTER;
+ }
+ if (ext1 == 0x10) { //MTK_CAMERA_MSG_EXT_CAPTURE_DONE
+ return;
+ }
+ LOG2("MtknotifyCallback(0x%x, 0x%x)", ext1, ext2);
+ }
+#endif
+
sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
if (client.get() == nullptr) return;
@@ -772,6 +784,34 @@ void CameraClient::dataCallback(int32_t msgType,
return;
}
+#ifdef MTK_HARDWARE
+ if (msgType == 0x80000000) { //MTK_CAMERA_MSG_EXT_DATA
+ struct DataHeader {
+ uint32_t extMsgType;
+ } dataHeader;
+ sp<IMemoryHeap> heap = 0;
+ ssize_t offset = 0;
+ size_t size = 0;
+
+ if (dataPtr.get()) {
+
+ heap = dataPtr->getMemory(&offset, &size);
+ if ( NULL != heap.get() && NULL != heap->base() )
+ ::memcpy(&dataHeader, ((uint8_t*)heap->base()) + offset, sizeof(DataHeader));
+
+ if (dataHeader.extMsgType == 0x10) { //MTK_CAMERA_MSG_EXT_DATA_COMPRESSED_IMAGE
+ msgType = CAMERA_MSG_COMPRESSED_IMAGE;
+ sp<MemoryBase> image = new MemoryBase(heap,
+ (offset + sizeof(DataHeader) + sizeof(uint_t) * 1),
+ (size - sizeof(DataHeader) - sizeof(uint_t) * 1));
+ client->handleCompressedPicture(image);
+ return;
+ }
+ }
+ LOG2("MtkDataCallback(0x%x)", dataHeader.extMsgType);
+ }
+#endif
+
switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp.orig b/services/camera/libcameraservice/api1/CameraClient.cpp.orig
new file mode 100644
index 0000000..f56f8f3
--- /dev/null
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp.orig
@@ -0,0 +1,1066 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraClient"
+//#define LOG_NDEBUG 0
+
+#include <cutils/properties.h>
+#include <gui/Surface.h>
+
+#include "api1/CameraClient.h"
+#include "device1/CameraHardwareInterface.h"
+#include "CameraService.h"
+
+namespace android {
+
+#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
+#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
+
+static int getCallingPid() {
+ return IPCThreadState::self()->getCallingPid();
+}
+
+CameraClient::CameraClient(const sp<CameraService>& cameraService,
+ const sp<ICameraClient>& cameraClient,
+ const String16& clientPackageName,
+ int cameraId, int cameraFacing,
+ int clientPid, int clientUid,
+ int servicePid, bool legacyMode):
+ Client(cameraService, cameraClient, clientPackageName,
+ cameraId, cameraFacing, clientPid, clientUid, servicePid)
+{
+ int callingPid = getCallingPid();
+ LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);
+
+ mHardware = NULL;
+ mMsgEnabled = 0;
+ mSurface = 0;
+ mPreviewWindow = 0;
+ mDestructionStarted = false;
+
+ // Callback is disabled by default
+ mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
+ mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
+ mLegacyMode = legacyMode;
+ mPlayShutterSound = true;
+
+ mLongshotEnabled = false;
+ mBurstCnt = 0;
+ LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
+}
+
+status_t CameraClient::initialize(CameraModule *module) {
+ int callingPid = getCallingPid();
+ status_t res;
+
+ LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
+
+ // Verify ops permissions
+ res = startCameraOps();
+ if (res != OK) {
+ return res;
+ }
+
+ char camera_device_name[10];
+ snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
+
+ mHardware = new CameraHardwareInterface(camera_device_name);
+ res = mHardware->initialize(module);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ mHardware.clear();
+ return res;
+ }
+
+ mHardware->setCallbacks(notifyCallback,
+ dataCallback,
+ dataCallbackTimestamp,
+ (void *)(uintptr_t)mCameraId);
+
+ // Enable zoom, error, focus, and metadata messages by default
+ enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
+ CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
+
+ LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
+ return OK;
+}
+
+
+// tear down the client
+CameraClient::~CameraClient() {
+ mDestructionStarted = true;
+ int callingPid = getCallingPid();
+ LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this);
+
+ disconnect();
+ LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this);
+}
+
+status_t CameraClient::dump(int fd, const Vector<String16>& args) {
+ return BasicClient::dump(fd, args);
+}
+
+status_t CameraClient::dumpClient(int fd, const Vector<String16>& args) {
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+
+ size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) with UID %d\n",
+ mCameraId,
+ (getRemoteCallback() != NULL ?
+ IInterface::asBinder(getRemoteCallback()).get() : NULL),
+ mClientUid);
+ len = (len > SIZE - 1) ? SIZE - 1 : len;
+ write(fd, buffer, len);
+
+ len = snprintf(buffer, SIZE, "Latest set parameters:\n");
+ len = (len > SIZE - 1) ? SIZE - 1 : len;
+ write(fd, buffer, len);
+
+ mLatestSetParameters.dump(fd, args);
+
+ const char *enddump = "\n\n";
+ write(fd, enddump, strlen(enddump));
+
+ return mHardware->dump(fd, args);
+}
+
+// ----------------------------------------------------------------------------
+
+status_t CameraClient::checkPid() const {
+ int callingPid = getCallingPid();
+ if (callingPid == mClientPid) return NO_ERROR;
+
+ ALOGW("attempt to use a locked camera from a different process"
+ " (old pid %d, new pid %d)", mClientPid, callingPid);
+ return EBUSY;
+}
+
+status_t CameraClient::checkPidAndHardware() const {
+ status_t result = checkPid();
+ if (result != NO_ERROR) return result;
+ if (mHardware == 0) {
+ ALOGE("attempt to use a camera after disconnect() (pid %d)", getCallingPid());
+ return INVALID_OPERATION;
+ }
+ return NO_ERROR;
+}
+
+status_t CameraClient::lock() {
+ int callingPid = getCallingPid();
+ LOG1("lock (pid %d)", callingPid);
+ Mutex::Autolock lock(mLock);
+
+ // lock camera to this client if the the camera is unlocked
+ if (mClientPid == 0) {
+ mClientPid = callingPid;
+ return NO_ERROR;
+ }
+
+ // returns NO_ERROR if the client already owns the camera, EBUSY otherwise
+ return checkPid();
+}
+
+status_t CameraClient::unlock() {
+ int callingPid = getCallingPid();
+ LOG1("unlock (pid %d)", callingPid);
+ Mutex::Autolock lock(mLock);
+
+ // allow anyone to use camera (after they lock the camera)
+ status_t result = checkPid();
+ if (result == NO_ERROR) {
+ if (mHardware->recordingEnabled()) {
+ ALOGE("Not allowed to unlock camera during recording.");
+ return INVALID_OPERATION;
+ }
+ mClientPid = 0;
+ LOG1("clear mRemoteCallback (pid %d)", callingPid);
+ // we need to remove the reference to ICameraClient so that when the app
+ // goes away, the reference count goes to 0.
+ mRemoteCallback.clear();
+ }
+ return result;
+}
+
+// connect a new client to the camera
+status_t CameraClient::connect(const sp<ICameraClient>& client) {
+ int callingPid = getCallingPid();
+ LOG1("connect E (pid %d)", callingPid);
+ Mutex::Autolock lock(mLock);
+
+ if (mClientPid != 0 && checkPid() != NO_ERROR) {
+ ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)",
+ mClientPid, callingPid);
+ return EBUSY;
+ }
+
+ if (mRemoteCallback != 0 &&
+ (IInterface::asBinder(client) == IInterface::asBinder(mRemoteCallback))) {
+ LOG1("Connect to the same client");
+ return NO_ERROR;
+ }
+
+ mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
+ mClientPid = callingPid;
+ mRemoteCallback = client;
+
+ LOG1("connect X (pid %d)", callingPid);
+ return NO_ERROR;
+}
+
+static void disconnectWindow(const sp<ANativeWindow>& window) {
+ if (window != 0) {
+ status_t result = native_window_api_disconnect(window.get(),
+ NATIVE_WINDOW_API_CAMERA);
+ if (result != NO_ERROR) {
+ ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result),
+ result);
+ }
+ }
+}
+
+void CameraClient::disconnect() {
+ int callingPid = getCallingPid();
+ LOG1("disconnect E (pid %d)", callingPid);
+ Mutex::Autolock lock(mLock);
+
+ // Allow both client and the media server to disconnect at all times
+ if (callingPid != mClientPid && callingPid != mServicePid) {
+ ALOGW("different client - don't disconnect");
+ return;
+ }
+
+ // Make sure disconnect() is done once and once only, whether it is called
+ // from the user directly, or called by the destructor.
+ if (mHardware == 0) return;
+
+ LOG1("hardware teardown");
+ // Before destroying mHardware, we must make sure it's in the
+ // idle state.
+ // Turn off all messages.
+ disableMsgType(CAMERA_MSG_ALL_MSGS);
+ mHardware->stopPreview();
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
+ mHardware->cancelPicture();
+ // Release the hardware resources.
+ mHardware->release();
+
+ // Release the held ANativeWindow resources.
+ if (mPreviewWindow != 0) {
+ disconnectWindow(mPreviewWindow);
+ mPreviewWindow = 0;
+ mHardware->setPreviewWindow(mPreviewWindow);
+ }
+ mHardware.clear();
+
+ CameraService::Client::disconnect();
+
+ LOG1("disconnect X (pid %d)", callingPid);
+}
+
+// ----------------------------------------------------------------------------
+
+status_t CameraClient::setPreviewWindow(const sp<IBinder>& binder,
+ const sp<ANativeWindow>& window) {
+ Mutex::Autolock lock(mLock);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ // return if no change in surface.
+ if (binder == mSurface) {
+ return NO_ERROR;
+ }
+
+ if (window != 0) {
+ result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
+ if (result != NO_ERROR) {
+ ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result),
+ result);
+ return result;
+ }
+ }
+
+ // If preview has been already started, register preview buffers now.
+ if (mHardware->previewEnabled()) {
+ if (window != 0) {
+ native_window_set_scaling_mode(window.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+ native_window_set_buffers_transform(window.get(), mOrientation);
+ result = mHardware->setPreviewWindow(window);
+ }
+ }
+
+ if (result == NO_ERROR) {
+ // Everything has succeeded. Disconnect the old window and remember the
+ // new window.
+ disconnectWindow(mPreviewWindow);
+ mSurface = binder;
+ mPreviewWindow = window;
+ } else {
+ // Something went wrong after we connected to the new window, so
+ // disconnect here.
+ disconnectWindow(window);
+ }
+
+ return result;
+}
+
+// set the buffer consumer that the preview will use
+status_t CameraClient::setPreviewTarget(
+ const sp<IGraphicBufferProducer>& bufferProducer) {
+ LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(),
+ getCallingPid());
+
+ sp<IBinder> binder;
+ sp<ANativeWindow> window;
+ if (bufferProducer != 0) {
+ binder = IInterface::asBinder(bufferProducer);
+ // Using controlledByApp flag to ensure that the buffer queue remains in
+ // async mode for the old camera API, where many applications depend
+ // on that behavior.
+ window = new Surface(bufferProducer, /*controlledByApp*/ true);
+ }
+ return setPreviewWindow(binder, window);
+}
+
+// set the preview callback flag to affect how the received frames from
+// preview are handled.
+void CameraClient::setPreviewCallbackFlag(int callback_flag) {
+ LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, getCallingPid());
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return;
+
+ mPreviewCallbackFlag = callback_flag;
+ if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+ enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+ }
+}
+
+status_t CameraClient::setPreviewCallbackTarget(
+ const sp<IGraphicBufferProducer>& callbackProducer) {
+ (void)callbackProducer;
+ ALOGE("%s: Unimplemented!", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+
+// start preview mode
+status_t CameraClient::startPreview() {
+ Mutex::Autolock lock(mLock);
+ LOG1("startPreview (pid %d)", getCallingPid());
+ return startCameraMode(CAMERA_PREVIEW_MODE);
+}
+
+// start recording mode
+status_t CameraClient::startRecording() {
+ Mutex::Autolock lock(mLock);
+ LOG1("startRecording (pid %d)", getCallingPid());
+ return startCameraMode(CAMERA_RECORDING_MODE);
+}
+
+// start preview or recording
+status_t CameraClient::startCameraMode(camera_mode mode) {
+ LOG1("startCameraMode(%d)", mode);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ switch(mode) {
+ case CAMERA_PREVIEW_MODE:
+ if (mSurface == 0 && mPreviewWindow == 0) {
+ LOG1("mSurface is not set yet.");
+ // still able to start preview in this case.
+ }
+ return startPreviewMode();
+ case CAMERA_RECORDING_MODE:
+ if (mSurface == 0 && mPreviewWindow == 0) {
+ ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
+ return INVALID_OPERATION;
+ }
+ return startRecordingMode();
+ default:
+ return UNKNOWN_ERROR;
+ }
+}
+
+status_t CameraClient::startPreviewMode() {
+ LOG1("startPreviewMode");
+ status_t result = NO_ERROR;
+
+ // if preview has been enabled, nothing needs to be done
+ if (mHardware->previewEnabled()) {
+ return NO_ERROR;
+ }
+
+ if (mPreviewWindow != 0) {
+ native_window_set_scaling_mode(mPreviewWindow.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+ native_window_set_buffers_transform(mPreviewWindow.get(),
+ mOrientation);
+ }
+ mHardware->setPreviewWindow(mPreviewWindow);
+ result = mHardware->startPreview();
+ if (result == NO_ERROR) {
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_ACTIVE,
+ String8::format("%d", mCameraId));
+ }
+ return result;
+}
+
+status_t CameraClient::startRecordingMode() {
+ LOG1("startRecordingMode");
+ status_t result = NO_ERROR;
+
+ // if recording has been enabled, nothing needs to be done
+ if (mHardware->recordingEnabled()) {
+ return NO_ERROR;
+ }
+
+ // if preview has not been started, start preview first
+ if (!mHardware->previewEnabled()) {
+ result = startPreviewMode();
+ if (result != NO_ERROR) {
+ return result;
+ }
+ }
+
+ // start recording mode
+ enableMsgType(CAMERA_MSG_VIDEO_FRAME);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_START);
+ result = mHardware->startRecording();
+ if (result != NO_ERROR) {
+ ALOGE("mHardware->startRecording() failed with status %d", result);
+ }
+ return result;
+}
+
+// stop preview mode
+void CameraClient::stopPreview() {
+ LOG1("stopPreview (pid %d)", getCallingPid());
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return;
+
+
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+ mHardware->stopPreview();
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
+ mPreviewBuffer.clear();
+}
+
+// stop recording mode
+void CameraClient::stopRecording() {
+ LOG1("stopRecording (pid %d)", getCallingPid());
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return;
+
+ disableMsgType(CAMERA_MSG_VIDEO_FRAME);
+ mHardware->stopRecording();
+ mCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
+
+ mPreviewBuffer.clear();
+}
+
+// release a recording frame
+void CameraClient::releaseRecordingFrame(const sp<IMemory>& mem) {
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return;
+ mHardware->releaseRecordingFrame(mem);
+}
+
+status_t CameraClient::storeMetaDataInBuffers(bool enabled)
+{
+ LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false");
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) {
+ return UNKNOWN_ERROR;
+ }
+ return mHardware->storeMetaDataInBuffers(enabled);
+}
+
+bool CameraClient::previewEnabled() {
+ LOG1("previewEnabled (pid %d)", getCallingPid());
+
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return false;
+ return mHardware->previewEnabled();
+}
+
+bool CameraClient::recordingEnabled() {
+ LOG1("recordingEnabled (pid %d)", getCallingPid());
+
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return false;
+ return mHardware->recordingEnabled();
+}
+
+status_t CameraClient::autoFocus() {
+ LOG1("autoFocus (pid %d)", getCallingPid());
+
+ Mutex::Autolock lock(mLock);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ return mHardware->autoFocus();
+}
+
+status_t CameraClient::cancelAutoFocus() {
+ LOG1("cancelAutoFocus (pid %d)", getCallingPid());
+
+ Mutex::Autolock lock(mLock);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ return mHardware->cancelAutoFocus();
+}
+
+// take a picture - image is returned in callback
+status_t CameraClient::takePicture(int msgType) {
+ LOG1("takePicture (pid %d): 0x%x", getCallingPid(), msgType);
+
+ Mutex::Autolock lock(mLock);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ if ((msgType & CAMERA_MSG_RAW_IMAGE) &&
+ (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
+ ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY"
+ " cannot be both enabled");
+ return BAD_VALUE;
+ }
+
+ // We only accept picture related message types
+ // and ignore other types of messages for takePicture().
+ int picMsgType = msgType
+ & (CAMERA_MSG_SHUTTER |
+ CAMERA_MSG_POSTVIEW_FRAME |
+ CAMERA_MSG_RAW_IMAGE |
+ CAMERA_MSG_RAW_IMAGE_NOTIFY |
+ CAMERA_MSG_COMPRESSED_IMAGE);
+
+ enableMsgType(picMsgType);
+ mBurstCnt = mHardware->getParameters().getInt("num-snaps-per-shutter");
+ if(mBurstCnt <= 0)
+ mBurstCnt = 1;
+ LOG1("mBurstCnt = %d", mBurstCnt);
+
+ return mHardware->takePicture();
+}
+
+// set preview/capture parameters - key/value pairs
+status_t CameraClient::setParameters(const String8& params) {
+ LOG1("setParameters (pid %d) (%s)", getCallingPid(), params.string());
+
+ Mutex::Autolock lock(mLock);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ mLatestSetParameters = CameraParameters(params);
+ CameraParameters p(params);
+ return mHardware->setParameters(p);
+}
+
+// get preview/capture parameters - key/value pairs
+String8 CameraClient::getParameters() const {
+ Mutex::Autolock lock(mLock);
+ // The camera service can unconditionally get the parameters at all times
+ if (getCallingPid() != mServicePid && checkPidAndHardware() != NO_ERROR) return String8();
+
+ String8 params(mHardware->getParameters().flatten());
+ LOG1("getParameters (pid %d) (%s)", getCallingPid(), params.string());
+ return params;
+}
+
+// enable shutter sound
+status_t CameraClient::enableShutterSound(bool enable) {
+ LOG1("enableShutterSound (pid %d)", getCallingPid());
+
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ if (enable) {
+ mPlayShutterSound = true;
+ return OK;
+ }
+
+ // the camera2 api legacy mode can unconditionally disable the shutter sound
+ if (mLegacyMode) {
+ ALOGV("%s: Disable shutter sound in legacy mode", __FUNCTION__);
+ mPlayShutterSound = false;
+ return OK;
+ }
+
+ // Disabling shutter sound may not be allowed. In that case only
+ // allow the mediaserver process to disable the sound.
+ char value[PROPERTY_VALUE_MAX];
+ property_get("ro.camera.sound.forced", value, "0");
+ if (strcmp(value, "0") != 0) {
+ // Disabling shutter sound is not allowed. Deny if the current
+ // process is not mediaserver.
+ if (getCallingPid() != getpid()) {
+ ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid());
+ return PERMISSION_DENIED;
+ }
+ }
+
+ mPlayShutterSound = false;
+ return OK;
+}
+
+status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
+ LOG1("sendCommand (pid %d)", getCallingPid());
+ int orientation;
+ Mutex::Autolock lock(mLock);
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) {
+ // Mirror the preview if the camera is front-facing.
+ orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT);
+ if (orientation == -1) return BAD_VALUE;
+
+ if (mOrientation != orientation) {
+ mOrientation = orientation;
+ if (mPreviewWindow != 0) {
+ native_window_set_buffers_transform(mPreviewWindow.get(),
+ mOrientation);
+ }
+ }
+ return OK;
+ } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
+ switch (arg1) {
+ case 0:
+ return enableShutterSound(false);
+ case 1:
+ return enableShutterSound(true);
+ default:
+ return BAD_VALUE;
+ }
+ return OK;
+ } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
+ mCameraService->playSound(CameraService::SOUND_RECORDING_START);
+ } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
+ // Silently ignore this command
+ return INVALID_OPERATION;
+ } else if (cmd == CAMERA_CMD_PING) {
+ // If mHardware is 0, checkPidAndHardware will return error.
+ return OK;
+ } else if (cmd == CAMERA_CMD_HISTOGRAM_ON) {
+ enableMsgType(CAMERA_MSG_STATS_DATA);
+ } else if (cmd == CAMERA_CMD_HISTOGRAM_OFF) {
+ disableMsgType(CAMERA_MSG_STATS_DATA);
+ } else if (cmd == CAMERA_CMD_METADATA_ON) {
+ enableMsgType(CAMERA_MSG_META_DATA);
+ } else if (cmd == CAMERA_CMD_METADATA_OFF) {
+ disableMsgType(CAMERA_MSG_META_DATA);
+ } else if ( cmd == CAMERA_CMD_LONGSHOT_ON ) {
+ mLongshotEnabled = true;
+ } else if ( cmd == CAMERA_CMD_LONGSHOT_OFF ) {
+ mLongshotEnabled = false;
+ disableMsgType(CAMERA_MSG_SHUTTER);
+ disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
+ }
+
+ return mHardware->sendCommand(cmd, arg1, arg2);
+}
+
+// ----------------------------------------------------------------------------
+
+void CameraClient::enableMsgType(int32_t msgType) {
+ android_atomic_or(msgType, &mMsgEnabled);
+ mHardware->enableMsgType(msgType);
+}
+
+void CameraClient::disableMsgType(int32_t msgType) {
+ android_atomic_and(~msgType, &mMsgEnabled);
+ mHardware->disableMsgType(msgType);
+}
+
+#define CHECK_MESSAGE_INTERVAL 10 // 10ms
+bool CameraClient::lockIfMessageWanted(int32_t msgType) {
+#ifdef MTK_HARDWARE
+ return true;
+#endif
+ int sleepCount = 0;
+ while (mMsgEnabled & msgType) {
+ if (mLock.tryLock() == NO_ERROR) {
+ if (sleepCount > 0) {
+ LOG1("lockIfMessageWanted(%d): waited for %d ms",
+ msgType, sleepCount * CHECK_MESSAGE_INTERVAL);
+ }
+
+ // If messages are no longer enabled after acquiring lock, release and drop message
+ if ((mMsgEnabled & msgType) == 0) {
+ mLock.unlock();
+ break;
+ }
+
+ return true;
+ }
+ if (sleepCount++ == 0) {
+ LOG1("lockIfMessageWanted(%d): enter sleep", msgType);
+ }
+ usleep(CHECK_MESSAGE_INTERVAL * 1000);
+ }
+ ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType);
+ return false;
+}
+
+// Callback messages can be dispatched to internal handlers or pass to our
+// client's callback functions, depending on the message type.
+//
+// notifyCallback:
+// CAMERA_MSG_SHUTTER handleShutter
+// (others) c->notifyCallback
+// dataCallback:
+// CAMERA_MSG_PREVIEW_FRAME handlePreviewData
+// CAMERA_MSG_POSTVIEW_FRAME handlePostview
+// CAMERA_MSG_RAW_IMAGE handleRawPicture
+// CAMERA_MSG_COMPRESSED_IMAGE handleCompressedPicture
+// (others) c->dataCallback
+// dataCallbackTimestamp
+// (others) c->dataCallbackTimestamp
+
+void CameraClient::notifyCallback(int32_t msgType, int32_t ext1,
+ int32_t ext2, void* user) {
+ LOG2("notifyCallback(%d)", msgType);
+
+#ifdef MTK_HARDWARE
+ if (msgType == 0x40000000) { //MTK_CAMERA_MSG_EXT_NOTIFY
+ if (ext1 == 0x11) { //MTK_CAMERA_MSG_EXT_NOTIFY_SHUTTER
+ msgType = CAMERA_MSG_SHUTTER;
+ }
+ if (ext1 == 0x10) { //MTK_CAMERA_MSG_EXT_CAPTURE_DONE
+ return;
+ }
+ LOG2("MtknotifyCallback(0x%x, 0x%x)", ext1, ext2);
+ }
+#endif
+
+ sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
+ if (client.get() == nullptr) return;
+
+ if (!client->lockIfMessageWanted(msgType)) return;
+
+ switch (msgType) {
+ case CAMERA_MSG_SHUTTER:
+ // ext1 is the dimension of the yuv picture.
+ client->handleShutter();
+ break;
+ default:
+ client->handleGenericNotify(msgType, ext1, ext2);
+ break;
+ }
+}
+
+void CameraClient::dataCallback(int32_t msgType,
+ const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
+ LOG2("dataCallback(%d)", msgType);
+
+ sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
+ if (client.get() == nullptr) return;
+
+ if (!client->lockIfMessageWanted(msgType)) return;
+ if (dataPtr == 0 && metadata == NULL) {
+ ALOGE("Null data returned in data callback");
+ client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ return;
+ }
+
+#ifdef MTK_HARDWARE
+ if (msgType == 0x80000000) { //MTK_CAMERA_MSG_EXT_DATA
+ struct DataHeader {
+ uint32_t extMsgType;
+ } dataHeader;
+ sp<IMemoryHeap> heap = 0;
+ ssize_t offset = 0;
+ size_t size = 0;
+
+ if (dataPtr.get()) {
+
+ heap = dataPtr->getMemory(&offset, &size);
+ if ( NULL != heap.get() && NULL != heap->base() )
+ ::memcpy(&dataHeader, ((uint8_t*)heap->base()) + offset, sizeof(DataHeader));
+
+ if (dataHeader.extMsgType == 0x10) { //MTK_CAMERA_MSG_EXT_DATA_COMPRESSED_IMAGE
+ msgType = CAMERA_MSG_COMPRESSED_IMAGE;
+ sp<MemoryBase> image = new MemoryBase(heap,
+ (offset + sizeof(DataHeader) + sizeof(uint_t) * 1),
+ (size - sizeof(DataHeader) - sizeof(uint_t) * 1));
+ client->handleCompressedPicture(image);
+ return;
+ }
+ }
+ LOG2("MtkDataCallback(0x%x)", dataHeader.extMsgType);
+ }
+#endif
+
+ switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
+ case CAMERA_MSG_PREVIEW_FRAME:
+ client->handlePreviewData(msgType, dataPtr, metadata);
+ break;
+ case CAMERA_MSG_POSTVIEW_FRAME:
+ client->handlePostview(dataPtr);
+ break;
+ case CAMERA_MSG_RAW_IMAGE:
+ client->handleRawPicture(dataPtr);
+ break;
+ case CAMERA_MSG_COMPRESSED_IMAGE:
+ client->handleCompressedPicture(dataPtr);
+ break;
+ default:
+ client->handleGenericData(msgType, dataPtr, metadata);
+ break;
+ }
+}
+
+void CameraClient::dataCallbackTimestamp(nsecs_t timestamp,
+ int32_t msgType, const sp<IMemory>& dataPtr, void* user) {
+ LOG2("dataCallbackTimestamp(%d)", msgType);
+
+ sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
+ if (client.get() == nullptr) return;
+
+ if (!client->lockIfMessageWanted(msgType)) return;
+
+ if (dataPtr == 0) {
+ ALOGE("Null data returned in data with timestamp callback");
+ client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ return;
+ }
+
+ client->handleGenericDataTimestamp(timestamp, msgType, dataPtr);
+}
+
+// snapshot taken callback
+void CameraClient::handleShutter(void) {
+ if (mPlayShutterSound) {
+ mCameraService->playSound(CameraService::SOUND_SHUTTER);
+ }
+
+ sp<ICameraClient> c = mRemoteCallback;
+ if (c != 0) {
+ mLock.unlock();
+ c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0);
+ if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return;
+ }
+ if ( !mLongshotEnabled ) {
+ disableMsgType(CAMERA_MSG_SHUTTER);
+ }
+
+ // Shutters only happen in response to takePicture, so mark device as
+ // idle now, until preview is restarted
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
+
+ mLock.unlock();
+}
+
+// preview callback - frame buffer update
+void CameraClient::handlePreviewData(int32_t msgType,
+ const sp<IMemory>& mem,
+ camera_frame_metadata_t *metadata) {
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+
+ // local copy of the callback flags
+ int flags = mPreviewCallbackFlag;
+
+ // is callback enabled?
+ if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
+ // If the enable bit is off, the copy-out and one-shot bits are ignored
+ LOG2("frame callback is disabled");
+ mLock.unlock();
+ return;
+ }
+
+ // hold a strong pointer to the client
+ sp<ICameraClient> c = mRemoteCallback;
+
+ // clear callback flags if no client or one-shot mode
+ if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
+ LOG2("Disable preview callback");
+ mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
+ CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+ }
+
+ if (c != 0) {
+ // Is the received frame copied out or not?
+ if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
+ LOG2("frame is copied");
+ copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
+ } else {
+ LOG2("frame is forwarded");
+ mLock.unlock();
+ c->dataCallback(msgType, mem, metadata);
+ }
+ } else {
+ mLock.unlock();
+ }
+}
+
+// picture callback - postview image ready
+void CameraClient::handlePostview(const sp<IMemory>& mem) {
+ disableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
+
+ sp<ICameraClient> c = mRemoteCallback;
+ mLock.unlock();
+ if (c != 0) {
+ c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL);
+ }
+}
+
+// picture callback - raw image ready
+void CameraClient::handleRawPicture(const sp<IMemory>& mem) {
+ disableMsgType(CAMERA_MSG_RAW_IMAGE);
+
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+
+ sp<ICameraClient> c = mRemoteCallback;
+ mLock.unlock();
+ if (c != 0) {
+ c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL);
+ }
+}
+
+// picture callback - compressed picture ready
+void CameraClient::handleCompressedPicture(const sp<IMemory>& mem) {
+ if (mBurstCnt)
+ mBurstCnt--;
+
+ if (!mBurstCnt && !mLongshotEnabled) {
+ LOG1("handleCompressedPicture mBurstCnt = %d", mBurstCnt);
+ disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
+ }
+
+ sp<ICameraClient> c = mRemoteCallback;
+ mLock.unlock();
+ if (c != 0) {
+ c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL);
+ }
+}
+
+
+void CameraClient::handleGenericNotify(int32_t msgType,
+ int32_t ext1, int32_t ext2) {
+ sp<ICameraClient> c = mRemoteCallback;
+ mLock.unlock();
+ if (c != 0) {
+ c->notifyCallback(msgType, ext1, ext2);
+ }
+}
+
+void CameraClient::handleGenericData(int32_t msgType,
+ const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) {
+ sp<ICameraClient> c = mRemoteCallback;
+ mLock.unlock();
+ if (c != 0) {
+ c->dataCallback(msgType, dataPtr, metadata);
+ }
+}
+
+void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp,
+ int32_t msgType, const sp<IMemory>& dataPtr) {
+ sp<ICameraClient> c = mRemoteCallback;
+ mLock.unlock();
+ if (c != 0) {
+ c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
+ }
+}
+
+void CameraClient::copyFrameAndPostCopiedFrame(
+ int32_t msgType, const sp<ICameraClient>& client,
+ const sp<IMemoryHeap>& heap, size_t offset, size_t size,
+ camera_frame_metadata_t *metadata) {
+ LOG2("copyFrameAndPostCopiedFrame");
+ // It is necessary to copy out of pmem before sending this to
+ // the callback. For efficiency, reuse the same MemoryHeapBase
+ // provided it's big enough. Don't allocate the memory or
+ // perform the copy if there's no callback.
+ // hold the preview lock while we grab a reference to the preview buffer
+ sp<MemoryHeapBase> previewBuffer;
+
+ if (mPreviewBuffer == 0) {
+ mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
+ } else if (size > mPreviewBuffer->virtualSize()) {
+ mPreviewBuffer.clear();
+ mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
+ }
+ if (mPreviewBuffer == 0) {
+ ALOGE("failed to allocate space for preview buffer");
+ mLock.unlock();
+ return;
+ }
+ previewBuffer = mPreviewBuffer;
+
+ void* previewBufferBase = previewBuffer->base();
+ void* heapBase = heap->base();
+
+ if (heapBase == MAP_FAILED) {
+ ALOGE("%s: Failed to mmap heap for preview frame.", __FUNCTION__);
+ mLock.unlock();
+ return;
+ } else if (previewBufferBase == MAP_FAILED) {
+ ALOGE("%s: Failed to mmap preview buffer for preview frame.", __FUNCTION__);
+ mLock.unlock();
+ return;
+ }
+
+ memcpy(previewBufferBase, (uint8_t *) heapBase + offset, size);
+
+ sp<MemoryBase> frame = new MemoryBase(previewBuffer, 0, size);
+ if (frame == 0) {
+ ALOGE("failed to allocate space for frame callback");
+ mLock.unlock();
+ return;
+ }
+
+ mLock.unlock();
+ client->dataCallback(msgType, frame, metadata);
+}
+
+int CameraClient::getOrientation(int degrees, bool mirror) {
+ if (!mirror) {
+ if (degrees == 0) return 0;
+ else if (degrees == 90) return HAL_TRANSFORM_ROT_90;
+ else if (degrees == 180) return HAL_TRANSFORM_ROT_180;
+ else if (degrees == 270) return HAL_TRANSFORM_ROT_270;
+ } else { // Do mirror (horizontal flip)
+ if (degrees == 0) { // FLIP_H and ROT_0
+ return HAL_TRANSFORM_FLIP_H;
+ } else if (degrees == 90) { // FLIP_H and ROT_90
+ return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
+ } else if (degrees == 180) { // FLIP_H and ROT_180
+ return HAL_TRANSFORM_FLIP_V;
+ } else if (degrees == 270) { // FLIP_H and ROT_270
+ return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
+ }
+ }
+ ALOGE("Invalid setDisplayOrientation degrees=%d", degrees);
+ return -1;
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 8cd6800..d0f29de 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -268,7 +268,7 @@ status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
return OK;
}
-void RingBufferConsumer::onFrameAvailable(const BufferItem& item) {
+void RingBufferConsumer::onFrameAvailable(const android::BufferItem& item) {
status_t err;
{
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 83e7298..90fd734 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -17,7 +17,6 @@
#ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H
#define ANDROID_GUI_RINGBUFFERCONSUMER_H
-#include <gui/BufferItem.h>
#include <gui/ConsumerBase.h>
#include <ui/GraphicBuffer.h>
@@ -55,6 +54,8 @@ class RingBufferConsumer : public ConsumerBase,
public:
typedef ConsumerBase::FrameAvailableListener FrameAvailableListener;
+ typedef BufferQueue::BufferItem BufferItem;
+
enum { INVALID_BUFFER_SLOT = BufferQueue::INVALID_BUFFER_SLOT };
enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE };
@@ -164,7 +165,7 @@ class RingBufferConsumer : public ConsumerBase,
private:
// Override ConsumerBase::onFrameAvailable
- virtual void onFrameAvailable(const BufferItem& item);
+ virtual void onFrameAvailable(const android::BufferItem& item);
void pinBufferLocked(const BufferItem& item);
void unpinBuffer(const BufferItem& item);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment