vg_lite.c 233 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255
  1. /****************************************************************************
  2. *
  3. * Copyright 2012 - 2023 Vivante Corporation, Santa Clara, California.
  4. * All Rights Reserved.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining
  7. * a copy of this software and associated documentation files (the
  8. * 'Software'), to deal in the Software without restriction, including
  9. * without limitation the rights to use, copy, modify, merge, publish,
  10. * distribute, sub license, and/or sell copies of the Software, and to
  11. * permit persons to whom the Software is furnished to do so, subject
  12. * to the following conditions:
  13. *
  14. * The above copyright notice and this permission notice (including the
  15. * next paragraph) shall be included in all copies or substantial
  16. * portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
  19. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  20. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  21. * IN NO EVENT SHALL VIVANTE AND/OR ITS SUPPLIERS BE LIABLE FOR ANY
  22. * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  23. * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  24. * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  25. *
  26. *****************************************************************************/
  27. #include "vg_lite_context.h"
  28. static float offsetTable[7] = {0, 0.000575f, -0.000575f, 0.0001f, -0.0001f, 0.0000375f, -0.0000375f};
  29. #if VG_SW_BLIT_PRECISION_OPT
  30. uint8_t GetIndex(uint32_t RotationStep, uint32_t ScaleValue)
  31. {
  32. uint8_t index = 0;
  33. switch (RotationStep) {
  34. case 0: //rotate 0
  35. switch (ScaleValue) {
  36. case 10:
  37. case 15:
  38. case 25:
  39. case 30:
  40. case 70:
  41. case 75:
  42. index = 1;
  43. break;
  44. case 45:
  45. case 50:
  46. case 60:
  47. case 65:
  48. case 550:
  49. index = 3;
  50. break;
  51. case 55:
  52. case 250:
  53. case 350:
  54. index = 4;
  55. break;
  56. case 85:
  57. case 90:
  58. case 95:
  59. case 150:
  60. case 450:
  61. case 650:
  62. case 750:
  63. case 850:
  64. case 950:
  65. index = 5;
  66. break;
  67. case 125:
  68. index = 2;
  69. break;
  70. default:
  71. index = 0;
  72. break;
  73. }
  74. break;
  75. case 2: //rotate 90
  76. switch (ScaleValue) {
  77. case 10:
  78. index = 2;
  79. break;
  80. case 15:
  81. case 25:
  82. case 30:
  83. case 45:
  84. case 75:
  85. case 85:
  86. case 90:
  87. case 95:
  88. case 150:
  89. case 250:
  90. case 350:
  91. case 450:
  92. case 550:
  93. case 850:
  94. index = 5;
  95. break;
  96. case 35:
  97. case 750:
  98. index = 4;
  99. break;
  100. case 50:
  101. index = 1;
  102. break;
  103. case 55:
  104. case 60:
  105. case 65:
  106. case 70:
  107. index = 3;
  108. break;
  109. default:
  110. index = 0;
  111. break;
  112. }
  113. break;
  114. case 3: //rotate 135
  115. switch (ScaleValue) {
  116. case 10:
  117. case 15:
  118. case 20:
  119. case 35:
  120. case 45:
  121. case 50:
  122. case 60:
  123. case 75:
  124. index = 2;
  125. break;
  126. case 85:
  127. case 90:
  128. case 100:
  129. case 400:
  130. case 450:
  131. case 500:
  132. case 550:
  133. case 850:
  134. index = 4;
  135. break;
  136. default:
  137. index = 0;
  138. break;
  139. }
  140. break;
  141. case 4: //rotate 180
  142. switch (ScaleValue) {
  143. case 10:
  144. case 15:
  145. case 25:
  146. case 30:
  147. case 35:
  148. case 50:
  149. index = 1;
  150. break;
  151. case 45:
  152. case 55:
  153. case 65:
  154. case 70:
  155. case 75:
  156. case 85:
  157. case 90:
  158. case 95:
  159. case 150:
  160. case 250:
  161. case 350:
  162. case 450:
  163. case 550:
  164. case 650:
  165. case 750:
  166. case 850:
  167. case 950:
  168. index = 5;
  169. break;
  170. default:
  171. index = 0;
  172. break;
  173. }
  174. break;
  175. case 5: //rotate 225
  176. switch (ScaleValue) {
  177. case 10:
  178. case 15:
  179. case 20:
  180. case 30:
  181. case 35:
  182. case 40:
  183. case 45:
  184. case 55:
  185. case 60:
  186. case 90:
  187. index = 6;
  188. break;
  189. default:
  190. index = 0;
  191. break;
  192. }
  193. break;
  194. case 6: //rotate 270
  195. switch (ScaleValue) {
  196. case 10:
  197. case 25:
  198. case 30:
  199. case 35:
  200. case 45:
  201. case 55:
  202. case 60:
  203. case 65:
  204. case 70:
  205. case 75:
  206. case 80:
  207. case 85:
  208. case 90:
  209. case 95:
  210. case 150:
  211. case 350:
  212. case 450:
  213. case 550:
  214. case 650:
  215. case 750:
  216. case 850:
  217. case 950:
  218. index = 5;
  219. break;
  220. default:
  221. index = 0;
  222. break;
  223. }
  224. break;
  225. case 7: //rotate 315
  226. switch (ScaleValue) {
  227. case 20:
  228. case 25:
  229. case 30:
  230. case 35:
  231. case 40:
  232. case 45:
  233. case 50:
  234. case 55:
  235. case 60:
  236. case 65:
  237. case 70:
  238. case 80:
  239. case 85:
  240. case 90:
  241. case 95:
  242. case 350:
  243. case 550:
  244. case 900:
  245. index = 5;
  246. break;
  247. default:
  248. index = 0;
  249. break;
  250. }
  251. break;
  252. default :
  253. index = 0;
  254. break;
  255. }
  256. return index;
  257. }
  258. #endif /* VG_SW_BLIT_PRECISION_OPT */
  259. /* Global context variables and feature table.
  260. */
  261. vg_lite_context_t s_context = { 0 };
  262. #if gcFEATURE_VG_SINGLE_COMMAND_BUFFER
  263. uint32_t command_buffer_size = VG_LITE_SINGLE_COMMAND_BUFFER_SIZE;
  264. #else
  265. uint32_t command_buffer_size = VG_LITE_COMMAND_BUFFER_SIZE;
  266. #endif
  267. uint32_t submit_flag = 0;
  268. vg_lite_matrix_t identity_mtx = {
  269. {
  270. { 1.0f, 0.0f, 0.0f },
  271. { 0.0f, 1.0f, 0.0f },
  272. { 0.0f, 0.0f, 1.0f }
  273. },
  274. 1.0f, 1.0f, 0.0f
  275. };
  276. /* Initialize the feature table of a chip. */
  277. vg_lite_ftable_t s_ftable = {
  278. {
  279. gcFEATURE_VG_IM_INDEX_FORMAT,
  280. gcFEATURE_VG_SCISSOR,
  281. gcFEATURE_VG_BORDER_CULLING,
  282. gcFEATURE_VG_RGBA2_FORMAT,
  283. gcFEATURE_VG_QUALITY_8X,
  284. gcFEATURE_VG_IM_FASTCLEAR,
  285. gcFEATURE_VG_RADIAL_GRADIENT,
  286. gcFEATURE_VG_GLOBAL_ALPHA,
  287. gcFEATURE_VG_RGBA8_ETC2_EAC,
  288. gcFEATURE_VG_COLOR_KEY,
  289. gcFEATURE_VG_DOUBLE_IMAGE,
  290. gcFEATURE_VG_YUV_OUTPUT,
  291. gcFEATURE_VG_FLEXA,
  292. gcFEATURE_VG_24BIT,
  293. gcFEATURE_VG_DITHER,
  294. gcFEATURE_VG_USE_DST,
  295. gcFEATURE_VG_PE_CLEAR,
  296. gcFEATURE_VG_IM_INPUT,
  297. gcFEATURE_VG_DEC_COMPRESS,
  298. gcFEATURE_VG_LINEAR_GRADIENT_EXT,
  299. gcFEATURE_VG_MASK,
  300. gcFEATURE_VG_MIRROR,
  301. gcFEATURE_VG_GAMMA,
  302. gcFEATURE_VG_NEW_BLEND_MODE,
  303. gcFEATURE_VG_STENCIL,
  304. gcFEATURE_VG_SRC_PREMULTIPLIED,
  305. gcFEATURE_VG_HW_PREMULTIPLY,
  306. gcFEATURE_VG_COLOR_TRANSFORMATION,
  307. gcFEATURE_VG_LVGL_SUPPORT,
  308. gcFEATURE_VG_INDEX_ENDIAN,
  309. gcFEATURE_VG_24BIT_PLANAR,
  310. gcFEATURE_VG_PIXEL_MATRIX,
  311. gcFEATURE_VG_NEW_IMAGE_INDEX,
  312. gcFEATURE_VG_PARALLEL_PATHS,
  313. gcFEATURE_VG_STRIPE_MODE,
  314. gcFEATURE_VG_IM_DEC_INPUT,
  315. gcFEATURE_VG_GAUSSIAN_BLUR,
  316. gcFEATURE_VG_RECTANGLE_TILED_OUT,
  317. gcFEATURE_VG_TESSELLATION_TILED_OUT,
  318. gcFEATURE_VG_IM_REPEAT_REFLECT,
  319. gcFEATURE_VG_YUY2_INPUT,
  320. gcFEATURE_VG_YUV_INPUT,
  321. gcFEATURE_VG_YUV_TILED_INPUT,
  322. gcFEATURE_VG_AYUV_INPUT,
  323. gcFEATURE_VG_16PIXELS_ALIGNED,
  324. gcFEATURE_VG_DEC_COMPRESS_2_0,
  325. gcFEATURE_VG_NV24_INPUT,
  326. gcFEATURE_VG_TILED_LIMIT,
  327. gcFEATURE_VG_TILED_MODE,
  328. gcFEATURE_VG_SRC_ADDRESS_16BYTES_ALIGNED,
  329. gcFEATURE_VG_SRC_ADDRESS_64BYTES_ALIGNED,
  330. gcFEATURE_VG_SRC_TILE_4PIXELS_ALIGNED,
  331. gcFEATURE_VG_SRC_BUF_ALINGED,
  332. gcFEATURE_VG_DST_ADDRESS_64BYTES_ALIGNED,
  333. gcFEATURE_VG_DST_TILE_4PIXELS_ALIGNED,
  334. gcFEATURE_VG_DST_BUF_ALIGNED,
  335. gcFEATURE_VG_DST_24BIT_PLANAR_ALIGNED,
  336. gcFEATURE_VG_DST_BUFLEN_ALIGNED,
  337. gcFEATURE_VG_FORMAT_SUPPORT_CHECK,
  338. gcFEATURE_VG_512_PARALLEL_PATHS,
  339. }
  340. };
  341. static vg_lite_error_t check_hardware_chip_info(void)
  342. {
  343. vg_lite_uint32_t chip_id = 0, chip_rev = 0, cid = 0, eco_id = 0;
  344. vg_lite_get_product_info(NULL, &chip_id, &chip_rev);
  345. vg_lite_get_register(0x30, &cid);
  346. vg_lite_get_register(0xE8, &eco_id);
  347. if (CHIPID != chip_id || REVISION != chip_rev || CID != cid || ECOID != eco_id) {
  348. printf("VGLite API initialization Error!!! \nHardware ChipId: 0x%X ChipRevision: 0x%X Cid: 0x%X Ecoid: 0x%X \n", chip_id, chip_rev, cid, eco_id);
  349. printf("NOT match vg_lite_options.h CHIPID: 0x%X REVISION: 0x%X CID: 0x%X Ecoid: 0x%X \n", CHIPID, REVISION, CID, ECOID);
  350. return VG_LITE_NOT_SUPPORT;
  351. }
  352. return VG_LITE_SUCCESS;
  353. }
  354. vg_lite_error_t check_compress(
  355. vg_lite_buffer_format_t format,
  356. vg_lite_compress_mode_t compress_mode,
  357. vg_lite_buffer_layout_t tiled,
  358. uint32_t width,
  359. uint32_t height
  360. )
  361. {
  362. #if gcFEATURE_VG_DEC_COMPRESS
  363. vg_lite_error_t error = VG_LITE_SUCCESS;
  364. if (compress_mode) {
  365. if (compress_mode > VG_LITE_DEC_HV_SAMPLE || compress_mode < VG_LITE_DEC_DISABLE)
  366. return VG_LITE_INVALID_ARGUMENT;
  367. if (tiled) {
  368. if (width % 16 || height % 4)
  369. return VG_LITE_INVALID_ARGUMENT;
  370. }
  371. else {
  372. if (width % 16 || compress_mode == VG_LITE_DEC_HV_SAMPLE)
  373. return VG_LITE_INVALID_ARGUMENT;
  374. }
  375. #if gcFEATURE_VG_DEC_COMPRESS_2_0
  376. if (format != VG_LITE_BGRA8888 && format != VG_LITE_BGRX8888 && format != VG_LITE_BGR888) {
  377. printf("Invalid compression format!\n");
  378. return VG_LITE_INVALID_ARGUMENT;
  379. }
  380. #else
  381. if ( format != VG_LITE_BGRX8888 && format != VG_LITE_RGBX8888 && format != VG_LITE_BGRA8888
  382. && format != VG_LITE_RGBA8888 && format != VG_LITE_RGB888 && format != VG_LITE_BGR888) {
  383. printf("Invalid compression format!\n");
  384. return VG_LITE_INVALID_ARGUMENT;
  385. }
  386. #endif
  387. }
  388. return error;
  389. #else
  390. return VG_LITE_SUCCESS;
  391. #endif
  392. }
  393. static vg_lite_float_t _calc_decnano_compress_ratio(
  394. vg_lite_buffer_format_t format,
  395. vg_lite_compress_mode_t compress_mode
  396. )
  397. {
  398. vg_lite_float_t ratio = 1.0f;
  399. #if gcFEATURE_VG_DEC_COMPRESS_2_0
  400. switch (compress_mode) {
  401. case VG_LITE_DEC_NON_SAMPLE:
  402. switch (format) {
  403. case VG_LITE_BGRA8888:
  404. case VG_LITE_BGR888:
  405. ratio = 0.5f;
  406. break;
  407. case VG_LITE_BGRX8888:
  408. ratio = 0.385f;
  409. break;
  410. default:
  411. return ratio;
  412. }
  413. break;
  414. case VG_LITE_DEC_HSAMPLE:
  415. switch (format) {
  416. case VG_LITE_BGRA8888:
  417. ratio = 0.385f;
  418. break;
  419. case VG_LITE_BGRX8888:
  420. ratio = 0.25f;
  421. break;
  422. case VG_LITE_BGR888:
  423. ratio = 0.334f;
  424. break;
  425. default:
  426. return ratio;
  427. }
  428. break;
  429. case VG_LITE_DEC_HV_SAMPLE:
  430. switch (format) {
  431. case VG_LITE_BGRA8888:
  432. ratio = 0.385f;
  433. break;
  434. case VG_LITE_BGRX8888:
  435. ratio = 0.25f;
  436. break;
  437. case VG_LITE_BGR888:
  438. ratio = 0.334f;
  439. break;
  440. default:
  441. return ratio;
  442. }
  443. break;
  444. default:
  445. return ratio;
  446. }
  447. #else
  448. #if gcFEATURE_VG_DEC_COMPRESS
  449. switch (compress_mode) {
  450. case VG_LITE_DEC_NON_SAMPLE:
  451. switch (format) {
  452. case VG_LITE_ABGR8888:
  453. case VG_LITE_ARGB8888:
  454. case VG_LITE_BGRA8888:
  455. case VG_LITE_RGBA8888:
  456. ratio = 0.625f;
  457. break;
  458. case VG_LITE_XBGR8888:
  459. case VG_LITE_XRGB8888:
  460. case VG_LITE_BGRX8888:
  461. case VG_LITE_RGBX8888:
  462. ratio = 0.5f;
  463. break;
  464. case VG_LITE_RGB888:
  465. case VG_LITE_BGR888:
  466. ratio = 0.667f;
  467. break;
  468. default:
  469. return ratio;
  470. }
  471. break;
  472. case VG_LITE_DEC_HSAMPLE:
  473. switch (format) {
  474. case VG_LITE_ABGR8888:
  475. case VG_LITE_ARGB8888:
  476. case VG_LITE_BGRA8888:
  477. case VG_LITE_RGBA8888:
  478. case VG_LITE_RGB888:
  479. case VG_LITE_BGR888:
  480. ratio = 0.5f;
  481. break;
  482. case VG_LITE_XBGR8888:
  483. case VG_LITE_XRGB8888:
  484. case VG_LITE_BGRX8888:
  485. case VG_LITE_RGBX8888:
  486. ratio = 0.375f;
  487. break;
  488. default:
  489. return ratio;
  490. }
  491. break;
  492. case VG_LITE_DEC_HV_SAMPLE:
  493. switch (format) {
  494. case VG_LITE_ABGR8888:
  495. case VG_LITE_ARGB8888:
  496. case VG_LITE_BGRA8888:
  497. case VG_LITE_RGBA8888:
  498. ratio = 0.375f;
  499. break;
  500. case VG_LITE_XBGR8888:
  501. case VG_LITE_XRGB8888:
  502. case VG_LITE_BGRX8888:
  503. case VG_LITE_RGBX8888:
  504. ratio = 0.25f;
  505. break;
  506. default:
  507. return ratio;
  508. }
  509. break;
  510. default:
  511. return ratio;
  512. }
  513. #endif
  514. #endif
  515. return ratio;
  516. }
  517. static inline int32_t has_valid_command_buffer(vg_lite_context_t *context)
  518. {
  519. if (context == NULL)
  520. return 0;
  521. if (context->command_buffer_current >= CMDBUF_COUNT)
  522. return 0;
  523. if (context->command_buffer[context->command_buffer_current] == NULL)
  524. return 0;
  525. return 1;
  526. }
  527. typedef vg_lite_float_t FLOATVECTOR4[4];
  528. static void ClampColor(FLOATVECTOR4 Source, FLOATVECTOR4 Target, uint8_t Premultiplied)
  529. {
  530. vg_lite_float_t colorMax;
  531. /* Clamp the alpha channel. */
  532. Target[3] = CLAMP(Source[3], 0.0f, 1.0f);
  533. /* Determine the maximum value for the color channels. */
  534. colorMax = Premultiplied ? Target[3] : 1.0f;
  535. /* Clamp the color channels. */
  536. Target[0] = CLAMP(Source[0], 0.0f, colorMax);
  537. Target[1] = CLAMP(Source[1], 0.0f, colorMax);
  538. Target[2] = CLAMP(Source[2], 0.0f, colorMax);
  539. }
  540. static uint8_t PackColorComponent(vg_lite_float_t value)
  541. {
  542. /* Compute the rounded normalized value. */
  543. vg_lite_float_t rounded = value * 255.0f + 0.5f;
  544. /* Get the integer part. */
  545. int32_t roundedInt = (int32_t)rounded;
  546. /* Clamp to 0..1 range. */
  547. uint8_t clamped = (uint8_t)CLAMP(roundedInt, 0, 255);
  548. /* Return result. */
  549. return clamped;
  550. }
  551. #if DUMP_IMAGE
  552. static void dump_img(void * memory, int32_t width, int32_t height, vg_lite_buffer_format_t format)
  553. {
  554. FILE * fp;
  555. char imgname[255] = {'\0'};
  556. static int32_t num = 1;
  557. uint32_t* pt = (uint32_t*) memory;
  558. int32_t i;
  559. sprintf(imgname, "img_pid%d_%d.txt", getpid(), num++);
  560. fp = fopen(imgname, "w");
  561. if (fp == NULL)
  562. printf("error!\n");
  563. switch (format) {
  564. case VG_LITE_INDEX_1:
  565. for(i = 0; i < width * height / 32; ++i)
  566. {
  567. fprintf(fp, "0x%08x\n",pt[i]);
  568. }
  569. break;
  570. case VG_LITE_INDEX_2:
  571. for(i = 0; i < width * height / 16; ++i)
  572. {
  573. fprintf(fp, "0x%08x\n",pt[i]);
  574. }
  575. break;
  576. case VG_LITE_INDEX_4:
  577. for(i = 0; i < width * height / 8; ++i)
  578. {
  579. fprintf(fp, "0x%08x\n",pt[i]);
  580. }
  581. break;
  582. case VG_LITE_INDEX_8:
  583. for(i = 0; i < width * height / 4; ++i)
  584. {
  585. fprintf(fp, "0x%08x\n",pt[i]);
  586. }
  587. break;
  588. case VG_LITE_RGBA2222:
  589. for(i = 0; i < width * height / 4; ++i)
  590. {
  591. fprintf(fp, "0x%08x\n",pt[i]);
  592. }
  593. break;
  594. case VG_LITE_RGBA4444:
  595. case VG_LITE_BGRA4444:
  596. case VG_LITE_RGB565:
  597. case VG_LITE_BGR565:
  598. for(i = 0; i < width * height / 2; ++i)
  599. {
  600. fprintf(fp, "0x%08x\n",pt[i]);
  601. }
  602. break;
  603. case VG_LITE_RGBA8888:
  604. case VG_LITE_BGRA8888:
  605. case VG_LITE_RGBX8888:
  606. case VG_LITE_BGRX8888:
  607. for(i = 0; i < width * height; ++i)
  608. {
  609. fprintf(fp, "0x%08x\n",pt[i]);
  610. }
  611. break;
  612. default:
  613. break;
  614. }
  615. fclose(fp);
  616. fp = NULL;
  617. }
  618. #endif
  619. static uint32_t rgb_to_l(uint32_t color)
  620. {
  621. uint32_t l = (uint32_t)((0.2126f * (vg_lite_float_t)(color & 0xFF)) +
  622. (0.7152f * (vg_lite_float_t)((color >> 8) & 0xFF)) +
  623. (0.0722f * (vg_lite_float_t)((color >> 16) & 0xFF)));
  624. return l | (l << 24);
  625. }
  626. /* Get the bpp information of a color format. */
  627. void get_format_bytes(vg_lite_buffer_format_t format,
  628. uint32_t *mul,
  629. uint32_t *div,
  630. uint32_t *bytes_align)
  631. {
  632. *mul = *div = 1;
  633. *bytes_align = 4;
  634. switch (format) {
  635. case VG_LITE_L8:
  636. case VG_LITE_A8:
  637. case VG_LITE_RGBA8888_ETC2_EAC:
  638. break;
  639. case VG_LITE_A4:
  640. *div = 2;
  641. break;
  642. case VG_LITE_ABGR1555:
  643. case VG_LITE_ARGB1555:
  644. case VG_LITE_BGRA5551:
  645. case VG_LITE_RGBA5551:
  646. case VG_LITE_RGBA4444:
  647. case VG_LITE_BGRA4444:
  648. case VG_LITE_ABGR4444:
  649. case VG_LITE_ARGB4444:
  650. case VG_LITE_RGB565:
  651. case VG_LITE_BGR565:
  652. case VG_LITE_YUYV:
  653. case VG_LITE_YUY2:
  654. case VG_LITE_YUY2_TILED:
  655. /* AYUY2 buffer memory = YUY2 + alpha. */
  656. case VG_LITE_AYUY2:
  657. case VG_LITE_AYUY2_TILED:
  658. /* ABGR8565_PLANAR buffer memory = RGB565 + alpha. */
  659. case VG_LITE_ABGR8565_PLANAR:
  660. case VG_LITE_ARGB8565_PLANAR:
  661. case VG_LITE_RGBA5658_PLANAR:
  662. case VG_LITE_BGRA5658_PLANAR:
  663. *mul = 2;
  664. break;
  665. case VG_LITE_RGBA8888:
  666. case VG_LITE_BGRA8888:
  667. case VG_LITE_ABGR8888:
  668. case VG_LITE_ARGB8888:
  669. case VG_LITE_RGBX8888:
  670. case VG_LITE_BGRX8888:
  671. case VG_LITE_XBGR8888:
  672. case VG_LITE_XRGB8888:
  673. *mul = 4;
  674. break;
  675. case VG_LITE_NV12:
  676. case VG_LITE_NV12_TILED:
  677. *mul = 1;
  678. break;
  679. case VG_LITE_ANV12:
  680. case VG_LITE_ANV12_TILED:
  681. *mul = 4;
  682. break;
  683. case VG_LITE_INDEX_1:
  684. *div = 8;
  685. *bytes_align = 8;
  686. break;
  687. case VG_LITE_INDEX_2:
  688. *div = 4;
  689. *bytes_align = 8;
  690. break;
  691. case VG_LITE_INDEX_4:
  692. *div = 2;
  693. *bytes_align = 8;
  694. break;
  695. case VG_LITE_INDEX_8:
  696. *bytes_align = 1;
  697. break;
  698. case VG_LITE_RGBA2222:
  699. case VG_LITE_BGRA2222:
  700. case VG_LITE_ABGR2222:
  701. case VG_LITE_ARGB2222:
  702. *mul = 1;
  703. break;
  704. case VG_LITE_RGB888:
  705. case VG_LITE_BGR888:
  706. case VG_LITE_ABGR8565:
  707. case VG_LITE_BGRA5658:
  708. case VG_LITE_ARGB8565:
  709. case VG_LITE_RGBA5658:
  710. *mul = 3;
  711. break;
  712. /* OpenVG format*/
  713. case OPENVG_sRGBX_8888:
  714. case OPENVG_sRGBX_8888_PRE:
  715. case OPENVG_sRGBA_8888:
  716. case OPENVG_sRGBA_8888_PRE:
  717. case OPENVG_lRGBX_8888:
  718. case OPENVG_lRGBX_8888_PRE:
  719. case OPENVG_lRGBA_8888:
  720. case OPENVG_lRGBA_8888_PRE:
  721. case OPENVG_sXRGB_8888:
  722. case OPENVG_sARGB_8888:
  723. case OPENVG_sARGB_8888_PRE:
  724. case OPENVG_lXRGB_8888:
  725. case OPENVG_lARGB_8888:
  726. case OPENVG_lARGB_8888_PRE:
  727. case OPENVG_sBGRX_8888:
  728. case OPENVG_sBGRA_8888:
  729. case OPENVG_sBGRA_8888_PRE:
  730. case OPENVG_lBGRX_8888:
  731. case OPENVG_lBGRA_8888:
  732. case OPENVG_sXBGR_8888:
  733. case OPENVG_sABGR_8888:
  734. case OPENVG_lBGRA_8888_PRE:
  735. case OPENVG_sABGR_8888_PRE:
  736. case OPENVG_lXBGR_8888:
  737. case OPENVG_lABGR_8888:
  738. case OPENVG_lABGR_8888_PRE:
  739. *mul = 4;
  740. break;
  741. case OPENVG_sRGBA_5551:
  742. case OPENVG_sRGBA_5551_PRE:
  743. case OPENVG_lRGBA_5551:
  744. case OPENVG_lRGBA_5551_PRE:
  745. case OPENVG_sRGBA_4444:
  746. case OPENVG_sRGBA_4444_PRE:
  747. case OPENVG_lRGBA_4444:
  748. case OPENVG_lRGBA_4444_PRE:
  749. case OPENVG_sARGB_1555:
  750. case OPENVG_sARGB_4444:
  751. case OPENVG_sBGRA_5551:
  752. case OPENVG_sBGRA_4444:
  753. case OPENVG_sABGR_1555:
  754. case OPENVG_sABGR_4444:
  755. case OPENVG_sRGB_565:
  756. case OPENVG_sRGB_565_PRE:
  757. case OPENVG_sBGR_565:
  758. case OPENVG_lRGB_565:
  759. case OPENVG_lRGB_565_PRE:
  760. * mul = 2;
  761. break;
  762. case OPENVG_sL_8:
  763. case OPENVG_lL_8:
  764. case OPENVG_A_8:
  765. break;
  766. case OPENVG_BW_1:
  767. case OPENVG_A_4:
  768. case OPENVG_A_1:
  769. * div = 2;
  770. break;
  771. default:
  772. break;
  773. }
  774. }
  775. /* Convert VGLite target color format to HW value. */
  776. static uint32_t convert_target_format(vg_lite_buffer_format_t format, vg_lite_capabilities_t caps)
  777. {
  778. switch (format) {
  779. case VG_LITE_A8:
  780. return 0x0;
  781. case VG_LITE_L8:
  782. return 0x6;
  783. case VG_LITE_ABGR4444:
  784. return 0x14;
  785. case VG_LITE_ARGB4444:
  786. return 0x34;
  787. case VG_LITE_RGBA4444:
  788. return 0x24;
  789. case VG_LITE_BGRA4444:
  790. return 0x4;
  791. case VG_LITE_RGB565:
  792. return 0x21;
  793. case VG_LITE_BGR565:
  794. return 0x1;
  795. case VG_LITE_ABGR8888:
  796. return 0x13;
  797. case VG_LITE_ARGB8888:
  798. return 0x33;
  799. case VG_LITE_RGBA8888:
  800. return 0x23;
  801. case VG_LITE_BGRA8888:
  802. return 0x3;
  803. case VG_LITE_RGBX8888:
  804. return 0x22;
  805. case VG_LITE_BGRX8888:
  806. return 0x2;
  807. case VG_LITE_XBGR8888:
  808. return 0x12;
  809. case VG_LITE_XRGB8888:
  810. return 0x32;
  811. case VG_LITE_ABGR1555:
  812. return 0x15;
  813. case VG_LITE_RGBA5551:
  814. return 0x25;
  815. case VG_LITE_ARGB1555:
  816. return 0x35;
  817. case VG_LITE_BGRA5551:
  818. return 0x5;
  819. case VG_LITE_YUYV:
  820. case VG_LITE_YUY2:
  821. case VG_LITE_YUY2_TILED:
  822. return 0x8;
  823. case VG_LITE_NV12:
  824. case VG_LITE_NV12_TILED:
  825. return 0xB;
  826. case VG_LITE_ANV12:
  827. case VG_LITE_ANV12_TILED:
  828. return 0xE;
  829. case VG_LITE_BGRA2222:
  830. return 0x7;
  831. case VG_LITE_RGBA2222:
  832. return 0x27;
  833. case VG_LITE_ABGR2222:
  834. return 0x17;
  835. case VG_LITE_ARGB2222:
  836. return 0x37;
  837. case VG_LITE_ARGB8565:
  838. return 0x3A;
  839. case VG_LITE_RGBA5658:
  840. return 0x2A;
  841. case VG_LITE_ABGR8565:
  842. return 0x1A;
  843. case VG_LITE_BGRA5658:
  844. return 0x0A;
  845. case VG_LITE_ARGB8565_PLANAR:
  846. return 0x3C;
  847. case VG_LITE_RGBA5658_PLANAR:
  848. return 0x2C;
  849. case VG_LITE_ABGR8565_PLANAR:
  850. return 0x1C;
  851. case VG_LITE_BGRA5658_PLANAR:
  852. return 0x0C;
  853. case VG_LITE_RGB888:
  854. return 0x29;
  855. case VG_LITE_BGR888:
  856. return 0x09;
  857. case VG_LITE_AYUY2:
  858. case VG_LITE_AYUY2_TILED:
  859. return 0xF;
  860. /* OpenVG VGImageFormat */
  861. case OPENVG_sRGBX_8888:
  862. case OPENVG_sRGBX_8888_PRE:
  863. return 0x12;
  864. break;
  865. case OPENVG_sRGBA_8888:
  866. case OPENVG_sRGBA_8888_PRE:
  867. return 0x13;
  868. break;
  869. case OPENVG_sRGB_565:
  870. case OPENVG_sRGB_565_PRE:
  871. return 0x1;
  872. break;
  873. case OPENVG_sRGBA_5551:
  874. case OPENVG_sRGBA_5551_PRE:
  875. return 0x15;
  876. break;
  877. case OPENVG_sRGBA_4444:
  878. case OPENVG_sRGBA_4444_PRE:
  879. return 0x14;
  880. break;
  881. case OPENVG_sL_8:
  882. return 0x6;
  883. break;
  884. case OPENVG_lRGBX_8888:
  885. case OPENVG_lRGBX_8888_PRE:
  886. return 0x12;
  887. break;
  888. case OPENVG_lRGBA_8888:
  889. case OPENVG_lRGBA_8888_PRE:
  890. return 0x13;
  891. break;
  892. case OPENVG_lRGB_565:
  893. case OPENVG_lRGB_565_PRE:
  894. return 0x1;
  895. break;
  896. case OPENVG_lRGBA_5551:
  897. case OPENVG_lRGBA_5551_PRE:
  898. return 0x15;
  899. break;
  900. case OPENVG_lRGBA_4444:
  901. case OPENVG_lRGBA_4444_PRE:
  902. return 0x14;
  903. break;
  904. case OPENVG_lL_8:
  905. return 0x6;
  906. break;
  907. case OPENVG_A_8:
  908. return 0x0;
  909. break;
  910. case OPENVG_sXRGB_8888:
  911. return 0x2;
  912. break;
  913. case OPENVG_sARGB_8888:
  914. return 0x3;
  915. break;
  916. case OPENVG_sARGB_8888_PRE:
  917. return 0x3;
  918. break;
  919. case OPENVG_sARGB_1555:
  920. return 0x5;
  921. break;
  922. case OPENVG_sARGB_4444:
  923. return 0x4;
  924. break;
  925. case OPENVG_lXRGB_8888:
  926. return 0x2;
  927. break;
  928. case OPENVG_lARGB_8888:
  929. return 0x3;
  930. break;
  931. case OPENVG_lARGB_8888_PRE:
  932. return 0x3;
  933. break;
  934. case OPENVG_sBGRX_8888:
  935. return 0x32;
  936. break;
  937. case OPENVG_sBGRA_8888:
  938. return 0x33;
  939. break;
  940. case OPENVG_sBGRA_8888_PRE:
  941. return 0x33;
  942. break;
  943. case OPENVG_sBGR_565:
  944. return 0x21;
  945. break;
  946. case OPENVG_sBGRA_5551:
  947. return 0x35;
  948. break;
  949. case OPENVG_sBGRA_4444:
  950. return 0x34;
  951. break;
  952. case OPENVG_lBGRX_8888:
  953. return 0x32;
  954. break;
  955. case OPENVG_lBGRA_8888:
  956. return 0x33;
  957. break;
  958. case OPENVG_lBGRA_8888_PRE:
  959. return 0x33;
  960. break;
  961. case OPENVG_sXBGR_8888:
  962. return 0x22;
  963. break;
  964. case OPENVG_sABGR_8888:
  965. return 0x23;
  966. break;
  967. case OPENVG_sABGR_8888_PRE:
  968. return 0x23;
  969. break;
  970. case OPENVG_sABGR_1555:
  971. return 0x25;
  972. break;
  973. case OPENVG_sABGR_4444:
  974. return 0x24;
  975. break;
  976. case OPENVG_lXBGR_8888:
  977. return 0x22;
  978. break;
  979. case OPENVG_lABGR_8888:
  980. return 0x23;
  981. break;
  982. case OPENVG_lABGR_8888_PRE:
  983. return 0x23;
  984. break;
  985. default:
  986. return 0xFF;
  987. }
  988. }
  989. #define FORMAT_ALIGNMENT(stride,align) \
  990. { \
  991. if ((stride) % (align) != 0) \
  992. return VG_LITE_INVALID_ARGUMENT; \
  993. return VG_LITE_SUCCESS; \
  994. }
  995. #if gcFEATURE_VG_16PIXELS_ALIGNED
  996. /* Determine source IM is aligned by specified bytes */
  997. static vg_lite_error_t _check_source_aligned(vg_lite_buffer_format_t format,uint32_t stride)
  998. {
  999. switch (format) {
  1000. case VG_LITE_A4:
  1001. case VG_LITE_INDEX_1:
  1002. case VG_LITE_INDEX_2:
  1003. case VG_LITE_INDEX_4:
  1004. FORMAT_ALIGNMENT(stride,8);
  1005. break;
  1006. case VG_LITE_L8:
  1007. case VG_LITE_A8:
  1008. case VG_LITE_INDEX_8:
  1009. case VG_LITE_RGBA2222:
  1010. case VG_LITE_BGRA2222:
  1011. case VG_LITE_ABGR2222:
  1012. case VG_LITE_ARGB2222:
  1013. case VG_LITE_RGBA8888_ETC2_EAC:
  1014. FORMAT_ALIGNMENT(stride,16);
  1015. break;
  1016. case VG_LITE_RGBA4444:
  1017. case VG_LITE_BGRA4444:
  1018. case VG_LITE_ABGR4444:
  1019. case VG_LITE_ARGB4444:
  1020. case VG_LITE_RGB565:
  1021. case VG_LITE_BGR565:
  1022. case VG_LITE_BGRA5551:
  1023. case VG_LITE_RGBA5551:
  1024. case VG_LITE_ABGR1555:
  1025. case VG_LITE_ARGB1555:
  1026. case VG_LITE_YUYV:
  1027. case VG_LITE_YUY2:
  1028. case VG_LITE_NV12:
  1029. case VG_LITE_YV12:
  1030. case VG_LITE_YV24:
  1031. case VG_LITE_YV16:
  1032. case VG_LITE_NV16:
  1033. case VG_LITE_NV24:
  1034. case VG_LITE_ABGR8565_PLANAR:
  1035. case VG_LITE_BGRA5658_PLANAR:
  1036. case VG_LITE_ARGB8565_PLANAR:
  1037. case VG_LITE_RGBA5658_PLANAR:
  1038. FORMAT_ALIGNMENT(stride,32);
  1039. break;
  1040. case VG_LITE_RGB888:
  1041. case VG_LITE_BGR888:
  1042. case VG_LITE_ABGR8565:
  1043. case VG_LITE_BGRA5658:
  1044. case VG_LITE_ARGB8565:
  1045. case VG_LITE_RGBA5658:
  1046. FORMAT_ALIGNMENT(stride,48);
  1047. break;
  1048. case VG_LITE_RGBA8888:
  1049. case VG_LITE_BGRA8888:
  1050. case VG_LITE_ABGR8888:
  1051. case VG_LITE_ARGB8888:
  1052. case VG_LITE_RGBX8888:
  1053. case VG_LITE_BGRX8888:
  1054. case VG_LITE_XBGR8888:
  1055. case VG_LITE_XRGB8888:
  1056. FORMAT_ALIGNMENT(stride,64);
  1057. break;
  1058. default:
  1059. return VG_LITE_SUCCESS;
  1060. }
  1061. }
  1062. #endif
  1063. #if gcFEATURE_VG_SRC_BUF_ALINGED
  1064. static vg_lite_error_t _check_source_aligned_2(vg_lite_buffer_format_t format, uint32_t stride)
  1065. {
  1066. switch (format) {
  1067. case VG_LITE_A4:
  1068. case VG_LITE_A8:
  1069. case VG_LITE_L8:
  1070. case VG_LITE_INDEX_1:
  1071. case VG_LITE_INDEX_2:
  1072. case VG_LITE_INDEX_4:
  1073. case VG_LITE_INDEX_8:
  1074. case VG_LITE_RGBA2222:
  1075. case VG_LITE_BGRA2222:
  1076. case VG_LITE_ABGR2222:
  1077. case VG_LITE_ARGB2222:
  1078. case VG_LITE_YV24:
  1079. case VG_LITE_NV24:
  1080. case VG_LITE_RGBA8888_ETC2_EAC:
  1081. FORMAT_ALIGNMENT(stride, 1);
  1082. break;
  1083. case VG_LITE_RGBA4444:
  1084. case VG_LITE_BGRA4444:
  1085. case VG_LITE_ABGR4444:
  1086. case VG_LITE_ARGB4444:
  1087. case VG_LITE_RGB565:
  1088. case VG_LITE_BGR565:
  1089. case VG_LITE_BGRA5551:
  1090. case VG_LITE_RGBA5551:
  1091. case VG_LITE_ABGR1555:
  1092. case VG_LITE_ARGB1555:
  1093. case VG_LITE_YV16:
  1094. case VG_LITE_NV16:
  1095. case VG_LITE_YV12:
  1096. case VG_LITE_NV12:
  1097. case VG_LITE_ABGR8565_PLANAR:
  1098. case VG_LITE_BGRA5658_PLANAR:
  1099. case VG_LITE_ARGB8565_PLANAR:
  1100. case VG_LITE_RGBA5658_PLANAR:
  1101. FORMAT_ALIGNMENT(stride, 2);
  1102. break;
  1103. case VG_LITE_YUYV:
  1104. case VG_LITE_YUY2:
  1105. case VG_LITE_RGB888:
  1106. case VG_LITE_BGR888:
  1107. case VG_LITE_ABGR8565:
  1108. case VG_LITE_BGRA5658:
  1109. case VG_LITE_ARGB8565:
  1110. case VG_LITE_RGBA5658:
  1111. case VG_LITE_RGBA8888:
  1112. case VG_LITE_BGRA8888:
  1113. case VG_LITE_ABGR8888:
  1114. case VG_LITE_ARGB8888:
  1115. case VG_LITE_RGBX8888:
  1116. case VG_LITE_BGRX8888:
  1117. case VG_LITE_XBGR8888:
  1118. case VG_LITE_XRGB8888:
  1119. FORMAT_ALIGNMENT(stride, 4);
  1120. break;
  1121. default:
  1122. return VG_LITE_SUCCESS;
  1123. }
  1124. }
  1125. static vg_lite_error_t _check_source_aligned_3(vg_lite_buffer_format_t format, uint32_t stride)
  1126. {
  1127. switch (format) {
  1128. case VG_LITE_INDEX_1:
  1129. case VG_LITE_INDEX_2:
  1130. FORMAT_ALIGNMENT(stride, 1);
  1131. break;
  1132. case VG_LITE_A4:
  1133. case VG_LITE_INDEX_4:
  1134. FORMAT_ALIGNMENT(stride, 2);
  1135. break;
  1136. case VG_LITE_A8:
  1137. case VG_LITE_L8:
  1138. case VG_LITE_YV24:
  1139. case VG_LITE_NV24:
  1140. case VG_LITE_INDEX_8:
  1141. case VG_LITE_RGBA2222:
  1142. case VG_LITE_BGRA2222:
  1143. case VG_LITE_ABGR2222:
  1144. case VG_LITE_ARGB2222:
  1145. case VG_LITE_RGBA8888_ETC2_EAC:
  1146. FORMAT_ALIGNMENT(stride, 4);
  1147. break;
  1148. case VG_LITE_RGBA4444:
  1149. case VG_LITE_BGRA4444:
  1150. case VG_LITE_ABGR4444:
  1151. case VG_LITE_ARGB4444:
  1152. case VG_LITE_BGRA5551:
  1153. case VG_LITE_RGBA5551:
  1154. case VG_LITE_ABGR1555:
  1155. case VG_LITE_ARGB1555:
  1156. case VG_LITE_RGB565:
  1157. case VG_LITE_BGR565:
  1158. case VG_LITE_YUYV:
  1159. case VG_LITE_YUY2:
  1160. case VG_LITE_YV12:
  1161. case VG_LITE_NV12:
  1162. case VG_LITE_ABGR8565_PLANAR:
  1163. case VG_LITE_BGRA5658_PLANAR:
  1164. case VG_LITE_ARGB8565_PLANAR:
  1165. case VG_LITE_RGBA5658_PLANAR:
  1166. FORMAT_ALIGNMENT(stride, 8);
  1167. break;
  1168. case VG_LITE_RGB888:
  1169. case VG_LITE_BGR888:
  1170. case VG_LITE_ABGR8565:
  1171. case VG_LITE_BGRA5658:
  1172. case VG_LITE_ARGB8565:
  1173. case VG_LITE_RGBA5658:
  1174. case VG_LITE_RGBA8888:
  1175. case VG_LITE_BGRA8888:
  1176. case VG_LITE_ABGR8888:
  1177. case VG_LITE_ARGB8888:
  1178. case VG_LITE_RGBX8888:
  1179. case VG_LITE_BGRX8888:
  1180. case VG_LITE_XBGR8888:
  1181. case VG_LITE_XRGB8888:
  1182. FORMAT_ALIGNMENT(stride, 16);
  1183. break;
  1184. default:
  1185. return VG_LITE_SUCCESS;
  1186. }
  1187. }
  1188. #endif
  1189. #if gcFEATURE_VG_FORMAT_SUPPORT_CHECK
  1190. static vg_lite_error_t _check_format_support_1(vg_lite_buffer_format_t format)
  1191. {
  1192. switch (format) {
  1193. case VG_LITE_A8:
  1194. case VG_LITE_L8:
  1195. case VG_LITE_RGBA2222:
  1196. case VG_LITE_BGRA2222:
  1197. case VG_LITE_ABGR2222:
  1198. case VG_LITE_ARGB2222:
  1199. case VG_LITE_RGBA4444:
  1200. case VG_LITE_BGRA4444:
  1201. case VG_LITE_ABGR4444:
  1202. case VG_LITE_ARGB4444:
  1203. case VG_LITE_BGRA5551:
  1204. case VG_LITE_RGBA5551:
  1205. case VG_LITE_ABGR1555:
  1206. case VG_LITE_ARGB1555:
  1207. case VG_LITE_RGB565:
  1208. case VG_LITE_BGR565:
  1209. case VG_LITE_RGB888:
  1210. case VG_LITE_BGR888:
  1211. case VG_LITE_ABGR8565:
  1212. case VG_LITE_BGRA5658:
  1213. case VG_LITE_ARGB8565:
  1214. case VG_LITE_RGBA5658:
  1215. case VG_LITE_RGBA8888:
  1216. case VG_LITE_BGRA8888:
  1217. case VG_LITE_ABGR8888:
  1218. case VG_LITE_ARGB8888:
  1219. case VG_LITE_RGBX8888:
  1220. case VG_LITE_BGRX8888:
  1221. case VG_LITE_XBGR8888:
  1222. case VG_LITE_XRGB8888:
  1223. break;
  1224. default:
  1225. return VG_LITE_NOT_SUPPORT;
  1226. }
  1227. return VG_LITE_SUCCESS;
  1228. }
  1229. static vg_lite_error_t _check_format_support_2(vg_lite_buffer_format_t format)
  1230. {
  1231. switch (format) {
  1232. case VG_LITE_INDEX_1:
  1233. case VG_LITE_INDEX_2:
  1234. case VG_LITE_INDEX_4:
  1235. case VG_LITE_INDEX_8:
  1236. case VG_LITE_A4:
  1237. case VG_LITE_YUY2:
  1238. case VG_LITE_YUY2_TILED:
  1239. case VG_LITE_RGBA8888_ETC2_EAC:
  1240. break;
  1241. default:
  1242. return VG_LITE_NOT_SUPPORT;
  1243. }
  1244. return VG_LITE_SUCCESS;
  1245. }
  1246. #endif
  1247. vg_lite_error_t srcbuf_align_check(vg_lite_buffer_t* source)
  1248. {
  1249. vg_lite_error_t error = VG_LITE_SUCCESS;
  1250. #if gcFEATURE_VG_FORMAT_SUPPORT_CHECK
  1251. if (_check_format_support_1(source->format) && _check_format_support_2(source->format)) {
  1252. return VG_LITE_NOT_SUPPORT;
  1253. }
  1254. #endif
  1255. #if gcFEATURE_VG_SRC_ADDRESS_64BYTES_ALIGNED
  1256. if ((uint32_t)(source->address) % 64 != 0) {
  1257. printf("buffer address need to be aglined to 64 bytes.");
  1258. return VG_LITE_INVALID_ARGUMENT;
  1259. }
  1260. #endif
  1261. #if gcFEATURE_VG_SRC_BUF_ALINGED
  1262. #if gcFEATURE_VG_SRC_ADDRESS_16BYTES_ALIGNED
  1263. if (source->format == VG_LITE_ARGB8888 ||
  1264. source->format == VG_LITE_BGRA8888 ||
  1265. source->format == VG_LITE_ABGR8888 ||
  1266. source->format == VG_LITE_ARGB8888
  1267. )
  1268. {
  1269. if ((uint32_t)(source->address) % 16 != 0) {
  1270. printf("buffer address need to be aglined to 16 bytes.");
  1271. return VG_LITE_INVALID_ARGUMENT;
  1272. }
  1273. }
  1274. else
  1275. #endif
  1276. {
  1277. if ((uint32_t)(source->address) % 8 != 0) {
  1278. printf("buffer address need to be aglined to 8 bytes.");
  1279. return VG_LITE_INVALID_ARGUMENT;
  1280. }
  1281. }
  1282. #endif
  1283. if (source->tiled == VG_LITE_TILED) {
  1284. #if gcFEATURE_VG_SRC_TILE_4PIXELS_ALIGNED
  1285. uint32_t align, mul, div;
  1286. get_format_bytes(source->format, &mul, &div, &align);
  1287. if ((source->stride % (4 * mul / div) != 0) || (source->height % 4 != 0)) {
  1288. return VG_LITE_INVALID_ARGUMENT;
  1289. }
  1290. #endif
  1291. #if gcFEATURE_VG_SRC_BUF_ALINGED
  1292. vg_lite_error_t error;
  1293. error = _check_source_aligned_3(source->format, source->stride);
  1294. if (error != VG_LITE_SUCCESS) {
  1295. return VG_LITE_INVALID_ARGUMENT;
  1296. }
  1297. #endif
  1298. }
  1299. if (source->tiled == VG_LITE_LINEAR) {
  1300. #if gcFEATURE_VG_16PIXELS_ALIGNED
  1301. uint32_t align, mul, div;
  1302. get_format_bytes(source->format, &mul, &div, &align);
  1303. if (source->stride % (16 * mul / div) != 0) {
  1304. return VG_LITE_INVALID_ARGUMENT;
  1305. }
  1306. #endif
  1307. #if gcFEATURE_VG_SRC_BUF_ALINGED
  1308. vg_lite_error_t error;
  1309. error = _check_source_aligned_2(source->format, source->stride);
  1310. if (error != VG_LITE_SUCCESS) {
  1311. return VG_LITE_INVALID_ARGUMENT;
  1312. }
  1313. #endif
  1314. }
  1315. return error;
  1316. }
  1317. vg_lite_error_t dstbuf_align_check(vg_lite_buffer_t* target)
  1318. {
  1319. vg_lite_error_t error = VG_LITE_SUCCESS;
  1320. uint32_t align, mul, div, bpp;
  1321. uint32_t tile_flag = 0;
  1322. uint32_t tile_flag1 = 0;
  1323. get_format_bytes(target->format, &mul, &div, &align);
  1324. bpp = 8 * mul / div;
  1325. #if gcFEATURE_VG_FORMAT_SUPPORT_CHECK
  1326. if (_check_format_support_1(target->format)) {
  1327. return VG_LITE_NOT_SUPPORT;
  1328. }
  1329. #endif
  1330. #if gcFEATURE_VG_DST_TILE_4PIXELS_ALIGNED
  1331. if (target->tiled == VG_LITE_TILED) {
  1332. if ((target->stride % (4 * mul / div) != 0) || (target->height % 4 != 0)) {
  1333. return VG_LITE_INVALID_ARGUMENT;
  1334. }
  1335. }
  1336. #endif
  1337. if (target->compress_mode == VG_LITE_DEC_DISABLE) {
  1338. #if gcFEATURE_VG_DST_BUF_ALIGNED
  1339. if (target->tiled == VG_LITE_TILED) {
  1340. if (bpp == 8 || bpp == 16 || bpp == 32) {
  1341. if (target->stride % (4 * mul / div)) {
  1342. return VG_LITE_INVALID_ARGUMENT;
  1343. }
  1344. }
  1345. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  1346. if (target->stride % 16 != 0) {
  1347. return VG_LITE_INVALID_ARGUMENT;
  1348. }
  1349. }
  1350. if (target->format >= VG_LITE_ABGR8565_PLANAR && target->format <= VG_LITE_RGBA5658_PLANAR) {
  1351. if (target->stride % 8 != 0) {
  1352. return VG_LITE_INVALID_ARGUMENT;
  1353. }
  1354. }
  1355. }
  1356. else {
  1357. if (bpp == 8 || bpp == 16 || bpp == 32) {
  1358. if (target->stride % (mul / div)) {
  1359. return VG_LITE_INVALID_ARGUMENT;
  1360. }
  1361. }
  1362. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  1363. if (target->stride % 4 != 0) {
  1364. return VG_LITE_INVALID_ARGUMENT;
  1365. }
  1366. }
  1367. if (target->format >= VG_LITE_ABGR8565_PLANAR && target->format <= VG_LITE_RGBA5658_PLANAR) {
  1368. if (target->stride % 2 != 0) {
  1369. return VG_LITE_INVALID_ARGUMENT;
  1370. }
  1371. }
  1372. }
  1373. if (bpp == 8 || bpp == 16 || bpp == 32)
  1374. {
  1375. if ((uint32_t)(target->address) % 4 != 0) {
  1376. return VG_LITE_INVALID_ARGUMENT;
  1377. }
  1378. }
  1379. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  1380. if ((uint32_t)(target->address) % 64 != 0) {
  1381. return VG_LITE_INVALID_ARGUMENT;
  1382. }
  1383. }
  1384. #endif
  1385. #if gcFEATURE_VG_DST_ADDRESS_64BYTES_ALIGNED
  1386. if ((uint32_t)(target->address) % 64 != 0) {
  1387. return VG_LITE_INVALID_ARGUMENT;
  1388. }
  1389. #endif
  1390. #if gcFEATURE_VG_DST_24BIT_PLANAR_ALIGNED
  1391. if (target->format >= VG_LITE_ABGR8565_PLANAR && target->format <= VG_LITE_RGBA5658_PLANAR) {
  1392. if ((uint32_t)(target->address) % 32 != 0) {
  1393. return VG_LITE_INVALID_ARGUMENT;
  1394. }
  1395. if ((uint32_t)(target->yuv.alpha_planar) % 16 != 0) {
  1396. return VG_LITE_INVALID_ARGUMENT;
  1397. }
  1398. }
  1399. #endif
  1400. }
  1401. else {
  1402. #if (gcFEATURE_VG_DEC_COMPRESS || gcFEATURE_VG_DEC_COMPRESS_2_0)
  1403. if ((uint32_t)(target->address) % 64 != 0) {
  1404. printf("target address need to be aligned to 64 bytes.");
  1405. return VG_LITE_INVALID_ARGUMENT;
  1406. }
  1407. #if gcFEATURE_VG_DEC_COMPRESS_2_0
  1408. if (target->format == VG_LITE_BGRA8888 || target->format == VG_LITE_BGRX8888) {
  1409. if ((target->stride * target->height) % 64 != 0) {
  1410. return VG_LITE_INVALID_ARGUMENT;
  1411. }
  1412. }
  1413. if (target->format == VG_LITE_BGR888) {
  1414. if ((target->stride * target->height) % 48 != 0) {
  1415. return VG_LITE_INVALID_ARGUMENT;
  1416. }
  1417. }
  1418. #else
  1419. if (target->format == VG_LITE_BGRX8888 || target->format == VG_LITE_RGBX8888
  1420. || target->format == VG_LITE_BGRA8888 || target->format == VG_LITE_RGBA8888) {
  1421. if ((target->stride * target->height) % 64 != 0) {
  1422. return VG_LITE_INVALID_ARGUMENT;
  1423. }
  1424. }
  1425. if (target->format == VG_LITE_RGB888 || target->format == VG_LITE_BGR888) {
  1426. if ((target->stride * target->height) % 48 != 0) {
  1427. return VG_LITE_INVALID_ARGUMENT;
  1428. }
  1429. }
  1430. #endif
  1431. #endif
  1432. }
  1433. if (target->tiled == VG_LITE_TILED) {
  1434. #if gcFEATURE_VG_RECTANGLE_TILED_OUT
  1435. tile_flag1 = 1;
  1436. #else
  1437. tile_flag1 = 0;
  1438. #endif
  1439. tile_flag = 1;
  1440. }
  1441. #if (gcFEATURE_VG_TILED_LIMIT == 1)
  1442. if (tile_flag1 ^ tile_flag) {
  1443. if (bpp != 24) {
  1444. if (target->stride % 64 != 0) {
  1445. return VG_LITE_INVALID_ARGUMENT;
  1446. }
  1447. }
  1448. else {
  1449. if (target->stride % 48 != 0) {
  1450. return VG_LITE_INVALID_ARGUMENT;
  1451. }
  1452. }
  1453. }
  1454. #elif (gcFEATURE_VG_TILED_LIMIT == 2)
  1455. if (tile_flag1 ^ tile_flag) {
  1456. return VG_LITE_INVALID_ARGUMENT;
  1457. }
  1458. #endif
  1459. return error;
  1460. }
  1461. /* Convert VGLite source color format to HW values. */
  1462. uint32_t convert_source_format(vg_lite_buffer_format_t format)
  1463. {
  1464. switch (format) {
  1465. case VG_LITE_L8:
  1466. return 0x0;
  1467. case VG_LITE_A4:
  1468. return 0x1;
  1469. case VG_LITE_A8:
  1470. return 0x2;
  1471. case VG_LITE_RGBA4444:
  1472. return 0x23;
  1473. case VG_LITE_BGRA4444:
  1474. return 0x3;
  1475. case VG_LITE_ABGR4444:
  1476. return 0x13;
  1477. case VG_LITE_ARGB4444:
  1478. return 0x33;
  1479. case VG_LITE_RGB565:
  1480. return 0x25;
  1481. case VG_LITE_BGR565:
  1482. return 0x5;
  1483. case VG_LITE_RGBA8888:
  1484. return 0x27;
  1485. case VG_LITE_BGRA8888:
  1486. return 0x7;
  1487. case VG_LITE_ABGR8888:
  1488. return 0x17;
  1489. case VG_LITE_ARGB8888:
  1490. return 0x37;
  1491. case VG_LITE_RGBX8888:
  1492. return 0x26;
  1493. case VG_LITE_BGRX8888:
  1494. return 0x6;
  1495. case VG_LITE_XBGR8888:
  1496. return 0x16;
  1497. case VG_LITE_XRGB8888:
  1498. return 0x36;
  1499. case VG_LITE_BGRA5551:
  1500. return 0x4;
  1501. case VG_LITE_RGBA5551:
  1502. return 0x24;
  1503. case VG_LITE_ABGR1555:
  1504. return 0x14;
  1505. case VG_LITE_ARGB1555:
  1506. return 0x34;
  1507. case VG_LITE_YUYV:
  1508. return 0x8;
  1509. case VG_LITE_YUY2:
  1510. case VG_LITE_YUY2_TILED:
  1511. return 0x8;
  1512. case VG_LITE_NV12:
  1513. case VG_LITE_NV12_TILED:
  1514. return 0xB;
  1515. case VG_LITE_ANV12:
  1516. case VG_LITE_ANV12_TILED:
  1517. return 0xE;
  1518. case VG_LITE_YV12:
  1519. return 0x9;
  1520. case VG_LITE_YV24:
  1521. return 0xD;
  1522. case VG_LITE_YV16:
  1523. return 0xC;
  1524. case VG_LITE_NV16:
  1525. return 0xA;
  1526. case VG_LITE_NV24:
  1527. case VG_LITE_NV24_TILED:
  1528. return 0xD | (1<<19);
  1529. case VG_LITE_AYUY2:
  1530. case VG_LITE_AYUY2_TILED:
  1531. return 0xF;
  1532. case VG_LITE_INDEX_1:
  1533. return 0x200;
  1534. case VG_LITE_INDEX_2:
  1535. return 0x400;
  1536. case VG_LITE_INDEX_4:
  1537. return 0x600;
  1538. case VG_LITE_INDEX_8:
  1539. return 0x800;
  1540. case VG_LITE_RGBA2222:
  1541. return 0xA20;
  1542. case VG_LITE_BGRA2222:
  1543. return 0xA00;
  1544. case VG_LITE_ABGR2222:
  1545. return 0xA10;
  1546. case VG_LITE_ARGB2222:
  1547. return 0xA30;
  1548. case VG_LITE_RGBA8888_ETC2_EAC:
  1549. return 0xE00;
  1550. case VG_LITE_ARGB8565:
  1551. return 0x40000030;
  1552. case VG_LITE_RGBA5658:
  1553. return 0x40000020;
  1554. case VG_LITE_ABGR8565:
  1555. return 0x40000010;
  1556. case VG_LITE_BGRA5658:
  1557. return 0x40000000;
  1558. case VG_LITE_RGB888:
  1559. return 0x20000020;
  1560. case VG_LITE_BGR888:
  1561. return 0x20000000;
  1562. case VG_LITE_ARGB8565_PLANAR:
  1563. return 0x60000030;
  1564. case VG_LITE_RGBA5658_PLANAR:
  1565. return 0x60000020;
  1566. case VG_LITE_ABGR8565_PLANAR:
  1567. return 0x60000010;
  1568. case VG_LITE_BGRA5658_PLANAR:
  1569. return 0x60000000;
  1570. /* OpenVG VGImageFormat */
  1571. case OPENVG_sRGBX_8888:
  1572. case OPENVG_sRGBX_8888_PRE:
  1573. return 0x16;
  1574. break;
  1575. case OPENVG_sRGBA_8888:
  1576. case OPENVG_sRGBA_8888_PRE:
  1577. return 0x17;
  1578. break;
  1579. case OPENVG_sRGB_565:
  1580. case OPENVG_sRGB_565_PRE:
  1581. return 0x5;
  1582. break;
  1583. case OPENVG_sRGBA_5551:
  1584. case OPENVG_sRGBA_5551_PRE:
  1585. return 0x14;
  1586. break;
  1587. case OPENVG_sRGBA_4444:
  1588. case OPENVG_sRGBA_4444_PRE:
  1589. return 0x13;
  1590. break;
  1591. case OPENVG_sL_8:
  1592. return 0x0;
  1593. break;
  1594. case OPENVG_lRGBX_8888:
  1595. case OPENVG_lRGBX_8888_PRE:
  1596. return 0x16;
  1597. break;
  1598. case OPENVG_lRGBA_8888:
  1599. case OPENVG_lRGBA_8888_PRE:
  1600. return 0x17;
  1601. break;
  1602. case OPENVG_lRGB_565:
  1603. case OPENVG_lRGB_565_PRE:
  1604. return 0x5;
  1605. break;
  1606. case OPENVG_lRGBA_5551:
  1607. case OPENVG_lRGBA_5551_PRE:
  1608. return 0x14;
  1609. break;
  1610. case OPENVG_lRGBA_4444:
  1611. case OPENVG_lRGBA_4444_PRE:
  1612. return 0x13;
  1613. break;
  1614. case OPENVG_lL_8:
  1615. return 0x0;
  1616. break;
  1617. case OPENVG_A_8:
  1618. return 0x2;
  1619. break;
  1620. case OPENVG_BW_1:
  1621. return 0x200;
  1622. break;
  1623. case OPENVG_A_1:
  1624. return 0x1;
  1625. break;
  1626. case OPENVG_A_4:
  1627. return 0x1;
  1628. break;
  1629. case OPENVG_sXRGB_8888:
  1630. return 0x6;
  1631. break;
  1632. case OPENVG_sARGB_8888:
  1633. return 0x7;
  1634. break;
  1635. case OPENVG_sARGB_8888_PRE:
  1636. return 0x7;
  1637. break;
  1638. case OPENVG_sARGB_1555:
  1639. return 0x4;
  1640. break;
  1641. case OPENVG_sARGB_4444:
  1642. return 0x3;
  1643. break;
  1644. case OPENVG_lXRGB_8888:
  1645. return 0x6;
  1646. break;
  1647. case OPENVG_lARGB_8888:
  1648. return 0x7;
  1649. break;
  1650. case OPENVG_lARGB_8888_PRE:
  1651. return 0x7;
  1652. break;
  1653. case OPENVG_sBGRX_8888:
  1654. return 0x36;
  1655. break;
  1656. case OPENVG_sBGRA_8888:
  1657. return 0x37;
  1658. break;
  1659. case OPENVG_sBGRA_8888_PRE:
  1660. return 0x37;
  1661. break;
  1662. case OPENVG_sBGR_565:
  1663. return 0x25;
  1664. break;
  1665. case OPENVG_sBGRA_5551:
  1666. return 0x34;
  1667. break;
  1668. case OPENVG_sBGRA_4444:
  1669. return 0x33;
  1670. break;
  1671. case OPENVG_lBGRX_8888:
  1672. return 0x36;
  1673. break;
  1674. case OPENVG_lBGRA_8888:
  1675. return 0x37;
  1676. break;
  1677. case OPENVG_lBGRA_8888_PRE:
  1678. return 0x37;
  1679. break;
  1680. case OPENVG_sXBGR_8888:
  1681. return 0x26;
  1682. break;
  1683. case OPENVG_sABGR_8888:
  1684. return 0x27;
  1685. break;
  1686. case OPENVG_sABGR_8888_PRE:
  1687. return 0x27;
  1688. break;
  1689. case OPENVG_sABGR_1555:
  1690. return 0x24;
  1691. break;
  1692. case OPENVG_sABGR_4444:
  1693. return 0x23;
  1694. break;
  1695. case OPENVG_lXBGR_8888:
  1696. return 0x26;
  1697. break;
  1698. case OPENVG_lABGR_8888:
  1699. return 0x27;
  1700. break;
  1701. case OPENVG_lABGR_8888_PRE:
  1702. return 0x27;
  1703. break;
  1704. default:
  1705. return 0;
  1706. break;
  1707. }
  1708. }
  1709. /* Convert VGLite blend modes to HW values. */
  1710. uint32_t convert_blend(vg_lite_blend_t blend)
  1711. {
  1712. switch (blend) {
  1713. case VG_LITE_BLEND_SRC_OVER:
  1714. case VG_LITE_BLEND_NORMAL_LVGL:
  1715. case OPENVG_BLEND_SRC_OVER:
  1716. return 0x00000100;
  1717. case VG_LITE_BLEND_DST_OVER:
  1718. case OPENVG_BLEND_DST_OVER:
  1719. return 0x00000200;
  1720. case VG_LITE_BLEND_SRC_IN:
  1721. case OPENVG_BLEND_SRC_IN:
  1722. return 0x00000300;
  1723. case VG_LITE_BLEND_DST_IN:
  1724. case OPENVG_BLEND_DST_IN:
  1725. return 0x00000400;
  1726. case VG_LITE_BLEND_MULTIPLY:
  1727. case VG_LITE_BLEND_MULTIPLY_LVGL:
  1728. case OPENVG_BLEND_MULTIPLY:
  1729. return 0x00000500;
  1730. case VG_LITE_BLEND_SCREEN:
  1731. case OPENVG_BLEND_SCREEN:
  1732. return 0x00000600;
  1733. case VG_LITE_BLEND_DARKEN:
  1734. case OPENVG_BLEND_DARKEN:
  1735. return 0x00000700;
  1736. case VG_LITE_BLEND_LIGHTEN:
  1737. case OPENVG_BLEND_LIGHTEN:
  1738. return 0x00000800;
  1739. case VG_LITE_BLEND_ADDITIVE:
  1740. case VG_LITE_BLEND_ADDITIVE_LVGL:
  1741. case OPENVG_BLEND_ADDITIVE:
  1742. return 0x00000900;
  1743. case VG_LITE_BLEND_SUBTRACT:
  1744. return 0x00000A00;
  1745. case VG_LITE_BLEND_SUBTRACT_LVGL:
  1746. #if gcFEATURE_VG_LVGL_SUPPORT
  1747. return 0x00000C00;
  1748. #else
  1749. return 0x00000A00;
  1750. #endif
  1751. default:
  1752. return 0;
  1753. }
  1754. }
  1755. /* Convert VGLite uv swizzle enums to HW values. */
  1756. uint32_t convert_uv_swizzle(vg_lite_swizzle_t swizzle)
  1757. {
  1758. switch (swizzle) {
  1759. case VG_LITE_SWIZZLE_UV:
  1760. return 0x00000040;
  1761. break;
  1762. case VG_LITE_SWIZZLE_VU:
  1763. return 0x00000050;
  1764. default:
  1765. return 0;
  1766. break;
  1767. }
  1768. }
  1769. /* Convert VGLite yuv standard enums to HW values. */
  1770. uint32_t convert_yuv2rgb(vg_lite_yuv2rgb_t yuv)
  1771. {
  1772. switch (yuv) {
  1773. case VG_LITE_YUV601:
  1774. return 0;
  1775. break;
  1776. case VG_LITE_YUV709:
  1777. return 0x00008000;
  1778. default:
  1779. return 0;
  1780. break;
  1781. }
  1782. }
  1783. static vg_lite_error_t submit(vg_lite_context_t * context);
  1784. static vg_lite_error_t stall(vg_lite_context_t * context, uint32_t time_ms, uint32_t mask);
  1785. /* Push a state array into current command buffer. */
  1786. vg_lite_error_t push_clut(vg_lite_context_t * context, uint32_t address, uint32_t count, uint32_t *data)
  1787. {
  1788. uint32_t i;
  1789. vg_lite_error_t error;
  1790. if (!has_valid_command_buffer(context))
  1791. return VG_LITE_NO_CONTEXT;
  1792. if (CMDBUF_OFFSET(*context) + 8 + VG_LITE_ALIGN(count + 1, 2) * 4 >= CMDBUF_SIZE(*context)) {
  1793. VG_LITE_RETURN_ERROR(submit(context));
  1794. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1795. }
  1796. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_STATES(count, address);
  1797. for (i = 0; i < count; i++) {
  1798. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1 + i] = data[i];
  1799. }
  1800. if (i%2 == 0) {
  1801. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1 + i] = VG_LITE_NOP();
  1802. }
  1803. #if DUMP_COMMAND
  1804. {
  1805. uint32_t loops;
  1806. if (strncmp(filename, "Commandbuffer", 13)) {
  1807. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  1808. }
  1809. fp = fopen(filename, "a");
  1810. if (fp == NULL)
  1811. printf("error!\n");
  1812. fprintf(fp, "Command buffer: 0x%08x, ",
  1813. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0]);
  1814. for (loops = 0; loops < count / 2; loops++) {
  1815. fprintf(fp, "0x%08x,\nCommand buffer: 0x%08x, ",
  1816. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[(loops + 1) * 2 - 1],
  1817. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[(loops + 1) * 2]);
  1818. }
  1819. fprintf(fp, "0x%08x,\n",
  1820. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[(loops + 1) * 2 - 1]);
  1821. fclose(fp);
  1822. fp = NULL;
  1823. }
  1824. #endif
  1825. CMDBUF_OFFSET(*context) += VG_LITE_ALIGN(count + 1, 2) * 4;
  1826. return VG_LITE_SUCCESS;
  1827. }
  1828. /* Push a single state command into the current command buffer. */
  1829. vg_lite_error_t push_state(vg_lite_context_t * context, uint32_t address, uint32_t data)
  1830. {
  1831. vg_lite_error_t error;
  1832. if (!has_valid_command_buffer(context))
  1833. return VG_LITE_NO_CONTEXT;
  1834. /* TODO wait for hw to complete development. */
  1835. /* if (address == 0x0A1B || context->hw.hw_states[address & 0xff].state != data || !context->hw.hw_states[address & 0xff].init) */
  1836. {
  1837. if (CMDBUF_OFFSET(*context) + 16 >= CMDBUF_SIZE(*context)) {
  1838. VG_LITE_RETURN_ERROR(submit(context));
  1839. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1840. }
  1841. /* TODO context->hw.hw_states[address & 0xff].state = data;
  1842. context->hw.hw_states[address & 0xff].init = 1;*/
  1843. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_STATE(address);
  1844. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = data;
  1845. #if DUMP_COMMAND
  1846. if (strncmp(filename, "Commandbuffer", 13)) {
  1847. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  1848. }
  1849. fp = fopen(filename, "a");
  1850. if (fp == NULL)
  1851. printf("error!\n");
  1852. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  1853. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0],
  1854. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1]);
  1855. fclose(fp);
  1856. fp = NULL;
  1857. #endif
  1858. CMDBUF_OFFSET(*context) += 8;
  1859. }
  1860. return VG_LITE_SUCCESS;
  1861. }
  1862. /* Push a single state command with given address. */
  1863. vg_lite_error_t push_state_ptr(vg_lite_context_t * context, uint32_t address, void * data_ptr)
  1864. {
  1865. vg_lite_error_t error;
  1866. uint32_t data = *(uint32_t *) data_ptr;
  1867. if (!has_valid_command_buffer(context))
  1868. return VG_LITE_NO_CONTEXT;
  1869. /* TODO wait for hw to complete development. */
  1870. /* if (address == 0x0A1B || context->hw.hw_states[address & 0xff].state != data || !context->hw.hw_states[address & 0xff].init) */
  1871. {
  1872. if (CMDBUF_OFFSET(*context) + 16 >= CMDBUF_SIZE(*context)) {
  1873. VG_LITE_RETURN_ERROR(submit(context));
  1874. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1875. }
  1876. /* TODO context->hw.hw_states[address & 0xff].state = data;
  1877. context->hw.hw_states[address & 0xff].init = 1;*/
  1878. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_STATE(address);
  1879. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = data;
  1880. #if DUMP_COMMAND
  1881. if (strncmp(filename, "Commandbuffer", 13)) {
  1882. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  1883. }
  1884. fp = fopen(filename, "a");
  1885. if (fp == NULL)
  1886. printf("error!\n");
  1887. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  1888. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0],
  1889. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1]);
  1890. fclose(fp);
  1891. fp = NULL;
  1892. #endif
  1893. CMDBUF_OFFSET(*context) += 8;
  1894. }
  1895. return VG_LITE_SUCCESS;
  1896. }
  1897. /* Push a "call" command into the current command buffer. */
  1898. vg_lite_error_t push_call(vg_lite_context_t * context, uint32_t address, uint32_t bytes)
  1899. {
  1900. vg_lite_error_t error;
  1901. if (!has_valid_command_buffer(context))
  1902. return VG_LITE_NO_CONTEXT;
  1903. if (CMDBUF_OFFSET(*context) + 16 >= CMDBUF_SIZE(*context)) {
  1904. VG_LITE_RETURN_ERROR(submit(context));
  1905. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1906. }
  1907. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_CALL((bytes + 7) / 8);
  1908. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = address;
  1909. #if DUMP_COMMAND
  1910. if (strncmp(filename, "Commandbuffer", 13)) {
  1911. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  1912. }
  1913. fp = fopen(filename, "a");
  1914. if (fp == NULL)
  1915. printf("error!\n");
  1916. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  1917. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0],
  1918. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1]);
  1919. fclose(fp);
  1920. fp = NULL;
  1921. #endif
  1922. CMDBUF_OFFSET(*context) += 8;
  1923. #if !gcFEATURE_VG_CMD_CALL_FIX
  1924. VG_LITE_RETURN_ERROR(push_stall(&s_context, 0x10));
  1925. #endif
  1926. return VG_LITE_SUCCESS;
  1927. }
  1928. #if gcFEATURE_VG_PE_CLEAR
  1929. static vg_lite_error_t push_pe_clear(vg_lite_context_t * context, uint32_t size)
  1930. {
  1931. vg_lite_error_t error;
  1932. if (!has_valid_command_buffer(context))
  1933. return VG_LITE_NO_CONTEXT;
  1934. if (CMDBUF_OFFSET(*context) + 16 >= CMDBUF_SIZE(*context)) {
  1935. VG_LITE_RETURN_ERROR(submit(context));
  1936. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1937. }
  1938. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_DATA(1);
  1939. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = 0;
  1940. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[2] = size;
  1941. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[3] = 0;
  1942. CMDBUF_OFFSET(*context) += 16;
  1943. return VG_LITE_SUCCESS;
  1944. }
  1945. #endif
  1946. /* Push a rectangle command into the current command buffer. */
  1947. static vg_lite_error_t push_rectangle(vg_lite_context_t * context, int32_t x, int32_t y, int32_t width, int32_t height)
  1948. {
  1949. vg_lite_error_t error;
  1950. if (!has_valid_command_buffer(context))
  1951. return VG_LITE_NO_CONTEXT;
  1952. if (CMDBUF_OFFSET(*context) + 16 >= CMDBUF_SIZE(*context)) {
  1953. VG_LITE_RETURN_ERROR(submit(context));
  1954. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1955. }
  1956. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_DATA(1);
  1957. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = 0;
  1958. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[4] = (uint16_t)x;
  1959. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[5] = (uint16_t)y;
  1960. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[6] = (uint16_t)width;
  1961. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[7] = (uint16_t)height;
  1962. #if DUMP_COMMAND
  1963. if (strncmp(filename, "Commandbuffer", 13)) {
  1964. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  1965. }
  1966. fp = fopen(filename, "a");
  1967. if (fp == NULL)
  1968. printf("error!\n");
  1969. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  1970. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0], 0);
  1971. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  1972. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[5] << 16 |
  1973. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[4],
  1974. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[7] << 16 |
  1975. ((uint16_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[6]);
  1976. fclose(fp);
  1977. fp = NULL;
  1978. #endif
  1979. CMDBUF_OFFSET(*context) += 16;
  1980. return VG_LITE_SUCCESS;
  1981. }
  1982. /* Push a data array into the current command buffer. */
  1983. vg_lite_error_t push_data(vg_lite_context_t * context, uint32_t size, void * data)
  1984. {
  1985. vg_lite_error_t error;
  1986. uint32_t bytes = VG_LITE_ALIGN(size, 8);
  1987. if (!has_valid_command_buffer(context))
  1988. return VG_LITE_NO_CONTEXT;
  1989. if (CMDBUF_OFFSET(*context) + 16 + bytes >= CMDBUF_SIZE(*context)) {
  1990. VG_LITE_RETURN_ERROR(submit(context));
  1991. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  1992. }
  1993. /* Command buffer size must be at least data size "bytes" plus header and END command */
  1994. if ((bytes + 16) > CMDBUF_SIZE(*context)) {
  1995. printf("Command buffer size needs increase for data sized %d bytes!\n", (int)(bytes + 16));
  1996. return VG_LITE_OUT_OF_RESOURCES;
  1997. }
  1998. ((uint64_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[(bytes >> 3)] = 0;
  1999. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_DATA((bytes >> 3));
  2000. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = 0;
  2001. memcpy(CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context) + 8, data, size);
  2002. #if DUMP_COMMAND
  2003. {
  2004. int32_t loops;
  2005. if (strncmp(filename, "Commandbuffer", 13)) {
  2006. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  2007. }
  2008. fp = fopen(filename, "a");
  2009. if (fp == NULL)
  2010. printf("error!\n");
  2011. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  2012. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0], 0);
  2013. for (loops = 0; loops < (bytes >> 3); loops++) {
  2014. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  2015. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[(loops + 1) * 2],
  2016. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[(loops + 1) * 2 + 1]);
  2017. }
  2018. fclose(fp);
  2019. fp = NULL;
  2020. }
  2021. #endif
  2022. CMDBUF_OFFSET(*context) += 8 + bytes;
  2023. return VG_LITE_SUCCESS;
  2024. }
  2025. /* Push a "stall" command into the current command buffer. */
  2026. vg_lite_error_t push_stall(vg_lite_context_t * context, uint32_t module)
  2027. {
  2028. vg_lite_error_t error;
  2029. if (!has_valid_command_buffer(context))
  2030. return VG_LITE_NO_CONTEXT;
  2031. if (CMDBUF_OFFSET(*context) + 16 >= CMDBUF_SIZE(*context)) {
  2032. VG_LITE_RETURN_ERROR(submit(context));
  2033. VG_LITE_RETURN_ERROR(stall(context, 0, (uint32_t)~0));
  2034. }
  2035. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_SEMAPHORE(module);
  2036. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = 0;
  2037. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[2] = VG_LITE_STALL(module);
  2038. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[3] = 0;
  2039. #if DUMP_COMMAND
  2040. if (strncmp(filename, "Commandbuffer", 13)) {
  2041. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  2042. }
  2043. fp = fopen(filename, "a");
  2044. if (fp == NULL)
  2045. printf("error!\n");
  2046. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  2047. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0], 0);
  2048. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  2049. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[2], 0);
  2050. fclose(fp);
  2051. fp = NULL;
  2052. #endif
  2053. CMDBUF_OFFSET(*context) += 16;
  2054. return VG_LITE_SUCCESS;
  2055. }
  2056. /* Submit the current command buffer to HW and reset the current command buffer offset. */
  2057. static vg_lite_error_t submit(vg_lite_context_t *context)
  2058. {
  2059. vg_lite_error_t error = VG_LITE_SUCCESS;
  2060. vg_lite_kernel_submit_t submit;
  2061. #if gcdVG_ENABLE_DELAY_RESUME
  2062. vg_lite_kernel_delay_resume_t delay_resume;
  2063. delay_resume.query_delay_resume = 1;
  2064. int resume_flag = vg_lite_kernel(VG_LITE_QUERY_DELAY_RESUME, &delay_resume);
  2065. if (resume_flag == 1) {
  2066. #ifdef __ZEPHYR__
  2067. /* Enable GPU clocking*/
  2068. vg_lite_kernel_gpu_clock_state_t gpu_state;
  2069. gpu_state.state = VG_LITE_GPU_RUN;
  2070. vg_lite_kernel(VG_LITE_SET_GPU_CLOCK_STATE, &gpu_state);
  2071. #endif
  2072. /* Reset GPU. */
  2073. vg_lite_kernel_reset_t reset;
  2074. vg_lite_kernel(VG_LITE_RESET, &reset);
  2075. printf("Delay resume success! \n");
  2076. #ifdef __ZEPHYR__
  2077. /* Disable GPU clocking*/
  2078. gpu_state.state = VG_LITE_GPU_STOP;
  2079. vg_lite_kernel(VG_LITE_SET_GPU_CLOCK_STATE, &gpu_state);
  2080. #endif
  2081. }
  2082. #endif
  2083. /* Check if there is a valid context and an allocated command buffer. */
  2084. if (!has_valid_command_buffer(context))
  2085. return VG_LITE_NO_CONTEXT;
  2086. /* Check if there is anything to submit. */
  2087. if (CMDBUF_OFFSET(*context) == 0)
  2088. return VG_LITE_INVALID_ARGUMENT;
  2089. #if 0
  2090. /* This case is safe as command buffer is allocated with (command_buffer_size + 8) bytes */
  2091. if (CMDBUF_OFFSET(*context) + 8 >= CMDBUF_SIZE(*context)) {
  2092. /* Reset command buffer offset. */
  2093. CMDBUF_OFFSET(*context) = 0;
  2094. return VG_LITE_OUT_OF_RESOURCES;
  2095. }
  2096. #endif
  2097. /* Append END command into the command buffer. */
  2098. if (s_context.frame_flag == VG_LITE_FRAME_END_FLAG) {
  2099. /* A interrupt will be received to indicate that the GPU is idle. */
  2100. ((uint32_t*)(CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_END(EVENT_FRAME_END);
  2101. ((uint32_t*)(CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = 0;
  2102. }
  2103. else {
  2104. /* A interrupt will be received to indicate that the GPU has completed the current instruction. */
  2105. ((uint32_t*)(CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0] = VG_LITE_END(EVENT_END);
  2106. ((uint32_t*)(CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[1] = 0;
  2107. }
  2108. s_context.frame_flag = 0;
  2109. #if DUMP_COMMAND
  2110. if (strncmp(filename, "Commandbuffer", 13)) {
  2111. sprintf(filename, "Commandbuffer_pid%d.txt", getpid());
  2112. }
  2113. fp = fopen(filename, "a");
  2114. if (fp == NULL)
  2115. printf("error!\n");
  2116. fprintf(fp, "Command buffer: 0x%08x, 0x%08x,\n",
  2117. ((uint32_t *) (CMDBUF_BUFFER(*context) + CMDBUF_OFFSET(*context)))[0], 0);
  2118. fprintf(fp, "Command buffer addr is : %p,\n", CMDBUF_BUFFER(*context));
  2119. fprintf(fp, "Command buffer offset is : %d,\n", CMDBUF_OFFSET(*context) + 8);
  2120. fclose(fp);
  2121. fp = NULL;
  2122. #endif
  2123. CMDBUF_OFFSET(*context) += 8;
  2124. /* Submit the command buffer. */
  2125. submit.context = &context->context;
  2126. submit.commands = CMDBUF_BUFFER(*context);
  2127. submit.command_size = CMDBUF_OFFSET(*context);
  2128. submit.command_id = CMDBUF_INDEX(*context);
  2129. #if DUMP_LAST_CAPTURE
  2130. //backup command
  2131. context->Physical = (size_t)CMDBUF_BUFFER(*context);
  2132. context->last_command_buffer_logical = submit.context->command_buffer_logical[CMDBUF_INDEX(*context)];
  2133. context->last_command_size = submit.command_size;
  2134. #endif
  2135. /* Wait if GPU has not completed previous CMD buffer */
  2136. if (submit_flag)
  2137. {
  2138. VG_LITE_RETURN_ERROR(stall(&s_context, 0, (uint32_t)~0));
  2139. }
  2140. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_SUBMIT, &submit));
  2141. submit_flag = 1;
  2142. vglitemDUMP_BUFFER("command", (size_t)CMDBUF_BUFFER(*context),
  2143. submit.context->command_buffer_logical[CMDBUF_INDEX(*context)], 0, submit.command_size);
  2144. #if !DUMP_COMMAND_CAPTURE
  2145. vglitemDUMP("@[commit]");
  2146. #endif
  2147. #if DUMP_INIT_COMMAND
  2148. is_init++;
  2149. #endif
  2150. /* Reset command buffer. */
  2151. CMDBUF_OFFSET(*context) = 0;
  2152. return error;
  2153. }
  2154. /* Wait for the HW to finish the current execution. */
  2155. static vg_lite_error_t stall(vg_lite_context_t * context, uint32_t time_ms, uint32_t mask)
  2156. {
  2157. #if !defined(_WINDLL)
  2158. vg_lite_error_t error;
  2159. #endif
  2160. vg_lite_kernel_wait_t wait;
  2161. #if !DUMP_COMMAND_BY_USER
  2162. #if !DUMP_COMMAND_CAPTURE
  2163. vglitemDUMP("@[stall]");
  2164. #endif
  2165. #endif
  2166. /* Wait until GPU is ready. */
  2167. wait.context = &context->context;
  2168. wait.timeout_ms = time_ms > 0 ? time_ms : VG_LITE_INFINITE;
  2169. wait.event_mask = mask;
  2170. wait.reset_type = RESTORE_ALL_COMMAND;
  2171. #if defined(_WINDLL)
  2172. vg_lite_kernel(VG_LITE_WAIT, &wait);
  2173. #else
  2174. #if !DUMP_LAST_CAPTURE
  2175. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_WAIT, &wait));
  2176. #else
  2177. error = vg_lite_kernel(VG_LITE_WAIT, &wait);
  2178. #endif
  2179. #endif
  2180. #if DUMP_LAST_CAPTURE
  2181. #if !defined(_WINDLL)
  2182. if (error == VG_LITE_TIMEOUT)
  2183. {
  2184. vglitemDUMP_BUFFER_single("command", context->Physical,
  2185. context->last_command_buffer_logical, 0, context->last_command_size);
  2186. }
  2187. #endif
  2188. #endif
  2189. submit_flag = 0;
  2190. return VG_LITE_SUCCESS;
  2191. }
  2192. /* Get the inversion of a matrix. */
  2193. uint32_t inverse(vg_lite_matrix_t * result, vg_lite_matrix_t * matrix)
  2194. {
  2195. vg_lite_float_t det00, det01, det02;
  2196. vg_lite_float_t d;
  2197. int32_t isAffine;
  2198. /* Test for identity matrix. */
  2199. if (matrix == NULL) {
  2200. result->m[0][0] = 1.0f;
  2201. result->m[0][1] = 0.0f;
  2202. result->m[0][2] = 0.0f;
  2203. result->m[1][0] = 0.0f;
  2204. result->m[1][1] = 1.0f;
  2205. result->m[1][2] = 0.0f;
  2206. result->m[2][0] = 0.0f;
  2207. result->m[2][1] = 0.0f;
  2208. result->m[2][2] = 1.0f;
  2209. /* Success. */
  2210. return 1;
  2211. }
  2212. det00 = (matrix->m[1][1] * matrix->m[2][2]) - (matrix->m[2][1] * matrix->m[1][2]);
  2213. det01 = (matrix->m[2][0] * matrix->m[1][2]) - (matrix->m[1][0] * matrix->m[2][2]);
  2214. det02 = (matrix->m[1][0] * matrix->m[2][1]) - (matrix->m[2][0] * matrix->m[1][1]);
  2215. /* Compute determinant. */
  2216. d = (matrix->m[0][0] * det00) + (matrix->m[0][1] * det01) + (matrix->m[0][2] * det02);
  2217. /* Return 0 if there is no inverse matrix. */
  2218. if (d == 0.0f)
  2219. return 0;
  2220. /* Compute reciprocal. */
  2221. d = 1.0f / d;
  2222. /* Determine if the matrix is affine. */
  2223. isAffine = (matrix->m[2][0] == 0.0f) && (matrix->m[2][1] == 0.0f) && (matrix->m[2][2] == 1.0f);
  2224. result->m[0][0] = d * det00;
  2225. result->m[0][1] = d * ((matrix->m[2][1] * matrix->m[0][2]) - (matrix->m[0][1] * matrix->m[2][2]));
  2226. result->m[0][2] = d * ((matrix->m[0][1] * matrix->m[1][2]) - (matrix->m[1][1] * matrix->m[0][2]));
  2227. result->m[1][0] = d * det01;
  2228. result->m[1][1] = d * ((matrix->m[0][0] * matrix->m[2][2]) - (matrix->m[2][0] * matrix->m[0][2]));
  2229. result->m[1][2] = d * ((matrix->m[1][0] * matrix->m[0][2]) - (matrix->m[0][0] * matrix->m[1][2]));
  2230. result->m[2][0] = isAffine ? 0.0f : d * det02;
  2231. result->m[2][1] = isAffine ? 0.0f : d * ((matrix->m[2][0] * matrix->m[0][1]) - (matrix->m[0][0] * matrix->m[2][1]));
  2232. result->m[2][2] = isAffine ? 1.0f : d * ((matrix->m[0][0] * matrix->m[1][1]) - (matrix->m[1][0] * matrix->m[0][1]));
  2233. /* Success. */
  2234. return 1;
  2235. }
  2236. /* Transform a 2D point by a given matrix. */
  2237. uint32_t transform(vg_lite_point_t * result, vg_lite_float_t x, vg_lite_float_t y, vg_lite_matrix_t * matrix)
  2238. {
  2239. vg_lite_float_t pt_x;
  2240. vg_lite_float_t pt_y;
  2241. vg_lite_float_t pt_w;
  2242. /* Test for identity matrix. */
  2243. if (matrix == NULL) {
  2244. result->x = (int)x;
  2245. result->y = (int)y;
  2246. /* Success. */
  2247. return 1;
  2248. }
  2249. if (((matrix->m[0][1] != 0.0f) || (matrix->m[1][0] != 0.0f) || (matrix->m[2][0] != 0.0f) || (matrix->m[2][1] != 0.0f) || (matrix->m[2][2] != 1.0f)) &&
  2250. (s_context.filter == VG_LITE_FILTER_LINEAR || s_context.filter == VG_LITE_FILTER_BI_LINEAR)) {
  2251. if (x != 0) {
  2252. x = x + 0.5f;
  2253. }
  2254. if (y != 0 && s_context.filter == VG_LITE_FILTER_BI_LINEAR) {
  2255. y = y + 0.5f;
  2256. }
  2257. }
  2258. /* Transform x, y, and w. */
  2259. pt_x = (x * matrix->m[0][0]) + (y * matrix->m[0][1]) + matrix->m[0][2];
  2260. pt_y = (x * matrix->m[1][0]) + (y * matrix->m[1][1]) + matrix->m[1][2];
  2261. pt_w = (x * matrix->m[2][0]) + (y * matrix->m[2][1]) + matrix->m[2][2];
  2262. if (pt_w <= 0.0f)
  2263. return 0;
  2264. /* Compute projected x and y. */
  2265. if (pt_x < 0)
  2266. {
  2267. result->x = (int)((pt_x / pt_w) - 0.5f);
  2268. }
  2269. else
  2270. {
  2271. result->x = (int)((pt_x / pt_w) + 0.5f);
  2272. }
  2273. if (pt_y < 0)
  2274. {
  2275. result->y = (int)((pt_y / pt_w) - 0.5f);
  2276. }
  2277. else
  2278. {
  2279. result->y = (int)((pt_y / pt_w) + 0.5f);
  2280. }
  2281. /* Success. */
  2282. return 1;
  2283. }
  2284. /* Flush specific VG module. */
  2285. static vg_lite_error_t flush_target(void)
  2286. {
  2287. vg_lite_error_t error = VG_LITE_SUCCESS;
  2288. vg_lite_context_t *context = GET_CONTEXT();
  2289. do {
  2290. VG_LITE_BREAK_ERROR(push_state(context, 0x0A1B, 0x00000011));
  2291. VG_LITE_BREAK_ERROR(push_stall(context, 7));
  2292. } while (0);
  2293. return error;
  2294. }
  2295. /* Set the current render target. */
  2296. vg_lite_error_t set_render_target(vg_lite_buffer_t *target)
  2297. {
  2298. vg_lite_error_t error = VG_LITE_SUCCESS;
  2299. uint32_t yuv2rgb = 0;
  2300. uint32_t uv_swiz = 0;
  2301. uint32_t tile_setting;
  2302. uint32_t flexa_mode = 0;
  2303. uint32_t compress_mode = 0;
  2304. uint32_t mirror_mode = 0;
  2305. uint32_t premultiply_dst = 0;
  2306. uint32_t rgb_alphadiv = 0;
  2307. uint32_t read_dest = 0;
  2308. uint32_t dst_format = 0;
  2309. uint32_t rt_changed = 0;
  2310. if (target == NULL) {
  2311. return VG_LITE_INVALID_ARGUMENT;
  2312. }
  2313. /* Check if render target parameters are really changed. */
  2314. if (memcmp(s_context.rtbuffer, target, sizeof(vg_lite_buffer_t))) {
  2315. rt_changed = 1;
  2316. }
  2317. /* Simply return if render target, scissor, mirror, gamma, flexa states are not changed. */
  2318. if (!rt_changed && !s_context.scissor_dirty && !s_context.mirror_dirty && !s_context.gamma_dirty && !s_context.flexa_dirty) {
  2319. return VG_LITE_SUCCESS;
  2320. }
  2321. #if gcFEATURE_VG_ERROR_CHECK
  2322. #if !gcFEATURE_VG_YUV_OUTPUT
  2323. if ((target != NULL) &&
  2324. (target->format == VG_LITE_YUY2 ||
  2325. target->format == VG_LITE_AYUY2 ||
  2326. target->format == VG_LITE_YUY2_TILED ||
  2327. target->format == VG_LITE_AYUY2_TILED)) {
  2328. return VG_LITE_NOT_SUPPORT;
  2329. }
  2330. #endif
  2331. #if !gcFEATURE_VG_24BIT_PLANAR
  2332. if (target->format >= VG_LITE_ABGR8565_PLANAR && target->format <= VG_LITE_RGBA5658_PLANAR) {
  2333. return VG_LITE_NOT_SUPPORT;
  2334. }
  2335. #endif
  2336. VG_LITE_RETURN_ERROR(dstbuf_align_check(target));
  2337. VG_LITE_RETURN_ERROR(check_compress(target->format, target->compress_mode, target->tiled, target->width, target->height));
  2338. #endif /* gcFEATURE_VG_ERROR_CHECK */
  2339. #if gcFEATURE_VG_IM_FASTCLEAR
  2340. update_fc_buffer(target);
  2341. #endif
  2342. /* Flush previous render target before setting the new render target. */
  2343. vg_lite_flush();
  2344. /* Program render target states */
  2345. {
  2346. if (((target->format >= VG_LITE_YUY2) && (target->format <= VG_LITE_AYUY2)) ||
  2347. ((target->format >= VG_LITE_YUY2_TILED) && (target->format <= VG_LITE_AYUY2_TILED)))
  2348. {
  2349. yuv2rgb = convert_yuv2rgb(target->yuv.yuv2rgb);
  2350. uv_swiz = convert_uv_swizzle(target->yuv.swizzle);
  2351. }
  2352. if (s_context.flexa_mode) {
  2353. flexa_mode = 1 << 7;
  2354. }
  2355. #if (CHIPID==0x355 || CHIPID==0x255)
  2356. if (s_context.mirror_orient == VG_LITE_ORIENTATION_TOP_BOTTOM) {
  2357. #else
  2358. if (s_context.mirror_orient == VG_LITE_ORIENTATION_BOTTOM_TOP) {
  2359. #endif
  2360. mirror_mode = 1 << 16;
  2361. }
  2362. compress_mode = ((uint32_t)target->compress_mode) << 25;
  2363. if (target->premultiplied || target->apply_premult) {
  2364. premultiply_dst = 0x00000100;
  2365. }
  2366. #if gcFEATURE_VG_HW_PREMULTIPLY
  2367. rgb_alphadiv = 0x00000200;
  2368. #endif
  2369. #if gcFEATURE_VG_USE_DST
  2370. read_dest = 0x00100000;
  2371. #endif
  2372. dst_format = convert_target_format(target->format, s_context.capabilities);
  2373. if (dst_format == 0xFF) {
  2374. printf("Target format: 0x%x is not supported.\n", target->format);
  2375. return VG_LITE_NOT_SUPPORT;
  2376. }
  2377. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A10,
  2378. dst_format | read_dest | uv_swiz | yuv2rgb | flexa_mode | compress_mode | mirror_mode | s_context.gamma_value | premultiply_dst | rgb_alphadiv));
  2379. s_context.mirror_dirty = 0;
  2380. s_context.gamma_dirty = 0;
  2381. if (s_context.flexa_dirty && !s_context.flexa_mode && s_context.tessbuf.tessbuf_size) {
  2382. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AC8, s_context.tessbuf.tessbuf_size -64));
  2383. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AC8, s_context.tessbuf.tessbuf_size));
  2384. s_context.flexa_dirty = 0;
  2385. }
  2386. if (target->yuv.uv_planar)
  2387. { /* Program uv plane address if necessary. */
  2388. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A5C, target->yuv.uv_planar));
  2389. }
  2390. if (target->yuv.alpha_planar) {
  2391. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A5D, target->yuv.alpha_planar));
  2392. }
  2393. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A11, target->address));
  2394. tile_setting = (target->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0;
  2395. /* 24bit format stride configured to 4bpp. */
  2396. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  2397. uint32_t stride = target->stride / 3 * 4;
  2398. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A12, stride | tile_setting));
  2399. }
  2400. else {
  2401. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A12, target->stride | tile_setting));
  2402. }
  2403. /* Set scissor rectangle on the render target */
  2404. if (s_context.scissor_set) {
  2405. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A13, s_context.scissor[2] | (s_context.scissor[3] << 16)));
  2406. }
  2407. else {
  2408. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A13, target->width | (target->height << 16)));
  2409. }
  2410. s_context.scissor_dirty = 0;
  2411. }
  2412. #if gcFEATURE_VG_TRACE_API
  2413. VGLITE_LOG(" set_render_target %p (%d, %d)\n", target, target->width, target->height);
  2414. #endif
  2415. /* Copy the current render target parameters into s_context.rtbuffer */
  2416. if (rt_changed) {
  2417. memcpy(s_context.rtbuffer, target, sizeof(vg_lite_buffer_t));
  2418. }
  2419. return error;
  2420. }
  2421. /*************** VGLite API Functions ***********************************************/
  2422. vg_lite_error_t vg_lite_clear(vg_lite_buffer_t * target,
  2423. vg_lite_rectangle_t * rect,
  2424. vg_lite_color_t color)
  2425. {
  2426. #if DUMP_API
  2427. FUNC_DUMP(vg_lite_clear)(target, rect, color);
  2428. #endif
  2429. vg_lite_error_t error;
  2430. vg_lite_point_t point_min, point_max;
  2431. int32_t left, top, right, bottom;
  2432. uint32_t color32;
  2433. uint32_t tile_setting = 0;
  2434. uint32_t stripe_mode = 0;
  2435. uint32_t in_premult = 0;
  2436. #if gcFEATURE_VG_TRACE_API
  2437. VGLITE_LOG("vg_lite_clear %p %p 0x%08X\n", target, rect, color);
  2438. if (rect) VGLITE_LOG(" Rect(%d, %d, %d, %d)\n", rect->x, rect->y, rect->width, rect->height);
  2439. #endif
  2440. #if gcFEATURE_VG_ERROR_CHECK
  2441. #if (CHIPID == 0x355)
  2442. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  2443. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  2444. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  2445. printf("Target format: 0x%x is not supported.\n", target->format);
  2446. return VG_LITE_NOT_SUPPORT;
  2447. }
  2448. #endif
  2449. #endif
  2450. #if gcFEATURE_VG_GAMMA
  2451. set_gamma_dest_only(target, VGL_FALSE);
  2452. #endif
  2453. if (target->premultiplied) {
  2454. in_premult = 0x00000000;
  2455. target->apply_premult = 0;
  2456. }
  2457. else {
  2458. in_premult = 0x10000000;
  2459. target->apply_premult = 1;
  2460. }
  2461. error = set_render_target(target);
  2462. if (error != VG_LITE_SUCCESS) {
  2463. return error;
  2464. }
  2465. /* Get rectangle. */
  2466. if (rect) {
  2467. point_min.x = rect->x;
  2468. point_min.y = rect->y;
  2469. point_max.x = rect->x + rect->width;
  2470. point_max.y = rect->y + rect->height;
  2471. }
  2472. else {
  2473. point_min.x = 0;
  2474. point_min.y = 0;
  2475. point_max.x = s_context.rtbuffer->width;
  2476. point_max.y = s_context.rtbuffer->height;
  2477. }
  2478. /* Clip to target. */
  2479. if (s_context.scissor_set && !target->scissor_buffer) {
  2480. left = s_context.scissor[0];
  2481. top = s_context.scissor[1];
  2482. right = s_context.scissor[2];
  2483. bottom = s_context.scissor[3];
  2484. }
  2485. else {
  2486. left = 0;
  2487. top = 0;
  2488. right = target->width;
  2489. bottom = target->height;
  2490. }
  2491. point_min.x = MAX(point_min.x, left);
  2492. point_min.y = MAX(point_min.y, top);
  2493. point_max.x = MIN(point_max.x, right);
  2494. point_max.y = MIN(point_max.y, bottom);
  2495. /* No need to draw. */
  2496. if ((point_max.x <= point_min.x) || (point_max.y <= point_min.y)) {
  2497. return VG_LITE_SUCCESS;
  2498. }
  2499. /* Get converted color when target is in L8 format. */
  2500. color32 = (target->format == VG_LITE_L8) ? rgb_to_l(color) : color;
  2501. #if gcFEATURE_VG_RECTANGLE_TILED_OUT
  2502. if (target->tiled == VG_LITE_TILED) {
  2503. tile_setting = 0x40;
  2504. stripe_mode = 0x20000000;
  2505. }
  2506. #endif
  2507. #if gcFEATURE_VG_IM_FASTCLEAR
  2508. if ((rect == NULL) ||
  2509. (point_min.x == 0 && point_min.y == 0 &&
  2510. ((point_max.x - point_min.x) == s_context.rtbuffer->width) &&
  2511. ((point_max.y - point_min.y) == s_context.rtbuffer->height))) {
  2512. convert_color(s_context.rtbuffer->format, color32, &color32, NULL);
  2513. clear_fc(&target->fc_buffer[0],(uint32_t)color32);
  2514. }
  2515. else
  2516. #endif
  2517. {
  2518. /* Setup the command buffer. */
  2519. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  2520. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color32));
  2521. /* Clear operation is not affected by color transformation and pixel matrix.
  2522. * So PE clear and push_rectangle() clear have the same clear result color.
  2523. */
  2524. #if gcFEATURE_VG_PE_CLEAR
  2525. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, 0));
  2526. if ((!rect && (point_min.x == 0 && point_min.y == 0 && (point_max.x - point_min.x) == target->width)) &&
  2527. !s_context.scissor_enable && !s_context.scissor_set && !s_context.enable_mask)
  2528. {
  2529. if (target->compress_mode == VG_LITE_DEC_DISABLE) {
  2530. #if gcFEATURE_VG_DST_BUFLEN_ALIGNED
  2531. uint32_t align, mul, div;
  2532. get_format_bytes(target->format, &mul, &div, &align);
  2533. if ((mul / div != 3) && ((target->stride * (point_max.y - point_min.y)) % 64 != 0)) {
  2534. return VG_LITE_INVALID_ARGUMENT;
  2535. }
  2536. #endif
  2537. #if gcFEATURE_VG_24BIT
  2538. uint32_t align1, mul1, div1;
  2539. get_format_bytes(target->format, &mul1, &div1, &align1);
  2540. if ((mul1 / div1 == 3) && ((target->stride * (point_max.y - point_min.y)) % 48 != 0)) {
  2541. return VG_LITE_INVALID_ARGUMENT;
  2542. }
  2543. #endif
  2544. }
  2545. else {
  2546. #if gcFEATURE_VG_DEC_COMPRESS_2_0
  2547. if (target->format == VG_LITE_BGRA8888 || target->format == VG_LITE_BGRX8888) {
  2548. if ((target->stride * (point_max.y - point_min.y)) % 64 != 0) {
  2549. return VG_LITE_INVALID_ARGUMENT;
  2550. }
  2551. }
  2552. if (target->format == VG_LITE_BGR888) {
  2553. if ((target->stride * (point_max.y - point_min.y)) % 48 != 0) {
  2554. return VG_LITE_INVALID_ARGUMENT;
  2555. }
  2556. }
  2557. #endif
  2558. #if gcFEATURE_VG_DEC_COMPRESS
  2559. if (target->format == VG_LITE_BGRX8888 || target->format == VG_LITE_RGBX8888
  2560. || target->format == VG_LITE_BGRA8888 || target->format == VG_LITE_RGBA8888) {
  2561. if ((target->stride * (point_max.y - point_min.y)) % 64 != 0) {
  2562. return VG_LITE_INVALID_ARGUMENT;
  2563. }
  2564. }
  2565. if (target->format == VG_LITE_RGB888 || target->format == VG_LITE_BGR888) {
  2566. if ((target->stride * (point_max.y - point_min.y)) % 48 != 0) {
  2567. return VG_LITE_INVALID_ARGUMENT;
  2568. }
  2569. }
  2570. #endif
  2571. }
  2572. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | 0x00000004 | tile_setting | s_context.scissor_enable | stripe_mode));
  2573. VG_LITE_RETURN_ERROR(push_pe_clear(&s_context, target->stride * (point_max.y - point_min.y)));
  2574. }
  2575. else
  2576. #endif
  2577. {
  2578. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | 0x00000001 | tile_setting | s_context.scissor_enable | stripe_mode));
  2579. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  2580. }
  2581. /* flush VGPE after clear */
  2582. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00000011));
  2583. }
  2584. /* Success. */
  2585. return VG_LITE_SUCCESS;
  2586. }
  2587. vg_lite_error_t vg_lite_blit2(vg_lite_buffer_t* target,
  2588. vg_lite_buffer_t* source0,
  2589. vg_lite_buffer_t* source1,
  2590. vg_lite_matrix_t* matrix0,
  2591. vg_lite_matrix_t* matrix1,
  2592. vg_lite_blend_t blend,
  2593. vg_lite_filter_t filter)
  2594. {
  2595. #if DUMP_API
  2596. FUNC_DUMP(vg_lite_blit2)(target, source0, source1, matrix0, matrix1, blend, filter);
  2597. #endif
  2598. #if gcFEATURE_VG_DOUBLE_IMAGE && gcFEATURE_VG_IM_INPUT
  2599. vg_lite_error_t error;
  2600. vg_lite_point_t point_min, point_max, temp;
  2601. vg_lite_matrix_t inverse_matrix;
  2602. vg_lite_float_t x_step[2][3];
  2603. vg_lite_float_t y_step[2][3];
  2604. vg_lite_float_t c_step[2][3];
  2605. uint32_t imageMode;
  2606. uint32_t blend_mode;
  2607. uint32_t filter_mode = 0;
  2608. int32_t stride0;
  2609. int32_t stride1;
  2610. uint32_t rotation = 0;
  2611. uint32_t conversion = 0;
  2612. uint32_t tiled0, tiled1;
  2613. int32_t left, right, bottom, top;
  2614. #if gcFEATURE_VG_TRACE_API
  2615. VGLITE_LOG("vg_lite_blit2 %p %p %p %p %p %d %d\n", target, source0, source1, matrix0, matrix1, blend, filter);
  2616. #endif
  2617. #if gcFEATURE_VG_ERROR_CHECK
  2618. #if !gcFEATURE_VG_24BIT
  2619. if ((target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) ||
  2620. (source0->format >= VG_LITE_RGB888 && source0->format <= VG_LITE_RGBA5658) ||
  2621. (source1->format >= VG_LITE_RGB888 && source1->format <= VG_LITE_RGBA5658)) {
  2622. return VG_LITE_NOT_SUPPORT;
  2623. }
  2624. #endif
  2625. #if !gcFEATURE_VG_YUY2_INPUT
  2626. if (source0->format == VG_LITE_YUYV || source0->format == VG_LITE_YUY2 || source1->format == VG_LITE_YUYV || source1->format == VG_LITE_YUY2) {
  2627. return VG_LITE_NOT_SUPPORT;
  2628. }
  2629. #endif
  2630. #if !gcFEATURE_VG_YUV_INPUT
  2631. if ((source0->format >= VG_LITE_NV12 && source0->format <= VG_LITE_NV16) || (source1->format >= VG_LITE_NV12 && source1->format <= VG_LITE_NV16) || source0->format == VG_LITE_NV24 || source1->format >= VG_LITE_NV24) {
  2632. return VG_LITE_NOT_SUPPORT;
  2633. }
  2634. #elif !gcFEATURE_VG_NV24_INPUT
  2635. if (source0->format == VG_LITE_NV24 || source1->format >= VG_LITE_NV24) {
  2636. return VG_LITE_NOT_SUPPORT;
  2637. }
  2638. #endif
  2639. #if !gcFEATURE_VG_AYUV_INPUT
  2640. if (source0->format == VG_LITE_ANV12 || source0->format == VG_LITE_AYUY2 || source1->format == VG_LITE_ANV12 || source1->format == VG_LITE_AYUY2) {
  2641. return VG_LITE_NOT_SUPPORT;
  2642. }
  2643. #endif
  2644. #if !gcFEATURE_VG_YUV_TILED_INPUT
  2645. if ((source0->format >= VG_LITE_YUY2_TILED && source0->format <= VG_LITE_AYUY2_TILED) || (source1->format >= VG_LITE_YUY2_TILED && source1->format <= VG_LITE_AYUY2_TILED) ||
  2646. (source0->format == VG_LITE_NV24_TILED) || (source1->format == VG_LITE_NV24_TILED)) {
  2647. return VG_LITE_NOT_SUPPORT;
  2648. }
  2649. #endif
  2650. #if !gcFEATURE_VG_NEW_BLEND_MODE
  2651. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  2652. return VG_LITE_NOT_SUPPORT;
  2653. }
  2654. #endif
  2655. #endif /* gcFEATURE_VG_ERROR_CHECK */
  2656. if (!matrix0) {
  2657. matrix0 = &identity_mtx;
  2658. }
  2659. if (!matrix1) {
  2660. matrix1 = &identity_mtx;
  2661. }
  2662. error = set_render_target(target);
  2663. if (error != VG_LITE_SUCCESS) {
  2664. return error;
  2665. }
  2666. /* Check if the specified matrix has rotation or perspective. */
  2667. if ( (matrix0->m[0][1] != 0.0f)
  2668. || (matrix0->m[1][0] != 0.0f)
  2669. || (matrix0->m[2][0] != 0.0f)
  2670. || (matrix0->m[2][1] != 0.0f)
  2671. || (matrix0->m[2][2] != 1.0f)
  2672. ) {
  2673. /* Mark that we have rotation. */
  2674. rotation = 0x8000;
  2675. }
  2676. /* Check whether L8 is supported or not. */
  2677. if ((target->format == VG_LITE_L8) && ((source0->format != VG_LITE_L8) && (source0->format != VG_LITE_A8))) {
  2678. conversion = 0x80000000;
  2679. }
  2680. /* Calculate transformation for Image0 (Paint) & Image1 (Image). */
  2681. /* Image1. */
  2682. /* Transform image (0,0) to screen. */
  2683. if (!transform(&temp, 0.0f, 0.0f, matrix0))
  2684. return VG_LITE_INVALID_ARGUMENT;
  2685. /* Set initial point. */
  2686. point_min = temp;
  2687. point_max = temp;
  2688. /* Transform image (0,height) to screen. */
  2689. if (!transform(&temp, 0.0f, (vg_lite_float_t)source0->height, matrix0))
  2690. return VG_LITE_INVALID_ARGUMENT;
  2691. /* Determine min/max. */
  2692. if (temp.x < point_min.x) point_min.x = temp.x;
  2693. if (temp.y < point_min.y) point_min.y = temp.y;
  2694. if (temp.x > point_max.x) point_max.x = temp.x;
  2695. if (temp.y > point_max.y) point_max.y = temp.y;
  2696. /* Transform image (width,height) to screen. */
  2697. if (!transform(&temp, (vg_lite_float_t)source0->width, (vg_lite_float_t)source0->height, matrix0))
  2698. return VG_LITE_INVALID_ARGUMENT;
  2699. /* Determine min/max. */
  2700. if (temp.x < point_min.x) point_min.x = temp.x;
  2701. if (temp.y < point_min.y) point_min.y = temp.y;
  2702. if (temp.x > point_max.x) point_max.x = temp.x;
  2703. if (temp.y > point_max.y) point_max.y = temp.y;
  2704. /* Transform image (width,0) to screen. */
  2705. if (!transform(&temp, (vg_lite_float_t)source0->width, 0.0f, matrix0))
  2706. return VG_LITE_INVALID_ARGUMENT;
  2707. /* Determine min/max. */
  2708. if (temp.x < point_min.x) point_min.x = temp.x;
  2709. if (temp.y < point_min.y) point_min.y = temp.y;
  2710. if (temp.x > point_max.x) point_max.x = temp.x;
  2711. if (temp.y > point_max.y) point_max.y = temp.y;
  2712. /* Clip to target. */
  2713. if (s_context.scissor_set) {
  2714. left = s_context.scissor[0];
  2715. top = s_context.scissor[1];
  2716. right = s_context.scissor[2];
  2717. bottom = s_context.scissor[3];
  2718. }
  2719. else {
  2720. left = top = 0;
  2721. right = target->width;
  2722. bottom = target->height;
  2723. }
  2724. point_min.x = MAX(point_min.x, left);
  2725. point_min.y = MAX(point_min.y, top);
  2726. point_max.x = MIN(point_max.x, right);
  2727. point_max.y = MIN(point_max.y, bottom);
  2728. if ((point_max.x - point_min.x) <= 0 || (point_max.y - point_min.y) <= 0)
  2729. return VG_LITE_SUCCESS;
  2730. /* Compute inverse matrix. */
  2731. if (!inverse(&inverse_matrix, matrix0))
  2732. return VG_LITE_INVALID_ARGUMENT;
  2733. /* Compute interpolation steps for image1 (Image). */
  2734. x_step[1][0] = inverse_matrix.m[0][0] / source0->width;
  2735. x_step[1][1] = inverse_matrix.m[1][0] / source0->height;
  2736. x_step[1][2] = inverse_matrix.m[2][0];
  2737. y_step[1][0] = inverse_matrix.m[0][1] / source0->width;
  2738. y_step[1][1] = inverse_matrix.m[1][1] / source0->height;
  2739. y_step[1][2] = inverse_matrix.m[2][1];
  2740. c_step[1][0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / source0->width;
  2741. c_step[1][1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source0->height;
  2742. c_step[1][2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  2743. /* Image0 (Paint, as background ). */
  2744. /* Transform image (0,0) to screen. */
  2745. if (!transform(&temp, 0.0f, 0.0f, matrix1))
  2746. return VG_LITE_INVALID_ARGUMENT;
  2747. /* Set initial point. */
  2748. point_min = temp;
  2749. point_max = temp;
  2750. /* Transform image (0,height) to screen. */
  2751. if (!transform(&temp, 0.0f, (vg_lite_float_t)source1->height, matrix1))
  2752. return VG_LITE_INVALID_ARGUMENT;
  2753. /* Determine min/max. */
  2754. if (temp.x < point_min.x) point_min.x = temp.x;
  2755. if (temp.y < point_min.y) point_min.y = temp.y;
  2756. if (temp.x > point_max.x) point_max.x = temp.x;
  2757. if (temp.y > point_max.y) point_max.y = temp.y;
  2758. /* Transform image (width,height) to screen. */
  2759. if (!transform(&temp, (vg_lite_float_t)source1->width, (vg_lite_float_t)source1->height, matrix1))
  2760. return VG_LITE_INVALID_ARGUMENT;
  2761. /* Determine min/max. */
  2762. if (temp.x < point_min.x) point_min.x = temp.x;
  2763. if (temp.y < point_min.y) point_min.y = temp.y;
  2764. if (temp.x > point_max.x) point_max.x = temp.x;
  2765. if (temp.y > point_max.y) point_max.y = temp.y;
  2766. /* Transform image (width,0) to screen. */
  2767. if (!transform(&temp, (vg_lite_float_t)source1->width, 0.0f, matrix1))
  2768. return VG_LITE_INVALID_ARGUMENT;
  2769. /* Determine min/max. */
  2770. if (temp.x < point_min.x) point_min.x = temp.x;
  2771. if (temp.y < point_min.y) point_min.y = temp.y;
  2772. if (temp.x > point_max.x) point_max.x = temp.x;
  2773. if (temp.y > point_max.y) point_max.y = temp.y;
  2774. /* Clip to target. */
  2775. if (s_context.scissor_set) {
  2776. left = s_context.scissor[0];
  2777. top = s_context.scissor[1];
  2778. right = s_context.scissor[2];
  2779. bottom = s_context.scissor[3];
  2780. }
  2781. else {
  2782. left = top = 0;
  2783. right = target->width;
  2784. bottom = target->height;
  2785. }
  2786. point_min.x = MAX(point_min.x, left);
  2787. point_min.y = MAX(point_min.y, top);
  2788. point_max.x = MIN(point_max.x, right);
  2789. point_max.y = MIN(point_max.y, bottom);
  2790. /* Compute inverse matrix. */
  2791. if (!inverse(&inverse_matrix, matrix1))
  2792. return VG_LITE_INVALID_ARGUMENT;
  2793. /* Compute interpolation steps for image1 (Image). */
  2794. x_step[0][0] = inverse_matrix.m[0][0] / source1->width;
  2795. x_step[0][1] = inverse_matrix.m[1][0] / source1->height;
  2796. x_step[0][2] = inverse_matrix.m[2][0];
  2797. y_step[0][0] = inverse_matrix.m[0][1] / source1->width;
  2798. y_step[0][1] = inverse_matrix.m[1][1] / source1->height;
  2799. y_step[0][2] = inverse_matrix.m[2][1];
  2800. c_step[0][0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / source1->width;
  2801. c_step[0][1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source1->height;
  2802. c_step[0][2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  2803. /* DOUBLE_IMAGE mode. */
  2804. imageMode = 0x5000;
  2805. blend_mode = convert_blend(blend);
  2806. tiled0 = (source0->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0;
  2807. tiled1 = (source1->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0;
  2808. switch (filter) {
  2809. case VG_LITE_FILTER_POINT:
  2810. filter_mode = 0;
  2811. break;
  2812. case VG_LITE_FILTER_LINEAR:
  2813. filter_mode = 0x10000;
  2814. break;
  2815. case VG_LITE_FILTER_BI_LINEAR:
  2816. filter_mode = 0x20000;
  2817. break;
  2818. case VG_LITE_FILTER_GAUSSIAN:
  2819. filter_mode = 0x30000;
  2820. break;
  2821. }
  2822. /* Setup the command buffer. */
  2823. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x10000001 | imageMode | blend_mode | rotation | s_context.enable_mask | s_context.color_transform | s_context.matrix_enable));
  2824. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  2825. /* Program image1. */
  2826. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *) &c_step[1][0]));
  2827. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *) &c_step[1][1]));
  2828. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *) &c_step[1][2]));
  2829. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *) &x_step[1][0]));
  2830. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *) &x_step[1][1]));
  2831. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *) &x_step[1][2]));
  2832. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  2833. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *) &y_step[1][0]));
  2834. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *) &y_step[1][1]));
  2835. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *) &y_step[1][2]));
  2836. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source0->format) | filter_mode | conversion));
  2837. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A27, 0));
  2838. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source0->address));
  2839. if (source0->yuv.uv_planar != 0) {
  2840. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source0->yuv.uv_planar));
  2841. }
  2842. if (source0->yuv.v_planar != 0) {
  2843. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source0->yuv.v_planar));
  2844. }
  2845. if (source0->yuv.alpha_planar != 0) {
  2846. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source0->yuv.alpha_planar));
  2847. }
  2848. /* 24bit format stride configured to 4bpp. */
  2849. if (source0->format >= VG_LITE_RGB888 && source0->format <= VG_LITE_RGBA5658) {
  2850. stride0 = source0->stride / 3 * 4;
  2851. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, stride0 | tiled0));
  2852. }
  2853. else {
  2854. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, source0->stride | tiled0));
  2855. }
  2856. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2D, 0));
  2857. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2F, source0->width | (source0->height << 16)));
  2858. /* Program image0 (Paint, as background). */
  2859. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A84, (void *) &c_step[0][0]));
  2860. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A85, (void *) &c_step[0][1]));
  2861. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A86, (void *) &c_step[0][2]));
  2862. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A7C, (void *) &x_step[0][0]));
  2863. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A7D, (void *) &x_step[0][1]));
  2864. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A7E, (void *) &x_step[0][2]));
  2865. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  2866. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A80, (void *) &y_step[0][0]));
  2867. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A81, (void *) &y_step[0][1]));
  2868. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A82, (void *) &y_step[0][2]));
  2869. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source1->format) | filter_mode | conversion));
  2870. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, 0));
  2871. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source1->address));
  2872. if (source1->yuv.uv_planar != 0) {
  2873. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A50, source1->yuv.uv_planar));
  2874. }
  2875. if (source1->yuv.v_planar != 0) {
  2876. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A52, source1->yuv.v_planar));
  2877. }
  2878. if (source1->yuv.alpha_planar != 0) {
  2879. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A52, source1->yuv.alpha_planar));
  2880. }
  2881. /* 24bit format stride configured to 4bpp. */
  2882. if (source1->format >= VG_LITE_RGB888 && source1->format <= VG_LITE_RGBA5658) {
  2883. stride1 = source1->stride / 3 * 4;
  2884. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, stride1 | tiled1));
  2885. }
  2886. else {
  2887. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, source1->stride | tiled1));
  2888. }
  2889. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  2890. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source1->width | (source1->height << 16)));
  2891. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  2892. VG_LITE_RETURN_ERROR(flush_target());
  2893. vglitemDUMP_BUFFER("image", (size_t)source0->address, source0->memory, 0, (source0->stride)*(source0->height));
  2894. vglitemDUMP_BUFFER("image", (size_t)source1->address, source1->memory, 0, (source1->stride)*(source1->height));
  2895. #if DUMP_IMAGE
  2896. dump_img(source0->memory, source0->width, source0->height, source0->format);
  2897. dump_img(source1->memory, source1->width, source1->height, source1->format);
  2898. #endif
  2899. return error;
  2900. #else
  2901. return VG_LITE_NOT_SUPPORT;
  2902. #endif
  2903. }
  2904. vg_lite_error_t vg_lite_blit(vg_lite_buffer_t* target,
  2905. vg_lite_buffer_t* source,
  2906. vg_lite_matrix_t* matrix,
  2907. vg_lite_blend_t blend,
  2908. vg_lite_color_t color,
  2909. vg_lite_filter_t filter)
  2910. {
  2911. #if DUMP_API
  2912. FUNC_DUMP(vg_lite_blit)(target, source, matrix, blend, color, filter);
  2913. #endif
  2914. #if gcFEATURE_VG_IM_INPUT
  2915. vg_lite_error_t error;
  2916. vg_lite_point_t point_min, point_max, temp;
  2917. vg_lite_matrix_t inverse_matrix;
  2918. vg_lite_float_t x_step[3];
  2919. vg_lite_float_t y_step[3];
  2920. vg_lite_float_t c_step[3];
  2921. uint32_t imageMode = 0;
  2922. uint32_t paintType = 0;
  2923. uint32_t in_premult = 0;
  2924. uint32_t blend_mode;
  2925. uint32_t filter_mode = 0;
  2926. uint32_t transparency_mode = 0;
  2927. uint32_t conversion = 0;
  2928. uint32_t tiled_source;
  2929. uint32_t yuv2rgb = 0;
  2930. uint32_t uv_swiz = 0;
  2931. uint32_t compress_mode = 0;
  2932. uint32_t src_premultiply_enable = 0;
  2933. uint32_t index_endian = 0;
  2934. uint32_t eco_fifo = 0;
  2935. uint32_t tile_setting = 0;
  2936. uint32_t stripe_mode = 0;
  2937. uint32_t premul_flag = 0;
  2938. uint32_t prediv_flag = 0;
  2939. int32_t left, top, right, bottom;
  2940. int32_t stride;
  2941. uint8_t lvgl_sw_blend = 0;
  2942. #if VG_SW_BLIT_PRECISION_OPT
  2943. uint8_t* bufferPointer;
  2944. uint32_t bufferAddress = 0, bufferAlignAddress = 0, addressOffset = 0, mul = 0, div = 0, required_align = 0;
  2945. vg_lite_buffer_t new_target;
  2946. vg_lite_point_t point0_0_afterTransform = { 0 };
  2947. uint8_t enableSwPreOpt = 0;
  2948. int32_t matrixOffsetX = 0;
  2949. /* Only accept interger move */
  2950. if (matrix != NULL && filter == VG_LITE_FILTER_POINT) {
  2951. matrix->m[0][2] = (vg_lite_float_t)(matrix->m[0][2] >= 0 ? (int32_t)(matrix->m[0][2] + 0.5) : (int32_t)(matrix->m[0][2] - 0.5));
  2952. matrix->m[1][2] = (vg_lite_float_t)(matrix->m[1][2] >= 0 ? (int32_t)(matrix->m[1][2] + 0.5) : (int32_t)(matrix->m[1][2] - 0.5));
  2953. /* Only nonperspective transform with scale or rotation could enable optimization */
  2954. if ((matrix->m[2][0] == 0.0f && matrix->m[2][1] == 0.0f && matrix->m[2][2] == 1.0f) &&
  2955. (matrix->m[0][0] != 1.0f || matrix->m[1][1] != 1.0f || matrix->m[0][1] != 0.0f)) {
  2956. if (target->tiled != VG_LITE_TILED && (target->format < VG_LITE_RGB888 || target->format > VG_LITE_RGBA5658_PLANAR)) {
  2957. enableSwPreOpt = 1;
  2958. }
  2959. }
  2960. }
  2961. #endif /* VG_SW_BLIT_PRECISION_OPT */
  2962. #if gcFEATURE_VG_TRACE_API
  2963. VGLITE_LOG("vg_lite_blit %p %p %p %d 0x%08X %d\n", target, source, matrix, blend, color, filter);
  2964. #endif
  2965. #if gcFEATURE_VG_ERROR_CHECK
  2966. #if !gcFEATURE_VG_INDEX_ENDIAN
  2967. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  2968. return VG_LITE_NOT_SUPPORT;
  2969. }
  2970. #endif
  2971. #if !gcFEATURE_VG_RGBA8_ETC2_EAC
  2972. if (source->format == VG_LITE_RGBA8888_ETC2_EAC) {
  2973. return VG_LITE_NOT_SUPPORT;
  2974. }
  2975. #else
  2976. if ((source->format == VG_LITE_RGBA8888_ETC2_EAC) && (source->width % 16 || source->height % 4)) {
  2977. return VG_LITE_INVALID_ARGUMENT;
  2978. }
  2979. #endif
  2980. #if !gcFEATURE_VG_YUY2_INPUT
  2981. if (source->format == VG_LITE_YUYV || source->format == VG_LITE_YUY2) {
  2982. return VG_LITE_NOT_SUPPORT;
  2983. }
  2984. #endif
  2985. #if !gcFEATURE_VG_YUV_INPUT
  2986. if ((source->format >= VG_LITE_NV12 && source->format <= VG_LITE_NV16) || source->format == VG_LITE_NV24) {
  2987. return VG_LITE_NOT_SUPPORT;
  2988. }
  2989. #elif !gcFEATURE_VG_NV24_INPUT
  2990. if (source->format == VG_LITE_NV24) {
  2991. return VG_LITE_NOT_SUPPORT;
  2992. }
  2993. #endif
  2994. #if !gcFEATURE_VG_AYUV_INPUT
  2995. if (source->format == VG_LITE_ANV12 || source->format == VG_LITE_AYUY2) {
  2996. return VG_LITE_NOT_SUPPORT;
  2997. }
  2998. #endif
  2999. #if !gcFEATURE_VG_YUV_TILED_INPUT
  3000. if ((source->format >= VG_LITE_YUY2_TILED && source->format <= VG_LITE_AYUY2_TILED) || (source->format == VG_LITE_NV24_TILED)) {
  3001. return VG_LITE_NOT_SUPPORT;
  3002. }
  3003. #endif
  3004. #if !gcFEATURE_VG_24BIT
  3005. if ((target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) ||
  3006. (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658)) {
  3007. return VG_LITE_NOT_SUPPORT;
  3008. }
  3009. #endif
  3010. #if !gcFEATURE_VG_24BIT_PLANAR
  3011. if (source->format >= VG_LITE_ABGR8565_PLANAR && source->format <= VG_LITE_RGBA5658_PLANAR) {
  3012. return VG_LITE_NOT_SUPPORT;
  3013. }
  3014. #endif
  3015. #if !gcFEATURE_VG_IM_DEC_INPUT
  3016. if (source->compress_mode != VG_LITE_DEC_DISABLE) {
  3017. return VG_LITE_NOT_SUPPORT;
  3018. }
  3019. #endif
  3020. #if !gcFEATURE_VG_STENCIL
  3021. if (source->image_mode == VG_LITE_STENCIL_MODE) {
  3022. return VG_LITE_NOT_SUPPORT;
  3023. }
  3024. #endif
  3025. #if !gcFEATURE_VG_NEW_BLEND_MODE
  3026. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  3027. return VG_LITE_NOT_SUPPORT;
  3028. }
  3029. #endif
  3030. if (blend && (target->format == VG_LITE_YUYV || target->format == VG_LITE_YUY2 || target->format == VG_LITE_YUY2_TILED
  3031. || target->format == VG_LITE_AYUY2 || target->format == VG_LITE_AYUY2_TILED)) {
  3032. return VG_LITE_NOT_SUPPORT;
  3033. }
  3034. #if (CHIPID == 0x355)
  3035. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  3036. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  3037. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  3038. printf("Target format: 0x%x is not supported.\n", target->format);
  3039. return VG_LITE_NOT_SUPPORT;
  3040. }
  3041. if (source->format == VG_LITE_L8 || source->format == VG_LITE_YUYV ||
  3042. source->format == VG_LITE_BGRA2222 || source->format == VG_LITE_RGBA2222 ||
  3043. source->format == VG_LITE_ABGR2222 || source->format == VG_LITE_ARGB2222) {
  3044. printf("Source format: 0x%x is not supported.\n", source->format);
  3045. return VG_LITE_NOT_SUPPORT;
  3046. }
  3047. #endif
  3048. VG_LITE_RETURN_ERROR(srcbuf_align_check(source));
  3049. VG_LITE_RETURN_ERROR(check_compress(source->format, source->compress_mode, source->tiled, source->width, source->height));
  3050. #endif /* gcFEATURE_VG_ERROR_CHECK */
  3051. #if !gcFEATURE_VG_LVGL_SUPPORT
  3052. if ((blend >= VG_LITE_BLEND_ADDITIVE_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) || (blend == VG_LITE_BLEND_NORMAL_LVGL && gcFEATURE_VG_SRC_PREMULTIPLIED)) {
  3053. if (!source->lvgl_buffer) {
  3054. source->lvgl_buffer = (vg_lite_buffer_t *)vg_lite_os_malloc(sizeof(vg_lite_buffer_t));
  3055. *source->lvgl_buffer = *source;
  3056. source->lvgl_buffer->lvgl_buffer = NULL;
  3057. vg_lite_allocate(source->lvgl_buffer);
  3058. }
  3059. /* Make sure render target is up to date before reading RT. */
  3060. vg_lite_finish();
  3061. setup_lvgl_image(target, source, source->lvgl_buffer, blend);
  3062. blend = VG_LITE_BLEND_SRC_OVER;
  3063. lvgl_sw_blend = 1;
  3064. }
  3065. #endif
  3066. if (!matrix) {
  3067. matrix = &identity_mtx;
  3068. }
  3069. #if gcFEATURE_VG_INDEX_ENDIAN
  3070. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  3071. index_endian = 1 << 14;
  3072. }
  3073. #endif
  3074. #if !gcFEATURE_VG_STRIPE_MODE
  3075. /* Enable fifo feature to share buffer between vg and ts to improve the rotation performance */
  3076. eco_fifo = 1 << 7;
  3077. #endif
  3078. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  3079. s_context.filter = filter;
  3080. /* Check if the specified matrix has rotation or perspective. */
  3081. if ( ( (matrix->m[0][1] != 0.0f)
  3082. || (matrix->m[1][0] != 0.0f)
  3083. || (matrix->m[2][0] != 0.0f)
  3084. || (matrix->m[2][1] != 0.0f)
  3085. || (matrix->m[2][2] != 1.0f)
  3086. )
  3087. && ( blend == VG_LITE_BLEND_NONE
  3088. || blend == VG_LITE_BLEND_SRC_IN
  3089. || blend == VG_LITE_BLEND_DST_IN
  3090. )
  3091. ) {
  3092. #if gcFEATURE_VG_BORDER_CULLING
  3093. /* Mark that we have rotation. */
  3094. transparency_mode = 0x8000;
  3095. #else
  3096. blend = VG_LITE_BLEND_SRC_OVER;
  3097. #endif
  3098. #if !gcFEATURE_VG_STRIPE_MODE
  3099. stripe_mode = 1 << 29;
  3100. #endif
  3101. }
  3102. /* Check whether L8 is supported or not. */
  3103. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  3104. conversion = 0x80000000;
  3105. }
  3106. #if gcFEATURE_VG_16PIXELS_ALIGNED
  3107. /* Check if source specify bytes are aligned */
  3108. error = _check_source_aligned(source->format, source->stride);
  3109. if (error != VG_LITE_SUCCESS) {
  3110. return error;
  3111. }
  3112. #endif
  3113. /* Transform image (0,0) to screen. */
  3114. if (!transform(&temp, 0.0f, 0.0f, matrix))
  3115. return VG_LITE_INVALID_ARGUMENT;
  3116. /* Set initial point. */
  3117. point_min = temp;
  3118. point_max = temp;
  3119. #if VG_SW_BLIT_PRECISION_OPT
  3120. point0_0_afterTransform = temp;
  3121. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3122. /* Transform image (0,height) to screen. */
  3123. if (!transform(&temp, 0.0f, (vg_lite_float_t)source->height, matrix))
  3124. return VG_LITE_INVALID_ARGUMENT;
  3125. /* Determine min/max. */
  3126. if (temp.x < point_min.x) point_min.x = temp.x;
  3127. if (temp.y < point_min.y) point_min.y = temp.y;
  3128. if (temp.x > point_max.x) point_max.x = temp.x;
  3129. if (temp.y > point_max.y) point_max.y = temp.y;
  3130. /* Transform image (width,height) to screen. */
  3131. if (!transform(&temp, (vg_lite_float_t)source->width, (vg_lite_float_t)source->height, matrix))
  3132. return VG_LITE_INVALID_ARGUMENT;
  3133. /* Determine min/max. */
  3134. if (temp.x < point_min.x) point_min.x = temp.x;
  3135. if (temp.y < point_min.y) point_min.y = temp.y;
  3136. if (temp.x > point_max.x) point_max.x = temp.x;
  3137. if (temp.y > point_max.y) point_max.y = temp.y;
  3138. /* Transform image (width,0) to screen. */
  3139. if (!transform(&temp, (vg_lite_float_t)source->width, 0.0f, matrix))
  3140. return VG_LITE_INVALID_ARGUMENT;
  3141. /* Determine min/max. */
  3142. if (temp.x < point_min.x) point_min.x = temp.x;
  3143. if (temp.y < point_min.y) point_min.y = temp.y;
  3144. if (temp.x > point_max.x) point_max.x = temp.x;
  3145. if (temp.y > point_max.y) point_max.y = temp.y;
  3146. /* Clip to target. */
  3147. if (s_context.scissor_set && !target->scissor_buffer) {
  3148. left = s_context.scissor[0];
  3149. top = s_context.scissor[1];
  3150. right = s_context.scissor[2];
  3151. bottom = s_context.scissor[3];
  3152. }
  3153. else {
  3154. left = 0;
  3155. top = 0;
  3156. right = target->width;
  3157. bottom = target->height;
  3158. }
  3159. point_min.x = MAX(point_min.x, left);
  3160. point_min.y = MAX(point_min.y, top);
  3161. point_max.x = MIN(point_max.x, right);
  3162. point_max.y = MIN(point_max.y, bottom);
  3163. /* No need to draw. */
  3164. if ((point_max.x <= point_min.x) || (point_max.y <= point_min.y)) {
  3165. return VG_LITE_SUCCESS;
  3166. }
  3167. #if gcFEATURE_VG_GAMMA
  3168. get_st_gamma_src_dest(source, target);
  3169. #endif
  3170. #if gcFEATURE_VG_GLOBAL_ALPHA
  3171. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  3172. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_GLOBAL, 0xff));
  3173. }
  3174. #endif
  3175. /*blend input into context*/
  3176. s_context.blend_mode = blend;
  3177. in_premult = 0x00000000;
  3178. /* Adjust premultiply setting according to openvg condition */
  3179. src_premultiply_enable = 0x01000100;
  3180. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  3181. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  3182. prediv_flag = 0;
  3183. }
  3184. else {
  3185. prediv_flag = 1;
  3186. }
  3187. if ((s_context.blend_mode >= OPENVG_BLEND_SRC && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE
  3188. || (s_context.blend_mode >= VG_LITE_BLEND_NORMAL_LVGL && s_context.blend_mode <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  3189. premul_flag = 1;
  3190. }
  3191. else {
  3192. premul_flag = 0;
  3193. }
  3194. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  3195. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  3196. src_premultiply_enable = 0x01000100;
  3197. in_premult = 0x10000000;
  3198. }
  3199. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  3200. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  3201. src_premultiply_enable = 0x00000100;
  3202. in_premult = 0x00000000;
  3203. }
  3204. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  3205. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  3206. src_premultiply_enable = 0x01000100;
  3207. in_premult = 0x00000000;
  3208. }
  3209. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  3210. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  3211. src_premultiply_enable = 0x00000100;
  3212. in_premult = 0x00000000;
  3213. }
  3214. if((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  3215. #if (CHIPID==0x255)
  3216. src_premultiply_enable = 0x00000000;
  3217. #endif
  3218. #if gcFEATURE_VG_SRC_PREMULTIPLIED
  3219. src_premultiply_enable = src_premultiply_enable & ~(1 << 8);
  3220. #endif
  3221. in_premult = 0x00000000;
  3222. }
  3223. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  3224. target->apply_premult = 1;
  3225. }
  3226. else {
  3227. target->apply_premult = 0;
  3228. }
  3229. #if (gcFEATURE_VG_SRC_PREMULTIPLIED == 0)
  3230. if (blend == VG_LITE_BLEND_NORMAL_LVGL)
  3231. in_premult = 0x00000000;
  3232. #endif
  3233. #if VG_SW_BLIT_PRECISION_OPT
  3234. if (enableSwPreOpt) {
  3235. get_format_bytes(target->format, &mul, &div, &required_align);
  3236. //update target memory address
  3237. bufferAddress = target->address;
  3238. bufferAddress = bufferAddress + point_min.y * target->stride + point_min.x * (mul / div);
  3239. //base address need align
  3240. bufferAlignAddress = bufferAddress & ~(required_align - 1);
  3241. //update buffer pointer address
  3242. bufferPointer = (uint8_t*)target->memory;
  3243. bufferPointer = bufferPointer + (bufferAlignAddress - target->address);
  3244. //update offset
  3245. addressOffset = bufferAddress - bufferAlignAddress;
  3246. //we need give some offset to match actual translate
  3247. matrixOffsetX = addressOffset * div / mul;
  3248. //update new_target and set it as target
  3249. memcpy(&new_target, target, sizeof(vg_lite_buffer_t));
  3250. new_target.address = bufferAddress;
  3251. new_target.memory = bufferPointer;
  3252. new_target.width = point_max.x - point_min.x + matrixOffsetX;
  3253. new_target.height = point_max.y - point_min.y;
  3254. target = &new_target;
  3255. //update matrix
  3256. matrix->m[0][2] = (vg_lite_float_t)(point0_0_afterTransform.x - point_min.x + matrixOffsetX);
  3257. matrix->m[1][2] = (vg_lite_float_t)(point0_0_afterTransform.y - point_min.y);
  3258. //modify point_min and point_max to let them start from (0, 0)
  3259. point_max.x = point_max.x - point_min.x;
  3260. point_max.y = point_max.y - point_min.y;
  3261. point_min.x = 0;
  3262. point_min.y = 0;
  3263. }
  3264. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3265. error = set_render_target(target);
  3266. if (error != VG_LITE_SUCCESS) {
  3267. return error;
  3268. }
  3269. /* Compute inverse matrix. */
  3270. if (!inverse(&inverse_matrix, matrix))
  3271. return VG_LITE_INVALID_ARGUMENT;
  3272. #if gcFEATURE_VG_MATH_PRECISION_FIX
  3273. if (filter == VG_LITE_FILTER_LINEAR)
  3274. {
  3275. /* Compute interpolation steps. */
  3276. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]);
  3277. x_step[1] = inverse_matrix.m[1][0];
  3278. x_step[2] = inverse_matrix.m[2][0];
  3279. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]);
  3280. y_step[1] = inverse_matrix.m[1][1];
  3281. y_step[2] = inverse_matrix.m[2][1];
  3282. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]);
  3283. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  3284. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3285. }
  3286. else if (filter == VG_LITE_FILTER_BI_LINEAR)
  3287. {
  3288. /* Shift the linear sampling points to center of pixels to avoid pixel offset issue */
  3289. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]);
  3290. x_step[1] = (inverse_matrix.m[1][0] - 0.5f * inverse_matrix.m[2][0]);
  3291. x_step[2] = inverse_matrix.m[2][0];
  3292. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]);
  3293. y_step[1] = (inverse_matrix.m[1][1] - 0.5f * inverse_matrix.m[2][1]);
  3294. y_step[2] = inverse_matrix.m[2][1];
  3295. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]);
  3296. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[1][2] - 0.5f * inverse_matrix.m[2][2]);
  3297. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3298. }
  3299. else
  3300. {
  3301. /* Compute interpolation steps. */
  3302. x_step[0] = inverse_matrix.m[0][0];
  3303. x_step[1] = inverse_matrix.m[1][0];
  3304. x_step[2] = inverse_matrix.m[2][0];
  3305. y_step[0] = inverse_matrix.m[0][1];
  3306. y_step[1] = inverse_matrix.m[1][1];
  3307. y_step[2] = inverse_matrix.m[2][1];
  3308. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]);
  3309. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  3310. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3311. // For FL32 rounding trick
  3312. uint32_t datax[2], datay[2], datac[2];
  3313. for (int idx = 0; idx < 2; idx++)
  3314. {
  3315. datax[idx] = *(uint32_t*)((void*)&x_step[idx]);
  3316. datay[idx] = *(uint32_t*)((void*)&y_step[idx]);
  3317. datac[idx] = *(uint32_t*)((void*)&c_step[idx]);
  3318. }
  3319. for (int i = 0; i < 2; i++)
  3320. {
  3321. int aSign = (datax[i] & 0x80000000) >> 31;
  3322. int bSign = (datay[i] & 0x80000000) >> 31;
  3323. int cSign = (datac[i] & 0x80000000) >> 31;
  3324. int aIn = (datax[i] & 0x20) >> 5;
  3325. int bIn = (datay[i] & 0x20) >> 5;
  3326. if ((aSign ==0 ) && (bSign == 0) && (aIn == bIn))
  3327. {
  3328. int cIn = (aSign ^ cSign) ^ ((~aIn) & 0x1);
  3329. if (cIn == 0)
  3330. {
  3331. datac[i] &= 0xFFFFFFDF;
  3332. }
  3333. else
  3334. {
  3335. datac[i] |= 0x00000020;
  3336. }
  3337. c_step[i] = *(vg_lite_float_t*)((void*)&datac[i]);
  3338. }
  3339. }
  3340. }
  3341. #else
  3342. if (filter == VG_LITE_FILTER_LINEAR)
  3343. {
  3344. /* Compute interpolation steps. */
  3345. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]) / source->width;
  3346. x_step[1] = inverse_matrix.m[1][0] / source->height;
  3347. x_step[2] = inverse_matrix.m[2][0];
  3348. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]) / source->width;
  3349. y_step[1] = inverse_matrix.m[1][1] / source->height;
  3350. y_step[2] = inverse_matrix.m[2][1];
  3351. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]) / source->width;
  3352. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source->height;
  3353. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3354. }
  3355. else if (filter == VG_LITE_FILTER_BI_LINEAR)
  3356. {
  3357. /* Shift the linear sampling points to center of pixels to avoid pixel offset issue */
  3358. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]) / source->width;
  3359. x_step[1] = (inverse_matrix.m[1][0] - 0.5f * inverse_matrix.m[2][0]) / source->height;
  3360. x_step[2] = inverse_matrix.m[2][0];
  3361. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]) / source->width;
  3362. y_step[1] = (inverse_matrix.m[1][1] - 0.5f * inverse_matrix.m[2][1]) / source->height;
  3363. y_step[2] = inverse_matrix.m[2][1];
  3364. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]) / source->width;
  3365. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[1][2] - 0.5f * inverse_matrix.m[2][2]) / source->height;
  3366. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3367. }
  3368. else
  3369. {
  3370. /* Compute interpolation steps. */
  3371. x_step[0] = inverse_matrix.m[0][0] / source->width;
  3372. x_step[1] = inverse_matrix.m[1][0] / source->height;
  3373. x_step[2] = inverse_matrix.m[2][0];
  3374. y_step[0] = inverse_matrix.m[0][1] / source->width;
  3375. y_step[1] = inverse_matrix.m[1][1] / source->height;
  3376. y_step[2] = inverse_matrix.m[2][1];
  3377. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / source->width;
  3378. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source->height;
  3379. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3380. }
  3381. #endif
  3382. #if VG_SW_BLIT_PRECISION_OPT
  3383. /* Update C offset */
  3384. if (enableSwPreOpt) {
  3385. uint8_t indexC0 = 0;
  3386. uint8_t indexC1 = 0;
  3387. uint32_t temp0 = (uint32_t)(matrix->angle / 45);
  3388. uint32_t temp1 = (uint32_t)(matrix->scaleX * 100);
  3389. uint32_t temp2 = (uint32_t)(matrix->scaleY * 100);
  3390. indexC0 = GetIndex(temp0, temp1);
  3391. indexC1 = GetIndex(temp0, temp2);
  3392. c_step[0] = c_step[0] + offsetTable[indexC0];
  3393. c_step[1] = c_step[1] + offsetTable[indexC1];
  3394. }
  3395. #else
  3396. c_step[0] = c_step[0] + offsetTable[0];
  3397. c_step[1] = c_step[1] + offsetTable[0];
  3398. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3399. /* Determine image mode (NORMAL, NONE , MULTIPLY or STENCIL) depending on the color. */
  3400. switch (source->image_mode) {
  3401. case VG_LITE_NONE_IMAGE_MODE:
  3402. imageMode = 0x00000000;
  3403. break;
  3404. case VG_LITE_MULTIPLY_IMAGE_MODE:
  3405. imageMode = 0x00002000;
  3406. break;
  3407. case VG_LITE_NORMAL_IMAGE_MODE:
  3408. case VG_LITE_ZERO:
  3409. imageMode = 0x00001000;
  3410. break;
  3411. case VG_LITE_STENCIL_MODE:
  3412. imageMode = 0x00003000;
  3413. break;
  3414. case VG_LITE_RECOLOR_MODE:
  3415. imageMode = 0x00006000;
  3416. break;
  3417. }
  3418. switch (filter) {
  3419. case VG_LITE_FILTER_POINT:
  3420. filter_mode = 0;
  3421. break;
  3422. case VG_LITE_FILTER_LINEAR:
  3423. filter_mode = 0x10000;
  3424. break;
  3425. case VG_LITE_FILTER_BI_LINEAR:
  3426. filter_mode = 0x20000;
  3427. break;
  3428. case VG_LITE_FILTER_GAUSSIAN:
  3429. filter_mode = 0x30000;
  3430. break;
  3431. }
  3432. switch (source->paintType)
  3433. {
  3434. case VG_LITE_PAINT_COLOR:
  3435. paintType = 0;
  3436. break;
  3437. case VG_LITE_PAINT_LINEAR_GRADIENT:
  3438. paintType = 1 << 24;
  3439. break;
  3440. case VG_LITE_PAINT_RADIAL_GRADIENT:
  3441. paintType = 1 << 25;
  3442. break;
  3443. case VG_LITE_PAINT_PATTERN:
  3444. paintType = 1 << 24 | 1 << 25;
  3445. break;
  3446. default:
  3447. break;
  3448. }
  3449. blend_mode = convert_blend(blend);
  3450. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  3451. #if gcFEATURE_VG_RECTANGLE_TILED_OUT
  3452. if (target->tiled == VG_LITE_TILED) {
  3453. tile_setting = 0x40;
  3454. stripe_mode = 0x20000000;
  3455. }
  3456. #endif
  3457. #if (gcFEATURE_VG_DEC_COMPRESS | gcFEATURE_VG_DEC_COMPRESS_2_0)
  3458. if (source->compress_mode != VG_LITE_DEC_DISABLE && target->compress_mode == VG_LITE_DEC_DISABLE) {
  3459. if (source->format != target->format) {
  3460. printf("The format of source and target buffers is inconsistent in decompressing!\n");
  3461. return VG_LITE_INVALID_ARGUMENT;
  3462. }
  3463. }
  3464. #endif
  3465. compress_mode = (uint32_t)source->compress_mode << 25;
  3466. /* Setup the command buffer. */
  3467. #if gcFEATURE_VG_GLOBAL_ALPHA
  3468. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD1, s_context.dst_alpha_mode | s_context.dst_alpha_value | s_context.src_alpha_mode | s_context.src_alpha_value));
  3469. #endif
  3470. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x00000001 | paintType | in_premult | imageMode | blend_mode | transparency_mode | tile_setting | s_context.enable_mask | s_context.color_transform | s_context.matrix_enable | eco_fifo | s_context.scissor_enable | stripe_mode));
  3471. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  3472. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *) &c_step[0]));
  3473. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *) &c_step[1]));
  3474. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *) &c_step[2]));
  3475. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *) &x_step[0]));
  3476. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *) &x_step[1]));
  3477. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *) &x_step[2]));
  3478. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  3479. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *) &y_step[0]));
  3480. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *) &y_step[1]));
  3481. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *) &y_step[2]));
  3482. if (((source->format >= VG_LITE_YUY2) &&
  3483. (source->format <= VG_LITE_AYUY2)) ||
  3484. ((source->format >= VG_LITE_YUY2_TILED) &&
  3485. (source->format <= VG_LITE_AYUY2_TILED))) {
  3486. yuv2rgb = convert_yuv2rgb(source->yuv.yuv2rgb);
  3487. uv_swiz = convert_uv_swizzle(source->yuv.swizzle);
  3488. }
  3489. #if gcFEATURE_VG_IM_FASTCLEAR
  3490. if (source->fc_enable) {
  3491. uint32_t im_fc_enable = (source->fc_enable == 0) ? 0 : 0x800000;
  3492. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | uv_swiz | yuv2rgb | conversion | im_fc_enable | ahb_read_split | compress_mode | src_premultiply_enable | index_endian));
  3493. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0ACF, source->fc_buffer[0].address)); /* FC buffer address. */
  3494. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD0, source->fc_buffer[0].color)); /* FC clear value. */
  3495. }
  3496. #endif
  3497. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | uv_swiz | yuv2rgb | conversion | compress_mode | src_premultiply_enable | index_endian));
  3498. if (source->yuv.uv_planar) {
  3499. /* Program u plane address if necessary. */
  3500. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source->yuv.uv_planar));
  3501. }
  3502. if (source->yuv.v_planar) {
  3503. /* Program v plane address if necessary. */
  3504. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.v_planar));
  3505. }
  3506. if (source->yuv.alpha_planar != 0) {
  3507. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.alpha_planar));
  3508. }
  3509. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A27, target->bg_color));
  3510. #if !gcFEATURE_VG_LVGL_SUPPORT
  3511. if (lvgl_sw_blend) {
  3512. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->lvgl_buffer->address));
  3513. }
  3514. else
  3515. #endif
  3516. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->address));
  3517. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  3518. /* 24bit format stride configured to 4bpp. */
  3519. if (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658) {
  3520. stride = source->stride / 3 * 4;
  3521. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, stride | tiled_source));
  3522. }
  3523. else {
  3524. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, source->stride | tiled_source));
  3525. }
  3526. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2D, 0));
  3527. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2F, source->width | (source->height << 16)));
  3528. #if VG_SW_BLIT_PRECISION_OPT
  3529. if (enableSwPreOpt) {
  3530. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x + matrixOffsetX, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  3531. } else
  3532. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3533. {
  3534. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  3535. }
  3536. #if !gcFEATURE_VG_STRIPE_MODE
  3537. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  3538. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  3539. #endif
  3540. if (!s_context.flexa_mode) {
  3541. error = flush_target();
  3542. }
  3543. #if gcFEATURE_VG_GLOBAL_ALPHA
  3544. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  3545. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_NORMAL, 0xFF));
  3546. }
  3547. #endif
  3548. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride)*(source->height));
  3549. #if DUMP_IMAGE
  3550. dump_img(source->memory, source->width, source->height, source->format);
  3551. #endif
  3552. return error;
  3553. #else
  3554. return VG_LITE_NOT_SUPPORT;
  3555. #endif
  3556. }
  3557. vg_lite_error_t vg_lite_blit_rect(vg_lite_buffer_t* target,
  3558. vg_lite_buffer_t* source,
  3559. vg_lite_rectangle_t* rect,
  3560. vg_lite_matrix_t* matrix,
  3561. vg_lite_blend_t blend,
  3562. vg_lite_color_t color,
  3563. vg_lite_filter_t filter)
  3564. {
  3565. #if DUMP_API
  3566. FUNC_DUMP(vg_lite_blit_rect)(target, source, rect, matrix, blend, color, filter);
  3567. #endif
  3568. #if gcFEATURE_VG_IM_INPUT
  3569. vg_lite_error_t error;
  3570. vg_lite_point_t point_min, point_max, temp;
  3571. vg_lite_matrix_t inverse_matrix;
  3572. vg_lite_float_t x_step[3];
  3573. vg_lite_float_t y_step[3];
  3574. vg_lite_float_t c_step[3];
  3575. uint32_t imageMode = 0;
  3576. uint32_t paintType = 0;
  3577. uint32_t in_premult = 0;
  3578. uint32_t blend_mode;
  3579. uint32_t filter_mode = 0;
  3580. uint32_t transparency_mode = 0;
  3581. uint32_t conversion = 0;
  3582. uint32_t rect_x = 0, rect_y = 0, rect_w = 0, rect_h = 0;
  3583. uint32_t tiled_source;
  3584. uint32_t yuv2rgb = 0;
  3585. uint32_t uv_swiz = 0;
  3586. uint32_t compress_mode = 0;
  3587. uint32_t src_premultiply_enable = 0;
  3588. uint32_t index_endian = 0;
  3589. uint32_t eco_fifo = 0;
  3590. uint32_t tile_setting = 0;
  3591. uint32_t stripe_mode = 0;
  3592. uint32_t premul_flag = 0;
  3593. uint32_t prediv_flag = 0;
  3594. int32_t left, top, right, bottom;
  3595. int32_t stride;
  3596. uint8_t lvgl_sw_blend = 0;
  3597. #if VG_SW_BLIT_PRECISION_OPT
  3598. uint8_t* bufferPointer;
  3599. uint32_t bufferAddress = 0, bufferAlignAddress = 0, addressOffset = 0, mul = 0, div = 0, required_align = 0;
  3600. vg_lite_buffer_t new_target;
  3601. vg_lite_point_t point0_0_afterTransform = { 0 };
  3602. uint8_t enableSwPreOpt = 0;
  3603. int32_t matrixOffsetX = 0;
  3604. /* Only accept interger move */
  3605. if (matrix != NULL && filter == VG_LITE_FILTER_POINT) {
  3606. matrix->m[0][2] = (vg_lite_float_t)(matrix->m[0][2] >= 0 ? (int32_t)(matrix->m[0][2] + 0.5) : (int32_t)(matrix->m[0][2] - 0.5));
  3607. matrix->m[1][2] = (vg_lite_float_t)(matrix->m[1][2] >= 0 ? (int32_t)(matrix->m[1][2] + 0.5) : (int32_t)(matrix->m[1][2] - 0.5));
  3608. /* Only nonperspective transform with scale or rotation could enable optimization */
  3609. if ((matrix->m[2][0] == 0.0f && matrix->m[2][1] == 0.0f && matrix->m[2][2] == 1.0f) &&
  3610. (matrix->m[0][0] != 1.0f || matrix->m[1][1] != 1.0f || matrix->m[0][1] != 0.0f)) {
  3611. if (target->tiled != VG_LITE_TILED && (target->format < VG_LITE_RGB888 || target->format > VG_LITE_RGBA5658_PLANAR)) {
  3612. enableSwPreOpt = 1;
  3613. }
  3614. }
  3615. }
  3616. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3617. #if gcFEATURE_VG_TRACE_API
  3618. VGLITE_LOG("vg_lite_blit_rect %p %p %p %p %d 0x%08X %d\n", target, source, rect, matrix, blend, color, filter);
  3619. #endif
  3620. #if gcFEATURE_VG_ERROR_CHECK
  3621. #if !gcFEATURE_VG_INDEX_ENDIAN
  3622. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  3623. return VG_LITE_NOT_SUPPORT;
  3624. }
  3625. #endif
  3626. #if !gcFEATURE_VG_RGBA8_ETC2_EAC
  3627. if (source->format == VG_LITE_RGBA8888_ETC2_EAC) {
  3628. return VG_LITE_NOT_SUPPORT;
  3629. }
  3630. #else
  3631. if ((source->format == VG_LITE_RGBA8888_ETC2_EAC) && (source->width % 16 || source->height % 4)) {
  3632. return VG_LITE_INVALID_ARGUMENT;
  3633. }
  3634. #endif
  3635. #if !gcFEATURE_VG_YUY2_INPUT
  3636. if (source->format == VG_LITE_YUYV || source->format == VG_LITE_YUY2) {
  3637. return VG_LITE_NOT_SUPPORT;
  3638. }
  3639. #endif
  3640. #if !gcFEATURE_VG_YUV_INPUT
  3641. if ((source->format >= VG_LITE_NV12 && source->format <= VG_LITE_NV16) || source->format == VG_LITE_NV24) {
  3642. return VG_LITE_NOT_SUPPORT;
  3643. }
  3644. #elif !gcFEATURE_VG_NV24_INPUT
  3645. if (source->format == VG_LITE_NV24) {
  3646. return VG_LITE_NOT_SUPPORT;
  3647. }
  3648. #endif
  3649. #if !gcFEATURE_VG_AYUV_INPUT
  3650. if (source->format == VG_LITE_ANV12 || source->format == VG_LITE_AYUY2) {
  3651. return VG_LITE_NOT_SUPPORT;
  3652. }
  3653. #endif
  3654. #if !gcFEATURE_VG_YUV_TILED_INPUT
  3655. if ((source->format >= VG_LITE_YUY2_TILED && source->format <= VG_LITE_AYUY2_TILED) || (source->format == VG_LITE_NV24_TILED)) {
  3656. return VG_LITE_NOT_SUPPORT;
  3657. }
  3658. #endif
  3659. #if !gcFEATURE_VG_24BIT
  3660. if ((target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) ||
  3661. (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658)) {
  3662. return VG_LITE_NOT_SUPPORT;
  3663. }
  3664. #endif
  3665. #if !gcFEATURE_VG_24BIT_PLANAR
  3666. if (source->format >= VG_LITE_ABGR8565_PLANAR && source->format <= VG_LITE_RGBA5658_PLANAR) {
  3667. return VG_LITE_NOT_SUPPORT;
  3668. }
  3669. #endif
  3670. #if !gcFEATURE_VG_IM_DEC_INPUT
  3671. if (source->compress_mode != VG_LITE_DEC_DISABLE) {
  3672. return VG_LITE_NOT_SUPPORT;
  3673. }
  3674. #endif
  3675. #if !gcFEATURE_VG_STENCIL
  3676. if (source->image_mode == VG_LITE_STENCIL_MODE) {
  3677. return VG_LITE_NOT_SUPPORT;
  3678. }
  3679. #endif
  3680. #if !gcFEATURE_VG_NEW_BLEND_MODE
  3681. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  3682. return VG_LITE_NOT_SUPPORT;
  3683. }
  3684. #endif
  3685. if (blend && (target->format == VG_LITE_YUYV || target->format == VG_LITE_YUY2 || target->format == VG_LITE_YUY2_TILED
  3686. || target->format == VG_LITE_AYUY2 || target->format == VG_LITE_AYUY2_TILED)) {
  3687. return VG_LITE_NOT_SUPPORT;
  3688. }
  3689. #if (CHIPID == 0x355)
  3690. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  3691. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  3692. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  3693. printf("Target format: 0x%x is not supported.\n", target->format);
  3694. return VG_LITE_NOT_SUPPORT;
  3695. }
  3696. if (source->format == VG_LITE_L8 || source->format == VG_LITE_YUYV ||
  3697. source->format == VG_LITE_BGRA2222 || source->format == VG_LITE_RGBA2222 ||
  3698. source->format == VG_LITE_ABGR2222 || source->format == VG_LITE_ARGB2222) {
  3699. printf("Source format: 0x%x is not supported.\n", source->format);
  3700. return VG_LITE_NOT_SUPPORT;
  3701. }
  3702. #endif
  3703. VG_LITE_RETURN_ERROR(srcbuf_align_check(source));
  3704. VG_LITE_RETURN_ERROR(check_compress(source->format, source->compress_mode, source->tiled, source->width, source->height));
  3705. #endif /* gcFEATURE_VG_ERROR_CHECK */
  3706. #if !gcFEATURE_VG_LVGL_SUPPORT
  3707. if ((blend >= VG_LITE_BLEND_ADDITIVE_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) || (blend == VG_LITE_BLEND_NORMAL_LVGL && gcFEATURE_VG_SRC_PREMULTIPLIED)) {
  3708. if (!source->lvgl_buffer) {
  3709. source->lvgl_buffer = (vg_lite_buffer_t *)vg_lite_os_malloc(sizeof(vg_lite_buffer_t));
  3710. *source->lvgl_buffer = *source;
  3711. source->lvgl_buffer->lvgl_buffer = NULL;
  3712. vg_lite_allocate(source->lvgl_buffer);
  3713. }
  3714. /* Make sure render target is up to date before reading RT. */
  3715. vg_lite_finish();
  3716. setup_lvgl_image(target, source, source->lvgl_buffer, blend);
  3717. blend = VG_LITE_BLEND_SRC_OVER;
  3718. lvgl_sw_blend = 1;
  3719. }
  3720. #endif
  3721. if (!matrix) {
  3722. matrix = &identity_mtx;
  3723. }
  3724. #if gcFEATURE_VG_INDEX_ENDIAN
  3725. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  3726. index_endian = 1 << 14;
  3727. }
  3728. #endif
  3729. #if !gcFEATURE_VG_STRIPE_MODE
  3730. /* Enable fifo feature to share buffer between vg and ts to improve the rotation performance */
  3731. eco_fifo = 1 << 7;
  3732. #endif
  3733. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  3734. s_context.filter = filter;
  3735. /* Check if the specified matrix has rotation or perspective. */
  3736. if ( ( (matrix->m[0][1] != 0.0f)
  3737. || (matrix->m[1][0] != 0.0f)
  3738. || (matrix->m[2][0] != 0.0f)
  3739. || (matrix->m[2][1] != 0.0f)
  3740. || (matrix->m[2][2] != 1.0f)
  3741. )
  3742. && ( blend == VG_LITE_BLEND_NONE
  3743. || blend == VG_LITE_BLEND_SRC_IN
  3744. || blend == VG_LITE_BLEND_DST_IN
  3745. )
  3746. ) {
  3747. #if gcFEATURE_VG_BORDER_CULLING
  3748. /* Mark that we have rotation. */
  3749. transparency_mode = 0x8000;
  3750. #else
  3751. blend = VG_LITE_BLEND_SRC_OVER;
  3752. #endif
  3753. #if !gcFEATURE_VG_STRIPE_MODE
  3754. stripe_mode = 1 << 29;
  3755. #endif
  3756. }
  3757. /* Check whether L8 is supported or not. */
  3758. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  3759. conversion = 0x80000000;
  3760. }
  3761. #if gcFEATURE_VG_16PIXELS_ALIGNED
  3762. /* Check if source specify bytes are aligned */
  3763. error = _check_source_aligned(source->format, source->stride);
  3764. if (error != VG_LITE_SUCCESS) {
  3765. return error;
  3766. }
  3767. #endif
  3768. /* Set source region. */
  3769. if (rect != NULL) {
  3770. rect_x = (rect->x < 0) ? 0 : rect->x;
  3771. rect_y = (rect->y < 0) ? 0 : rect->y;
  3772. rect_w = rect->width;
  3773. rect_h = rect->height;
  3774. if ((rect_x > (uint32_t)source->width) || (rect_y > (uint32_t)source->height) ||
  3775. (rect_w == 0) || (rect_h == 0))
  3776. {
  3777. /*No intersection*/
  3778. return VG_LITE_INVALID_ARGUMENT;
  3779. }
  3780. if (rect_x + rect_w > (uint32_t)source->width)
  3781. {
  3782. rect_w = source->width - rect_x;
  3783. }
  3784. if (rect_y + rect_h > (uint32_t)source->height)
  3785. {
  3786. rect_h = source->height - rect_y;
  3787. }
  3788. }
  3789. else {
  3790. rect_x = rect_y = 0;
  3791. rect_w = source->width;
  3792. rect_h = source->height;
  3793. }
  3794. /* Transform image (0,0) to screen. */
  3795. if (!transform(&temp, 0.0f, 0.0f, matrix))
  3796. return VG_LITE_INVALID_ARGUMENT;
  3797. /* Set initial point. */
  3798. point_min = temp;
  3799. point_max = temp;
  3800. #if VG_SW_BLIT_PRECISION_OPT
  3801. point0_0_afterTransform = temp;
  3802. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3803. /* Transform image (0,height) to screen. */
  3804. if (!transform(&temp, 0.0f, (vg_lite_float_t)rect_h, matrix))
  3805. return VG_LITE_INVALID_ARGUMENT;
  3806. /* Determine min/max. */
  3807. if (temp.x < point_min.x) point_min.x = temp.x;
  3808. if (temp.y < point_min.y) point_min.y = temp.y;
  3809. if (temp.x > point_max.x) point_max.x = temp.x;
  3810. if (temp.y > point_max.y) point_max.y = temp.y;
  3811. /* Transform image (width,height) to screen. */
  3812. if (!transform(&temp, (vg_lite_float_t)rect_w, (vg_lite_float_t)rect_h, matrix))
  3813. return VG_LITE_INVALID_ARGUMENT;
  3814. /* Determine min/max. */
  3815. if (temp.x < point_min.x) point_min.x = temp.x;
  3816. if (temp.y < point_min.y) point_min.y = temp.y;
  3817. if (temp.x > point_max.x) point_max.x = temp.x;
  3818. if (temp.y > point_max.y) point_max.y = temp.y;
  3819. /* Transform image (width,0) to screen. */
  3820. if (!transform(&temp, (vg_lite_float_t)rect_w, 0.0f, matrix))
  3821. return VG_LITE_INVALID_ARGUMENT;
  3822. /* Determine min/max. */
  3823. if (temp.x < point_min.x) point_min.x = temp.x;
  3824. if (temp.y < point_min.y) point_min.y = temp.y;
  3825. if (temp.x > point_max.x) point_max.x = temp.x;
  3826. if (temp.y > point_max.y) point_max.y = temp.y;
  3827. /* Clip to target. */
  3828. if (s_context.scissor_set && !target->scissor_buffer) {
  3829. left = s_context.scissor[0];
  3830. top = s_context.scissor[1];
  3831. right = s_context.scissor[2];
  3832. bottom = s_context.scissor[3];
  3833. }
  3834. else {
  3835. left = 0;
  3836. top = 0;
  3837. right = target->width;
  3838. bottom = target->height;
  3839. }
  3840. point_min.x = MAX(point_min.x, left);
  3841. point_min.y = MAX(point_min.y, top);
  3842. point_max.x = MIN(point_max.x, right);
  3843. point_max.y = MIN(point_max.y, bottom);
  3844. /* No need to draw. */
  3845. if ((point_max.x <= point_min.x) || (point_max.y <= point_min.y)) {
  3846. return VG_LITE_SUCCESS;
  3847. }
  3848. #if gcFEATURE_VG_GAMMA
  3849. get_st_gamma_src_dest(source, target);
  3850. #endif
  3851. #if gcFEATURE_VG_GLOBAL_ALPHA
  3852. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  3853. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_GLOBAL, 0xff));
  3854. }
  3855. #endif
  3856. /*blend input into context*/
  3857. s_context.blend_mode = blend;
  3858. in_premult = 0x00000000;
  3859. /* Adjust premultiply setting according to openvg condition */
  3860. src_premultiply_enable = 0x01000100;
  3861. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  3862. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  3863. prediv_flag = 0;
  3864. }
  3865. else {
  3866. prediv_flag = 1;
  3867. }
  3868. if ((s_context.blend_mode >= OPENVG_BLEND_SRC && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE
  3869. || (s_context.blend_mode >= VG_LITE_BLEND_NORMAL_LVGL && s_context.blend_mode <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  3870. premul_flag = 1;
  3871. }
  3872. else {
  3873. premul_flag = 0;
  3874. }
  3875. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  3876. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  3877. src_premultiply_enable = 0x01000100;
  3878. in_premult = 0x10000000;
  3879. }
  3880. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  3881. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  3882. src_premultiply_enable = 0x00000100;
  3883. in_premult = 0x00000000;
  3884. }
  3885. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  3886. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  3887. src_premultiply_enable = 0x01000100;
  3888. in_premult = 0x00000000;
  3889. }
  3890. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  3891. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  3892. src_premultiply_enable = 0x00000100;
  3893. in_premult = 0x00000000;
  3894. }
  3895. if((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  3896. #if (CHIPID==0x255)
  3897. src_premultiply_enable = 0x00000000;
  3898. #endif
  3899. #if gcFEATURE_VG_SRC_PREMULTIPLIED
  3900. src_premultiply_enable = src_premultiply_enable & ~(1 << 8);
  3901. #endif
  3902. in_premult = 0x00000000;
  3903. }
  3904. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  3905. target->apply_premult = 1;
  3906. }
  3907. else {
  3908. target->apply_premult = 0;
  3909. }
  3910. #if (gcFEATURE_VG_SRC_PREMULTIPLIED == 0)
  3911. if (blend == VG_LITE_BLEND_NORMAL_LVGL)
  3912. in_premult = 0x00000000;
  3913. #endif
  3914. #if VG_SW_BLIT_PRECISION_OPT
  3915. if (enableSwPreOpt) {
  3916. get_format_bytes(target->format, &mul, &div, &required_align);
  3917. //update target memory address
  3918. bufferAddress = target->address;
  3919. bufferAddress = bufferAddress + point_min.y * target->stride + point_min.x * (mul / div);
  3920. //base address need align
  3921. bufferAlignAddress = bufferAddress & ~(required_align - 1);
  3922. //update buffer pointer address
  3923. bufferPointer = (uint8_t*)target->memory;
  3924. bufferPointer = bufferPointer + (bufferAlignAddress - target->address);
  3925. //update offset
  3926. addressOffset = bufferAddress - bufferAlignAddress;
  3927. //we need give some offset to match actual translate
  3928. matrixOffsetX = addressOffset * div / mul;
  3929. //update new_target and set it as target
  3930. memcpy(&new_target, target, sizeof(vg_lite_buffer_t));
  3931. new_target.address = bufferAddress;
  3932. new_target.memory = bufferPointer;
  3933. new_target.width = point_max.x - point_min.x + matrixOffsetX;
  3934. new_target.height = point_max.y - point_min.y;
  3935. target = &new_target;
  3936. //update matrix
  3937. matrix->m[0][2] = (vg_lite_float_t)(point0_0_afterTransform.x - point_min.x + matrixOffsetX);
  3938. matrix->m[1][2] = (vg_lite_float_t)(point0_0_afterTransform.y - point_min.y);
  3939. //modify point_min and point_max to let them start from (0, 0)
  3940. point_max.x = point_max.x - point_min.x;
  3941. point_max.y = point_max.y - point_min.y;
  3942. point_min.x = 0;
  3943. point_min.y = 0;
  3944. }
  3945. #endif /* VG_SW_BLIT_PRECISION_OPT */
  3946. error = set_render_target(target);
  3947. if (error != VG_LITE_SUCCESS) {
  3948. return error;
  3949. }
  3950. /* Compute inverse matrix. */
  3951. if (!inverse(&inverse_matrix, matrix))
  3952. return VG_LITE_INVALID_ARGUMENT;
  3953. #if gcFEATURE_VG_MATH_PRECISION_FIX
  3954. if (filter == VG_LITE_FILTER_LINEAR)
  3955. {
  3956. /* Compute interpolation steps. */
  3957. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]);
  3958. x_step[1] = inverse_matrix.m[1][0];
  3959. x_step[2] = inverse_matrix.m[2][0];
  3960. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]);
  3961. y_step[1] = inverse_matrix.m[1][1];
  3962. y_step[2] = inverse_matrix.m[2][1];
  3963. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]);
  3964. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  3965. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3966. }
  3967. else if (filter == VG_LITE_FILTER_BI_LINEAR)
  3968. {
  3969. /* Shift the linear sampling points to center of pixels to avoid pixel offset issue */
  3970. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]);
  3971. x_step[1] = (inverse_matrix.m[1][0] - 0.5f * inverse_matrix.m[2][0]);
  3972. x_step[2] = inverse_matrix.m[2][0];
  3973. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]);
  3974. y_step[1] = (inverse_matrix.m[1][1] - 0.5f * inverse_matrix.m[2][1]);
  3975. y_step[2] = inverse_matrix.m[2][1];
  3976. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]);
  3977. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[1][2] - 0.5f * inverse_matrix.m[2][2]);
  3978. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3979. }
  3980. else
  3981. {
  3982. /* Compute interpolation steps. */
  3983. x_step[0] = inverse_matrix.m[0][0];
  3984. x_step[1] = inverse_matrix.m[1][0];
  3985. x_step[2] = inverse_matrix.m[2][0];
  3986. y_step[0] = inverse_matrix.m[0][1];
  3987. y_step[1] = inverse_matrix.m[1][1];
  3988. y_step[2] = inverse_matrix.m[2][1];
  3989. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]);
  3990. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  3991. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3992. // For FL32 rounding trick
  3993. uint32_t datax[2], datay[2], datac[2];
  3994. for (int idx = 0; idx < 2; idx++)
  3995. {
  3996. datax[idx] = *(uint32_t*)((void*)&x_step[idx]);
  3997. datay[idx] = *(uint32_t*)((void*)&y_step[idx]);
  3998. datac[idx] = *(uint32_t*)((void*)&c_step[idx]);
  3999. }
  4000. for (int i = 0; i < 2; i++)
  4001. {
  4002. int aSign = (datax[i] & 0x80000000) >> 31;
  4003. int bSign = (datay[i] & 0x80000000) >> 31;
  4004. int cSign = (datac[i] & 0x80000000) >> 31;
  4005. int aIn = (datax[i] & 0x20) >> 5;
  4006. int bIn = (datay[i] & 0x20) >> 5;
  4007. if ((aSign ==0 ) && (bSign == 0) && (aIn == bIn))
  4008. {
  4009. int cIn = (aSign ^ cSign) ^ ((~aIn) & 0x1);
  4010. if (cIn == 0)
  4011. {
  4012. datac[i] &= 0xFFFFFFDF;
  4013. }
  4014. else
  4015. {
  4016. datac[i] |= 0x00000020;
  4017. }
  4018. c_step[i] = *(vg_lite_float_t*)((void*)&datac[i]);
  4019. }
  4020. }
  4021. }
  4022. #else
  4023. if (filter == VG_LITE_FILTER_LINEAR)
  4024. {
  4025. /* Compute interpolation steps. */
  4026. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]) / rect_w;
  4027. x_step[1] = inverse_matrix.m[1][0] / rect_h;
  4028. x_step[2] = inverse_matrix.m[2][0];
  4029. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]) / rect_w;
  4030. y_step[1] = inverse_matrix.m[1][1] / rect_h;
  4031. y_step[2] = inverse_matrix.m[2][1];
  4032. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]) / rect_w;
  4033. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / rect_h;
  4034. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  4035. }
  4036. else if (filter == VG_LITE_FILTER_BI_LINEAR)
  4037. {
  4038. /* Shift the linear sampling points to center of pixels to avoid pixel offset issue */
  4039. x_step[0] = (inverse_matrix.m[0][0] - 0.5f * inverse_matrix.m[2][0]) / rect_w;
  4040. x_step[1] = (inverse_matrix.m[1][0] - 0.5f * inverse_matrix.m[2][0]) / rect_h;
  4041. x_step[2] = inverse_matrix.m[2][0];
  4042. y_step[0] = (inverse_matrix.m[0][1] - 0.5f * inverse_matrix.m[2][1]) / rect_w;
  4043. y_step[1] = (inverse_matrix.m[1][1] - 0.5f * inverse_matrix.m[2][1]) / rect_h;
  4044. y_step[2] = inverse_matrix.m[2][1];
  4045. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[0][2] - 0.5f * inverse_matrix.m[2][2]) / rect_w;
  4046. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) - 0.25f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[1][2] - 0.5f * inverse_matrix.m[2][2]) / rect_h;
  4047. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  4048. }
  4049. else
  4050. {
  4051. /* Compute interpolation steps. */
  4052. x_step[0] = inverse_matrix.m[0][0] / rect_w;
  4053. x_step[1] = inverse_matrix.m[1][0] / rect_h;
  4054. x_step[2] = inverse_matrix.m[2][0];
  4055. y_step[0] = inverse_matrix.m[0][1] / rect_w;
  4056. y_step[1] = inverse_matrix.m[1][1] / rect_h;
  4057. y_step[2] = inverse_matrix.m[2][1];
  4058. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / rect_w;
  4059. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / rect_h;
  4060. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  4061. }
  4062. #endif
  4063. #if VG_SW_BLIT_PRECISION_OPT
  4064. /* Update C offset */
  4065. if (enableSwPreOpt) {
  4066. uint8_t indexC0 = 0;
  4067. uint8_t indexC1 = 0;
  4068. uint32_t temp0 = (uint32_t)(matrix->angle / 45);
  4069. uint32_t temp1 = (uint32_t)(matrix->scaleX * 100);
  4070. uint32_t temp2 = (uint32_t)(matrix->scaleY * 100);
  4071. indexC0 = GetIndex(temp0, temp1);
  4072. indexC1 = GetIndex(temp0, temp2);
  4073. c_step[0] = c_step[0] + offsetTable[indexC0];
  4074. c_step[1] = c_step[1] + offsetTable[indexC1];
  4075. }
  4076. #else
  4077. c_step[0] = c_step[0] + offsetTable[0];
  4078. c_step[1] = c_step[1] + offsetTable[0];
  4079. #endif /* VG_SW_BLIT_PRECISION_OPT */
  4080. /* Determine image mode (NORMAL, NONE , MULTIPLY or STENCIL) depending on the color. */
  4081. switch (source->image_mode) {
  4082. case VG_LITE_NONE_IMAGE_MODE:
  4083. imageMode = 0x00000000;
  4084. break;
  4085. case VG_LITE_MULTIPLY_IMAGE_MODE:
  4086. imageMode = 0x00002000;
  4087. break;
  4088. case VG_LITE_NORMAL_IMAGE_MODE:
  4089. case VG_LITE_ZERO:
  4090. imageMode = 0x00001000;
  4091. break;
  4092. case VG_LITE_STENCIL_MODE:
  4093. imageMode = 0x00003000;
  4094. break;
  4095. case VG_LITE_RECOLOR_MODE:
  4096. imageMode = 0x00006000;
  4097. break;
  4098. }
  4099. switch (filter) {
  4100. case VG_LITE_FILTER_POINT:
  4101. filter_mode = 0;
  4102. break;
  4103. case VG_LITE_FILTER_LINEAR:
  4104. filter_mode = 0x10000;
  4105. break;
  4106. case VG_LITE_FILTER_BI_LINEAR:
  4107. filter_mode = 0x20000;
  4108. break;
  4109. case VG_LITE_FILTER_GAUSSIAN:
  4110. filter_mode = 0x30000;
  4111. break;
  4112. }
  4113. switch (source->paintType)
  4114. {
  4115. case VG_LITE_PAINT_COLOR:
  4116. paintType = 0;
  4117. break;
  4118. case VG_LITE_PAINT_LINEAR_GRADIENT:
  4119. paintType = 1 << 24;
  4120. break;
  4121. case VG_LITE_PAINT_RADIAL_GRADIENT:
  4122. paintType = 1 << 25;
  4123. break;
  4124. case VG_LITE_PAINT_PATTERN:
  4125. paintType = 1 << 24 | 1 << 25;
  4126. break;
  4127. default:
  4128. break;
  4129. }
  4130. blend_mode = convert_blend(blend);
  4131. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  4132. #if gcFEATURE_VG_RECTANGLE_TILED_OUT
  4133. if (target->tiled == VG_LITE_TILED) {
  4134. tile_setting = 0x40;
  4135. stripe_mode = 0x20000000;
  4136. }
  4137. #endif
  4138. #if (gcFEATURE_VG_DEC_COMPRESS | gcFEATURE_VG_DEC_COMPRESS_2_0)
  4139. if (source->compress_mode != VG_LITE_DEC_DISABLE && target->compress_mode == VG_LITE_DEC_DISABLE) {
  4140. if (source->format != target->format) {
  4141. printf("The format of source and target buffers is inconsistent in decompressing!\n");
  4142. return VG_LITE_INVALID_ARGUMENT;
  4143. }
  4144. }
  4145. #endif
  4146. compress_mode = (uint32_t)source->compress_mode << 25;
  4147. /* Setup the command buffer. */
  4148. #if gcFEATURE_VG_GLOBAL_ALPHA
  4149. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD1, s_context.dst_alpha_mode | s_context.dst_alpha_value | s_context.src_alpha_mode | s_context.src_alpha_value));
  4150. #endif
  4151. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x00000001 | paintType | in_premult | imageMode | blend_mode | transparency_mode | tile_setting | s_context.enable_mask | s_context.color_transform | s_context.matrix_enable | eco_fifo | s_context.scissor_enable | stripe_mode));
  4152. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  4153. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *) &c_step[0]));
  4154. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *) &c_step[1]));
  4155. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *) &c_step[2]));
  4156. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *) &x_step[0]));
  4157. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *) &x_step[1]));
  4158. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *) &x_step[2]));
  4159. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  4160. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *) &y_step[0]));
  4161. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *) &y_step[1]));
  4162. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *) &y_step[2]));
  4163. if (((source->format >= VG_LITE_YUY2) &&
  4164. (source->format <= VG_LITE_AYUY2)) ||
  4165. ((source->format >= VG_LITE_YUY2_TILED) &&
  4166. (source->format <= VG_LITE_AYUY2_TILED))) {
  4167. yuv2rgb = convert_yuv2rgb(source->yuv.yuv2rgb);
  4168. uv_swiz = convert_uv_swizzle(source->yuv.swizzle);
  4169. }
  4170. #if gcFEATURE_VG_IM_FASTCLEAR
  4171. if (source->fc_enable) {
  4172. uint32_t im_fc_enable = (source->fc_enable == 0) ? 0 : 0x800000;
  4173. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | uv_swiz | yuv2rgb | conversion | im_fc_enable | ahb_read_split | compress_mode | src_premultiply_enable | index_endian));
  4174. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0ACF, source->fc_buffer[0].address)); /* FC buffer address. */
  4175. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD0, source->fc_buffer[0].color)); /* FC clear value. */
  4176. }
  4177. #endif
  4178. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | uv_swiz | yuv2rgb | conversion | compress_mode | src_premultiply_enable | index_endian));
  4179. if (source->yuv.uv_planar) {
  4180. /* Program u plane address if necessary. */
  4181. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source->yuv.uv_planar));
  4182. }
  4183. if (source->yuv.v_planar) {
  4184. /* Program v plane address if necessary. */
  4185. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.v_planar));
  4186. }
  4187. if (source->yuv.alpha_planar != 0) {
  4188. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.alpha_planar));
  4189. }
  4190. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A27, target->bg_color));
  4191. #if !gcFEATURE_VG_LVGL_SUPPORT
  4192. if (lvgl_sw_blend) {
  4193. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->lvgl_buffer->address));
  4194. }
  4195. else
  4196. #endif
  4197. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->address));
  4198. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  4199. /* 24bit format stride configured to 4bpp. */
  4200. if (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658) {
  4201. stride = source->stride / 3 * 4;
  4202. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, stride | tiled_source));
  4203. }
  4204. else {
  4205. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, source->stride | tiled_source));
  4206. }
  4207. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2D, rect_x | (rect_y << 16)));
  4208. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2F, rect_w | (rect_h << 16)));
  4209. #if VG_SW_BLIT_PRECISION_OPT
  4210. if (enableSwPreOpt) {
  4211. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x + matrixOffsetX, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  4212. } else
  4213. #endif /* VG_SW_BLIT_PRECISION_OPT */
  4214. {
  4215. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  4216. }
  4217. #if !gcFEATURE_VG_STRIPE_MODE
  4218. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  4219. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  4220. #endif
  4221. if (!s_context.flexa_mode) {
  4222. error = flush_target();
  4223. }
  4224. #if gcFEATURE_VG_GLOBAL_ALPHA
  4225. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  4226. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_NORMAL, 0xFF));
  4227. }
  4228. #endif
  4229. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride)*(source->height));
  4230. #if DUMP_IMAGE
  4231. dump_img(source->memory, source->width, source->height, source->format);
  4232. #endif
  4233. return error;
  4234. #else
  4235. return VG_LITE_NOT_SUPPORT;
  4236. #endif
  4237. }
  4238. /* Program initial states for tessellation buffer. */
  4239. static vg_lite_error_t program_tessellation(vg_lite_context_t *context)
  4240. {
  4241. vg_lite_error_t error = VG_LITE_SUCCESS;
  4242. uint32_t tessellation_size = 0;
  4243. #if (CHIPID==0x355 || CHIPID==0x255)
  4244. /* Compute tessellation buffer size. */
  4245. uint32_t width = (context->tessbuf.tess_w_h & 0xFFFF);
  4246. /* uint32_t height = (context->tessbuf.tess_w_h >> 16); */
  4247. context->tessbuf.tess_stride = VG_LITE_ALIGN(width * 8, 64);
  4248. /* Each bit in the L1 cache represents 64 bytes of tessellation data. */
  4249. context->tessbuf.L1_size = VG_LITE_ALIGN(VG_LITE_ALIGN(context->tessbuf.tessbuf_size / 64, 64) / 8, 64);
  4250. #if (CHIPID==0x355)
  4251. /* Each bit in the L2 cache represents 32 bytes of L1 data. */
  4252. context->tessbuf.L2_size = VG_LITE_ALIGN(VG_LITE_ALIGN(context->tessbuf.L1_size / 32, 64) / 8, 64);
  4253. tessellation_size = context->tessbuf.L2_size;
  4254. #else /* CHIPID: 0x255 */
  4255. tessellation_size = context->tessbuf.L1_size;
  4256. #endif
  4257. context->tessbuf.L1_phyaddr = context->tessbuf.physical_addr + context->tessbuf.tessbuf_size;
  4258. context->tessbuf.L2_phyaddr = context->tessbuf.L1_phyaddr + context->tessbuf.L1_size;
  4259. context->tessbuf.L1_logical = context->tessbuf.logical_addr + context->tessbuf.tessbuf_size;
  4260. context->tessbuf.L2_logical = context->tessbuf.L1_logical + context->tessbuf.L1_size;
  4261. /* Program tessellation buffer: input for VG module. */
  4262. VG_LITE_RETURN_ERROR(push_state(context, 0x0A30, context->tessbuf.physical_addr)); /* Tessellation buffer address. */
  4263. VG_LITE_RETURN_ERROR(push_state(context, 0x0A31, context->tessbuf.L1_phyaddr)); /* L1 address of tessellation buffer. */
  4264. VG_LITE_RETURN_ERROR(push_state(context, 0x0A32, context->tessbuf.L2_phyaddr)); /* L2 address of tessellation buffer. */
  4265. VG_LITE_RETURN_ERROR(push_state(context, 0x0A33, context->tessbuf.tess_stride));
  4266. /* Program tessellation control: for TS module. */
  4267. VG_LITE_RETURN_ERROR(push_state(context, 0x0A35, context->tessbuf.physical_addr));
  4268. VG_LITE_RETURN_ERROR(push_state(context, 0x0A36, context->tessbuf.L1_phyaddr));
  4269. VG_LITE_RETURN_ERROR(push_state(context, 0x0A37, context->tessbuf.L2_phyaddr));
  4270. VG_LITE_RETURN_ERROR(push_state(context, 0x0A38, context->tessbuf.tess_stride));
  4271. VG_LITE_RETURN_ERROR(push_state(context, 0x0A3A, context->tessbuf.tess_w_h));
  4272. #if (REVISION==0x1217 && CID==0x407)
  4273. VG_LITE_RETURN_ERROR(push_state(context, 0x0AB1, context->tessbuf.tess_stride));
  4274. VG_LITE_RETURN_ERROR(push_state(context, 0x0AB2, context->tessbuf.tess_stride));
  4275. #endif
  4276. VG_LITE_RETURN_ERROR(push_state(context, 0x0A3D, tessellation_size / 64));
  4277. #else /* (CHIPID==0x355 || CHIPID==0x255) */
  4278. tessellation_size = context->tessbuf.tessbuf_size;
  4279. /* Program tessellation control: for TS module. */
  4280. VG_LITE_RETURN_ERROR(push_state(context, 0x0A35, context->tessbuf.physical_addr));
  4281. VG_LITE_RETURN_ERROR(push_state(context, 0x0AC8, tessellation_size));
  4282. VG_LITE_RETURN_ERROR(push_state(context, 0x0ACB, context->tessbuf.physical_addr + tessellation_size));
  4283. VG_LITE_RETURN_ERROR(push_state(context, 0x0ACC, context->tessbuf.countbuf_size));
  4284. #endif /* (CHIPID==0x355 || CHIPID==0x255) */
  4285. return error;
  4286. }
  4287. vg_lite_error_t vg_lite_init(vg_lite_uint32_t tess_width, vg_lite_uint32_t tess_height)
  4288. {
  4289. #if DUMP_API
  4290. FUNC_DUMP(vg_lite_init)(tess_width, tess_height);
  4291. #endif
  4292. vg_lite_error_t error;
  4293. vg_lite_kernel_initialize_t initialize;
  4294. uint8_t i;
  4295. #if gcFEATURE_VG_TRACE_API
  4296. VGLITE_LOG("vg_lite_init %d %d\n", tess_width, tess_height);
  4297. #endif
  4298. if (s_context.rtbuffer) {
  4299. if (s_context.tess_width >= tess_width && s_context.tess_height >= tess_height) {
  4300. /* VGLite is already initialized properly. Return */
  4301. return VG_LITE_SUCCESS;
  4302. }
  4303. else {
  4304. vg_lite_close();
  4305. }
  4306. }
  4307. s_context.rtbuffer = (vg_lite_buffer_t *)vg_lite_os_malloc(sizeof(vg_lite_buffer_t));
  4308. if (!s_context.rtbuffer)
  4309. return VG_LITE_OUT_OF_RESOURCES;
  4310. memset(s_context.rtbuffer, 0, sizeof(vg_lite_buffer_t));
  4311. if (tess_width <= 0) {
  4312. tess_width = 0;
  4313. tess_height = 0;
  4314. }
  4315. if (tess_height <= 0) {
  4316. tess_height = 0;
  4317. tess_width = 0;
  4318. }
  4319. tess_width = VG_LITE_ALIGN(tess_width, 16);
  4320. /* Allocate a command buffer and a tessellation buffer.
  4321. Add extra 8 bytes in the allocated command buffer so there is space for a END command. */
  4322. initialize.command_buffer_size = command_buffer_size + 8;
  4323. initialize.tess_width = tess_width;
  4324. initialize.tess_height = tess_height;
  4325. initialize.command_buffer_pool = (vg_lite_vidmem_pool_t)s_context.command_buffer_pool;
  4326. initialize.tess_buffer_pool = (vg_lite_vidmem_pool_t)s_context.tess_buffer_pool;
  4327. initialize.context = &s_context.context;
  4328. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_INITIALIZE, &initialize));
  4329. /* Verify driver ChipId/ChipRevision/Cid match hardware chip information */
  4330. VG_LITE_RETURN_ERROR(check_hardware_chip_info());
  4331. /* Save draw context. */
  4332. s_context.capabilities = initialize.capabilities;
  4333. s_context.command_buffer[0] = (uint8_t *)initialize.command_buffer[0];
  4334. s_context.command_buffer[1] = (uint8_t *)initialize.command_buffer[1];
  4335. s_context.command_buffer_size = command_buffer_size;
  4336. s_context.command_offset[0] = 0;
  4337. s_context.command_offset[1] = 0;
  4338. if ((tess_width > 0) && (tess_height > 0))
  4339. {
  4340. /* Set and Program Tessellation Buffer states. */
  4341. s_context.tessbuf.physical_addr = initialize.physical_addr;
  4342. s_context.tessbuf.logical_addr = initialize.logical_addr;
  4343. s_context.tessbuf.tess_w_h = initialize.tess_w_h;
  4344. s_context.tessbuf.tessbuf_size = initialize.tessbuf_size;
  4345. s_context.tessbuf.countbuf_size = initialize.countbuf_size;
  4346. VG_LITE_RETURN_ERROR(program_tessellation(&s_context));
  4347. /* Init register gcregVGPEColorKey. */
  4348. for (i = 0; i < 8; i++) {
  4349. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A90 + i, 0));
  4350. }
  4351. }
  4352. s_context.custom_tessbuf = 0;
  4353. s_context.custom_cmdbuf = 0;
  4354. s_context.tess_width = tess_width;
  4355. s_context.tess_height = tess_height;
  4356. /* Init scissor rect. */
  4357. s_context.scissor[0] =
  4358. s_context.scissor[1] =
  4359. s_context.scissor[2] =
  4360. s_context.scissor[3] = 0;
  4361. s_context.path_counter = 0;
  4362. s_context.mirror_orient = VG_LITE_ORIENTATION_TOP_BOTTOM;
  4363. #if DUMP_INIT_COMMAND
  4364. physical_address = (size_t)CMDBUF_BUFFER(s_context);
  4365. uint32_t * ptr = (uint32_t*) s_context.context.command_buffer_logical[CMDBUF_INDEX(s_context)];
  4366. ptr += 1;
  4367. for (int i = 0; i < 12; i++)
  4368. {
  4369. init_buffer[i] = *ptr;
  4370. ptr+=2;
  4371. }
  4372. is_init = 0;
  4373. #endif
  4374. #if DUMP_CAPTURE || DUMP_LAST_CAPTURE
  4375. _SetDumpFileInfo();
  4376. #endif
  4377. return VG_LITE_SUCCESS;
  4378. }
  4379. vg_lite_error_t vg_lite_close(void)
  4380. {
  4381. #if DUMP_API
  4382. FUNC_DUMP(vg_lite_close)();
  4383. #endif
  4384. vg_lite_error_t error;
  4385. vg_lite_kernel_terminate_t terminate;
  4386. #if gcFEATURE_VG_TRACE_API
  4387. VGLITE_LOG("vg_lite_close\n");
  4388. #endif
  4389. if (s_context.scissor_layer)
  4390. {
  4391. vg_lite_free(s_context.scissor_layer);
  4392. vg_lite_os_free(s_context.scissor_layer);
  4393. }
  4394. if (s_context.custom_cmdbuf)
  4395. {
  4396. vg_lite_kernel_unmap_memory_t unmap = {0};
  4397. unmap.bytes = s_context.command_buffer_size * 2;
  4398. unmap.logical = s_context.command_buffer[0];
  4399. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_UNMAP_MEMORY, &unmap));
  4400. }
  4401. if (s_context.custom_tessbuf)
  4402. {
  4403. vg_lite_kernel_unmap_memory_t unmap = {0};
  4404. unmap.bytes = s_context.tessbuf.tessbuf_size + s_context.tessbuf.countbuf_size;
  4405. unmap.logical = s_context.tessbuf.logical_addr;
  4406. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_UNMAP_MEMORY, &unmap));
  4407. }
  4408. /* Termnate the draw context. */
  4409. terminate.context = &s_context.context;
  4410. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_TERMINATE, &terminate));
  4411. if (s_context.rtbuffer)
  4412. vg_lite_os_free(s_context.rtbuffer);
  4413. submit_flag = 0;
  4414. /* Reset the draw context. */
  4415. memset(&s_context, 0, sizeof(s_context));
  4416. #if DUMP_CAPTURE
  4417. _SetDumpFileInfo();
  4418. #endif
  4419. return VG_LITE_SUCCESS;
  4420. }
  4421. vg_lite_error_t vg_lite_set_command_buffer_size(vg_lite_uint32_t size)
  4422. {
  4423. #if DUMP_API
  4424. FUNC_DUMP(vg_lite_set_command_buffer_size)(size);
  4425. #endif
  4426. #if gcFEATURE_VG_TRACE_API
  4427. VGLITE_LOG("vg_lite_set_command_buffer_size %d\n", size);
  4428. #endif
  4429. if (command_buffer_size == 0)
  4430. return VG_LITE_INVALID_ARGUMENT;
  4431. command_buffer_size = size;
  4432. return VG_LITE_SUCCESS;
  4433. }
  4434. vg_lite_error_t vg_lite_set_command_buffer(vg_lite_uint32_t physical, vg_lite_uint32_t size)
  4435. {
  4436. vg_lite_error_t error = VG_LITE_SUCCESS;
  4437. vg_lite_kernel_map_memory_t map = { 0 };
  4438. #if gcFEATURE_VG_TRACE_API
  4439. VGLITE_LOG("vg_lite_set_command_buffer 0x%08X %d\n", physical, size);
  4440. #endif
  4441. if ((physical == 0) || (size == 0) || (physical % 64) || (size % 128))
  4442. return VG_LITE_INVALID_ARGUMENT;
  4443. map.bytes = size;
  4444. map.physical = physical;
  4445. if (s_context.command_buffer[0])
  4446. {
  4447. if (submit_flag)
  4448. VG_LITE_RETURN_ERROR(stall(&s_context, 0, (uint32_t)~0));
  4449. if (!s_context.custom_cmdbuf)
  4450. {
  4451. vg_lite_kernel_free_t free;
  4452. free.memory_handle = s_context.context.command_buffer[0];
  4453. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &free));
  4454. s_context.context.command_buffer[0] = 0;
  4455. free.memory_handle = s_context.context.command_buffer[1];
  4456. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &free));
  4457. s_context.context.command_buffer[1] = 0;
  4458. }
  4459. else
  4460. {
  4461. vg_lite_kernel_unmap_memory_t unmap = { 0 };
  4462. unmap.bytes = s_context.command_buffer_size + 8;
  4463. unmap.logical = s_context.command_buffer[0];
  4464. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_UNMAP_MEMORY, &unmap));
  4465. unmap.bytes = s_context.command_buffer_size + 8;
  4466. unmap.logical = s_context.command_buffer[1];
  4467. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_UNMAP_MEMORY, &unmap));
  4468. }
  4469. }
  4470. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_MAP_MEMORY, &map));
  4471. s_context.context.command_buffer_logical[0] = map.logical;
  4472. s_context.context.command_buffer_physical[0] = map.physical;
  4473. s_context.context.command_buffer_logical[1] = (void*)((uint8_t*)map.logical + map.bytes / 2);
  4474. s_context.context.command_buffer_physical[1] = map.physical + map.bytes / 2;
  4475. s_context.command_buffer[0] = s_context.context.command_buffer_logical[0];
  4476. s_context.command_buffer[1] = s_context.context.command_buffer_logical[1];
  4477. s_context.command_offset[0] = 0;
  4478. s_context.command_offset[1] = 0;
  4479. s_context.command_buffer_current = 0;
  4480. /* Reserve 8 bytes in mapped command buffer so there is space for a END command. */
  4481. s_context.command_buffer_size = (map.bytes / 2) - 8;
  4482. s_context.custom_cmdbuf = 1;
  4483. return error;
  4484. }
  4485. vg_lite_error_t vg_lite_set_tess_buffer(vg_lite_uint32_t physical, vg_lite_uint32_t size)
  4486. {
  4487. vg_lite_error_t error = VG_LITE_SUCCESS;
  4488. vg_lite_kernel_map_memory_t map = { 0 };
  4489. #if gcFEATURE_VG_TRACE_API
  4490. VGLITE_LOG("vg_lite_set_tess_buffer 0x%08X %d\n", physical, size);
  4491. #endif
  4492. if ((physical == 0) || (size == 0) || (physical % 64) || (size % 64) || (size < MIN_TS_SIZE))
  4493. return VG_LITE_INVALID_ARGUMENT;
  4494. map.bytes = size;
  4495. map.physical = physical;
  4496. if (s_context.tessbuf.logical_addr)
  4497. {
  4498. if (submit_flag)
  4499. VG_LITE_RETURN_ERROR(stall(&s_context, 0, (uint32_t)~0));
  4500. if (!s_context.custom_tessbuf)
  4501. {
  4502. vg_lite_kernel_free_t free;
  4503. free.memory_handle = s_context.context.tess_buffer;
  4504. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &free));
  4505. s_context.context.tess_buffer = 0;
  4506. }
  4507. else
  4508. {
  4509. vg_lite_kernel_unmap_memory_t unmap = { 0 };
  4510. unmap.bytes = s_context.tessbuf.tessbuf_size + s_context.tessbuf.countbuf_size;
  4511. unmap.logical = s_context.tessbuf.logical_addr;
  4512. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_UNMAP_MEMORY, &unmap));
  4513. }
  4514. }
  4515. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_MAP_MEMORY, &map));
  4516. s_context.tessbuf.logical_addr = map.logical;
  4517. s_context.tessbuf.physical_addr = map.physical;
  4518. s_context.tessbuf.countbuf_size = size * 3 / 128;
  4519. s_context.tessbuf.countbuf_size = VG_LITE_ALIGN(s_context.tessbuf.countbuf_size, 64);
  4520. s_context.tessbuf.tessbuf_size = map.bytes - s_context.tessbuf.countbuf_size;
  4521. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A35, s_context.tessbuf.physical_addr));
  4522. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AC8, s_context.tessbuf.tessbuf_size));
  4523. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0ACB, s_context.tessbuf.physical_addr + s_context.tessbuf.tessbuf_size));
  4524. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0ACC, s_context.tessbuf.countbuf_size));
  4525. s_context.custom_tessbuf = 1;
  4526. return error;
  4527. }
  4528. vg_lite_error_t vg_lite_get_mem_size(vg_lite_uint32_t* size)
  4529. {
  4530. vg_lite_error_t error = VG_LITE_SUCCESS;
  4531. vg_lite_kernel_mem_t mem;
  4532. #if gcFEATURE_VG_TRACE_API
  4533. VGLITE_LOG("vg_lite_get_mem_size %p\n", size);
  4534. #endif
  4535. mem.pool = VG_LITE_POOL_RESERVED_MEMORY1;
  4536. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_QUERY_MEM, &mem));
  4537. *size = mem.bytes;
  4538. return error;
  4539. }
  4540. /* Handle tiled & yuv allocation. Currently including NV12, ANV12, YV12, YV16, NV16, YV24, NV24. */
  4541. static vg_lite_error_t _allocate_tiled_yuv_planar(vg_lite_buffer_t *buffer)
  4542. {
  4543. vg_lite_error_t error = VG_LITE_SUCCESS;
  4544. uint32_t yplane_size = 0;
  4545. vg_lite_kernel_allocate_t allocate, uv_allocate, v_allocate;
  4546. if (((buffer->format < VG_LITE_NV12) || (buffer->format > VG_LITE_ANV12_TILED)
  4547. || (buffer->format == VG_LITE_AYUY2) || (buffer->format == VG_LITE_YUY2_TILED))
  4548. && ((buffer->format != VG_LITE_NV24) && (buffer->format != VG_LITE_NV24_TILED)))
  4549. {
  4550. return error;
  4551. }
  4552. /* For NV12, there are 2 planes (Y, UV);
  4553. For ANV12, there are 3 planes (Y, UV, Alpha).
  4554. Each plane must be aligned by (4, 8).
  4555. Then Y plane must be aligned by (8, 8).
  4556. For YVxx, there are 3 planes (Y, U, V).
  4557. YV12 is similar to NV12, both YUV420 format.
  4558. YV16 and NV16 are YUV422 format.
  4559. YV24 is YUV444 format.
  4560. */
  4561. buffer->width = VG_LITE_ALIGN(buffer->width, 8);
  4562. buffer->height = VG_LITE_ALIGN(buffer->height, 8);
  4563. buffer->stride = VG_LITE_ALIGN(buffer->width, 64);
  4564. switch (buffer->format) {
  4565. case VG_LITE_NV12:
  4566. case VG_LITE_ANV12:
  4567. case VG_LITE_NV12_TILED:
  4568. case VG_LITE_ANV12_TILED:
  4569. buffer->yuv.uv_stride = buffer->stride;
  4570. buffer->yuv.alpha_stride = buffer->stride;
  4571. buffer->yuv.uv_height = buffer->height / 2;
  4572. break;
  4573. case VG_LITE_NV16:
  4574. buffer->yuv.uv_stride = buffer->stride;
  4575. buffer->yuv.uv_height = buffer->height;
  4576. break;
  4577. case VG_LITE_NV24:
  4578. case VG_LITE_NV24_TILED:
  4579. buffer->yuv.uv_stride = buffer->stride * 2;
  4580. buffer->yuv.uv_height = buffer->height;
  4581. break;
  4582. case VG_LITE_YV12:
  4583. buffer->yuv.uv_stride =
  4584. buffer->yuv.v_stride = buffer->stride / 2;
  4585. buffer->yuv.uv_height =
  4586. buffer->yuv.v_height = buffer->height / 2;
  4587. break;
  4588. case VG_LITE_YV16:
  4589. buffer->yuv.uv_stride =
  4590. buffer->yuv.v_stride = buffer->stride;
  4591. buffer->yuv.uv_height =
  4592. buffer->yuv.v_height = buffer->height / 2;
  4593. break;
  4594. case VG_LITE_YV24:
  4595. buffer->yuv.uv_stride =
  4596. buffer->yuv.v_stride = buffer->stride;
  4597. buffer->yuv.uv_height =
  4598. buffer->yuv.v_height = buffer->height;
  4599. break;
  4600. default:
  4601. return error;
  4602. }
  4603. yplane_size = buffer->stride * buffer->height;
  4604. /* Allocate buffer memory: Y. */
  4605. allocate.bytes = yplane_size;
  4606. allocate.contiguous = 1;
  4607. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &allocate));
  4608. /* Save the allocation. */
  4609. buffer->handle = allocate.memory_handle;
  4610. buffer->memory = allocate.memory;
  4611. buffer->address = allocate.memory_gpu;
  4612. if ((buffer->format == VG_LITE_NV12) || (buffer->format == VG_LITE_ANV12)
  4613. || (buffer->format == VG_LITE_NV16) || (buffer->format == VG_LITE_NV24)
  4614. || (buffer->format == VG_LITE_NV12_TILED) || (buffer->format == VG_LITE_ANV12_TILED)) {
  4615. /* Allocate buffer memory: UV. */
  4616. uv_allocate.bytes = buffer->yuv.uv_stride * buffer->yuv.uv_height;
  4617. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &uv_allocate));
  4618. buffer->yuv.uv_handle = uv_allocate.memory_handle;
  4619. buffer->yuv.uv_memory = uv_allocate.memory;
  4620. buffer->yuv.uv_planar = uv_allocate.memory_gpu;
  4621. if ((buffer->format == VG_LITE_ANV12) || (buffer->format == VG_LITE_ANV12_TILED)) {
  4622. uv_allocate.bytes = yplane_size;
  4623. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &uv_allocate));
  4624. buffer->yuv.alpha_planar = uv_allocate.memory_gpu;
  4625. }
  4626. } else {
  4627. /* Allocate buffer memory: U, V. */
  4628. uv_allocate.bytes = buffer->yuv.uv_stride * buffer->yuv.uv_height;
  4629. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &uv_allocate));
  4630. buffer->yuv.uv_handle = uv_allocate.memory_handle;
  4631. buffer->yuv.uv_memory = uv_allocate.memory;
  4632. buffer->yuv.uv_planar = uv_allocate.memory_gpu;
  4633. v_allocate.bytes = buffer->yuv.v_stride * buffer->yuv.v_height;
  4634. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &v_allocate));
  4635. buffer->yuv.v_handle = v_allocate.memory_handle;
  4636. buffer->yuv.v_memory = v_allocate.memory;
  4637. buffer->yuv.v_planar = v_allocate.memory_gpu;
  4638. }
  4639. return error;
  4640. }
  4641. vg_lite_error_t vg_lite_allocate(vg_lite_buffer_t * buffer)
  4642. {
  4643. #if DUMP_API
  4644. FUNC_DUMP(vg_lite_allocate)(buffer);
  4645. #endif
  4646. vg_lite_error_t error = VG_LITE_SUCCESS;
  4647. vg_lite_kernel_allocate_t allocate;
  4648. #if gcFEATURE_VG_TRACE_API
  4649. VGLITE_LOG("vg_lite_allocate %p (w: %d, h: %d, fmt: %d)\n", buffer, buffer->width, buffer->height, buffer->format);
  4650. #endif
  4651. if (buffer->format == VG_LITE_RGBA8888_ETC2_EAC &&
  4652. #if (CHIPID == 0x555)
  4653. (buffer->width % 16 || buffer->height % 4)
  4654. #else
  4655. (buffer->width % 4 || buffer->height % 4)
  4656. #endif
  4657. )
  4658. {
  4659. return VG_LITE_INVALID_ARGUMENT;
  4660. }
  4661. /* Set buffer->premultiplied properly according to buffer->format */
  4662. if (buffer->format < VG_LITE_RGBA8888)
  4663. { /* For all OpenVG VG_* formats */
  4664. #if gcFEATURE_VG_HW_PREMULTIPLY
  4665. switch (buffer->format) {
  4666. case OPENVG_sRGBA_8888_PRE:
  4667. case OPENVG_lRGBA_8888_PRE:
  4668. case OPENVG_sARGB_8888_PRE:
  4669. case OPENVG_lARGB_8888_PRE:
  4670. case OPENVG_sBGRA_8888_PRE:
  4671. case OPENVG_lBGRA_8888_PRE:
  4672. case OPENVG_sABGR_8888_PRE:
  4673. case OPENVG_lABGR_8888_PRE:
  4674. case OPENVG_sRGBX_8888_PRE:
  4675. case OPENVG_lRGBX_8888_PRE:
  4676. case OPENVG_sRGB_565_PRE:
  4677. case OPENVG_lRGB_565_PRE:
  4678. case OPENVG_sRGBA_5551_PRE:
  4679. case OPENVG_lRGBA_5551_PRE:
  4680. case OPENVG_sRGBA_4444_PRE:
  4681. case OPENVG_lRGBA_4444_PRE:
  4682. buffer->premultiplied = 1;
  4683. break;
  4684. default:
  4685. buffer->premultiplied = 0;
  4686. break;
  4687. };
  4688. #else
  4689. /* Cannot support OpenVG VG_* format if HW does not support premultiply */
  4690. return VG_LITE_INVALID_ARGUMENT;
  4691. #endif
  4692. }
  4693. else {
  4694. /* All VG_LITE_* formats are not premultiplied */
  4695. buffer->premultiplied = 0;
  4696. }
  4697. /* Reset planar. */
  4698. buffer->yuv.uv_planar =
  4699. buffer->yuv.v_planar =
  4700. buffer->yuv.alpha_planar = 0;
  4701. /* Align height in case format is tiled. */
  4702. if ((buffer->format >= VG_LITE_YUY2 && buffer->format <= VG_LITE_NV16) || buffer->format == VG_LITE_NV24) {
  4703. buffer->height = VG_LITE_ALIGN(buffer->height, 4);
  4704. buffer->yuv.swizzle = VG_LITE_SWIZZLE_UV;
  4705. }
  4706. if ((buffer->format >= VG_LITE_YUY2_TILED && buffer->format <= VG_LITE_AYUY2_TILED) || buffer->format == VG_LITE_NV24_TILED) {
  4707. buffer->height = VG_LITE_ALIGN(buffer->height, 4);
  4708. buffer->tiled = VG_LITE_TILED;
  4709. buffer->yuv.swizzle = VG_LITE_SWIZZLE_UV;
  4710. }
  4711. if ((buffer->format >= VG_LITE_NV12 && buffer->format <= VG_LITE_ANV12_TILED
  4712. && buffer->format != VG_LITE_AYUY2 && buffer->format != VG_LITE_YUY2_TILED)
  4713. || (buffer->format >= VG_LITE_NV24 && buffer->format <= VG_LITE_NV24_TILED)) {
  4714. _allocate_tiled_yuv_planar(buffer);
  4715. }
  4716. else {
  4717. /* Driver need compute the stride always with RT500 project. */
  4718. vg_lite_float_t ratio = 1.0f;
  4719. uint32_t mul, div, align;
  4720. get_format_bytes(buffer->format, &mul, &div, &align);
  4721. buffer->stride = buffer->width * mul / div;
  4722. #if gcFEATURE_VG_16PIXELS_ALIGNED
  4723. int tmp_align = 16 * mul / div;
  4724. if ((mul / div) % 2 != 0) {
  4725. if (buffer->stride % tmp_align != 0) {
  4726. buffer->stride = (buffer->stride + tmp_align) / tmp_align * tmp_align;
  4727. }
  4728. }
  4729. else {
  4730. buffer->stride = VG_LITE_ALIGN(buffer->stride, tmp_align);
  4731. }
  4732. #endif
  4733. /* Allocate the buffer. */
  4734. if (buffer->compress_mode)
  4735. ratio = _calc_decnano_compress_ratio(buffer->format, buffer->compress_mode);
  4736. allocate.bytes = (uint32_t)(buffer->stride * buffer->height * ratio);
  4737. #if gcFEATURE_VG_IM_FASTCLEAR
  4738. allocate.bytes = VG_LITE_ALIGN(allocate.bytes, 64);
  4739. #endif
  4740. allocate.contiguous = 1;
  4741. allocate.pool = (vg_lite_vidmem_pool_t)s_context.render_buffer_pool;
  4742. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &allocate));
  4743. /* Save the buffer allocation. */
  4744. buffer->handle = allocate.memory_handle;
  4745. buffer->memory = allocate.memory;
  4746. buffer->address = allocate.memory_gpu;
  4747. buffer->pool = (vg_lite_memory_pool_t)allocate.pool;
  4748. if ((buffer->format == VG_LITE_AYUY2) || (buffer->format == VG_LITE_AYUY2_TILED) || ((buffer->format >= VG_LITE_ABGR8565_PLANAR)
  4749. && (buffer->format <= VG_LITE_RGBA5658_PLANAR))) {
  4750. allocate.bytes = buffer->stride * buffer->height;
  4751. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &allocate));
  4752. buffer->yuv.alpha_planar = allocate.memory_gpu;
  4753. }
  4754. }
  4755. #if gcFEATURE_VG_TRACE_API
  4756. VGLITE_LOG("=>buffer: width=%d, height=%d, stride=%d, bytes=%d, format=%d\n",
  4757. buffer->width, buffer->height, buffer->stride, allocate.bytes, buffer->format);
  4758. #endif
  4759. return VG_LITE_SUCCESS;
  4760. }
  4761. vg_lite_error_t vg_lite_free(vg_lite_buffer_t * buffer)
  4762. {
  4763. #if DUMP_API
  4764. FUNC_DUMP(vg_lite_free)(buffer);
  4765. #endif
  4766. vg_lite_error_t error;
  4767. vg_lite_kernel_free_t free, uv_free, v_free;
  4768. #if gcFEATURE_VG_TRACE_API
  4769. VGLITE_LOG("vg_lite_free %p\n", buffer);
  4770. #endif
  4771. if (buffer == NULL)
  4772. return VG_LITE_INVALID_ARGUMENT;
  4773. if (!(memcmp(s_context.rtbuffer,buffer,sizeof(vg_lite_buffer_t))) ) {
  4774. if (VG_LITE_SUCCESS == submit(&s_context)) {
  4775. VG_LITE_RETURN_ERROR(stall(&s_context, 0, ~0));
  4776. }
  4777. #if !DUMP_COMMAND_CAPTURE
  4778. vglitemDUMP("@[swap 0x%08X %dx%d +%u]",
  4779. s_context.rtbuffer->address,
  4780. s_context.rtbuffer->width, s_context.rtbuffer->height,
  4781. s_context.rtbuffer->stride);
  4782. vglitemDUMP_BUFFER(
  4783. "framebuffer",
  4784. (size_t)s_context.rtbuffer->address,s_context.rtbuffer->memory,
  4785. 0,
  4786. s_context.rtbuffer->stride*(s_context.rtbuffer->height));
  4787. #endif
  4788. memset(s_context.rtbuffer, 0, sizeof(vg_lite_buffer_t));
  4789. }
  4790. #if !gcFEATURE_VG_LVGL_SUPPORT
  4791. if (buffer->lvgl_buffer != NULL) {
  4792. free.memory_handle = buffer->lvgl_buffer->handle;
  4793. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &free));
  4794. vg_lite_os_free(buffer->lvgl_buffer);
  4795. buffer->lvgl_buffer = NULL;
  4796. }
  4797. #endif
  4798. if (buffer->yuv.uv_planar) {
  4799. /* Free UV(U) planar buffer. */
  4800. vglitemDUMP_BUFFER(
  4801. "uv_plane",
  4802. (size_t)buffer->yuv.uv_planar,buffer->yuv.uv_memory,
  4803. 0,
  4804. buffer->yuv.uv_stride*buffer->yuv.uv_height);
  4805. uv_free.memory_handle = buffer->yuv.uv_handle;
  4806. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &uv_free));
  4807. /* Mark the buffer as freed. */
  4808. buffer->yuv.uv_handle = NULL;
  4809. buffer->yuv.uv_memory = NULL;
  4810. }
  4811. if (buffer->yuv.v_planar) {
  4812. /* Free V planar buffer. */
  4813. vglitemDUMP_BUFFER(
  4814. "v_plane",
  4815. (size_t)buffer->yuv.v_planar,buffer->yuv.v_memory,
  4816. 0,
  4817. buffer->yuv.v_stride*buffer->yuv.v_height);
  4818. /* Free V planar buffer. */
  4819. v_free.memory_handle = buffer->yuv.v_handle;
  4820. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &v_free));
  4821. /* Mark the buffer as freed. */
  4822. buffer->yuv.v_handle = NULL;
  4823. buffer->yuv.v_memory = NULL;
  4824. }
  4825. #if gcFEATURE_VG_IM_FASTCLEAR
  4826. if (buffer->fc_buffer[0].handle != 0)
  4827. {
  4828. #if VG_TARGET_FC_DUMP
  4829. vglitemDUMP_BUFFER(
  4830. "fcbuffer",
  4831. (uint64_t)buffer->fc_buffer[0].address,buffer->fc_buffer[0].memory,
  4832. 0,
  4833. buffer->fc_buffer[0].stride*(buffer->fc_buffer[0].height));
  4834. #endif
  4835. _free_fc_buffer(&buffer->fc_buffer[0]);
  4836. }
  4837. #endif
  4838. /* Make sure we have a valid memory handle. */
  4839. if (buffer->handle == NULL) {
  4840. return VG_LITE_INVALID_ARGUMENT;
  4841. }
  4842. /* Free the buffer. */
  4843. free.memory_handle = buffer->handle;
  4844. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &free));
  4845. /* Mark the buffer as freed. */
  4846. buffer->handle = NULL;
  4847. buffer->memory = NULL;
  4848. return VG_LITE_SUCCESS;
  4849. }
  4850. vg_lite_error_t vg_lite_map(vg_lite_buffer_t* buffer, vg_lite_map_flag_t flag, int32_t fd)
  4851. {
  4852. #if DUMP_API
  4853. FUNC_DUMP(vg_lite_map)(buffer, flag, fd);
  4854. #endif
  4855. vg_lite_error_t error;
  4856. vg_lite_kernel_map_t map;
  4857. #if gcFEATURE_VG_TRACE_API
  4858. VGLITE_LOG("vg_lite_map %p\n", buffer);
  4859. #endif
  4860. /* We either need a logical or physical address. */
  4861. if (buffer->memory == NULL && buffer->address == 0) {
  4862. return VG_LITE_INVALID_ARGUMENT;
  4863. }
  4864. /* Compute the stride. Align if necessary. */
  4865. if (buffer->stride == 0){
  4866. uint32_t mul, div, align;
  4867. get_format_bytes(buffer->format, &mul, &div, &align);
  4868. buffer->stride = buffer->width * mul / div;
  4869. }
  4870. /* Map the buffer. */
  4871. map.bytes = buffer->stride * buffer->height;
  4872. map.logical = buffer->memory;
  4873. map.physical = buffer->address;
  4874. if (flag == VG_LITE_MAP_USER_MEMORY) {
  4875. map.flags = VG_LITE_HAL_MAP_USER_MEMORY;
  4876. }
  4877. else if (flag == VG_LITE_MAP_DMABUF) {
  4878. map.flags = VG_LITE_HAL_MAP_DMABUF;
  4879. }
  4880. else {
  4881. return VG_LITE_INVALID_ARGUMENT;
  4882. }
  4883. map.dma_buf_fd = fd;
  4884. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_MAP, &map));
  4885. /* Save the buffer allocation. */
  4886. buffer->handle = map.memory_handle;
  4887. buffer->address = map.memory_gpu;
  4888. return VG_LITE_SUCCESS;
  4889. }
  4890. vg_lite_error_t vg_lite_unmap(vg_lite_buffer_t * buffer)
  4891. {
  4892. #if DUMP_API
  4893. FUNC_DUMP(vg_lite_unmap)(buffer);
  4894. #endif
  4895. vg_lite_error_t error;
  4896. vg_lite_kernel_unmap_t unmap;
  4897. #if gcFEATURE_VG_TRACE_API
  4898. VGLITE_LOG("vg_lite_unmap %p\n", buffer);
  4899. #endif
  4900. /* Make sure we have a valid memory handle. */
  4901. if (buffer->handle == NULL) {
  4902. return VG_LITE_INVALID_ARGUMENT;
  4903. }
  4904. /* Unmap the buffer. */
  4905. unmap.memory_handle = buffer->handle;
  4906. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_UNMAP, &unmap));
  4907. /* Mark the buffer as freed. */
  4908. buffer->handle = NULL;
  4909. return VG_LITE_SUCCESS;
  4910. }
  4911. vg_lite_error_t vg_lite_flush_mapped_buffer(vg_lite_buffer_t * buffer)
  4912. {
  4913. #if DUMP_API
  4914. FUNC_DUMP(vg_lite_flush_mapped_buffer)(buffer);
  4915. #endif
  4916. vg_lite_error_t error;
  4917. vg_lite_kernel_cache_t cache;
  4918. #if gcFEATURE_VG_TRACE_API
  4919. VGLITE_LOG("vg_lite_flush_mapped_buffer %p\n", buffer);
  4920. #endif
  4921. /* Make sure we have a valid memory handle. */
  4922. if (buffer->handle == NULL) {
  4923. return VG_LITE_INVALID_ARGUMENT;
  4924. }
  4925. cache.memory_handle = buffer->handle;
  4926. cache.cache_op = VG_LITE_CACHE_FLUSH;
  4927. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_CACHE, &cache));
  4928. return VG_LITE_SUCCESS;
  4929. }
  4930. vg_lite_error_t vg_lite_get_register(vg_lite_uint32_t address, vg_lite_uint32_t* result)
  4931. {
  4932. vg_lite_error_t error;
  4933. vg_lite_kernel_info_t data;
  4934. #if gcFEATURE_VG_TRACE_API
  4935. VGLITE_LOG("vg_lite_get_register 0x%08X %p\n", address, result);
  4936. #endif
  4937. /* Get input register address. */
  4938. data.addr = address;
  4939. /* Get register info. */
  4940. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_CHECK, &data));
  4941. /* Return register info. */
  4942. *result = data.reg;
  4943. return VG_LITE_SUCCESS;
  4944. }
  4945. vg_lite_error_t vg_lite_get_info(vg_lite_info_t *info)
  4946. {
  4947. #if gcFEATURE_VG_TRACE_API
  4948. VGLITE_LOG("vg_lite_get_info %p\n", info);
  4949. #endif
  4950. if (info != NULL)
  4951. {
  4952. info->api_version = VGLITE_API_VERSION_3_0;
  4953. info->header_version = VGLITE_HEADER_VERSION;
  4954. info->release_version = VGLITE_RELEASE_VERSION;
  4955. info->reserved = 0;
  4956. }
  4957. return VG_LITE_SUCCESS;
  4958. }
  4959. vg_lite_uint32_t vg_lite_get_product_info(vg_lite_char* name, vg_lite_uint32_t* chip_id, vg_lite_uint32_t* chip_rev)
  4960. {
  4961. const char *product_name;
  4962. uint32_t name_len;
  4963. vg_lite_uint32_t rev = 0, id = 0;
  4964. #if gcFEATURE_VG_TRACE_API
  4965. VGLITE_LOG("vg_lite_get_product_info %p %p %p\n", name, chip_id, chip_rev);
  4966. #endif
  4967. vg_lite_get_register(0x24, &rev);
  4968. vg_lite_get_register(0x20, &id);
  4969. if (id == 0x265 || id == 0x555)
  4970. product_name = "GCNanoUltraV";
  4971. else if (id == 0x255)
  4972. product_name = "GCNanoLiteV";
  4973. else if (id == 0x355)
  4974. product_name = "GC355";
  4975. else
  4976. product_name = "Unknown";
  4977. name_len = strlen(product_name) + 1;
  4978. if (name != NULL)
  4979. {
  4980. memcpy(name, product_name, name_len);
  4981. }
  4982. if (chip_id != NULL)
  4983. {
  4984. *chip_id = id;
  4985. }
  4986. if (chip_rev != NULL)
  4987. {
  4988. *chip_rev = rev;
  4989. }
  4990. return name_len;
  4991. }
  4992. vg_lite_uint32_t vg_lite_query_feature(vg_lite_feature_t feature)
  4993. {
  4994. uint32_t result;
  4995. #if gcFEATURE_VG_TRACE_API
  4996. VGLITE_LOG("vg_lite_query_feature %d\n", feature);
  4997. #endif
  4998. if (feature < gcFEATURE_COUNT)
  4999. result = s_ftable.ftable[feature];
  5000. else
  5001. result = 0;
  5002. return result;
  5003. }
  5004. vg_lite_error_t vg_lite_finish()
  5005. {
  5006. #if DUMP_API
  5007. FUNC_DUMP(vg_lite_finish)();
  5008. #endif
  5009. vg_lite_error_t error;
  5010. #if gcFEATURE_VG_TRACE_API
  5011. VGLITE_LOG("vg_lite_finish\n");
  5012. #endif
  5013. /* Return if there is nothing to submit. */
  5014. if (CMDBUF_OFFSET(s_context) == 0)
  5015. {
  5016. if (submit_flag)
  5017. VG_LITE_RETURN_ERROR(stall(&s_context, 0, (uint32_t)~0));
  5018. return VG_LITE_SUCCESS;
  5019. }
  5020. /* Flush is moved from each draw to here. */
  5021. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x00000001));
  5022. VG_LITE_RETURN_ERROR(flush_target());
  5023. VG_LITE_RETURN_ERROR(submit(&s_context));
  5024. #if gcFEATURE_VG_POWER_MANAGEMENT
  5025. s_context.context.end_of_frame = 1;
  5026. #endif
  5027. VG_LITE_RETURN_ERROR(stall(&s_context, 0, (uint32_t)~0));
  5028. #if gcFEATURE_VG_IM_FASTCLEAR
  5029. #if VG_TARGET_FC_DUMP
  5030. fc_buf_dump(s_context.rtbuffer, &s_context.fcBuffer);
  5031. #endif
  5032. #endif
  5033. #if gcFEATURE_VG_SINGLE_COMMAND_BUFFER
  5034. CMDBUF_OFFSET(s_context) = 0;
  5035. #else
  5036. CMDBUF_SWAP(s_context);
  5037. /* Reset command buffer. */
  5038. CMDBUF_OFFSET(s_context) = 0;
  5039. #endif
  5040. return VG_LITE_SUCCESS;
  5041. }
  5042. vg_lite_error_t vg_lite_flush(void)
  5043. {
  5044. #if DUMP_API
  5045. FUNC_DUMP(vg_lite_flush)();
  5046. #endif
  5047. #if !gcFEATURE_VG_SINGLE_COMMAND_BUFFER
  5048. vg_lite_error_t error;
  5049. #if gcFEATURE_VG_TRACE_API
  5050. VGLITE_LOG("vg_lite_flush\n");
  5051. #endif
  5052. /* Return if there is nothing to submit. */
  5053. if (CMDBUF_OFFSET(s_context) == 0)
  5054. return VG_LITE_SUCCESS;
  5055. /* Wait if GPU has not completed previous CMD buffer */
  5056. if (submit_flag)
  5057. {
  5058. VG_LITE_RETURN_ERROR(stall(&s_context, 0, (uint32_t)~0));
  5059. }
  5060. /* Submit the current command buffer. */
  5061. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x00000001));
  5062. VG_LITE_RETURN_ERROR(flush_target());
  5063. VG_LITE_RETURN_ERROR(submit(&s_context));
  5064. #if gcFEATURE_VG_POWER_MANAGEMENT
  5065. s_context.context.end_of_frame = 1;
  5066. #endif
  5067. CMDBUF_SWAP(s_context);
  5068. /* Reset command buffer. */
  5069. CMDBUF_OFFSET(s_context) = 0;
  5070. return VG_LITE_SUCCESS;
  5071. #else
  5072. printf("vg_lite_flush is not support when enable single command buffer!\n");
  5073. return VG_LITE_NOT_SUPPORT;
  5074. #endif
  5075. }
  5076. vg_lite_error_t vg_lite_init_grad(vg_lite_linear_gradient_t *grad)
  5077. {
  5078. vg_lite_error_t error = VG_LITE_SUCCESS;
  5079. #if DUMP_API
  5080. FUNC_DUMP(vg_lite_init_grad)(grad);
  5081. #endif
  5082. #if gcFEATURE_VG_TRACE_API
  5083. VGLITE_LOG("vg_lite_init_grad %p\n", grad);
  5084. #endif
  5085. grad->count = 0;
  5086. /* Set the member values according to driver defaults. */
  5087. grad->image.width = VLC_GRADIENT_BUFFER_WIDTH;
  5088. grad->image.height = 1;
  5089. grad->image.stride = 0;
  5090. grad->image.format = VG_LITE_BGRA8888;
  5091. /* Allocate the image for gradient. */
  5092. VG_LITE_RETURN_ERROR(vg_lite_allocate(&grad->image));
  5093. return error;
  5094. }
  5095. vg_lite_error_t vg_lite_set_linear_grad(vg_lite_ext_linear_gradient_t *grad,
  5096. vg_lite_uint32_t count,
  5097. vg_lite_color_ramp_t *color_ramp,
  5098. vg_lite_linear_gradient_parameter_t linear_gradient,
  5099. vg_lite_gradient_spreadmode_t spread_mode,
  5100. vg_lite_uint8_t pre_multiplied)
  5101. {
  5102. static vg_lite_color_ramp_t default_ramp[] =
  5103. {
  5104. {
  5105. 0.0f,
  5106. 0.0f, 0.0f, 0.0f, 1.0f
  5107. },
  5108. {
  5109. 1.0f,
  5110. 1.0f, 1.0f, 1.0f, 1.0f
  5111. }
  5112. };
  5113. uint32_t i, trg_count;
  5114. vg_lite_float_t prev_stop;
  5115. vg_lite_color_ramp_t *src_ramp;
  5116. vg_lite_color_ramp_t *src_ramp_last;
  5117. vg_lite_color_ramp_t *trg_ramp;
  5118. #if gcFEATURE_VG_TRACE_API
  5119. VGLITE_LOG("vg_lite_set_linear_grad %p %d %p (%f %f %f %f) %d %d\n", grad, count, color_ramp,
  5120. linear_gradient.X0, linear_gradient.X1, linear_gradient.Y0, linear_gradient.Y1, spread_mode, pre_multiplied);
  5121. #endif
  5122. /* Reset the count. */
  5123. trg_count = 0;
  5124. if ((linear_gradient.X0 == linear_gradient.X1) && (linear_gradient.Y0 == linear_gradient.Y1))
  5125. return VG_LITE_INVALID_ARGUMENT;
  5126. grad->linear_grad = linear_gradient;
  5127. grad->pre_multiplied = pre_multiplied;
  5128. grad->spread_mode = spread_mode;
  5129. if (!count || count > VLC_MAX_COLOR_RAMP_STOPS || color_ramp == NULL)
  5130. goto Empty_sequence_handler;
  5131. for(i = 0; i < count;i++)
  5132. grad->color_ramp[i] = color_ramp[i];
  5133. grad->ramp_length = count;
  5134. /* Determine the last source ramp. */
  5135. src_ramp_last
  5136. = grad->color_ramp
  5137. + grad->ramp_length;
  5138. /* Set the initial previous stop. */
  5139. prev_stop = -1;
  5140. /* Reset the count. */
  5141. trg_count = 0;
  5142. /* Walk through the source ramp. */
  5143. for (
  5144. src_ramp = grad->color_ramp, trg_ramp = grad->converted_ramp;
  5145. (src_ramp < src_ramp_last) && (trg_count < VLC_MAX_COLOR_RAMP_STOPS + 2);
  5146. src_ramp += 1
  5147. )
  5148. {
  5149. /* Must be in increasing order. */
  5150. if (src_ramp->stop < prev_stop)
  5151. {
  5152. /* Ignore the entire sequence. */
  5153. trg_count = 0;
  5154. break;
  5155. }
  5156. /* Update the previous stop value. */
  5157. prev_stop = src_ramp->stop;
  5158. /* Must be within [0..1] range. */
  5159. if ((src_ramp->stop < 0.0f) || (src_ramp->stop > 1.0f))
  5160. {
  5161. /* Ignore. */
  5162. continue;
  5163. }
  5164. /* Clamp color. */
  5165. ClampColor(COLOR_FROM_RAMP(src_ramp),COLOR_FROM_RAMP(trg_ramp),0);
  5166. /* First stop greater then zero? */
  5167. if ((trg_count == 0) && (src_ramp->stop > 0.0f))
  5168. {
  5169. /* Force the first stop to 0.0f. */
  5170. trg_ramp->stop = 0.0f;
  5171. /* Replicate the entry. */
  5172. trg_ramp[1] = *trg_ramp;
  5173. trg_ramp[1].stop = src_ramp->stop;
  5174. /* Advance. */
  5175. trg_ramp += 2;
  5176. trg_count += 2;
  5177. }
  5178. else
  5179. {
  5180. /* Set the stop value. */
  5181. trg_ramp->stop = src_ramp->stop;
  5182. /* Advance. */
  5183. trg_ramp += 1;
  5184. trg_count += 1;
  5185. }
  5186. }
  5187. /* Empty sequence? */
  5188. if (trg_count == 0)
  5189. {
  5190. memcpy(grad->converted_ramp, default_ramp, sizeof(default_ramp));
  5191. grad->converted_length = sizeof(default_ramp) / 5;
  5192. }
  5193. else
  5194. {
  5195. /* The last stop must be at 1.0. */
  5196. if (trg_ramp[-1].stop != 1.0f)
  5197. {
  5198. /* Replicate the last entry. */
  5199. *trg_ramp = trg_ramp[-1];
  5200. /* Force the last stop to 1.0f. */
  5201. trg_ramp->stop = 1.0f;
  5202. /* Update the final entry count. */
  5203. trg_count += 1;
  5204. }
  5205. /* Set new length. */
  5206. grad->converted_length = trg_count;
  5207. }
  5208. return VG_LITE_SUCCESS;
  5209. Empty_sequence_handler:
  5210. memcpy(grad->converted_ramp, default_ramp, sizeof(default_ramp));
  5211. grad->converted_length = sizeof(default_ramp) / 5;
  5212. return VG_LITE_SUCCESS;
  5213. }
  5214. vg_lite_error_t vg_lite_update_linear_grad(vg_lite_ext_linear_gradient_t *grad)
  5215. {
  5216. #if DUMP_API
  5217. FUNC_DUMP(vg_lite_update_linear_grad)(grad);
  5218. #endif
  5219. uint32_t ramp_length;
  5220. vg_lite_color_ramp_t *color_ramp;
  5221. uint32_t stop;
  5222. uint32_t i, width;
  5223. uint8_t* bits;
  5224. vg_lite_float_t x0,y0,x1,y1,length,dx,dy;
  5225. vg_lite_error_t error = VG_LITE_SUCCESS;
  5226. #if gcFEATURE_VG_TRACE_API
  5227. VGLITE_LOG("vg_lite_update_linear_grad %p\n", grad);
  5228. #endif
  5229. /* Get shortcuts to the color ramp. */
  5230. ramp_length = grad->converted_length;
  5231. color_ramp = grad->converted_ramp;
  5232. x0 = grad->matrix.m[0][0] * grad->linear_grad.X0 + grad->matrix.m[0][1] * grad->linear_grad.Y0 + grad->matrix.m[0][2];
  5233. y0 = grad->matrix.m[1][0] * grad->linear_grad.X0 + grad->matrix.m[1][1] * grad->linear_grad.Y0 + grad->matrix.m[1][2];
  5234. x1 = grad->matrix.m[0][0] * grad->linear_grad.X1 + grad->matrix.m[0][1] * grad->linear_grad.Y1 + grad->matrix.m[0][2];
  5235. y1 = grad->matrix.m[1][0] * grad->linear_grad.X1 + grad->matrix.m[1][1] * grad->linear_grad.Y1 + grad->matrix.m[1][2];
  5236. dx = x1 - x0;
  5237. dy = y1 - y0;
  5238. length = (vg_lite_float_t)sqrt(dx * dx + dy * dy);
  5239. width = ramp_length * 128;
  5240. if (length <= 0)
  5241. return VG_LITE_INVALID_ARGUMENT;
  5242. /* Find the common denominator of the color ramp stops. */
  5243. /* Compute transform matrix from ramp surface to grad.*/
  5244. vg_lite_identity(&(grad->matrix));
  5245. vg_lite_translate(x0, y0, &(grad->matrix));
  5246. vg_lite_rotate(
  5247. ((dy >= 0) ? acosf(dx / length) : (2 * PI - acosf(dx / length))) * 180.f / PI,
  5248. &(grad->matrix)
  5249. );
  5250. vg_lite_scale(length / width, 1.f, &(grad->matrix));
  5251. /* Set grad to ramp surface. */
  5252. grad->linear_grad.X0 = 0.f;
  5253. grad->linear_grad.Y0 = 0.f;
  5254. grad->linear_grad.X1 = (float)width;
  5255. grad->linear_grad.Y1 = 0.f;
  5256. /* Allocate the color ramp surface. */
  5257. memset(&grad->image, 0, sizeof(grad->image));
  5258. grad->image.width = width;
  5259. grad->image.height = 1;
  5260. grad->image.stride = 0;
  5261. grad->image.image_mode = VG_LITE_NONE_IMAGE_MODE;
  5262. grad->image.format = VG_LITE_ABGR8888;
  5263. /* Allocate the image for gradient. */
  5264. VG_LITE_RETURN_ERROR(vg_lite_allocate(&grad->image));
  5265. memset(grad->image.memory, 0, grad->image.stride * grad->image.height);
  5266. /* Set pointer to color array. */
  5267. bits = (uint8_t *)grad->image.memory;
  5268. /* Start filling the color array. */
  5269. stop = 0;
  5270. for (i = 0; i < width; ++i)
  5271. {
  5272. vg_lite_float_t gradient;
  5273. vg_lite_float_t color[4];
  5274. vg_lite_float_t color1[4];
  5275. vg_lite_float_t color2[4];
  5276. vg_lite_float_t weight;
  5277. if (i == 241)
  5278. i = 241;
  5279. /* Compute gradient for current color array entry. */
  5280. gradient = (vg_lite_float_t) i / (vg_lite_float_t) (width - 1);
  5281. /* Find the entry in the color ramp that matches or exceeds this
  5282. ** gradient. */
  5283. while (gradient > color_ramp[stop].stop)
  5284. {
  5285. ++stop;
  5286. }
  5287. if (gradient == color_ramp[stop].stop)
  5288. {
  5289. /* Perfect match weight 1.0. */
  5290. weight = 1.0f;
  5291. /* Use color ramp color. */
  5292. color1[3] = color_ramp[stop].alpha;
  5293. color1[2] = color_ramp[stop].blue;
  5294. color1[1] = color_ramp[stop].green;
  5295. color1[0] = color_ramp[stop].red;
  5296. color2[3] =
  5297. color2[2] =
  5298. color2[1] =
  5299. color2[0] = 0.0f;
  5300. }
  5301. else
  5302. {
  5303. if(stop == 0){
  5304. return VG_LITE_INVALID_ARGUMENT;
  5305. }
  5306. /* Compute weight. */
  5307. weight = (color_ramp[stop].stop - gradient)
  5308. / (color_ramp[stop].stop - color_ramp[stop - 1].stop);
  5309. /* Grab color ramp color of previous stop. */
  5310. color1[3] = color_ramp[stop - 1].alpha;
  5311. color1[2] = color_ramp[stop - 1].blue;
  5312. color1[1] = color_ramp[stop - 1].green;
  5313. color1[0] = color_ramp[stop - 1].red;
  5314. /* Grab color ramp color of current stop. */
  5315. color2[3] = color_ramp[stop].alpha;
  5316. color2[2] = color_ramp[stop].blue;
  5317. color2[1] = color_ramp[stop].green;
  5318. color2[0] = color_ramp[stop].red;
  5319. }
  5320. if (grad->pre_multiplied)
  5321. {
  5322. /* Pre-multiply the first color. */
  5323. color1[2] *= color1[3];
  5324. color1[1] *= color1[3];
  5325. color1[0] *= color1[3];
  5326. /* Pre-multiply the second color. */
  5327. color2[2] *= color2[3];
  5328. color2[1] *= color2[3];
  5329. color2[0] *= color2[3];
  5330. }
  5331. /* Filter the colors per channel. */
  5332. color[3] = LERP(color1[3], color2[3], weight);
  5333. color[2] = LERP(color1[2], color2[2], weight);
  5334. color[1] = LERP(color1[1], color2[1], weight);
  5335. color[0] = LERP(color1[0], color2[0], weight);
  5336. /* Pack the final color. */
  5337. *bits++ = PackColorComponent(color[3]);
  5338. *bits++ = PackColorComponent(color[2]);
  5339. *bits++ = PackColorComponent(color[1]);
  5340. *bits++ = PackColorComponent(color[0]);
  5341. }
  5342. return VG_LITE_SUCCESS;
  5343. }
  5344. vg_lite_error_t vg_lite_set_radial_grad(vg_lite_radial_gradient_t *grad,
  5345. vg_lite_uint32_t count,
  5346. vg_lite_color_ramp_t *color_ramp,
  5347. vg_lite_radial_gradient_parameter_t radial_grad,
  5348. vg_lite_gradient_spreadmode_t spread_mode,
  5349. vg_lite_uint8_t pre_multiplied)
  5350. {
  5351. static vg_lite_color_ramp_t defaultRamp[] =
  5352. {
  5353. {
  5354. 0.0f,
  5355. 0.0f, 0.0f, 0.0f, 1.0f
  5356. },
  5357. {
  5358. 1.0f,
  5359. 1.0f, 1.0f, 1.0f, 1.0f
  5360. }
  5361. };
  5362. uint32_t i, trgCount;
  5363. vg_lite_float_t prevStop;
  5364. vg_lite_color_ramp_t *srcRamp;
  5365. vg_lite_color_ramp_t *srcRampLast;
  5366. vg_lite_color_ramp_t *trgRamp;
  5367. #if gcFEATURE_VG_TRACE_API
  5368. VGLITE_LOG("vg_lite_set_radial_grad %p %d %p (%f %f %f %f %f) %d %d\n", grad, count, color_ramp,
  5369. radial_grad.cx, radial_grad.cy, radial_grad.fx, radial_grad.fy, radial_grad.r, spread_mode, pre_multiplied);
  5370. #endif
  5371. /* Reset the count. */
  5372. trgCount = 0;
  5373. if (radial_grad.r <= 0)
  5374. return VG_LITE_INVALID_ARGUMENT;
  5375. grad->radial_grad = radial_grad;
  5376. grad->pre_multiplied = pre_multiplied;
  5377. grad->spread_mode = spread_mode;
  5378. if (!count || count > VLC_MAX_COLOR_RAMP_STOPS || color_ramp == NULL)
  5379. goto Empty_sequence_handler;
  5380. for(i = 0; i < count;i++)
  5381. grad->color_ramp[i] = color_ramp[i];
  5382. grad->ramp_length = count;
  5383. /* Determine the last source ramp. */
  5384. srcRampLast
  5385. = grad->color_ramp
  5386. + grad->ramp_length;
  5387. /* Set the initial previous stop. */
  5388. prevStop = -1;
  5389. /* Reset the count. */
  5390. trgCount = 0;
  5391. /* Walk through the source ramp. */
  5392. for (
  5393. srcRamp = grad->color_ramp, trgRamp = grad->converted_ramp;
  5394. (srcRamp < srcRampLast) && (trgCount < VLC_MAX_COLOR_RAMP_STOPS + 2);
  5395. srcRamp += 1
  5396. )
  5397. {
  5398. /* Must be in increasing order. */
  5399. if (srcRamp->stop < prevStop)
  5400. {
  5401. /* Ignore the entire sequence. */
  5402. trgCount = 0;
  5403. break;
  5404. }
  5405. /* Update the previous stop value. */
  5406. prevStop = srcRamp->stop;
  5407. /* Must be within [0..1] range. */
  5408. if ((srcRamp->stop < 0.0f) || (srcRamp->stop > 1.0f))
  5409. {
  5410. /* Ignore. */
  5411. continue;
  5412. }
  5413. /* Clamp color. */
  5414. ClampColor(COLOR_FROM_RAMP(srcRamp),COLOR_FROM_RAMP(trgRamp),0);
  5415. /* First stop greater then zero? */
  5416. if ((trgCount == 0) && (srcRamp->stop > 0.0f))
  5417. {
  5418. /* Force the first stop to 0.0f. */
  5419. trgRamp->stop = 0.0f;
  5420. /* Replicate the entry. */
  5421. trgRamp[1] = *trgRamp;
  5422. trgRamp[1].stop = srcRamp->stop;
  5423. /* Advance. */
  5424. trgRamp += 2;
  5425. trgCount += 2;
  5426. }
  5427. else
  5428. {
  5429. /* Set the stop value. */
  5430. trgRamp->stop = srcRamp->stop;
  5431. /* Advance. */
  5432. trgRamp += 1;
  5433. trgCount += 1;
  5434. }
  5435. }
  5436. /* Empty sequence? */
  5437. if (trgCount == 0)
  5438. {
  5439. memcpy(grad->converted_ramp,defaultRamp,sizeof(defaultRamp));
  5440. grad->converted_length = sizeof(defaultRamp) / 5;
  5441. }
  5442. else
  5443. {
  5444. /* The last stop must be at 1.0. */
  5445. if (trgRamp[-1].stop != 1.0f)
  5446. {
  5447. /* Replicate the last entry. */
  5448. *trgRamp = trgRamp[-1];
  5449. /* Force the last stop to 1.0f. */
  5450. trgRamp->stop = 1.0f;
  5451. /* Update the final entry count. */
  5452. trgCount += 1;
  5453. }
  5454. /* Set new length. */
  5455. grad->converted_length = trgCount;
  5456. }
  5457. return VG_LITE_SUCCESS;
  5458. Empty_sequence_handler:
  5459. memcpy(grad->converted_ramp,defaultRamp,sizeof(defaultRamp));
  5460. grad->converted_length = sizeof(defaultRamp) / 5;
  5461. return VG_LITE_SUCCESS;
  5462. }
  5463. vg_lite_error_t vg_lite_update_radial_grad(vg_lite_radial_gradient_t *grad)
  5464. {
  5465. #if DUMP_API
  5466. FUNC_DUMP(vg_lite_update_radial_grad)(grad);
  5467. #endif
  5468. uint32_t ramp_length;
  5469. vg_lite_color_ramp_t *colorRamp;
  5470. uint32_t common, stop;
  5471. uint32_t i, width;
  5472. uint8_t* bits;
  5473. vg_lite_error_t error = VG_LITE_SUCCESS;
  5474. uint32_t align, mul, div;
  5475. #if gcFEATURE_VG_TRACE_API
  5476. VGLITE_LOG("vg_lite_update_radial_grad %p\n", grad);
  5477. #endif
  5478. /* Get shortcuts to the color ramp. */
  5479. ramp_length = grad->converted_length;
  5480. colorRamp = grad->converted_ramp;
  5481. if (grad->radial_grad.r <= 0)
  5482. return VG_LITE_INVALID_ARGUMENT;
  5483. if (grad->radial_grad.r < 1)
  5484. {
  5485. common = 1;
  5486. for (i = 0; i < ramp_length; ++i)
  5487. {
  5488. if (colorRamp[i].stop != 0.0f)
  5489. {
  5490. vg_lite_float_t mul2 = common * colorRamp[i].stop;
  5491. vg_lite_float_t frac = mul2 - (vg_lite_float_t)floor(mul2);
  5492. if (frac > 0.00013f) /* Suppose error for zero is 0.00013 */
  5493. {
  5494. common = MAX(common, (uint32_t)(1.0f / frac + 0.5f));
  5495. }
  5496. }
  5497. }
  5498. /* Compute the width of the required color array. */
  5499. width = common + 1;
  5500. width = (width + 15) & (~0xf);
  5501. }
  5502. else
  5503. {
  5504. width = ramp_length * 128;
  5505. }
  5506. /* Allocate the color ramp surface. */
  5507. memset(&grad->image, 0, sizeof(grad->image));
  5508. grad->image.width = width;
  5509. grad->image.height = 1;
  5510. grad->image.stride = 0;
  5511. grad->image.image_mode = VG_LITE_NONE_IMAGE_MODE;
  5512. grad->image.format = VG_LITE_ABGR8888;
  5513. /* Allocate the image for gradient. */
  5514. VG_LITE_RETURN_ERROR(vg_lite_allocate(&grad->image));
  5515. get_format_bytes(VG_LITE_ABGR8888, &mul, &div, &align);
  5516. width = grad->image.stride * div / mul;
  5517. /* Set pointer to color array. */
  5518. bits = (uint8_t *)grad->image.memory;
  5519. /* Start filling the color array. */
  5520. stop = 0;
  5521. for (i = 0; i < width; ++i)
  5522. {
  5523. vg_lite_float_t gradient;
  5524. vg_lite_float_t color[4];
  5525. vg_lite_float_t color1[4];
  5526. vg_lite_float_t color2[4];
  5527. vg_lite_float_t weight;
  5528. /* Compute gradient for current color array entry. */
  5529. gradient = (vg_lite_float_t) i / (vg_lite_float_t) (width - 1);
  5530. /* Find the entry in the color ramp that matches or exceeds this
  5531. ** gradient. */
  5532. while (gradient > colorRamp[stop].stop)
  5533. {
  5534. ++stop;
  5535. }
  5536. if (gradient == colorRamp[stop].stop)
  5537. {
  5538. /* Perfect match weight 1.0. */
  5539. weight = 1.0f;
  5540. /* Use color ramp color. */
  5541. color1[3] = colorRamp[stop].alpha;
  5542. color1[2] = colorRamp[stop].blue;
  5543. color1[1] = colorRamp[stop].green;
  5544. color1[0] = colorRamp[stop].red;
  5545. color2[3] =
  5546. color2[2] =
  5547. color2[1] =
  5548. color2[0] = 0.0f;
  5549. }
  5550. else
  5551. {
  5552. /* Compute weight. */
  5553. weight = (colorRamp[stop].stop - gradient)
  5554. / (colorRamp[stop].stop - colorRamp[stop - 1].stop);
  5555. /* Grab color ramp color of previous stop. */
  5556. color1[3] = colorRamp[stop - 1].alpha;
  5557. color1[2] = colorRamp[stop - 1].blue;
  5558. color1[1] = colorRamp[stop - 1].green;
  5559. color1[0] = colorRamp[stop - 1].red;
  5560. /* Grab color ramp color of current stop. */
  5561. color2[3] = colorRamp[stop].alpha;
  5562. color2[2] = colorRamp[stop].blue;
  5563. color2[1] = colorRamp[stop].green;
  5564. color2[0] = colorRamp[stop].red;
  5565. }
  5566. if (grad->pre_multiplied)
  5567. {
  5568. /* Pre-multiply the first color. */
  5569. color1[2] *= color1[3];
  5570. color1[1] *= color1[3];
  5571. color1[0] *= color1[3];
  5572. /* Pre-multiply the second color. */
  5573. color2[2] *= color2[3];
  5574. color2[1] *= color2[3];
  5575. color2[0] *= color2[3];
  5576. }
  5577. /* Filter the colors per channel. */
  5578. color[3] = LERP(color1[3], color2[3], weight);
  5579. color[2] = LERP(color1[2], color2[2], weight);
  5580. color[1] = LERP(color1[1], color2[1], weight);
  5581. color[0] = LERP(color1[0], color2[0], weight);
  5582. /* Pack the final color. */
  5583. *bits++ = PackColorComponent(color[3]);
  5584. *bits++ = PackColorComponent(color[2]);
  5585. *bits++ = PackColorComponent(color[1]);
  5586. *bits++ = PackColorComponent(color[0]);
  5587. }
  5588. return VG_LITE_SUCCESS;
  5589. }
  5590. vg_lite_error_t vg_lite_set_grad(vg_lite_linear_gradient_t *grad,
  5591. vg_lite_uint32_t count,
  5592. vg_lite_uint32_t *colors,
  5593. vg_lite_uint32_t *stops)
  5594. {
  5595. uint32_t i;
  5596. #if gcFEATURE_VG_TRACE_API
  5597. VGLITE_LOG("vg_lite_set_grad %p %d %p %p\n", grad, count, colors, stops);
  5598. #endif
  5599. grad->count = 0; /* Opaque B&W gradient */
  5600. if (!count || count > VLC_MAX_GRADIENT_STOPS || colors == NULL || stops == NULL)
  5601. return VG_LITE_SUCCESS;
  5602. /* Check stops validity */
  5603. for (i = 0; i < count; i++)
  5604. if (stops[i] < VLC_GRADIENT_BUFFER_WIDTH) {
  5605. if (!grad->count || stops[i] > grad->stops[grad->count - 1]) {
  5606. grad->stops[grad->count] = stops[i];
  5607. grad->colors[grad->count] = colors[i];
  5608. grad->count++;
  5609. } else if (stops[i] == grad->stops[grad->count - 1]) {
  5610. /* Equal stops : use the color corresponding to the last stop
  5611. in the sequence */
  5612. grad->colors[grad->count - 1] = colors[i];
  5613. }
  5614. }
  5615. return VG_LITE_SUCCESS;
  5616. }
  5617. vg_lite_error_t vg_lite_update_grad(vg_lite_linear_gradient_t *grad)
  5618. {
  5619. #if DUMP_API
  5620. FUNC_DUMP(vg_lite_update_grad)(grad);
  5621. #endif
  5622. vg_lite_error_t error = VG_LITE_SUCCESS;
  5623. int32_t r0, g0, b0, a0;
  5624. int32_t r1, g1, b1, a1;
  5625. int32_t lr, lg, lb, la;
  5626. uint32_t i;
  5627. int32_t j;
  5628. int32_t ds, dr, dg, db, da;
  5629. uint32_t *buffer = (uint32_t *)grad->image.memory;
  5630. #if gcFEATURE_VG_TRACE_API
  5631. VGLITE_LOG("vg_lite_update_grad %p\n", grad);
  5632. #endif
  5633. if (grad->count == 0) {
  5634. /* If no valid stops have been specified (e.g., due to an empty input
  5635. * array, out-of-range, or out-of-order stops), a stop at 0 with color
  5636. * 0xFF000000 (opaque black) and a stop at 255 with color 0xFFFFFFFF
  5637. * (opaque white) are implicitly defined. */
  5638. grad->stops[0] = 0;
  5639. grad->colors[0] = 0xFF000000; /* Opaque black */
  5640. grad->stops[1] = 255;
  5641. grad->colors[1] = 0xFFFFFFFF; /* Opaque white */
  5642. grad->count = 2;
  5643. } else if (grad->count && grad->stops[0] != 0) {
  5644. /* If at least one valid stop has been specified, but none has been
  5645. * defined with an offset of 0, an implicit stop is added with an
  5646. * offset of 0 and the same color as the first user-defined stop. */
  5647. for (i = 0; i < grad->stops[0]; i++)
  5648. buffer[i] = grad->colors[0];
  5649. }
  5650. a0 = A(grad->colors[0]);
  5651. r0 = R(grad->colors[0]);
  5652. g0 = G(grad->colors[0]);
  5653. b0 = B(grad->colors[0]);
  5654. /* Calculate the colors for each pixel of the image. */
  5655. for (i = 0; i < grad->count - 1; i++) {
  5656. buffer[grad->stops[i]] = grad->colors[i];
  5657. ds = grad->stops[i + 1] - grad->stops[i];
  5658. a1 = A(grad->colors[i + 1]);
  5659. r1 = R(grad->colors[i + 1]);
  5660. g1 = G(grad->colors[i + 1]);
  5661. b1 = B(grad->colors[i + 1]);
  5662. da = a1 - a0;
  5663. dr = r1 - r0;
  5664. dg = g1 - g0;
  5665. db = b1 - b0;
  5666. for (j = 1; j < ds; j++) {
  5667. la = a0 + da * j / ds;
  5668. lr = r0 + dr * j / ds;
  5669. lg = g0 + dg * j / ds;
  5670. lb = b0 + db * j / ds;
  5671. buffer[grad->stops[i] + j] = ARGB(la, lr, lg, lb);
  5672. }
  5673. a0 = a1;
  5674. r0 = r1;
  5675. g0 = g1;
  5676. b0 = b1;
  5677. }
  5678. /* If at least one valid stop has been specified, but none has been defined
  5679. * with an offset of 255, an implicit stop is added with an offset of 255
  5680. * and the same color as the last user-defined stop. */
  5681. for (i = grad->stops[grad->count - 1]; i < VLC_GRADIENT_BUFFER_WIDTH; i++)
  5682. buffer[i] = grad->colors[grad->count - 1];
  5683. return error;
  5684. }
  5685. vg_lite_error_t vg_lite_clear_linear_grad(vg_lite_ext_linear_gradient_t *grad)
  5686. {
  5687. #if DUMP_API
  5688. FUNC_DUMP(vg_lite_clear_linear_grad)(grad);
  5689. #endif
  5690. vg_lite_error_t error = VG_LITE_SUCCESS;
  5691. #if gcFEATURE_VG_TRACE_API
  5692. VGLITE_LOG("vg_lite_clear_linear_grad %p\n", grad);
  5693. #endif
  5694. grad->count = 0;
  5695. /* Release the image resource. */
  5696. if (grad->image.handle != NULL)
  5697. {
  5698. error = vg_lite_free(&grad->image);
  5699. }
  5700. return error;
  5701. }
  5702. vg_lite_error_t vg_lite_clear_grad(vg_lite_linear_gradient_t *grad)
  5703. {
  5704. #if DUMP_API
  5705. FUNC_DUMP(vg_lite_clear_grad)(grad);
  5706. #endif
  5707. vg_lite_error_t error = VG_LITE_SUCCESS;
  5708. #if gcFEATURE_VG_TRACE_API
  5709. VGLITE_LOG("vg_lite_clear_grad %p\n", grad);
  5710. #endif
  5711. grad->count = 0;
  5712. /* Release the image resource. */
  5713. if (grad->image.handle != NULL)
  5714. {
  5715. error = vg_lite_free(&grad->image);
  5716. }
  5717. return error;
  5718. }
  5719. vg_lite_error_t vg_lite_clear_radial_grad(vg_lite_radial_gradient_t *grad)
  5720. {
  5721. #if DUMP_API
  5722. FUNC_DUMP(vg_lite_clear_radial_grad)(grad);
  5723. #endif
  5724. vg_lite_error_t error = VG_LITE_SUCCESS;
  5725. #if gcFEATURE_VG_TRACE_API
  5726. VGLITE_LOG("vg_lite_clear_radial_grad %p\n", grad);
  5727. #endif
  5728. grad->count = 0;
  5729. /* Release the image resource. */
  5730. if (grad->image.handle != NULL)
  5731. {
  5732. error = vg_lite_free(&grad->image);
  5733. }
  5734. return error;
  5735. }
  5736. vg_lite_matrix_t * vg_lite_get_linear_grad_matrix(vg_lite_ext_linear_gradient_t *grad)
  5737. {
  5738. #if gcFEATURE_VG_TRACE_API
  5739. VGLITE_LOG("vg_lite_get_linear_grad_matrix %p\n", grad);
  5740. #endif
  5741. return &grad->matrix;
  5742. }
  5743. vg_lite_matrix_t * vg_lite_get_grad_matrix(vg_lite_linear_gradient_t *grad)
  5744. {
  5745. #if gcFEATURE_VG_TRACE_API
  5746. VGLITE_LOG("vg_lite_get_grad_matrix %p\n", grad);
  5747. #endif
  5748. return &grad->matrix;
  5749. }
  5750. vg_lite_matrix_t * vg_lite_get_radial_grad_matrix(vg_lite_radial_gradient_t *grad)
  5751. {
  5752. #if gcFEATURE_VG_TRACE_API
  5753. VGLITE_LOG("vg_lite_get_radial_grad_matrix %p\n", grad);
  5754. #endif
  5755. return &grad->matrix;
  5756. }
  5757. vg_lite_error_t vg_lite_dump_command_buffer()
  5758. {
  5759. #if DUMP_API
  5760. FUNC_DUMP(vg_lite_dump_command_buffer)();
  5761. #endif
  5762. vg_lite_error_t error = VG_LITE_SUCCESS;
  5763. vg_lite_kernel_submit_t submit;
  5764. vg_lite_context_t* context = &s_context;
  5765. /* Submit the command buffer. */
  5766. submit.context = &context->context;
  5767. submit.commands = CMDBUF_BUFFER(*context);
  5768. submit.command_size = CMDBUF_OFFSET(*context);
  5769. submit.command_id = CMDBUF_INDEX(*context);
  5770. vglitemDUMP_BUFFER("command", (size_t)CMDBUF_BUFFER(*context),
  5771. submit.context->command_buffer_logical[CMDBUF_INDEX(*context)], 0, submit.command_size);
  5772. #if !DUMP_COMMAND_CAPTURE
  5773. vglitemDUMP("@[commit]");
  5774. #endif
  5775. return error;
  5776. }
  5777. vg_lite_error_t vg_lite_get_parameter(vg_lite_param_type_t type,
  5778. vg_lite_int32_t count,
  5779. vg_lite_pointer params)
  5780. {
  5781. vg_lite_error_t error = VG_LITE_SUCCESS;
  5782. vg_lite_uint32_t gpu_idle = 0;
  5783. vg_lite_float_t *fparams;
  5784. vg_lite_uint32_t *uiparams;
  5785. vg_lite_kernel_hardware_running_time_t time;
  5786. #if gcFEATURE_VG_TRACE_API
  5787. VGLITE_LOG("vg_lite_get_parameter %d %p\n", count, params);
  5788. #endif
  5789. switch (type)
  5790. {
  5791. case VG_LITE_GPU_IDLE_STATE:
  5792. if (count != 1) {
  5793. return VG_LITE_INVALID_ARGUMENT;
  5794. }
  5795. vg_lite_get_register(0x04, &gpu_idle);
  5796. uiparams = (vg_lite_uint32_t*)params;
  5797. *uiparams = ((gpu_idle & 0x0B05) == 0x0B05);
  5798. break;
  5799. case VG_LITE_SCISSOR_RECT:
  5800. if ((count % 4) != 0) {
  5801. return VG_LITE_INVALID_ARGUMENT;
  5802. }
  5803. fparams = (vg_lite_float_t*)params;
  5804. for (vg_lite_int32_t i = 0; i < count; i++)
  5805. {
  5806. *(fparams + i) = (vg_lite_float_t)s_context.scissor[i];
  5807. }
  5808. break;
  5809. case VG_LITE_HARDWARE_RUNNING_TIME:
  5810. vg_lite_kernel(VG_LITE_RECORD_RUNNING_TIME, &time);
  5811. *((float*)params) = (float)time.run_time / (float)time.hertz;
  5812. break;
  5813. default:
  5814. error = VG_LITE_INVALID_ARGUMENT;
  5815. break;
  5816. }
  5817. return error;
  5818. }
  5819. vg_lite_error_t vg_lite_copy_image(vg_lite_buffer_t *target, vg_lite_buffer_t *source,
  5820. vg_lite_int32_t sx, vg_lite_int32_t sy,
  5821. vg_lite_int32_t dx, vg_lite_int32_t dy,
  5822. vg_lite_uint32_t width, vg_lite_uint32_t height)
  5823. {
  5824. #if gcFEATURE_VG_IM_INPUT
  5825. vg_lite_error_t error;
  5826. vg_lite_point_t point_min, point_max, temp;
  5827. vg_lite_matrix_t inverse_matrix;
  5828. vg_lite_matrix_t n;
  5829. vg_lite_float_t x_step[3];
  5830. vg_lite_float_t y_step[3];
  5831. vg_lite_float_t c_step[3];
  5832. uint32_t imageMode = 0;
  5833. uint32_t in_premult = 0;
  5834. int32_t stride;
  5835. uint32_t transparency_mode = 0;
  5836. uint32_t filter_mode = 0;
  5837. uint32_t conversion = 0;
  5838. uint32_t tiled_source;
  5839. int32_t left, top, right, bottom;
  5840. uint32_t rect_x = 0, rect_y = 0, rect_w = 0, rect_h = 0;
  5841. vg_lite_rectangle_t rectangle = { dx, dy, width, height };
  5842. uint32_t yuv2rgb = 0;
  5843. uint32_t uv_swiz = 0;
  5844. uint32_t compress_mode;
  5845. uint32_t src_premultiply_enable = 0;
  5846. uint32_t index_endian = 0;
  5847. uint32_t eco_fifo = 0;
  5848. uint32_t tile_setting = 0;
  5849. uint32_t stripe_mode = 0;
  5850. uint32_t premul_flag = 0;
  5851. uint32_t prediv_flag = 0;
  5852. vg_lite_color_t color = 0;
  5853. #if gcFEATURE_VG_TRACE_API
  5854. VGLITE_LOG("vg_lite_copy_image %p %p %d %d %d %d %d %d\n", target, source, sx, sy, dx, dy, width, height);
  5855. #endif
  5856. #if gcFEATURE_VG_ERROR_CHECK
  5857. #if !gcFEATURE_VG_INDEX_ENDIAN
  5858. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  5859. return VG_LITE_NOT_SUPPORT;
  5860. }
  5861. #endif
  5862. #if !gcFEATURE_VG_RECTANGLE_TILED_OUT
  5863. if (target->tiled != VG_LITE_LINEAR) {
  5864. return VG_LITE_NOT_SUPPORT;
  5865. }
  5866. #endif
  5867. #if !gcFEATURE_VG_RGBA8_ETC2_EAC
  5868. if (source->format == VG_LITE_RGBA8888_ETC2_EAC) {
  5869. return VG_LITE_NOT_SUPPORT;
  5870. }
  5871. #else
  5872. if ((source->format == VG_LITE_RGBA8888_ETC2_EAC) && (source->width % 16 || source->height % 4)) {
  5873. return VG_LITE_INVALID_ARGUMENT;
  5874. }
  5875. #endif
  5876. #if !gcFEATURE_VG_YUY2_INPUT
  5877. if (source->format == VG_LITE_YUYV || source->format == VG_LITE_YUY2) {
  5878. return VG_LITE_NOT_SUPPORT;
  5879. }
  5880. #endif
  5881. #if !gcFEATURE_VG_YUV_INPUT
  5882. if ((source->format >= VG_LITE_NV12 && source->format <= VG_LITE_NV16) || source->format == VG_LITE_NV24) {
  5883. return VG_LITE_NOT_SUPPORT;
  5884. }
  5885. #elif !gcFEATURE_VG_NV24_INPUT
  5886. if (source->format == VG_LITE_NV24) {
  5887. return VG_LITE_NOT_SUPPORT;
  5888. }
  5889. #endif
  5890. #if !gcFEATURE_VG_AYUV_INPUT
  5891. if (source->format == VG_LITE_ANV12 || source->format == VG_LITE_AYUY2) {
  5892. return VG_LITE_NOT_SUPPORT;
  5893. }
  5894. #endif
  5895. #if !gcFEATURE_VG_YUV_TILED_INPUT
  5896. if ((source->format >= VG_LITE_YUY2_TILED && source->format <= VG_LITE_AYUY2_TILED) || (source->format == VG_LITE_NV24_TILED)) {
  5897. return VG_LITE_NOT_SUPPORT;
  5898. }
  5899. #endif
  5900. #if !gcFEATURE_VG_24BIT
  5901. if ((target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) ||
  5902. (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658)) {
  5903. return VG_LITE_NOT_SUPPORT;
  5904. }
  5905. #endif
  5906. #if !gcFEATURE_VG_24BIT_PLANAR
  5907. if (source->format >= VG_LITE_ABGR8565_PLANAR && source->format <= VG_LITE_RGBA5658_PLANAR) {
  5908. return VG_LITE_NOT_SUPPORT;
  5909. }
  5910. #endif
  5911. #if !gcFEATURE_VG_IM_DEC_INPUT
  5912. if (source->compress_mode != VG_LITE_DEC_DISABLE) {
  5913. return VG_LITE_NOT_SUPPORT;
  5914. }
  5915. #endif
  5916. #if !gcFEATURE_VG_STENCIL
  5917. if (source->image_mode == VG_LITE_STENCIL_MODE) {
  5918. return VG_LITE_NOT_SUPPORT;
  5919. }
  5920. #endif
  5921. #if (CHIPID == 0x355)
  5922. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  5923. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  5924. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  5925. printf("Target format: 0x%x is not supported.\n", target->format);
  5926. return VG_LITE_NOT_SUPPORT;
  5927. }
  5928. if (source->format == VG_LITE_L8 || source->format == VG_LITE_YUYV ||
  5929. source->format == VG_LITE_BGRA2222 || source->format == VG_LITE_RGBA2222 ||
  5930. source->format == VG_LITE_ABGR2222 || source->format == VG_LITE_ARGB2222) {
  5931. printf("Source format: 0x%x is not supported.\n", source->format);
  5932. return VG_LITE_NOT_SUPPORT;
  5933. }
  5934. #endif
  5935. VG_LITE_RETURN_ERROR(srcbuf_align_check(source));
  5936. VG_LITE_RETURN_ERROR(check_compress(source->format, source->compress_mode, source->tiled, source->width, source->height));
  5937. #endif /* gcFEATURE_VG_ERROR_CHECK */
  5938. #if gcFEATURE_VG_INDEX_ENDIAN
  5939. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  5940. index_endian = 1 << 14;
  5941. }
  5942. #endif
  5943. #if !gcFEATURE_VG_STRIPE_MODE
  5944. /* Enable fifo feature to share buffer between vg and ts to improve the rotation performance */
  5945. eco_fifo = 1 << 7;
  5946. #endif
  5947. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000 : 0);
  5948. vg_lite_matrix_t* matrix = &n;
  5949. vg_lite_identity(matrix);
  5950. vg_lite_translate((vg_lite_float_t)sx, (vg_lite_float_t)sy, matrix);
  5951. /* Check whether L8 is supported or not. */
  5952. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  5953. conversion = 0x80000000;
  5954. }
  5955. #if gcFEATURE_VG_16PIXELS_ALIGNED
  5956. /* Check if source specify bytes are aligned */
  5957. error = _check_source_aligned(source->format, source->stride);
  5958. if (error != VG_LITE_SUCCESS) {
  5959. return error;
  5960. }
  5961. #endif
  5962. /* Set source region. */
  5963. vg_lite_rectangle_t* rect = &rectangle;
  5964. rect_x = (rect->x < 0) ? 0 : rect->x;
  5965. rect_y = (rect->y < 0) ? 0 : rect->y;
  5966. rect_w = rect->width;
  5967. rect_h = rect->height;
  5968. if ((rect_x > (uint32_t)source->width) || (rect_y > (uint32_t)source->height) ||
  5969. (rect_w == 0) || (rect_h == 0))
  5970. {
  5971. /*No intersection*/
  5972. return VG_LITE_INVALID_ARGUMENT;
  5973. }
  5974. if (rect_x + rect_w > (uint32_t)source->width)
  5975. {
  5976. rect_w = source->width - rect_x;
  5977. }
  5978. if (rect_y + rect_h > (uint32_t)source->height)
  5979. {
  5980. rect_h = source->height - rect_y;
  5981. }
  5982. /* Transform image (0,0) to screen. */
  5983. if (!transform(&temp, 0.0f, 0.0f, matrix))
  5984. return VG_LITE_INVALID_ARGUMENT;
  5985. /* Set initial point. */
  5986. point_min = temp;
  5987. point_max = temp;
  5988. /* Transform image (0,height) to screen. */
  5989. if (!transform(&temp, 0.0f, (vg_lite_float_t)rect_h, matrix))
  5990. return VG_LITE_INVALID_ARGUMENT;
  5991. /* Determine min/max. */
  5992. if (temp.x < point_min.x) point_min.x = temp.x;
  5993. if (temp.y < point_min.y) point_min.y = temp.y;
  5994. if (temp.x > point_max.x) point_max.x = temp.x;
  5995. if (temp.y > point_max.y) point_max.y = temp.y;
  5996. /* Transform image (width,height) to screen. */
  5997. if (!transform(&temp, (vg_lite_float_t)rect_w, (vg_lite_float_t)rect_h, matrix))
  5998. return VG_LITE_INVALID_ARGUMENT;
  5999. /* Determine min/max. */
  6000. if (temp.x < point_min.x) point_min.x = temp.x;
  6001. if (temp.y < point_min.y) point_min.y = temp.y;
  6002. if (temp.x > point_max.x) point_max.x = temp.x;
  6003. if (temp.y > point_max.y) point_max.y = temp.y;
  6004. /* Transform image (width,0) to screen. */
  6005. if (!transform(&temp, (vg_lite_float_t)rect_w, 0.0f, matrix))
  6006. return VG_LITE_INVALID_ARGUMENT;
  6007. /* Determine min/max. */
  6008. if (temp.x < point_min.x) point_min.x = temp.x;
  6009. if (temp.y < point_min.y) point_min.y = temp.y;
  6010. if (temp.x > point_max.x) point_max.x = temp.x;
  6011. if (temp.y > point_max.y) point_max.y = temp.y;
  6012. /* Clip to target. */
  6013. if (s_context.scissor_set && !target->scissor_buffer) {
  6014. left = s_context.scissor[0];
  6015. top = s_context.scissor[1];
  6016. right = s_context.scissor[2];
  6017. bottom = s_context.scissor[3];
  6018. }
  6019. else {
  6020. left = 0;
  6021. top = 0;
  6022. right = target->width;
  6023. bottom = target->height;
  6024. }
  6025. point_min.x = MAX(point_min.x, left);
  6026. point_min.y = MAX(point_min.y, top);
  6027. point_max.x = MIN(point_max.x, right);
  6028. point_max.y = MIN(point_max.y, bottom);
  6029. /* No need to draw. */
  6030. if ((point_max.x <= point_min.x) || (point_max.y <= point_min.y)) {
  6031. return VG_LITE_SUCCESS;
  6032. }
  6033. #if gcFEATURE_VG_GAMMA
  6034. get_st_gamma_src_dest(source, target);
  6035. #endif
  6036. /*blend input into context*/
  6037. in_premult = 0x00000000;
  6038. /* Adjust premultiply setting according to openvg condition */
  6039. src_premultiply_enable = 0x01000100;
  6040. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  6041. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  6042. prediv_flag = 0;
  6043. }
  6044. else {
  6045. prediv_flag = 1;
  6046. }
  6047. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  6048. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  6049. src_premultiply_enable = 0x01000100;
  6050. in_premult = 0x10000000;
  6051. }
  6052. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  6053. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  6054. src_premultiply_enable = 0x01000100;
  6055. in_premult = 0x10000000;
  6056. }
  6057. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  6058. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  6059. src_premultiply_enable = 0x01000100;
  6060. in_premult = 0x00000000;
  6061. }
  6062. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  6063. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  6064. src_premultiply_enable = 0x00000100;
  6065. in_premult = 0x00000000;
  6066. }
  6067. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  6068. target->apply_premult = 1;
  6069. }
  6070. else {
  6071. target->apply_premult = 0;
  6072. }
  6073. error = set_render_target(target);
  6074. if (error != VG_LITE_SUCCESS) {
  6075. return error;
  6076. }
  6077. /* Compute inverse matrix. */
  6078. if (!inverse(&inverse_matrix, matrix))
  6079. return VG_LITE_INVALID_ARGUMENT;
  6080. #if gcFEATURE_VG_MATH_PRECISION_FIX
  6081. /* Compute interpolation steps. */
  6082. x_step[0] = inverse_matrix.m[0][0];
  6083. x_step[1] = inverse_matrix.m[1][0];
  6084. x_step[2] = inverse_matrix.m[2][0];
  6085. y_step[0] = inverse_matrix.m[0][1];
  6086. y_step[1] = inverse_matrix.m[1][1];
  6087. y_step[2] = inverse_matrix.m[2][1];
  6088. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]);
  6089. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  6090. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  6091. #else
  6092. /* Compute interpolation steps. */
  6093. x_step[0] = inverse_matrix.m[0][0] / rect_w;
  6094. x_step[1] = inverse_matrix.m[1][0] / rect_h;
  6095. x_step[2] = inverse_matrix.m[2][0];
  6096. y_step[0] = inverse_matrix.m[0][1] / rect_w;
  6097. y_step[1] = inverse_matrix.m[1][1] / rect_h;
  6098. y_step[2] = inverse_matrix.m[2][1];
  6099. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / rect_w;
  6100. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / rect_h;
  6101. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  6102. #endif
  6103. /* Determine image mode (NORMAL) depending on the color. */
  6104. imageMode = 0x00001000;
  6105. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0;
  6106. #if gcFEATURE_VG_RECTANGLE_TILED_OUT
  6107. if (target->tiled == VG_LITE_TILED) {
  6108. tile_setting = 0x40;
  6109. stripe_mode = 0x20000000;
  6110. }
  6111. #endif
  6112. compress_mode = (uint32_t)source->compress_mode << 25;
  6113. /* Setup the command buffer. */
  6114. #if gcFEATURE_VG_GLOBAL_ALPHA
  6115. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD1, s_context.dst_alpha_mode | s_context.dst_alpha_value | s_context.src_alpha_mode | s_context.src_alpha_value));
  6116. #endif
  6117. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x00000001 | in_premult | imageMode | transparency_mode | tile_setting | eco_fifo | s_context.scissor_enable | stripe_mode));
  6118. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  6119. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void*)&c_step[0]));
  6120. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void*)&c_step[1]));
  6121. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void*)&c_step[2]));
  6122. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void*)&x_step[0]));
  6123. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void*)&x_step[1]));
  6124. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void*)&x_step[2]));
  6125. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  6126. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void*)&y_step[0]));
  6127. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void*)&y_step[1]));
  6128. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void*)&y_step[2]));
  6129. if (((source->format >= VG_LITE_YUY2) &&
  6130. (source->format <= VG_LITE_AYUY2)) ||
  6131. ((source->format >= VG_LITE_YUY2_TILED) &&
  6132. (source->format <= VG_LITE_AYUY2_TILED))) {
  6133. yuv2rgb = convert_yuv2rgb(source->yuv.yuv2rgb);
  6134. uv_swiz = convert_uv_swizzle(source->yuv.swizzle);
  6135. }
  6136. #if gcFEATURE_VG_IM_FASTCLEAR
  6137. if (source->fc_enable) {
  6138. uint32_t im_fc_enable = (source->fc_enable == 0) ? 0 : 0x800000;
  6139. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | uv_swiz | yuv2rgb | conversion | im_fc_enable | ahb_read_split | compress_mode | src_premultiply_enable | index_endian));
  6140. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0ACF, source->fc_buffer[0].address)); /* FC buffer address. */
  6141. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD0, source->fc_buffer[0].color)); /* FC clear value. */
  6142. }
  6143. #endif
  6144. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | uv_swiz | yuv2rgb | conversion | compress_mode | src_premultiply_enable | index_endian));
  6145. if (source->yuv.uv_planar) {
  6146. /* Program u plane address if necessary. */
  6147. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source->yuv.uv_planar));
  6148. }
  6149. if (source->yuv.v_planar) {
  6150. /* Program v plane address if necessary. */
  6151. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.v_planar));
  6152. }
  6153. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A27, 0));
  6154. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->address));
  6155. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  6156. /* 24bit format stride configured to 4bpp. */
  6157. if (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658) {
  6158. stride = source->stride / 3 * 4;
  6159. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, stride | tiled_source));
  6160. }
  6161. else {
  6162. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, source->stride | tiled_source));
  6163. }
  6164. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2D, rect_x | (rect_y << 16)));
  6165. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2F, rect_w | (rect_h << 16)));
  6166. VG_LITE_RETURN_ERROR(push_rectangle(&s_context, point_min.x, point_min.y, point_max.x - point_min.x, point_max.y - point_min.y));
  6167. #if !gcFEATURE_VG_STRIPE_MODE
  6168. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  6169. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  6170. #endif
  6171. if (!s_context.flexa_mode) {
  6172. error = flush_target();
  6173. }
  6174. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride) * (source->height));
  6175. #if DUMP_IMAGE
  6176. dump_img(source->memory, source->width, source->height, source->format);
  6177. #endif
  6178. return error;
  6179. #else
  6180. return VG_LITE_NOT_SUPPORT;
  6181. #endif
  6182. }
  6183. vg_lite_error_t vg_lite_set_memory_pool(vg_lite_buffer_type_t type, vg_lite_memory_pool_t pool)
  6184. {
  6185. if (!(pool >= VG_LITE_MEMORY_POOL_1 && pool <= VG_LITE_MEMORY_POOL_2))
  6186. return VG_LITE_INVALID_ARGUMENT;
  6187. switch (type) {
  6188. case VG_LITE_COMMAND_BUFFER:
  6189. s_context.command_buffer_pool = pool;
  6190. break;
  6191. case VG_LITE_TESSELLATION_BUFFER:
  6192. s_context.tess_buffer_pool = pool;
  6193. break;
  6194. case VG_LITE_RENDER_BUFFER:
  6195. s_context.render_buffer_pool = pool;
  6196. break;
  6197. default:
  6198. return VG_LITE_INVALID_ARGUMENT;
  6199. }
  6200. return VG_LITE_SUCCESS;
  6201. }
  6202. vg_lite_error_t vg_lite_frame_delimiter(vg_lite_frame_flag_t flag)
  6203. {
  6204. s_context.frame_flag = flag;
  6205. vg_lite_error_t error = vg_lite_finish();
  6206. return error;
  6207. }