SDL_gpu_vulkan.c 519 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537
  1. /*
  2. Simple DirectMedia Layer
  3. Copyright (C) 1997-2026 Sam Lantinga <slouken@libsdl.org>
  4. This software is provided 'as-is', without any express or implied
  5. warranty. In no event will the authors be held liable for any damages
  6. arising from the use of this software.
  7. Permission is granted to anyone to use this software for any purpose,
  8. including commercial applications, and to alter it and redistribute it
  9. freely, subject to the following restrictions:
  10. 1. The origin of this software must not be misrepresented; you must not
  11. claim that you wrote the original software. If you use this software
  12. in a product, an acknowledgment in the product documentation would be
  13. appreciated but is not required.
  14. 2. Altered source versions must be plainly marked as such, and must not be
  15. misrepresented as being the original software.
  16. 3. This notice may not be removed or altered from any source distribution.
  17. */
  18. #include "SDL_internal.h"
  19. #ifdef SDL_GPU_VULKAN
  20. // Needed for VK_KHR_portability_subset
  21. #define VK_ENABLE_BETA_EXTENSIONS
  22. #define VK_NO_PROTOTYPES
  23. #include "../../video/khronos/vulkan/vulkan.h"
  24. #ifdef HAVE_GPU_OPENXR
  25. #define XR_USE_GRAPHICS_API_VULKAN 1
  26. #include "../xr/SDL_openxr_internal.h"
  27. #include "../xr/SDL_openxrdyn.h"
  28. #include "../xr/SDL_gpu_openxr.h"
  29. #endif
  30. #include <SDL3/SDL_vulkan.h>
  31. #include "../SDL_sysgpu.h"
  32. #include "../../events/SDL_windowevents_c.h"
  33. // Global Vulkan Loader Entry Points
  34. static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL;
  35. #define VULKAN_GLOBAL_FUNCTION(name) \
  36. static PFN_##name name = NULL;
  37. #include "SDL_gpu_vulkan_vkfuncs.h"
  38. typedef struct VulkanExtensions
  39. {
  40. // These extensions are required!
  41. // Globally supported
  42. Uint8 KHR_swapchain;
  43. // Core since 1.1, needed for negative VkViewport::height
  44. Uint8 KHR_maintenance1;
  45. // These extensions are optional!
  46. // Core since 1.2, but requires annoying paperwork to implement
  47. Uint8 KHR_driver_properties;
  48. // Only required for special implementations (i.e. MoltenVK)
  49. Uint8 KHR_portability_subset;
  50. // Only required to detect devices using Dozen D3D12 driver
  51. Uint8 MSFT_layered_driver;
  52. // Only required for decoding HDR ASTC textures
  53. Uint8 EXT_texture_compression_astc_hdr;
  54. } VulkanExtensions;
  55. // Defines
  56. #define SMALL_ALLOCATION_THRESHOLD 2097152 // 2 MiB
  57. #define SMALL_ALLOCATION_SIZE 16777216 // 16 MiB
  58. #define LARGE_ALLOCATION_INCREMENT 67108864 // 64 MiB
  59. #define MAX_UBO_SECTION_SIZE 4096 // 4 KiB
  60. #define DESCRIPTOR_POOL_SIZE 128
  61. #define WINDOW_PROPERTY_DATA "SDL.internal.gpu.vulkan.data"
  62. #define IDENTITY_SWIZZLE \
  63. { \
  64. VK_COMPONENT_SWIZZLE_IDENTITY, \
  65. VK_COMPONENT_SWIZZLE_IDENTITY, \
  66. VK_COMPONENT_SWIZZLE_IDENTITY, \
  67. VK_COMPONENT_SWIZZLE_IDENTITY \
  68. }
  69. // Conversions
  70. static VkPresentModeKHR SDLToVK_PresentMode[] = {
  71. VK_PRESENT_MODE_FIFO_KHR,
  72. VK_PRESENT_MODE_IMMEDIATE_KHR,
  73. VK_PRESENT_MODE_MAILBOX_KHR
  74. };
  75. // NOTE: this is behind an ifdef guard because without, it would trigger an "unused variable" error when OpenXR support is disabled
  76. #ifdef HAVE_GPU_OPENXR
  77. typedef struct TextureFormatPair {
  78. VkFormat vk;
  79. SDL_GPUTextureFormat sdl;
  80. } TextureFormatPair;
  81. static TextureFormatPair SDLToVK_TextureFormat_SrgbOnly[] = {
  82. {VK_FORMAT_R8G8B8A8_SRGB, SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM_SRGB},
  83. {VK_FORMAT_B8G8R8A8_SRGB, SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM_SRGB},
  84. {VK_FORMAT_BC1_RGBA_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_BC1_RGBA_UNORM_SRGB},
  85. {VK_FORMAT_BC2_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_BC2_RGBA_UNORM_SRGB},
  86. {VK_FORMAT_BC3_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_BC3_RGBA_UNORM_SRGB},
  87. {VK_FORMAT_BC7_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_BC7_RGBA_UNORM_SRGB},
  88. {VK_FORMAT_ASTC_4x4_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_4x4_UNORM_SRGB},
  89. {VK_FORMAT_ASTC_5x4_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_5x4_UNORM_SRGB},
  90. {VK_FORMAT_ASTC_5x5_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_5x5_UNORM_SRGB},
  91. {VK_FORMAT_ASTC_6x5_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_6x5_UNORM_SRGB},
  92. {VK_FORMAT_ASTC_6x6_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_6x6_UNORM_SRGB},
  93. {VK_FORMAT_ASTC_8x5_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_8x5_UNORM_SRGB},
  94. {VK_FORMAT_ASTC_8x6_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_8x6_UNORM_SRGB},
  95. {VK_FORMAT_ASTC_8x8_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_8x8_UNORM_SRGB},
  96. {VK_FORMAT_ASTC_10x5_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_10x5_UNORM_SRGB},
  97. {VK_FORMAT_ASTC_10x6_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_10x6_UNORM_SRGB},
  98. {VK_FORMAT_ASTC_10x8_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_10x8_UNORM_SRGB},
  99. {VK_FORMAT_ASTC_10x10_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_10x10_UNORM_SRGB},
  100. {VK_FORMAT_ASTC_12x10_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_12x10_UNORM_SRGB},
  101. {VK_FORMAT_ASTC_12x12_SRGB_BLOCK, SDL_GPU_TEXTUREFORMAT_ASTC_12x12_UNORM_SRGB},
  102. };
  103. #endif // HAVE_GPU_OPENXR
  104. static VkFormat SDLToVK_TextureFormat[] = {
  105. VK_FORMAT_UNDEFINED, // INVALID
  106. VK_FORMAT_R8_UNORM, // A8_UNORM
  107. VK_FORMAT_R8_UNORM, // R8_UNORM
  108. VK_FORMAT_R8G8_UNORM, // R8G8_UNORM
  109. VK_FORMAT_R8G8B8A8_UNORM, // R8G8B8A8_UNORM
  110. VK_FORMAT_R16_UNORM, // R16_UNORM
  111. VK_FORMAT_R16G16_UNORM, // R16G16_UNORM
  112. VK_FORMAT_R16G16B16A16_UNORM, // R16G16B16A16_UNORM
  113. VK_FORMAT_A2B10G10R10_UNORM_PACK32, // R10G10B10A2_UNORM
  114. VK_FORMAT_R5G6B5_UNORM_PACK16, // B5G6R5_UNORM
  115. VK_FORMAT_A1R5G5B5_UNORM_PACK16, // B5G5R5A1_UNORM
  116. VK_FORMAT_B4G4R4A4_UNORM_PACK16, // B4G4R4A4_UNORM
  117. VK_FORMAT_B8G8R8A8_UNORM, // B8G8R8A8_UNORM
  118. VK_FORMAT_BC1_RGBA_UNORM_BLOCK, // BC1_UNORM
  119. VK_FORMAT_BC2_UNORM_BLOCK, // BC2_UNORM
  120. VK_FORMAT_BC3_UNORM_BLOCK, // BC3_UNORM
  121. VK_FORMAT_BC4_UNORM_BLOCK, // BC4_UNORM
  122. VK_FORMAT_BC5_UNORM_BLOCK, // BC5_UNORM
  123. VK_FORMAT_BC7_UNORM_BLOCK, // BC7_UNORM
  124. VK_FORMAT_BC6H_SFLOAT_BLOCK, // BC6H_FLOAT
  125. VK_FORMAT_BC6H_UFLOAT_BLOCK, // BC6H_UFLOAT
  126. VK_FORMAT_R8_SNORM, // R8_SNORM
  127. VK_FORMAT_R8G8_SNORM, // R8G8_SNORM
  128. VK_FORMAT_R8G8B8A8_SNORM, // R8G8B8A8_SNORM
  129. VK_FORMAT_R16_SNORM, // R16_SNORM
  130. VK_FORMAT_R16G16_SNORM, // R16G16_SNORM
  131. VK_FORMAT_R16G16B16A16_SNORM, // R16G16B16A16_SNORM
  132. VK_FORMAT_R16_SFLOAT, // R16_FLOAT
  133. VK_FORMAT_R16G16_SFLOAT, // R16G16_FLOAT
  134. VK_FORMAT_R16G16B16A16_SFLOAT, // R16G16B16A16_FLOAT
  135. VK_FORMAT_R32_SFLOAT, // R32_FLOAT
  136. VK_FORMAT_R32G32_SFLOAT, // R32G32_FLOAT
  137. VK_FORMAT_R32G32B32A32_SFLOAT, // R32G32B32A32_FLOAT
  138. VK_FORMAT_B10G11R11_UFLOAT_PACK32, // R11G11B10_UFLOAT
  139. VK_FORMAT_R8_UINT, // R8_UINT
  140. VK_FORMAT_R8G8_UINT, // R8G8_UINT
  141. VK_FORMAT_R8G8B8A8_UINT, // R8G8B8A8_UINT
  142. VK_FORMAT_R16_UINT, // R16_UINT
  143. VK_FORMAT_R16G16_UINT, // R16G16_UINT
  144. VK_FORMAT_R16G16B16A16_UINT, // R16G16B16A16_UINT
  145. VK_FORMAT_R32_UINT, // R32_UINT
  146. VK_FORMAT_R32G32_UINT, // R32G32_UINT
  147. VK_FORMAT_R32G32B32A32_UINT, // R32G32B32A32_UINT
  148. VK_FORMAT_R8_SINT, // R8_INT
  149. VK_FORMAT_R8G8_SINT, // R8G8_INT
  150. VK_FORMAT_R8G8B8A8_SINT, // R8G8B8A8_INT
  151. VK_FORMAT_R16_SINT, // R16_INT
  152. VK_FORMAT_R16G16_SINT, // R16G16_INT
  153. VK_FORMAT_R16G16B16A16_SINT, // R16G16B16A16_INT
  154. VK_FORMAT_R32_SINT, // R32_INT
  155. VK_FORMAT_R32G32_SINT, // R32G32_INT
  156. VK_FORMAT_R32G32B32A32_SINT, // R32G32B32A32_INT
  157. VK_FORMAT_R8G8B8A8_SRGB, // R8G8B8A8_UNORM_SRGB
  158. VK_FORMAT_B8G8R8A8_SRGB, // B8G8R8A8_UNORM_SRGB
  159. VK_FORMAT_BC1_RGBA_SRGB_BLOCK, // BC1_UNORM_SRGB
  160. VK_FORMAT_BC2_SRGB_BLOCK, // BC3_UNORM_SRGB
  161. VK_FORMAT_BC3_SRGB_BLOCK, // BC3_UNORM_SRGB
  162. VK_FORMAT_BC7_SRGB_BLOCK, // BC7_UNORM_SRGB
  163. VK_FORMAT_D16_UNORM, // D16_UNORM
  164. VK_FORMAT_X8_D24_UNORM_PACK32, // D24_UNORM
  165. VK_FORMAT_D32_SFLOAT, // D32_FLOAT
  166. VK_FORMAT_D24_UNORM_S8_UINT, // D24_UNORM_S8_UINT
  167. VK_FORMAT_D32_SFLOAT_S8_UINT, // D32_FLOAT_S8_UINT
  168. VK_FORMAT_ASTC_4x4_UNORM_BLOCK, // ASTC_4x4_UNORM
  169. VK_FORMAT_ASTC_5x4_UNORM_BLOCK, // ASTC_5x4_UNORM
  170. VK_FORMAT_ASTC_5x5_UNORM_BLOCK, // ASTC_5x5_UNORM
  171. VK_FORMAT_ASTC_6x5_UNORM_BLOCK, // ASTC_6x5_UNORM
  172. VK_FORMAT_ASTC_6x6_UNORM_BLOCK, // ASTC_6x6_UNORM
  173. VK_FORMAT_ASTC_8x5_UNORM_BLOCK, // ASTC_8x5_UNORM
  174. VK_FORMAT_ASTC_8x6_UNORM_BLOCK, // ASTC_8x6_UNORM
  175. VK_FORMAT_ASTC_8x8_UNORM_BLOCK, // ASTC_8x8_UNORM
  176. VK_FORMAT_ASTC_10x5_UNORM_BLOCK, // ASTC_10x5_UNORM
  177. VK_FORMAT_ASTC_10x6_UNORM_BLOCK, // ASTC_10x6_UNORM
  178. VK_FORMAT_ASTC_10x8_UNORM_BLOCK, // ASTC_10x8_UNORM
  179. VK_FORMAT_ASTC_10x10_UNORM_BLOCK, // ASTC_10x10_UNORM
  180. VK_FORMAT_ASTC_12x10_UNORM_BLOCK, // ASTC_12x10_UNORM
  181. VK_FORMAT_ASTC_12x12_UNORM_BLOCK, // ASTC_12x12_UNORM
  182. VK_FORMAT_ASTC_4x4_SRGB_BLOCK, // ASTC_4x4_UNORM_SRGB
  183. VK_FORMAT_ASTC_5x4_SRGB_BLOCK, // ASTC_5x4_UNORM_SRGB
  184. VK_FORMAT_ASTC_5x5_SRGB_BLOCK, // ASTC_5x5_UNORM_SRGB
  185. VK_FORMAT_ASTC_6x5_SRGB_BLOCK, // ASTC_6x5_UNORM_SRGB
  186. VK_FORMAT_ASTC_6x6_SRGB_BLOCK, // ASTC_6x6_UNORM_SRGB
  187. VK_FORMAT_ASTC_8x5_SRGB_BLOCK, // ASTC_8x5_UNORM_SRGB
  188. VK_FORMAT_ASTC_8x6_SRGB_BLOCK, // ASTC_8x6_UNORM_SRGB
  189. VK_FORMAT_ASTC_8x8_SRGB_BLOCK, // ASTC_8x8_UNORM_SRGB
  190. VK_FORMAT_ASTC_10x5_SRGB_BLOCK, // ASTC_10x5_UNORM_SRGB
  191. VK_FORMAT_ASTC_10x6_SRGB_BLOCK, // ASTC_10x6_UNORM_SRGB
  192. VK_FORMAT_ASTC_10x8_SRGB_BLOCK, // ASTC_10x8_UNORM_SRGB
  193. VK_FORMAT_ASTC_10x10_SRGB_BLOCK, // ASTC_10x10_UNORM_SRGB
  194. VK_FORMAT_ASTC_12x10_SRGB_BLOCK, // ASTC_12x10_UNORM_SRGB
  195. VK_FORMAT_ASTC_12x12_SRGB_BLOCK, // ASTC_12x12_UNORM_SRGB
  196. VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, // ASTC_4x4_FLOAT
  197. VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, // ASTC_5x4_FLOAT
  198. VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, // ASTC_5x5_FLOAT
  199. VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, // ASTC_6x5_FLOAT
  200. VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, // ASTC_6x6_FLOAT
  201. VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, // ASTC_8x5_FLOAT
  202. VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, // ASTC_8x6_FLOAT
  203. VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, // ASTC_8x8_FLOAT
  204. VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, // ASTC_10x5_FLOAT
  205. VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, // ASTC_10x6_FLOAT
  206. VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, // ASTC_10x8_FLOAT
  207. VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, // ASTC_10x10_FLOAT
  208. VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, // ASTC_12x10_FLOAT
  209. VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK // ASTC_12x12_FLOAT
  210. };
  211. SDL_COMPILE_TIME_ASSERT(SDLToVK_TextureFormat, SDL_arraysize(SDLToVK_TextureFormat) == SDL_GPU_TEXTUREFORMAT_MAX_ENUM_VALUE);
  212. static VkComponentMapping SwizzleForSDLFormat(SDL_GPUTextureFormat format)
  213. {
  214. if (format == SDL_GPU_TEXTUREFORMAT_A8_UNORM) {
  215. // TODO: use VK_FORMAT_A8_UNORM_KHR from VK_KHR_maintenance5 when available
  216. return (VkComponentMapping){
  217. VK_COMPONENT_SWIZZLE_ZERO,
  218. VK_COMPONENT_SWIZZLE_ZERO,
  219. VK_COMPONENT_SWIZZLE_ZERO,
  220. VK_COMPONENT_SWIZZLE_R,
  221. };
  222. }
  223. if (format == SDL_GPU_TEXTUREFORMAT_B4G4R4A4_UNORM) {
  224. // ARGB -> BGRA
  225. // TODO: use VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT from VK_EXT_4444_formats when available
  226. return (VkComponentMapping){
  227. VK_COMPONENT_SWIZZLE_G,
  228. VK_COMPONENT_SWIZZLE_R,
  229. VK_COMPONENT_SWIZZLE_A,
  230. VK_COMPONENT_SWIZZLE_B,
  231. };
  232. }
  233. return (VkComponentMapping)IDENTITY_SWIZZLE;
  234. }
  235. static VkFormat SwapchainCompositionToFormat[] = {
  236. VK_FORMAT_B8G8R8A8_UNORM, // SDR
  237. VK_FORMAT_B8G8R8A8_SRGB, // SDR_LINEAR
  238. VK_FORMAT_R16G16B16A16_SFLOAT, // HDR_EXTENDED_LINEAR
  239. VK_FORMAT_A2B10G10R10_UNORM_PACK32 // HDR10_ST2084
  240. };
  241. static VkFormat SwapchainCompositionToFallbackFormat[] = {
  242. VK_FORMAT_R8G8B8A8_UNORM, // SDR
  243. VK_FORMAT_R8G8B8A8_SRGB, // SDR_LINEAR
  244. VK_FORMAT_UNDEFINED, // HDR_EXTENDED_LINEAR (no fallback)
  245. VK_FORMAT_UNDEFINED // HDR10_ST2084 (no fallback)
  246. };
  247. static SDL_GPUTextureFormat SwapchainCompositionToSDLFormat(
  248. SDL_GPUSwapchainComposition composition,
  249. bool usingFallback)
  250. {
  251. switch (composition) {
  252. case SDL_GPU_SWAPCHAINCOMPOSITION_SDR:
  253. return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM;
  254. case SDL_GPU_SWAPCHAINCOMPOSITION_SDR_LINEAR:
  255. return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM_SRGB : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM_SRGB;
  256. case SDL_GPU_SWAPCHAINCOMPOSITION_HDR_EXTENDED_LINEAR:
  257. return SDL_GPU_TEXTUREFORMAT_R16G16B16A16_FLOAT;
  258. case SDL_GPU_SWAPCHAINCOMPOSITION_HDR10_ST2084:
  259. return SDL_GPU_TEXTUREFORMAT_R10G10B10A2_UNORM;
  260. default:
  261. return SDL_GPU_TEXTUREFORMAT_INVALID;
  262. }
  263. }
  264. static VkColorSpaceKHR SwapchainCompositionToColorSpace[] = {
  265. VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, // SDR
  266. VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, // SDR_LINEAR
  267. VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, // HDR_EXTENDED_LINEAR
  268. VK_COLOR_SPACE_HDR10_ST2084_EXT // HDR10_ST2084
  269. };
  270. static VkComponentMapping SwapchainCompositionSwizzle[] = {
  271. IDENTITY_SWIZZLE, // SDR
  272. IDENTITY_SWIZZLE, // SDR_LINEAR
  273. IDENTITY_SWIZZLE, // HDR_EXTENDED_LINEAR
  274. {
  275. // HDR10_ST2084
  276. VK_COMPONENT_SWIZZLE_R,
  277. VK_COMPONENT_SWIZZLE_G,
  278. VK_COMPONENT_SWIZZLE_B,
  279. VK_COMPONENT_SWIZZLE_A,
  280. }
  281. };
  282. static VkFormat SDLToVK_VertexFormat[] = {
  283. VK_FORMAT_UNDEFINED, // INVALID
  284. VK_FORMAT_R32_SINT, // INT
  285. VK_FORMAT_R32G32_SINT, // INT2
  286. VK_FORMAT_R32G32B32_SINT, // INT3
  287. VK_FORMAT_R32G32B32A32_SINT, // INT4
  288. VK_FORMAT_R32_UINT, // UINT
  289. VK_FORMAT_R32G32_UINT, // UINT2
  290. VK_FORMAT_R32G32B32_UINT, // UINT3
  291. VK_FORMAT_R32G32B32A32_UINT, // UINT4
  292. VK_FORMAT_R32_SFLOAT, // FLOAT
  293. VK_FORMAT_R32G32_SFLOAT, // FLOAT2
  294. VK_FORMAT_R32G32B32_SFLOAT, // FLOAT3
  295. VK_FORMAT_R32G32B32A32_SFLOAT, // FLOAT4
  296. VK_FORMAT_R8G8_SINT, // BYTE2
  297. VK_FORMAT_R8G8B8A8_SINT, // BYTE4
  298. VK_FORMAT_R8G8_UINT, // UBYTE2
  299. VK_FORMAT_R8G8B8A8_UINT, // UBYTE4
  300. VK_FORMAT_R8G8_SNORM, // BYTE2_NORM
  301. VK_FORMAT_R8G8B8A8_SNORM, // BYTE4_NORM
  302. VK_FORMAT_R8G8_UNORM, // UBYTE2_NORM
  303. VK_FORMAT_R8G8B8A8_UNORM, // UBYTE4_NORM
  304. VK_FORMAT_R16G16_SINT, // SHORT2
  305. VK_FORMAT_R16G16B16A16_SINT, // SHORT4
  306. VK_FORMAT_R16G16_UINT, // USHORT2
  307. VK_FORMAT_R16G16B16A16_UINT, // USHORT4
  308. VK_FORMAT_R16G16_SNORM, // SHORT2_NORM
  309. VK_FORMAT_R16G16B16A16_SNORM, // SHORT4_NORM
  310. VK_FORMAT_R16G16_UNORM, // USHORT2_NORM
  311. VK_FORMAT_R16G16B16A16_UNORM, // USHORT4_NORM
  312. VK_FORMAT_R16G16_SFLOAT, // HALF2
  313. VK_FORMAT_R16G16B16A16_SFLOAT // HALF4
  314. };
  315. SDL_COMPILE_TIME_ASSERT(SDLToVK_VertexFormat, SDL_arraysize(SDLToVK_VertexFormat) == SDL_GPU_VERTEXELEMENTFORMAT_MAX_ENUM_VALUE);
  316. static VkIndexType SDLToVK_IndexType[] = {
  317. VK_INDEX_TYPE_UINT16,
  318. VK_INDEX_TYPE_UINT32
  319. };
  320. static VkPrimitiveTopology SDLToVK_PrimitiveType[] = {
  321. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
  322. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
  323. VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
  324. VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
  325. VK_PRIMITIVE_TOPOLOGY_POINT_LIST
  326. };
  327. static VkCullModeFlags SDLToVK_CullMode[] = {
  328. VK_CULL_MODE_NONE,
  329. VK_CULL_MODE_FRONT_BIT,
  330. VK_CULL_MODE_BACK_BIT,
  331. VK_CULL_MODE_FRONT_AND_BACK
  332. };
  333. static VkFrontFace SDLToVK_FrontFace[] = {
  334. VK_FRONT_FACE_COUNTER_CLOCKWISE,
  335. VK_FRONT_FACE_CLOCKWISE
  336. };
  337. static VkBlendFactor SDLToVK_BlendFactor[] = {
  338. VK_BLEND_FACTOR_ZERO, // INVALID
  339. VK_BLEND_FACTOR_ZERO,
  340. VK_BLEND_FACTOR_ONE,
  341. VK_BLEND_FACTOR_SRC_COLOR,
  342. VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
  343. VK_BLEND_FACTOR_DST_COLOR,
  344. VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
  345. VK_BLEND_FACTOR_SRC_ALPHA,
  346. VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
  347. VK_BLEND_FACTOR_DST_ALPHA,
  348. VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
  349. VK_BLEND_FACTOR_CONSTANT_COLOR,
  350. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
  351. VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
  352. };
  353. SDL_COMPILE_TIME_ASSERT(SDLToVK_BlendFactor, SDL_arraysize(SDLToVK_BlendFactor) == SDL_GPU_BLENDFACTOR_MAX_ENUM_VALUE);
  354. static VkBlendOp SDLToVK_BlendOp[] = {
  355. VK_BLEND_OP_ADD, // INVALID
  356. VK_BLEND_OP_ADD,
  357. VK_BLEND_OP_SUBTRACT,
  358. VK_BLEND_OP_REVERSE_SUBTRACT,
  359. VK_BLEND_OP_MIN,
  360. VK_BLEND_OP_MAX
  361. };
  362. SDL_COMPILE_TIME_ASSERT(SDLToVK_BlendOp, SDL_arraysize(SDLToVK_BlendOp) == SDL_GPU_BLENDOP_MAX_ENUM_VALUE);
  363. static VkCompareOp SDLToVK_CompareOp[] = {
  364. VK_COMPARE_OP_NEVER, // INVALID
  365. VK_COMPARE_OP_NEVER,
  366. VK_COMPARE_OP_LESS,
  367. VK_COMPARE_OP_EQUAL,
  368. VK_COMPARE_OP_LESS_OR_EQUAL,
  369. VK_COMPARE_OP_GREATER,
  370. VK_COMPARE_OP_NOT_EQUAL,
  371. VK_COMPARE_OP_GREATER_OR_EQUAL,
  372. VK_COMPARE_OP_ALWAYS
  373. };
  374. SDL_COMPILE_TIME_ASSERT(SDLToVK_CompareOp, SDL_arraysize(SDLToVK_CompareOp) == SDL_GPU_COMPAREOP_MAX_ENUM_VALUE);
  375. static VkStencilOp SDLToVK_StencilOp[] = {
  376. VK_STENCIL_OP_KEEP, // INVALID
  377. VK_STENCIL_OP_KEEP,
  378. VK_STENCIL_OP_ZERO,
  379. VK_STENCIL_OP_REPLACE,
  380. VK_STENCIL_OP_INCREMENT_AND_CLAMP,
  381. VK_STENCIL_OP_DECREMENT_AND_CLAMP,
  382. VK_STENCIL_OP_INVERT,
  383. VK_STENCIL_OP_INCREMENT_AND_WRAP,
  384. VK_STENCIL_OP_DECREMENT_AND_WRAP
  385. };
  386. SDL_COMPILE_TIME_ASSERT(SDLToVK_StencilOp, SDL_arraysize(SDLToVK_StencilOp) == SDL_GPU_STENCILOP_MAX_ENUM_VALUE);
  387. static VkAttachmentLoadOp SDLToVK_LoadOp[] = {
  388. VK_ATTACHMENT_LOAD_OP_LOAD,
  389. VK_ATTACHMENT_LOAD_OP_CLEAR,
  390. VK_ATTACHMENT_LOAD_OP_DONT_CARE
  391. };
  392. static VkAttachmentStoreOp SDLToVK_StoreOp[] = {
  393. VK_ATTACHMENT_STORE_OP_STORE,
  394. VK_ATTACHMENT_STORE_OP_DONT_CARE,
  395. VK_ATTACHMENT_STORE_OP_DONT_CARE,
  396. VK_ATTACHMENT_STORE_OP_STORE
  397. };
  398. static VkSampleCountFlagBits SDLToVK_SampleCount[] = {
  399. VK_SAMPLE_COUNT_1_BIT,
  400. VK_SAMPLE_COUNT_2_BIT,
  401. VK_SAMPLE_COUNT_4_BIT,
  402. VK_SAMPLE_COUNT_8_BIT
  403. };
  404. static VkVertexInputRate SDLToVK_VertexInputRate[] = {
  405. VK_VERTEX_INPUT_RATE_VERTEX,
  406. VK_VERTEX_INPUT_RATE_INSTANCE
  407. };
  408. static VkFilter SDLToVK_Filter[] = {
  409. VK_FILTER_NEAREST,
  410. VK_FILTER_LINEAR
  411. };
  412. static VkSamplerMipmapMode SDLToVK_SamplerMipmapMode[] = {
  413. VK_SAMPLER_MIPMAP_MODE_NEAREST,
  414. VK_SAMPLER_MIPMAP_MODE_LINEAR
  415. };
  416. static VkSamplerAddressMode SDLToVK_SamplerAddressMode[] = {
  417. VK_SAMPLER_ADDRESS_MODE_REPEAT,
  418. VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
  419. VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
  420. };
  421. // Structures
  422. typedef struct VulkanRenderer VulkanRenderer;
  423. typedef struct VulkanCommandPool VulkanCommandPool;
  424. typedef struct VulkanMemoryAllocation VulkanMemoryAllocation;
  425. typedef struct VulkanBuffer VulkanBuffer;
  426. typedef struct VulkanBufferContainer VulkanBufferContainer;
  427. typedef struct VulkanUniformBuffer VulkanUniformBuffer;
  428. typedef struct VulkanTexture VulkanTexture;
  429. typedef struct VulkanTextureContainer VulkanTextureContainer;
  430. typedef struct VulkanFenceHandle
  431. {
  432. VkFence fence;
  433. SDL_AtomicInt referenceCount;
  434. } VulkanFenceHandle;
  435. // Memory Allocation
  436. typedef struct VulkanMemoryFreeRegion
  437. {
  438. VulkanMemoryAllocation *allocation;
  439. VkDeviceSize offset;
  440. VkDeviceSize size;
  441. Uint32 allocationIndex;
  442. Uint32 sortedIndex;
  443. } VulkanMemoryFreeRegion;
  444. typedef struct VulkanMemoryUsedRegion
  445. {
  446. VulkanMemoryAllocation *allocation;
  447. VkDeviceSize offset;
  448. VkDeviceSize size;
  449. VkDeviceSize resourceOffset; // differs from offset based on alignment
  450. VkDeviceSize resourceSize; // differs from size based on alignment
  451. VkDeviceSize alignment;
  452. Uint8 isBuffer;
  453. union
  454. {
  455. VulkanBuffer *vulkanBuffer;
  456. VulkanTexture *vulkanTexture;
  457. };
  458. } VulkanMemoryUsedRegion;
  459. typedef struct VulkanMemorySubAllocator
  460. {
  461. Uint32 memoryTypeIndex;
  462. VulkanMemoryAllocation **allocations;
  463. Uint32 allocationCount;
  464. VulkanMemoryFreeRegion **sortedFreeRegions;
  465. Uint32 sortedFreeRegionCount;
  466. Uint32 sortedFreeRegionCapacity;
  467. } VulkanMemorySubAllocator;
  468. struct VulkanMemoryAllocation
  469. {
  470. VulkanMemorySubAllocator *allocator;
  471. VkDeviceMemory memory;
  472. VkDeviceSize size;
  473. VulkanMemoryUsedRegion **usedRegions;
  474. Uint32 usedRegionCount;
  475. Uint32 usedRegionCapacity;
  476. VulkanMemoryFreeRegion **freeRegions;
  477. Uint32 freeRegionCount;
  478. Uint32 freeRegionCapacity;
  479. Uint8 availableForAllocation;
  480. VkDeviceSize freeSpace;
  481. VkDeviceSize usedSpace;
  482. Uint8 *mapPointer;
  483. SDL_Mutex *memoryLock;
  484. };
  485. typedef struct VulkanMemoryAllocator
  486. {
  487. VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES];
  488. } VulkanMemoryAllocator;
  489. // Memory structures
  490. typedef enum VulkanBufferType
  491. {
  492. VULKAN_BUFFER_TYPE_GPU,
  493. VULKAN_BUFFER_TYPE_UNIFORM,
  494. VULKAN_BUFFER_TYPE_TRANSFER
  495. } VulkanBufferType;
  496. struct VulkanBuffer
  497. {
  498. VulkanBufferContainer *container;
  499. Uint32 containerIndex;
  500. VkBuffer buffer;
  501. VulkanMemoryUsedRegion *usedRegion;
  502. // Needed for uniforms and defrag
  503. VulkanBufferType type;
  504. SDL_GPUBufferUsageFlags usage;
  505. VkDeviceSize size;
  506. SDL_AtomicInt referenceCount;
  507. bool transitioned;
  508. bool markedForDestroy; // so that defrag doesn't double-free
  509. VulkanUniformBuffer *uniformBufferForDefrag;
  510. };
  511. struct VulkanBufferContainer
  512. {
  513. VulkanBuffer *activeBuffer;
  514. VulkanBuffer **buffers;
  515. Uint32 bufferCapacity;
  516. Uint32 bufferCount;
  517. bool dedicated;
  518. char *debugName;
  519. };
  520. // Renderer Structure
  521. typedef struct QueueFamilyIndices
  522. {
  523. Uint32 graphicsFamily;
  524. Uint32 presentFamily;
  525. Uint32 computeFamily;
  526. Uint32 transferFamily;
  527. } QueueFamilyIndices;
  528. typedef struct VulkanSampler
  529. {
  530. VkSampler sampler;
  531. SDL_AtomicInt referenceCount;
  532. } VulkanSampler;
  533. typedef struct VulkanShader
  534. {
  535. VkShaderModule shaderModule;
  536. char *entrypointName;
  537. SDL_GPUShaderStage stage;
  538. Uint32 numSamplers;
  539. Uint32 numStorageTextures;
  540. Uint32 numStorageBuffers;
  541. Uint32 numUniformBuffers;
  542. SDL_AtomicInt referenceCount;
  543. } VulkanShader;
  544. /* Textures are made up of individual subresources.
  545. * This helps us barrier the resource efficiently.
  546. */
  547. typedef struct VulkanTextureSubresource
  548. {
  549. VulkanTexture *parent;
  550. Uint32 layer;
  551. Uint32 level;
  552. VkImageView *renderTargetViews; // One render target view per depth slice
  553. VkImageView computeWriteView;
  554. VkImageView depthStencilView;
  555. } VulkanTextureSubresource;
  556. struct VulkanTexture
  557. {
  558. VulkanTextureContainer *container;
  559. Uint32 containerIndex;
  560. VulkanMemoryUsedRegion *usedRegion;
  561. VkImage image;
  562. VkImageView fullView; // used for samplers and storage reads
  563. VkComponentMapping swizzle;
  564. VkImageAspectFlags aspectFlags;
  565. Uint32 depth; // used for cleanup only
  566. // FIXME: It'd be nice if we didn't have to have this on the texture...
  567. SDL_GPUTextureUsageFlags usage; // used for defrag transitions only.
  568. Uint32 subresourceCount;
  569. VulkanTextureSubresource *subresources;
  570. bool markedForDestroy; // so that defrag doesn't double-free
  571. bool externallyManaged; // true for XR swapchain images
  572. SDL_AtomicInt referenceCount;
  573. };
  574. struct VulkanTextureContainer
  575. {
  576. TextureCommonHeader header;
  577. VulkanTexture *activeTexture;
  578. Uint32 textureCapacity;
  579. Uint32 textureCount;
  580. VulkanTexture **textures;
  581. char *debugName;
  582. bool canBeCycled;
  583. bool externallyManaged; // true for XR swapchain images
  584. };
  585. typedef enum VulkanBufferUsageMode
  586. {
  587. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  588. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  589. VULKAN_BUFFER_USAGE_MODE_VERTEX_READ,
  590. VULKAN_BUFFER_USAGE_MODE_INDEX_READ,
  591. VULKAN_BUFFER_USAGE_MODE_INDIRECT,
  592. VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ,
  593. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  594. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  595. } VulkanBufferUsageMode;
  596. typedef enum VulkanTextureUsageMode
  597. {
  598. VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
  599. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  600. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  601. VULKAN_TEXTURE_USAGE_MODE_SAMPLER,
  602. VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ,
  603. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  604. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  605. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
  606. VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT,
  607. VULKAN_TEXTURE_USAGE_MODE_PRESENT
  608. } VulkanTextureUsageMode;
  609. typedef enum VulkanUniformBufferStage
  610. {
  611. VULKAN_UNIFORM_BUFFER_STAGE_VERTEX,
  612. VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT,
  613. VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE
  614. } VulkanUniformBufferStage;
  615. typedef struct VulkanFramebuffer
  616. {
  617. VkFramebuffer framebuffer;
  618. SDL_AtomicInt referenceCount;
  619. } VulkanFramebuffer;
  620. typedef struct WindowData
  621. {
  622. SDL_Window *window;
  623. VulkanRenderer *renderer;
  624. int refcount;
  625. SDL_GPUSwapchainComposition swapchainComposition;
  626. SDL_GPUPresentMode presentMode;
  627. bool needsSwapchainRecreate;
  628. bool needsSurfaceRecreate;
  629. Uint32 swapchainCreateWidth;
  630. Uint32 swapchainCreateHeight;
  631. // Window surface
  632. VkSurfaceKHR surface;
  633. // Swapchain for window surface
  634. VkSwapchainKHR swapchain;
  635. VkFormat format;
  636. VkColorSpaceKHR colorSpace;
  637. VkComponentMapping swapchainSwizzle;
  638. bool usingFallbackFormat;
  639. // Swapchain images
  640. VulkanTextureContainer *textureContainers; // use containers so that swapchain textures can use the same API as other textures
  641. Uint32 imageCount;
  642. Uint32 width;
  643. Uint32 height;
  644. // Synchronization primitives
  645. VkSemaphore imageAvailableSemaphore[MAX_FRAMES_IN_FLIGHT];
  646. VkSemaphore *renderFinishedSemaphore;
  647. SDL_GPUFence *inFlightFences[MAX_FRAMES_IN_FLIGHT];
  648. Uint32 frameCounter;
  649. } WindowData;
  650. typedef struct SwapchainSupportDetails
  651. {
  652. VkSurfaceCapabilitiesKHR capabilities;
  653. VkSurfaceFormatKHR *formats;
  654. Uint32 formatsLength;
  655. VkPresentModeKHR *presentModes;
  656. Uint32 presentModesLength;
  657. } SwapchainSupportDetails;
  658. typedef struct VulkanPresentData
  659. {
  660. WindowData *windowData;
  661. Uint32 swapchainImageIndex;
  662. } VulkanPresentData;
  663. struct VulkanUniformBuffer
  664. {
  665. VulkanBuffer *buffer;
  666. Uint32 drawOffset;
  667. Uint32 writeOffset;
  668. };
  669. typedef struct VulkanDescriptorInfo
  670. {
  671. VkDescriptorType descriptorType;
  672. VkShaderStageFlagBits stageFlag;
  673. } VulkanDescriptorInfo;
  674. typedef struct DescriptorSetPool
  675. {
  676. // It's a pool... of pools!!!
  677. Uint32 poolCount;
  678. VkDescriptorPool *descriptorPools;
  679. // We'll just manage the descriptor sets ourselves instead of freeing the sets
  680. VkDescriptorSet *descriptorSets;
  681. Uint32 descriptorSetCount;
  682. Uint32 descriptorSetIndex;
  683. } DescriptorSetPool;
  684. // A command buffer acquires a cache at command buffer acquisition time
  685. typedef struct DescriptorSetCache
  686. {
  687. // Pools are indexed by DescriptorSetLayoutID which increases monotonically
  688. // There's only a certain number of maximum layouts possible since we de-duplicate them.
  689. DescriptorSetPool *pools;
  690. Uint32 poolCount;
  691. } DescriptorSetCache;
  692. typedef struct DescriptorSetLayoutHashTableKey
  693. {
  694. VkShaderStageFlagBits shaderStage;
  695. // Category 1: read resources
  696. Uint32 samplerCount;
  697. Uint32 storageBufferCount;
  698. Uint32 storageTextureCount;
  699. // Category 2: write resources
  700. Uint32 writeStorageBufferCount;
  701. Uint32 writeStorageTextureCount;
  702. // Category 3: uniform buffers
  703. Uint32 uniformBufferCount;
  704. } DescriptorSetLayoutHashTableKey;
  705. typedef uint32_t DescriptorSetLayoutID;
  706. typedef struct DescriptorSetLayout
  707. {
  708. DescriptorSetLayoutID ID;
  709. VkDescriptorSetLayout descriptorSetLayout;
  710. // Category 1: read resources
  711. Uint32 samplerCount;
  712. Uint32 storageBufferCount;
  713. Uint32 storageTextureCount;
  714. // Category 2: write resources
  715. Uint32 writeStorageBufferCount;
  716. Uint32 writeStorageTextureCount;
  717. // Category 3: uniform buffers
  718. Uint32 uniformBufferCount;
  719. } DescriptorSetLayout;
  720. typedef struct GraphicsPipelineResourceLayoutHashTableKey
  721. {
  722. Uint32 vertexSamplerCount;
  723. Uint32 vertexStorageTextureCount;
  724. Uint32 vertexStorageBufferCount;
  725. Uint32 vertexUniformBufferCount;
  726. Uint32 fragmentSamplerCount;
  727. Uint32 fragmentStorageTextureCount;
  728. Uint32 fragmentStorageBufferCount;
  729. Uint32 fragmentUniformBufferCount;
  730. } GraphicsPipelineResourceLayoutHashTableKey;
  731. typedef struct VulkanGraphicsPipelineResourceLayout
  732. {
  733. VkPipelineLayout pipelineLayout;
  734. /*
  735. * Descriptor set layout is as follows:
  736. * 0: vertex resources
  737. * 1: vertex uniform buffers
  738. * 2: fragment resources
  739. * 3: fragment uniform buffers
  740. */
  741. DescriptorSetLayout *descriptorSetLayouts[4];
  742. Uint32 vertexSamplerCount;
  743. Uint32 vertexStorageTextureCount;
  744. Uint32 vertexStorageBufferCount;
  745. Uint32 vertexUniformBufferCount;
  746. Uint32 fragmentSamplerCount;
  747. Uint32 fragmentStorageTextureCount;
  748. Uint32 fragmentStorageBufferCount;
  749. Uint32 fragmentUniformBufferCount;
  750. } VulkanGraphicsPipelineResourceLayout;
  751. typedef struct VulkanGraphicsPipeline
  752. {
  753. GraphicsPipelineCommonHeader header;
  754. VkPipeline pipeline;
  755. SDL_GPUPrimitiveType primitiveType;
  756. VulkanGraphicsPipelineResourceLayout *resourceLayout;
  757. VulkanShader *vertexShader;
  758. VulkanShader *fragmentShader;
  759. SDL_AtomicInt referenceCount;
  760. } VulkanGraphicsPipeline;
  761. typedef struct ComputePipelineResourceLayoutHashTableKey
  762. {
  763. Uint32 samplerCount;
  764. Uint32 readonlyStorageTextureCount;
  765. Uint32 readonlyStorageBufferCount;
  766. Uint32 readWriteStorageTextureCount;
  767. Uint32 readWriteStorageBufferCount;
  768. Uint32 uniformBufferCount;
  769. } ComputePipelineResourceLayoutHashTableKey;
  770. typedef struct VulkanComputePipelineResourceLayout
  771. {
  772. VkPipelineLayout pipelineLayout;
  773. /*
  774. * Descriptor set layout is as follows:
  775. * 0: samplers, then read-only textures, then read-only buffers
  776. * 1: write-only textures, then write-only buffers
  777. * 2: uniform buffers
  778. */
  779. DescriptorSetLayout *descriptorSetLayouts[3];
  780. Uint32 numSamplers;
  781. Uint32 numReadonlyStorageTextures;
  782. Uint32 numReadonlyStorageBuffers;
  783. Uint32 numReadWriteStorageTextures;
  784. Uint32 numReadWriteStorageBuffers;
  785. Uint32 numUniformBuffers;
  786. } VulkanComputePipelineResourceLayout;
  787. typedef struct VulkanComputePipeline
  788. {
  789. ComputePipelineCommonHeader header;
  790. VkShaderModule shaderModule;
  791. VkPipeline pipeline;
  792. VulkanComputePipelineResourceLayout *resourceLayout;
  793. SDL_AtomicInt referenceCount;
  794. } VulkanComputePipeline;
  795. typedef struct RenderPassColorTargetDescription
  796. {
  797. VkFormat format;
  798. SDL_GPULoadOp loadOp;
  799. SDL_GPUStoreOp storeOp;
  800. } RenderPassColorTargetDescription;
  801. typedef struct RenderPassDepthStencilTargetDescription
  802. {
  803. VkFormat format;
  804. SDL_GPULoadOp loadOp;
  805. SDL_GPUStoreOp storeOp;
  806. SDL_GPULoadOp stencilLoadOp;
  807. SDL_GPUStoreOp stencilStoreOp;
  808. } RenderPassDepthStencilTargetDescription;
  809. typedef struct CommandPoolHashTableKey
  810. {
  811. SDL_ThreadID threadID;
  812. } CommandPoolHashTableKey;
  813. typedef struct RenderPassHashTableKey
  814. {
  815. RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS];
  816. Uint32 numColorTargets;
  817. VkFormat resolveTargetFormats[MAX_COLOR_TARGET_BINDINGS];
  818. Uint32 numResolveTargets;
  819. RenderPassDepthStencilTargetDescription depthStencilTargetDescription;
  820. VkSampleCountFlagBits sampleCount;
  821. } RenderPassHashTableKey;
  822. typedef struct VulkanRenderPassHashTableValue
  823. {
  824. VkRenderPass handle;
  825. } VulkanRenderPassHashTableValue;
  826. typedef struct FramebufferHashTableKey
  827. {
  828. VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
  829. Uint32 numColorTargets;
  830. VkImageView resolveAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
  831. Uint32 numResolveAttachments;
  832. VkImageView depthStencilAttachmentView;
  833. Uint32 width;
  834. Uint32 height;
  835. } FramebufferHashTableKey;
  836. // Command structures
  837. typedef struct VulkanFencePool
  838. {
  839. SDL_Mutex *lock;
  840. VulkanFenceHandle **availableFences;
  841. Uint32 availableFenceCount;
  842. Uint32 availableFenceCapacity;
  843. } VulkanFencePool;
  844. typedef struct VulkanCommandBuffer
  845. {
  846. CommandBufferCommonHeader common;
  847. VulkanRenderer *renderer;
  848. VkCommandBuffer commandBuffer;
  849. VulkanCommandPool *commandPool;
  850. VulkanPresentData *presentDatas;
  851. Uint32 presentDataCount;
  852. Uint32 presentDataCapacity;
  853. VkSemaphore *waitSemaphores;
  854. Uint32 waitSemaphoreCount;
  855. Uint32 waitSemaphoreCapacity;
  856. VkSemaphore *signalSemaphores;
  857. Uint32 signalSemaphoreCount;
  858. Uint32 signalSemaphoreCapacity;
  859. VulkanComputePipeline *currentComputePipeline;
  860. VulkanGraphicsPipeline *currentGraphicsPipeline;
  861. // Keep track of resources transitioned away from their default state to barrier them on pass end
  862. VulkanTextureSubresource *colorAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS];
  863. Uint32 colorAttachmentSubresourceCount;
  864. VulkanTextureSubresource *resolveAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS];
  865. Uint32 resolveAttachmentSubresourceCount;
  866. VulkanTextureSubresource *depthStencilAttachmentSubresource; // may be NULL
  867. // Dynamic state
  868. VkViewport currentViewport;
  869. VkRect2D currentScissor;
  870. float blendConstants[4];
  871. Uint8 stencilRef;
  872. // Resource bind state
  873. DescriptorSetCache *descriptorSetCache; // acquired when command buffer is acquired
  874. bool needNewVertexResourceDescriptorSet;
  875. bool needNewVertexUniformDescriptorSet;
  876. bool needNewVertexUniformOffsets;
  877. bool needNewFragmentResourceDescriptorSet;
  878. bool needNewFragmentUniformDescriptorSet;
  879. bool needNewFragmentUniformOffsets;
  880. bool needNewComputeReadOnlyDescriptorSet;
  881. bool needNewComputeReadWriteDescriptorSet;
  882. bool needNewComputeUniformDescriptorSet;
  883. bool needNewComputeUniformOffsets;
  884. VkDescriptorSet vertexResourceDescriptorSet;
  885. VkDescriptorSet vertexUniformDescriptorSet;
  886. VkDescriptorSet fragmentResourceDescriptorSet;
  887. VkDescriptorSet fragmentUniformDescriptorSet;
  888. VkDescriptorSet computeReadOnlyDescriptorSet;
  889. VkDescriptorSet computeReadWriteDescriptorSet;
  890. VkDescriptorSet computeUniformDescriptorSet;
  891. VkBuffer vertexBuffers[MAX_VERTEX_BUFFERS];
  892. VkDeviceSize vertexBufferOffsets[MAX_VERTEX_BUFFERS];
  893. Uint32 vertexBufferCount;
  894. bool needVertexBufferBind;
  895. VkImageView vertexSamplerTextureViewBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  896. VkSampler vertexSamplerBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  897. VkImageView vertexStorageTextureViewBindings[MAX_STORAGE_TEXTURES_PER_STAGE];
  898. VkBuffer vertexStorageBufferBindings[MAX_STORAGE_BUFFERS_PER_STAGE];
  899. VkImageView fragmentSamplerTextureViewBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  900. VkSampler fragmentSamplerBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  901. VkImageView fragmentStorageTextureViewBindings[MAX_STORAGE_TEXTURES_PER_STAGE];
  902. VkBuffer fragmentStorageBufferBindings[MAX_STORAGE_BUFFERS_PER_STAGE];
  903. VkImageView computeSamplerTextureViewBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  904. VkSampler computeSamplerBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  905. VkImageView readOnlyComputeStorageTextureViewBindings[MAX_STORAGE_TEXTURES_PER_STAGE];
  906. VkBuffer readOnlyComputeStorageBufferBindings[MAX_STORAGE_BUFFERS_PER_STAGE];
  907. // Track these separately because barriers can happen mid compute pass
  908. VulkanTexture *readOnlyComputeStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
  909. VulkanBuffer *readOnlyComputeStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
  910. VkImageView readWriteComputeStorageTextureViewBindings[MAX_COMPUTE_WRITE_TEXTURES];
  911. VkBuffer readWriteComputeStorageBufferBindings[MAX_COMPUTE_WRITE_BUFFERS];
  912. // Track these separately because they are barriered when the compute pass begins
  913. VulkanTextureSubresource *readWriteComputeStorageTextureSubresources[MAX_COMPUTE_WRITE_TEXTURES];
  914. Uint32 readWriteComputeStorageTextureSubresourceCount;
  915. VulkanBuffer *readWriteComputeStorageBuffers[MAX_COMPUTE_WRITE_BUFFERS];
  916. // Uniform buffers
  917. VulkanUniformBuffer *vertexUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
  918. VulkanUniformBuffer *fragmentUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
  919. VulkanUniformBuffer *computeUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
  920. // Track used resources
  921. VulkanBuffer **usedBuffers;
  922. Sint32 usedBufferCount;
  923. Sint32 usedBufferCapacity;
  924. VulkanTexture **usedTextures;
  925. Sint32 usedTextureCount;
  926. Sint32 usedTextureCapacity;
  927. VulkanSampler **usedSamplers;
  928. Sint32 usedSamplerCount;
  929. Sint32 usedSamplerCapacity;
  930. VulkanGraphicsPipeline **usedGraphicsPipelines;
  931. Sint32 usedGraphicsPipelineCount;
  932. Sint32 usedGraphicsPipelineCapacity;
  933. VulkanComputePipeline **usedComputePipelines;
  934. Sint32 usedComputePipelineCount;
  935. Sint32 usedComputePipelineCapacity;
  936. VulkanFramebuffer **usedFramebuffers;
  937. Sint32 usedFramebufferCount;
  938. Sint32 usedFramebufferCapacity;
  939. VulkanUniformBuffer **usedUniformBuffers;
  940. Sint32 usedUniformBufferCount;
  941. Sint32 usedUniformBufferCapacity;
  942. VulkanFenceHandle *inFlightFence;
  943. bool autoReleaseFence;
  944. bool swapchainRequested;
  945. bool isDefrag; // Whether this CB was created for defragging
  946. } VulkanCommandBuffer;
  947. struct VulkanCommandPool
  948. {
  949. SDL_ThreadID threadID;
  950. VkCommandPool commandPool;
  951. VulkanCommandBuffer **inactiveCommandBuffers;
  952. Uint32 inactiveCommandBufferCapacity;
  953. Uint32 inactiveCommandBufferCount;
  954. };
  955. // Feature Checks
  956. typedef struct VulkanFeatures
  957. {
  958. Uint32 desiredApiVersion;
  959. VkPhysicalDeviceFeatures desiredVulkan10DeviceFeatures;
  960. VkPhysicalDeviceVulkan11Features desiredVulkan11DeviceFeatures;
  961. VkPhysicalDeviceVulkan12Features desiredVulkan12DeviceFeatures;
  962. VkPhysicalDeviceVulkan13Features desiredVulkan13DeviceFeatures;
  963. bool usesCustomVulkanOptions;
  964. Uint32 additionalDeviceExtensionCount;
  965. const char **additionalDeviceExtensionNames;
  966. Uint32 additionalInstanceExtensionCount;
  967. const char **additionalInstanceExtensionNames;
  968. } VulkanFeatures;
  969. // Context
  970. struct VulkanRenderer
  971. {
  972. VkInstance instance;
  973. VkPhysicalDevice physicalDevice;
  974. VkPhysicalDeviceProperties2KHR physicalDeviceProperties;
  975. VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties;
  976. VkDevice logicalDevice;
  977. Uint8 integratedMemoryNotification;
  978. Uint8 outOfDeviceLocalMemoryWarning;
  979. Uint8 outofBARMemoryWarning;
  980. Uint8 fillModeOnlyWarning;
  981. // OpenXR
  982. Uint32 minimumVkVersion;
  983. #ifdef HAVE_GPU_OPENXR
  984. XrInstance xrInstance; // a non-null instance also states this vk device was created by OpenXR
  985. XrSystemId xrSystemId;
  986. XrInstancePfns *xr;
  987. #endif
  988. bool debugMode;
  989. bool preferLowPower;
  990. bool requireHardwareAcceleration;
  991. SDL_PropertiesID props;
  992. Uint32 allowedFramesInFlight;
  993. VulkanExtensions supports;
  994. bool supportsDebugUtils;
  995. bool supportsColorspace;
  996. bool supportsPhysicalDeviceProperties2;
  997. bool supportsFillModeNonSolid;
  998. bool supportsMultiDrawIndirect;
  999. VulkanMemoryAllocator *memoryAllocator;
  1000. VkPhysicalDeviceMemoryProperties memoryProperties;
  1001. bool checkEmptyAllocations;
  1002. WindowData **claimedWindows;
  1003. Uint32 claimedWindowCount;
  1004. Uint32 claimedWindowCapacity;
  1005. Uint32 queueFamilyIndex;
  1006. VkQueue unifiedQueue;
  1007. VulkanCommandBuffer **submittedCommandBuffers;
  1008. Uint32 submittedCommandBufferCount;
  1009. Uint32 submittedCommandBufferCapacity;
  1010. VulkanFencePool fencePool;
  1011. SDL_HashTable *commandPoolHashTable;
  1012. SDL_HashTable *renderPassHashTable;
  1013. SDL_HashTable *framebufferHashTable;
  1014. SDL_HashTable *graphicsPipelineResourceLayoutHashTable;
  1015. SDL_HashTable *computePipelineResourceLayoutHashTable;
  1016. SDL_HashTable *descriptorSetLayoutHashTable;
  1017. VulkanUniformBuffer **uniformBufferPool;
  1018. Uint32 uniformBufferPoolCount;
  1019. Uint32 uniformBufferPoolCapacity;
  1020. DescriptorSetCache **descriptorSetCachePool;
  1021. Uint32 descriptorSetCachePoolCount;
  1022. Uint32 descriptorSetCachePoolCapacity;
  1023. SDL_AtomicInt layoutResourceID;
  1024. Uint32 minUBOAlignment;
  1025. // Deferred resource destruction
  1026. VulkanTexture **texturesToDestroy;
  1027. Uint32 texturesToDestroyCount;
  1028. Uint32 texturesToDestroyCapacity;
  1029. VulkanBuffer **buffersToDestroy;
  1030. Uint32 buffersToDestroyCount;
  1031. Uint32 buffersToDestroyCapacity;
  1032. VulkanSampler **samplersToDestroy;
  1033. Uint32 samplersToDestroyCount;
  1034. Uint32 samplersToDestroyCapacity;
  1035. VulkanGraphicsPipeline **graphicsPipelinesToDestroy;
  1036. Uint32 graphicsPipelinesToDestroyCount;
  1037. Uint32 graphicsPipelinesToDestroyCapacity;
  1038. VulkanComputePipeline **computePipelinesToDestroy;
  1039. Uint32 computePipelinesToDestroyCount;
  1040. Uint32 computePipelinesToDestroyCapacity;
  1041. VulkanShader **shadersToDestroy;
  1042. Uint32 shadersToDestroyCount;
  1043. Uint32 shadersToDestroyCapacity;
  1044. VulkanFramebuffer **framebuffersToDestroy;
  1045. Uint32 framebuffersToDestroyCount;
  1046. Uint32 framebuffersToDestroyCapacity;
  1047. SDL_Mutex *allocatorLock;
  1048. SDL_Mutex *disposeLock;
  1049. SDL_Mutex *submitLock;
  1050. SDL_Mutex *acquireCommandBufferLock;
  1051. SDL_Mutex *acquireUniformBufferLock;
  1052. SDL_Mutex *renderPassFetchLock;
  1053. SDL_Mutex *framebufferFetchLock;
  1054. SDL_Mutex *graphicsPipelineLayoutFetchLock;
  1055. SDL_Mutex *computePipelineLayoutFetchLock;
  1056. SDL_Mutex *descriptorSetLayoutFetchLock;
  1057. SDL_Mutex *windowLock;
  1058. Uint8 defragInProgress;
  1059. VulkanMemoryAllocation **allocationsToDefrag;
  1060. Uint32 allocationsToDefragCount;
  1061. Uint32 allocationsToDefragCapacity;
  1062. #define VULKAN_INSTANCE_FUNCTION(func) \
  1063. PFN_##func func;
  1064. #define VULKAN_DEVICE_FUNCTION(func) \
  1065. PFN_##func func;
  1066. #include "SDL_gpu_vulkan_vkfuncs.h"
  1067. };
  1068. // Forward declarations
  1069. static bool VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
  1070. static bool VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
  1071. static void VULKAN_ReleaseWindow(SDL_GPURenderer *driverData, SDL_Window *window);
  1072. static bool VULKAN_Wait(SDL_GPURenderer *driverData);
  1073. static bool VULKAN_WaitForFences(SDL_GPURenderer *driverData, bool waitAll, SDL_GPUFence *const *fences, Uint32 numFences);
  1074. static bool VULKAN_Submit(SDL_GPUCommandBuffer *commandBuffer);
  1075. static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer(SDL_GPURenderer *driverData);
  1076. // Error Handling
  1077. static inline const char *VkErrorMessages(VkResult code)
  1078. {
  1079. #define ERR_TO_STR(e) \
  1080. case e: \
  1081. return #e;
  1082. switch (code) {
  1083. ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY)
  1084. ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY)
  1085. ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL)
  1086. ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY)
  1087. ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED)
  1088. ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT)
  1089. ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT)
  1090. ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT)
  1091. ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS)
  1092. ERR_TO_STR(VK_ERROR_DEVICE_LOST)
  1093. ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER)
  1094. ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR)
  1095. ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR)
  1096. ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT)
  1097. ERR_TO_STR(VK_SUBOPTIMAL_KHR)
  1098. ERR_TO_STR(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR)
  1099. ERR_TO_STR(VK_ERROR_INVALID_SHADER_NV)
  1100. default:
  1101. return "Unhandled VkResult!";
  1102. }
  1103. #undef ERR_TO_STR
  1104. }
  1105. #define SET_ERROR(fmt, msg) \
  1106. do { \
  1107. if (renderer->debugMode) { \
  1108. SDL_LogError(SDL_LOG_CATEGORY_GPU, fmt, msg); \
  1109. } \
  1110. SDL_SetError((fmt), (msg)); \
  1111. } while (0)
  1112. #define SET_STRING_ERROR(msg) SET_ERROR("%s", msg)
  1113. #define SET_ERROR_AND_RETURN(fmt, msg, ret) \
  1114. do { \
  1115. SET_ERROR(fmt, msg); \
  1116. return ret; \
  1117. } while (0)
  1118. #define SET_STRING_ERROR_AND_RETURN(msg, ret) SET_ERROR_AND_RETURN("%s", msg, ret)
  1119. #define CHECK_VULKAN_ERROR_AND_RETURN(res, fn, ret) \
  1120. do { \
  1121. if ((res) != VK_SUCCESS) { \
  1122. if (renderer->debugMode) { \
  1123. SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s %s", #fn, VkErrorMessages(res)); \
  1124. } \
  1125. SDL_SetError("%s %s", #fn, VkErrorMessages(res)); \
  1126. return (ret); \
  1127. } \
  1128. } while (0)
  1129. // Utility
  1130. static inline VkPolygonMode SDLToVK_PolygonMode(
  1131. VulkanRenderer *renderer,
  1132. SDL_GPUFillMode mode)
  1133. {
  1134. if (mode == SDL_GPU_FILLMODE_FILL) {
  1135. return VK_POLYGON_MODE_FILL; // always available!
  1136. }
  1137. if (renderer->supportsFillModeNonSolid && mode == SDL_GPU_FILLMODE_LINE) {
  1138. return VK_POLYGON_MODE_LINE;
  1139. }
  1140. if (!renderer->fillModeOnlyWarning) {
  1141. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Unsupported fill mode requested, using FILL!");
  1142. renderer->fillModeOnlyWarning = 1;
  1143. }
  1144. return VK_POLYGON_MODE_FILL;
  1145. }
  1146. // Memory Management
  1147. // Vulkan: Memory Allocation
  1148. static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment(
  1149. VkDeviceSize n,
  1150. VkDeviceSize align)
  1151. {
  1152. return align * ((n + align - 1) / align);
  1153. }
  1154. static inline Uint32 VULKAN_INTERNAL_NextHighestAlignment32(
  1155. Uint32 n,
  1156. Uint32 align)
  1157. {
  1158. return align * ((n + align - 1) / align);
  1159. }
  1160. static void VULKAN_INTERNAL_MakeMemoryUnavailable(
  1161. VulkanMemoryAllocation *allocation)
  1162. {
  1163. Uint32 i, j;
  1164. VulkanMemoryFreeRegion *freeRegion;
  1165. allocation->availableForAllocation = 0;
  1166. for (i = 0; i < allocation->freeRegionCount; i += 1) {
  1167. freeRegion = allocation->freeRegions[i];
  1168. // close the gap in the sorted list
  1169. if (allocation->allocator->sortedFreeRegionCount > 1) {
  1170. for (j = freeRegion->sortedIndex; j < allocation->allocator->sortedFreeRegionCount - 1; j += 1) {
  1171. allocation->allocator->sortedFreeRegions[j] =
  1172. allocation->allocator->sortedFreeRegions[j + 1];
  1173. allocation->allocator->sortedFreeRegions[j]->sortedIndex = j;
  1174. }
  1175. }
  1176. allocation->allocator->sortedFreeRegionCount -= 1;
  1177. }
  1178. }
  1179. static void VULKAN_INTERNAL_MarkAllocationsForDefrag(
  1180. VulkanRenderer *renderer)
  1181. {
  1182. Uint32 memoryType, allocationIndex;
  1183. VulkanMemorySubAllocator *currentAllocator;
  1184. for (memoryType = 0; memoryType < VK_MAX_MEMORY_TYPES; memoryType += 1) {
  1185. currentAllocator = &renderer->memoryAllocator->subAllocators[memoryType];
  1186. for (allocationIndex = 0; allocationIndex < currentAllocator->allocationCount; allocationIndex += 1) {
  1187. if (currentAllocator->allocations[allocationIndex]->availableForAllocation == 1) {
  1188. if (currentAllocator->allocations[allocationIndex]->freeRegionCount > 1) {
  1189. EXPAND_ARRAY_IF_NEEDED(
  1190. renderer->allocationsToDefrag,
  1191. VulkanMemoryAllocation *,
  1192. renderer->allocationsToDefragCount + 1,
  1193. renderer->allocationsToDefragCapacity,
  1194. renderer->allocationsToDefragCapacity * 2);
  1195. renderer->allocationsToDefrag[renderer->allocationsToDefragCount] =
  1196. currentAllocator->allocations[allocationIndex];
  1197. renderer->allocationsToDefragCount += 1;
  1198. VULKAN_INTERNAL_MakeMemoryUnavailable(
  1199. currentAllocator->allocations[allocationIndex]);
  1200. }
  1201. }
  1202. }
  1203. }
  1204. }
  1205. static void VULKAN_INTERNAL_RemoveMemoryFreeRegion(
  1206. VulkanRenderer *renderer,
  1207. VulkanMemoryFreeRegion *freeRegion)
  1208. {
  1209. Uint32 i;
  1210. SDL_LockMutex(renderer->allocatorLock);
  1211. if (freeRegion->allocation->availableForAllocation) {
  1212. // close the gap in the sorted list
  1213. if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1) {
  1214. for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1) {
  1215. freeRegion->allocation->allocator->sortedFreeRegions[i] =
  1216. freeRegion->allocation->allocator->sortedFreeRegions[i + 1];
  1217. freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
  1218. }
  1219. }
  1220. freeRegion->allocation->allocator->sortedFreeRegionCount -= 1;
  1221. }
  1222. // close the gap in the buffer list
  1223. if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1) {
  1224. freeRegion->allocation->freeRegions[freeRegion->allocationIndex] =
  1225. freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1];
  1226. freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex =
  1227. freeRegion->allocationIndex;
  1228. }
  1229. freeRegion->allocation->freeRegionCount -= 1;
  1230. freeRegion->allocation->freeSpace -= freeRegion->size;
  1231. SDL_free(freeRegion);
  1232. SDL_UnlockMutex(renderer->allocatorLock);
  1233. }
  1234. static void VULKAN_INTERNAL_NewMemoryFreeRegion(
  1235. VulkanRenderer *renderer,
  1236. VulkanMemoryAllocation *allocation,
  1237. VkDeviceSize offset,
  1238. VkDeviceSize size)
  1239. {
  1240. VulkanMemoryFreeRegion *newFreeRegion;
  1241. VkDeviceSize newOffset, newSize;
  1242. Sint32 insertionIndex = 0;
  1243. SDL_LockMutex(renderer->allocatorLock);
  1244. // look for an adjacent region to merge
  1245. for (Sint32 i = allocation->freeRegionCount - 1; i >= 0; i -= 1) {
  1246. // check left side
  1247. if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset) {
  1248. newOffset = allocation->freeRegions[i]->offset;
  1249. newSize = allocation->freeRegions[i]->size + size;
  1250. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
  1251. VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
  1252. SDL_UnlockMutex(renderer->allocatorLock);
  1253. return;
  1254. }
  1255. // check right side
  1256. if (allocation->freeRegions[i]->offset == offset + size) {
  1257. newOffset = offset;
  1258. newSize = allocation->freeRegions[i]->size + size;
  1259. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
  1260. VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
  1261. SDL_UnlockMutex(renderer->allocatorLock);
  1262. return;
  1263. }
  1264. }
  1265. // region is not contiguous with another free region, make a new one
  1266. allocation->freeRegionCount += 1;
  1267. if (allocation->freeRegionCount > allocation->freeRegionCapacity) {
  1268. allocation->freeRegionCapacity *= 2;
  1269. allocation->freeRegions = SDL_realloc(
  1270. allocation->freeRegions,
  1271. sizeof(VulkanMemoryFreeRegion *) * allocation->freeRegionCapacity);
  1272. }
  1273. newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion));
  1274. newFreeRegion->offset = offset;
  1275. newFreeRegion->size = size;
  1276. newFreeRegion->allocation = allocation;
  1277. allocation->freeSpace += size;
  1278. allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion;
  1279. newFreeRegion->allocationIndex = allocation->freeRegionCount - 1;
  1280. if (allocation->availableForAllocation) {
  1281. for (Uint32 i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1) {
  1282. if (allocation->allocator->sortedFreeRegions[i]->size < size) {
  1283. // this is where the new region should go
  1284. break;
  1285. }
  1286. insertionIndex += 1;
  1287. }
  1288. if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity) {
  1289. allocation->allocator->sortedFreeRegionCapacity *= 2;
  1290. allocation->allocator->sortedFreeRegions = SDL_realloc(
  1291. allocation->allocator->sortedFreeRegions,
  1292. sizeof(VulkanMemoryFreeRegion *) * allocation->allocator->sortedFreeRegionCapacity);
  1293. }
  1294. // perform insertion sort
  1295. if (allocation->allocator->sortedFreeRegionCount > 0 && (Uint32)insertionIndex != allocation->allocator->sortedFreeRegionCount) {
  1296. for (Sint32 i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1) {
  1297. allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1];
  1298. allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
  1299. }
  1300. }
  1301. allocation->allocator->sortedFreeRegionCount += 1;
  1302. allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion;
  1303. newFreeRegion->sortedIndex = insertionIndex;
  1304. }
  1305. SDL_UnlockMutex(renderer->allocatorLock);
  1306. }
  1307. static VulkanMemoryUsedRegion *VULKAN_INTERNAL_NewMemoryUsedRegion(
  1308. VulkanRenderer *renderer,
  1309. VulkanMemoryAllocation *allocation,
  1310. VkDeviceSize offset,
  1311. VkDeviceSize size,
  1312. VkDeviceSize resourceOffset,
  1313. VkDeviceSize resourceSize,
  1314. VkDeviceSize alignment)
  1315. {
  1316. VulkanMemoryUsedRegion *memoryUsedRegion;
  1317. SDL_LockMutex(renderer->allocatorLock);
  1318. if (allocation->usedRegionCount == allocation->usedRegionCapacity) {
  1319. allocation->usedRegionCapacity *= 2;
  1320. allocation->usedRegions = SDL_realloc(
  1321. allocation->usedRegions,
  1322. allocation->usedRegionCapacity * sizeof(VulkanMemoryUsedRegion *));
  1323. }
  1324. memoryUsedRegion = SDL_malloc(sizeof(VulkanMemoryUsedRegion));
  1325. memoryUsedRegion->allocation = allocation;
  1326. memoryUsedRegion->offset = offset;
  1327. memoryUsedRegion->size = size;
  1328. memoryUsedRegion->resourceOffset = resourceOffset;
  1329. memoryUsedRegion->resourceSize = resourceSize;
  1330. memoryUsedRegion->alignment = alignment;
  1331. allocation->usedSpace += size;
  1332. allocation->usedRegions[allocation->usedRegionCount] = memoryUsedRegion;
  1333. allocation->usedRegionCount += 1;
  1334. SDL_UnlockMutex(renderer->allocatorLock);
  1335. return memoryUsedRegion;
  1336. }
  1337. static void VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1338. VulkanRenderer *renderer,
  1339. VulkanMemoryUsedRegion *usedRegion)
  1340. {
  1341. Uint32 i;
  1342. SDL_LockMutex(renderer->allocatorLock);
  1343. for (i = 0; i < usedRegion->allocation->usedRegionCount; i += 1) {
  1344. if (usedRegion->allocation->usedRegions[i] == usedRegion) {
  1345. // plug the hole
  1346. if (i != usedRegion->allocation->usedRegionCount - 1) {
  1347. usedRegion->allocation->usedRegions[i] = usedRegion->allocation->usedRegions[usedRegion->allocation->usedRegionCount - 1];
  1348. }
  1349. break;
  1350. }
  1351. }
  1352. usedRegion->allocation->usedSpace -= usedRegion->size;
  1353. usedRegion->allocation->usedRegionCount -= 1;
  1354. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1355. renderer,
  1356. usedRegion->allocation,
  1357. usedRegion->offset,
  1358. usedRegion->size);
  1359. if (usedRegion->allocation->usedRegionCount == 0) {
  1360. renderer->checkEmptyAllocations = true;
  1361. }
  1362. SDL_free(usedRegion);
  1363. SDL_UnlockMutex(renderer->allocatorLock);
  1364. }
  1365. static bool VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1366. Uint32 memoryTypeIndex,
  1367. const Uint32 *memoryTypeIndexArray,
  1368. Uint32 count)
  1369. {
  1370. Uint32 i = 0;
  1371. for (i = 0; i < count; i += 1) {
  1372. if (memoryTypeIndexArray[i] == memoryTypeIndex) {
  1373. return false;
  1374. }
  1375. }
  1376. return true;
  1377. }
  1378. /* Returns an array of memory type indices in order of preference.
  1379. * Memory types are requested with the following three guidelines:
  1380. *
  1381. * Required: Absolutely necessary
  1382. * Preferred: Nice to have, but not necessary
  1383. * Tolerable: Can be allowed if there are no other options
  1384. *
  1385. * We return memory types in this order:
  1386. * 1. Required and preferred. This is the best category.
  1387. * 2. Required only.
  1388. * 3. Required, preferred, and tolerable.
  1389. * 4. Required and tolerable. This is the worst category.
  1390. */
  1391. static Uint32 *VULKAN_INTERNAL_FindBestMemoryTypes(
  1392. VulkanRenderer *renderer,
  1393. Uint32 typeFilter,
  1394. VkMemoryPropertyFlags requiredProperties,
  1395. VkMemoryPropertyFlags preferredProperties,
  1396. VkMemoryPropertyFlags tolerableProperties,
  1397. Uint32 *pCount)
  1398. {
  1399. Uint32 i;
  1400. Uint32 index = 0;
  1401. Uint32 *result = SDL_malloc(sizeof(Uint32) * renderer->memoryProperties.memoryTypeCount);
  1402. // required + preferred + !tolerable
  1403. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1404. if ((typeFilter & (1 << i)) &&
  1405. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1406. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties &&
  1407. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) {
  1408. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1409. i,
  1410. result,
  1411. index)) {
  1412. result[index] = i;
  1413. index += 1;
  1414. }
  1415. }
  1416. }
  1417. // required + !preferred + !tolerable
  1418. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1419. if ((typeFilter & (1 << i)) &&
  1420. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1421. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 &&
  1422. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) {
  1423. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1424. i,
  1425. result,
  1426. index)) {
  1427. result[index] = i;
  1428. index += 1;
  1429. }
  1430. }
  1431. }
  1432. // required + preferred + tolerable
  1433. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1434. if ((typeFilter & (1 << i)) &&
  1435. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1436. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties &&
  1437. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) {
  1438. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1439. i,
  1440. result,
  1441. index)) {
  1442. result[index] = i;
  1443. index += 1;
  1444. }
  1445. }
  1446. }
  1447. // required + !preferred + tolerable
  1448. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1449. if ((typeFilter & (1 << i)) &&
  1450. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1451. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 &&
  1452. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) {
  1453. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1454. i,
  1455. result,
  1456. index)) {
  1457. result[index] = i;
  1458. index += 1;
  1459. }
  1460. }
  1461. }
  1462. *pCount = index;
  1463. return result;
  1464. }
  1465. static Uint32 *VULKAN_INTERNAL_FindBestBufferMemoryTypes(
  1466. VulkanRenderer *renderer,
  1467. VkBuffer buffer,
  1468. VkMemoryPropertyFlags requiredMemoryProperties,
  1469. VkMemoryPropertyFlags preferredMemoryProperties,
  1470. VkMemoryPropertyFlags tolerableMemoryProperties,
  1471. VkMemoryRequirements *pMemoryRequirements,
  1472. Uint32 *pCount)
  1473. {
  1474. renderer->vkGetBufferMemoryRequirements(
  1475. renderer->logicalDevice,
  1476. buffer,
  1477. pMemoryRequirements);
  1478. return VULKAN_INTERNAL_FindBestMemoryTypes(
  1479. renderer,
  1480. pMemoryRequirements->memoryTypeBits,
  1481. requiredMemoryProperties,
  1482. preferredMemoryProperties,
  1483. tolerableMemoryProperties,
  1484. pCount);
  1485. }
  1486. static Uint32 *VULKAN_INTERNAL_FindBestImageMemoryTypes(
  1487. VulkanRenderer *renderer,
  1488. VkImage image,
  1489. VkMemoryPropertyFlags preferredMemoryPropertyFlags,
  1490. VkMemoryRequirements *pMemoryRequirements,
  1491. Uint32 *pCount)
  1492. {
  1493. renderer->vkGetImageMemoryRequirements(
  1494. renderer->logicalDevice,
  1495. image,
  1496. pMemoryRequirements);
  1497. return VULKAN_INTERNAL_FindBestMemoryTypes(
  1498. renderer,
  1499. pMemoryRequirements->memoryTypeBits,
  1500. 0,
  1501. preferredMemoryPropertyFlags,
  1502. 0,
  1503. pCount);
  1504. }
  1505. static void VULKAN_INTERNAL_DeallocateMemory(
  1506. VulkanRenderer *renderer,
  1507. VulkanMemorySubAllocator *allocator,
  1508. Uint32 allocationIndex)
  1509. {
  1510. Uint32 i;
  1511. VulkanMemoryAllocation *allocation = allocator->allocations[allocationIndex];
  1512. SDL_LockMutex(renderer->allocatorLock);
  1513. // If this allocation was marked for defrag, cancel that
  1514. for (i = 0; i < renderer->allocationsToDefragCount; i += 1) {
  1515. if (allocation == renderer->allocationsToDefrag[i]) {
  1516. renderer->allocationsToDefrag[i] = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
  1517. renderer->allocationsToDefragCount -= 1;
  1518. break;
  1519. }
  1520. }
  1521. for (i = 0; i < allocation->freeRegionCount; i += 1) {
  1522. VULKAN_INTERNAL_RemoveMemoryFreeRegion(
  1523. renderer,
  1524. allocation->freeRegions[i]);
  1525. }
  1526. SDL_free(allocation->freeRegions);
  1527. /* no need to iterate used regions because deallocate
  1528. * only happens when there are 0 used regions
  1529. */
  1530. SDL_free(allocation->usedRegions);
  1531. renderer->vkFreeMemory(
  1532. renderer->logicalDevice,
  1533. allocation->memory,
  1534. NULL);
  1535. SDL_DestroyMutex(allocation->memoryLock);
  1536. SDL_free(allocation);
  1537. if (allocationIndex != allocator->allocationCount - 1) {
  1538. allocator->allocations[allocationIndex] = allocator->allocations[allocator->allocationCount - 1];
  1539. }
  1540. allocator->allocationCount -= 1;
  1541. SDL_UnlockMutex(renderer->allocatorLock);
  1542. }
  1543. static Uint8 VULKAN_INTERNAL_AllocateMemory(
  1544. VulkanRenderer *renderer,
  1545. Uint32 memoryTypeIndex,
  1546. VkDeviceSize allocationSize,
  1547. Uint8 isHostVisible,
  1548. VulkanMemoryAllocation **pMemoryAllocation)
  1549. {
  1550. VulkanMemoryAllocation *allocation;
  1551. VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
  1552. VkMemoryAllocateInfo allocInfo;
  1553. VkResult result;
  1554. allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
  1555. allocInfo.pNext = NULL;
  1556. allocInfo.memoryTypeIndex = memoryTypeIndex;
  1557. allocInfo.allocationSize = allocationSize;
  1558. allocation = SDL_malloc(sizeof(VulkanMemoryAllocation));
  1559. allocation->size = allocationSize;
  1560. allocation->freeSpace = 0; // added by FreeRegions
  1561. allocation->usedSpace = 0; // added by UsedRegions
  1562. allocation->memoryLock = SDL_CreateMutex();
  1563. allocator->allocationCount += 1;
  1564. allocator->allocations = SDL_realloc(
  1565. allocator->allocations,
  1566. sizeof(VulkanMemoryAllocation *) * allocator->allocationCount);
  1567. allocator->allocations[allocator->allocationCount - 1] = allocation;
  1568. allocInfo.pNext = NULL;
  1569. allocation->availableForAllocation = 1;
  1570. allocation->usedRegions = SDL_malloc(sizeof(VulkanMemoryUsedRegion *));
  1571. allocation->usedRegionCount = 0;
  1572. allocation->usedRegionCapacity = 1;
  1573. allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion *));
  1574. allocation->freeRegionCount = 0;
  1575. allocation->freeRegionCapacity = 1;
  1576. allocation->allocator = allocator;
  1577. result = renderer->vkAllocateMemory(
  1578. renderer->logicalDevice,
  1579. &allocInfo,
  1580. NULL,
  1581. &allocation->memory);
  1582. if (result != VK_SUCCESS) {
  1583. // Uh oh, we couldn't allocate, time to clean up
  1584. SDL_free(allocation->freeRegions);
  1585. allocator->allocationCount -= 1;
  1586. allocator->allocations = SDL_realloc(
  1587. allocator->allocations,
  1588. sizeof(VulkanMemoryAllocation *) * allocator->allocationCount);
  1589. SDL_free(allocation);
  1590. return 0;
  1591. }
  1592. // Persistent mapping for host-visible memory
  1593. if (isHostVisible) {
  1594. result = renderer->vkMapMemory(
  1595. renderer->logicalDevice,
  1596. allocation->memory,
  1597. 0,
  1598. VK_WHOLE_SIZE,
  1599. 0,
  1600. (void **)&allocation->mapPointer);
  1601. CHECK_VULKAN_ERROR_AND_RETURN(result, vkMapMemory, 0);
  1602. } else {
  1603. allocation->mapPointer = NULL;
  1604. }
  1605. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1606. renderer,
  1607. allocation,
  1608. 0,
  1609. allocation->size);
  1610. *pMemoryAllocation = allocation;
  1611. return 1;
  1612. }
  1613. static Uint8 VULKAN_INTERNAL_BindBufferMemory(
  1614. VulkanRenderer *renderer,
  1615. VulkanMemoryUsedRegion *usedRegion,
  1616. VkDeviceSize alignedOffset,
  1617. VkBuffer buffer)
  1618. {
  1619. VkResult vulkanResult;
  1620. SDL_LockMutex(usedRegion->allocation->memoryLock);
  1621. vulkanResult = renderer->vkBindBufferMemory(
  1622. renderer->logicalDevice,
  1623. buffer,
  1624. usedRegion->allocation->memory,
  1625. alignedOffset);
  1626. SDL_UnlockMutex(usedRegion->allocation->memoryLock);
  1627. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkBindBufferMemory, 0);
  1628. return 1;
  1629. }
  1630. static Uint8 VULKAN_INTERNAL_BindImageMemory(
  1631. VulkanRenderer *renderer,
  1632. VulkanMemoryUsedRegion *usedRegion,
  1633. VkDeviceSize alignedOffset,
  1634. VkImage image)
  1635. {
  1636. VkResult vulkanResult;
  1637. SDL_LockMutex(usedRegion->allocation->memoryLock);
  1638. vulkanResult = renderer->vkBindImageMemory(
  1639. renderer->logicalDevice,
  1640. image,
  1641. usedRegion->allocation->memory,
  1642. alignedOffset);
  1643. SDL_UnlockMutex(usedRegion->allocation->memoryLock);
  1644. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkBindImageMemory, 0);
  1645. return 1;
  1646. }
  1647. static Uint8 VULKAN_INTERNAL_BindResourceMemory(
  1648. VulkanRenderer *renderer,
  1649. Uint32 memoryTypeIndex,
  1650. VkMemoryRequirements *memoryRequirements,
  1651. VkDeviceSize resourceSize, // may be different from requirements size!
  1652. bool dedicated, // the entire memory allocation should be used for this resource
  1653. VkBuffer buffer, // may be VK_NULL_HANDLE
  1654. VkImage image, // may be VK_NULL_HANDLE
  1655. VulkanMemoryUsedRegion **pMemoryUsedRegion)
  1656. {
  1657. VulkanMemoryAllocation *allocation;
  1658. VulkanMemorySubAllocator *allocator;
  1659. VulkanMemoryFreeRegion *region;
  1660. VulkanMemoryFreeRegion *selectedRegion;
  1661. VulkanMemoryUsedRegion *usedRegion;
  1662. VkDeviceSize requiredSize, allocationSize;
  1663. VkDeviceSize alignedOffset = 0;
  1664. VkDeviceSize newRegionSize, newRegionOffset;
  1665. Uint8 isHostVisible, smallAllocation, allocationResult;
  1666. Sint32 i;
  1667. isHostVisible =
  1668. (renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags &
  1669. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
  1670. allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
  1671. requiredSize = memoryRequirements->size;
  1672. smallAllocation = requiredSize <= SMALL_ALLOCATION_THRESHOLD;
  1673. if ((buffer == VK_NULL_HANDLE && image == VK_NULL_HANDLE) ||
  1674. (buffer != VK_NULL_HANDLE && image != VK_NULL_HANDLE)) {
  1675. SDL_LogError(SDL_LOG_CATEGORY_GPU, "BindResourceMemory must be given either a VulkanBuffer or a VulkanTexture");
  1676. return 0;
  1677. }
  1678. SDL_LockMutex(renderer->allocatorLock);
  1679. selectedRegion = NULL;
  1680. if (dedicated) {
  1681. // Force an allocation
  1682. allocationSize = requiredSize;
  1683. } else {
  1684. // Search for a suitable existing free region
  1685. for (i = allocator->sortedFreeRegionCount - 1; i >= 0; i -= 1) {
  1686. region = allocator->sortedFreeRegions[i];
  1687. if (smallAllocation && region->allocation->size != SMALL_ALLOCATION_SIZE) {
  1688. // region is not in a small allocation
  1689. continue;
  1690. }
  1691. if (!smallAllocation && region->allocation->size == SMALL_ALLOCATION_SIZE) {
  1692. // allocation is not small and current region is in a small allocation
  1693. continue;
  1694. }
  1695. alignedOffset = VULKAN_INTERNAL_NextHighestAlignment(
  1696. region->offset,
  1697. memoryRequirements->alignment);
  1698. if (alignedOffset + requiredSize <= region->offset + region->size) {
  1699. selectedRegion = region;
  1700. break;
  1701. }
  1702. }
  1703. if (selectedRegion != NULL) {
  1704. region = selectedRegion;
  1705. allocation = region->allocation;
  1706. usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
  1707. renderer,
  1708. allocation,
  1709. region->offset,
  1710. requiredSize + (alignedOffset - region->offset),
  1711. alignedOffset,
  1712. resourceSize,
  1713. memoryRequirements->alignment);
  1714. usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
  1715. newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize);
  1716. newRegionOffset = alignedOffset + requiredSize;
  1717. // remove and add modified region to re-sort
  1718. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
  1719. // if size is 0, no need to re-insert
  1720. if (newRegionSize != 0) {
  1721. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1722. renderer,
  1723. allocation,
  1724. newRegionOffset,
  1725. newRegionSize);
  1726. }
  1727. SDL_UnlockMutex(renderer->allocatorLock);
  1728. if (buffer != VK_NULL_HANDLE) {
  1729. if (!VULKAN_INTERNAL_BindBufferMemory(
  1730. renderer,
  1731. usedRegion,
  1732. alignedOffset,
  1733. buffer)) {
  1734. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1735. renderer,
  1736. usedRegion);
  1737. return 0;
  1738. }
  1739. } else if (image != VK_NULL_HANDLE) {
  1740. if (!VULKAN_INTERNAL_BindImageMemory(
  1741. renderer,
  1742. usedRegion,
  1743. alignedOffset,
  1744. image)) {
  1745. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1746. renderer,
  1747. usedRegion);
  1748. return 0;
  1749. }
  1750. }
  1751. *pMemoryUsedRegion = usedRegion;
  1752. return 1;
  1753. }
  1754. // No suitable free regions exist, allocate a new memory region
  1755. if (
  1756. renderer->allocationsToDefragCount == 0 &&
  1757. !renderer->defragInProgress) {
  1758. // Mark currently fragmented allocations for defrag
  1759. VULKAN_INTERNAL_MarkAllocationsForDefrag(renderer);
  1760. }
  1761. if (requiredSize > SMALL_ALLOCATION_THRESHOLD) {
  1762. // allocate a page of required size aligned to LARGE_ALLOCATION_INCREMENT increments
  1763. allocationSize =
  1764. VULKAN_INTERNAL_NextHighestAlignment(requiredSize, LARGE_ALLOCATION_INCREMENT);
  1765. } else {
  1766. allocationSize = SMALL_ALLOCATION_SIZE;
  1767. }
  1768. }
  1769. allocationResult = VULKAN_INTERNAL_AllocateMemory(
  1770. renderer,
  1771. memoryTypeIndex,
  1772. allocationSize,
  1773. isHostVisible,
  1774. &allocation);
  1775. // Uh oh, we're out of memory
  1776. if (allocationResult == 0) {
  1777. SDL_UnlockMutex(renderer->allocatorLock);
  1778. // Responsibility of the caller to handle being out of memory
  1779. return 2;
  1780. }
  1781. usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
  1782. renderer,
  1783. allocation,
  1784. 0,
  1785. requiredSize,
  1786. 0,
  1787. resourceSize,
  1788. memoryRequirements->alignment);
  1789. usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
  1790. region = allocation->freeRegions[0];
  1791. newRegionOffset = region->offset + requiredSize;
  1792. newRegionSize = region->size - requiredSize;
  1793. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
  1794. if (newRegionSize != 0) {
  1795. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1796. renderer,
  1797. allocation,
  1798. newRegionOffset,
  1799. newRegionSize);
  1800. }
  1801. SDL_UnlockMutex(renderer->allocatorLock);
  1802. if (buffer != VK_NULL_HANDLE) {
  1803. if (!VULKAN_INTERNAL_BindBufferMemory(
  1804. renderer,
  1805. usedRegion,
  1806. 0,
  1807. buffer)) {
  1808. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1809. renderer,
  1810. usedRegion);
  1811. return 0;
  1812. }
  1813. } else if (image != VK_NULL_HANDLE) {
  1814. if (!VULKAN_INTERNAL_BindImageMemory(
  1815. renderer,
  1816. usedRegion,
  1817. 0,
  1818. image)) {
  1819. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1820. renderer,
  1821. usedRegion);
  1822. return 0;
  1823. }
  1824. }
  1825. *pMemoryUsedRegion = usedRegion;
  1826. return 1;
  1827. }
  1828. static Uint8 VULKAN_INTERNAL_BindMemoryForImage(
  1829. VulkanRenderer *renderer,
  1830. VkImage image,
  1831. VulkanMemoryUsedRegion **usedRegion)
  1832. {
  1833. Uint8 bindResult = 0;
  1834. Uint32 memoryTypeCount = 0;
  1835. Uint32 *memoryTypesToTry = NULL;
  1836. Uint32 selectedMemoryTypeIndex = 0;
  1837. Uint32 i;
  1838. VkMemoryPropertyFlags preferredMemoryPropertyFlags;
  1839. VkMemoryRequirements memoryRequirements;
  1840. /* Vulkan memory types have several memory properties.
  1841. *
  1842. * Unlike buffers, images are always optimally stored device-local,
  1843. * so that is the only property we prefer here.
  1844. *
  1845. * If memory is constrained, it is fine for the texture to not
  1846. * be device-local.
  1847. */
  1848. preferredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1849. memoryTypesToTry = VULKAN_INTERNAL_FindBestImageMemoryTypes(
  1850. renderer,
  1851. image,
  1852. preferredMemoryPropertyFlags,
  1853. &memoryRequirements,
  1854. &memoryTypeCount);
  1855. for (i = 0; i < memoryTypeCount; i += 1) {
  1856. bindResult = VULKAN_INTERNAL_BindResourceMemory(
  1857. renderer,
  1858. memoryTypesToTry[i],
  1859. &memoryRequirements,
  1860. memoryRequirements.size,
  1861. false,
  1862. VK_NULL_HANDLE,
  1863. image,
  1864. usedRegion);
  1865. if (bindResult == 1) {
  1866. selectedMemoryTypeIndex = memoryTypesToTry[i];
  1867. break;
  1868. }
  1869. }
  1870. SDL_free(memoryTypesToTry);
  1871. // Check for warnings on success
  1872. if (bindResult == 1) {
  1873. if (!renderer->outOfDeviceLocalMemoryWarning) {
  1874. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
  1875. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating textures on host-local memory!");
  1876. renderer->outOfDeviceLocalMemoryWarning = 1;
  1877. }
  1878. }
  1879. }
  1880. return bindResult;
  1881. }
  1882. static Uint8 VULKAN_INTERNAL_BindMemoryForBuffer(
  1883. VulkanRenderer *renderer,
  1884. VkBuffer buffer,
  1885. VkDeviceSize size,
  1886. VulkanBufferType type,
  1887. bool dedicated,
  1888. VulkanMemoryUsedRegion **usedRegion)
  1889. {
  1890. Uint8 bindResult = 0;
  1891. Uint32 memoryTypeCount = 0;
  1892. Uint32 *memoryTypesToTry = NULL;
  1893. Uint32 selectedMemoryTypeIndex = 0;
  1894. Uint32 i;
  1895. VkMemoryPropertyFlags requiredMemoryPropertyFlags = 0;
  1896. VkMemoryPropertyFlags preferredMemoryPropertyFlags = 0;
  1897. VkMemoryPropertyFlags tolerableMemoryPropertyFlags = 0;
  1898. VkMemoryRequirements memoryRequirements;
  1899. /* Buffers need to be optimally bound to a memory type
  1900. * based on their use case and the architecture of the system.
  1901. *
  1902. * It is important to understand the distinction between device and host.
  1903. *
  1904. * On a traditional high-performance desktop computer,
  1905. * the "device" would be the GPU, and the "host" would be the CPU.
  1906. * Memory being copied between these two must cross the PCI bus.
  1907. * On these systems we have to be concerned about bandwidth limitations
  1908. * and causing memory stalls, so we have taken a great deal of care
  1909. * to structure this API to guide the client towards optimal usage.
  1910. *
  1911. * Other kinds of devices do not necessarily have this distinction.
  1912. * On an iPhone or Nintendo Switch, all memory is accessible both to the
  1913. * GPU and the CPU at all times. These kinds of systems are known as
  1914. * UMA, or Unified Memory Architecture. A desktop computer using the
  1915. * CPU's integrated graphics can also be thought of as UMA.
  1916. *
  1917. * Vulkan memory types have several memory properties.
  1918. * The relevant memory properties are as follows:
  1919. *
  1920. * DEVICE_LOCAL:
  1921. * This memory is on-device and most efficient for device access.
  1922. * On UMA systems all memory is device-local.
  1923. * If memory is not device-local, then it is host-local.
  1924. *
  1925. * HOST_VISIBLE:
  1926. * This memory can be mapped for host access, meaning we can obtain
  1927. * a pointer to directly access the memory.
  1928. *
  1929. * HOST_COHERENT:
  1930. * Host-coherent memory does not require cache management operations
  1931. * when mapped, so we always set this alongside HOST_VISIBLE
  1932. * to avoid extra record keeping.
  1933. *
  1934. * HOST_CACHED:
  1935. * Host-cached memory is faster to access than uncached memory
  1936. * but memory of this type might not always be available.
  1937. *
  1938. * GPU buffers, like vertex buffers, indirect buffers, etc
  1939. * are optimally stored in device-local memory.
  1940. * However, if device-local memory is low, these buffers
  1941. * can be accessed from host-local memory with a performance penalty.
  1942. *
  1943. * Uniform buffers must be host-visible and coherent because
  1944. * the client uses them to quickly push small amounts of data.
  1945. * We prefer uniform buffers to also be device-local because
  1946. * they are accessed by shaders, but the amount of memory
  1947. * that is both device-local and host-visible
  1948. * is often constrained, particularly on low-end devices.
  1949. *
  1950. * Transfer buffers must be host-visible and coherent because
  1951. * the client uses them to stage data to be transferred
  1952. * to device-local memory, or to read back data transferred
  1953. * from the device. We prefer the cache bit for performance
  1954. * but it isn't strictly necessary. We tolerate device-local
  1955. * memory in this situation because, as mentioned above,
  1956. * on certain devices all memory is device-local, and even
  1957. * though the transfer isn't strictly necessary it is still
  1958. * useful for correctly timelining data.
  1959. */
  1960. if (type == VULKAN_BUFFER_TYPE_GPU) {
  1961. preferredMemoryPropertyFlags |=
  1962. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1963. } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
  1964. requiredMemoryPropertyFlags |=
  1965. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  1966. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  1967. preferredMemoryPropertyFlags |=
  1968. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1969. } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) {
  1970. requiredMemoryPropertyFlags |=
  1971. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  1972. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  1973. preferredMemoryPropertyFlags |=
  1974. VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
  1975. tolerableMemoryPropertyFlags |=
  1976. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1977. } else {
  1978. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer type!");
  1979. return 0;
  1980. }
  1981. memoryTypesToTry = VULKAN_INTERNAL_FindBestBufferMemoryTypes(
  1982. renderer,
  1983. buffer,
  1984. requiredMemoryPropertyFlags,
  1985. preferredMemoryPropertyFlags,
  1986. tolerableMemoryPropertyFlags,
  1987. &memoryRequirements,
  1988. &memoryTypeCount);
  1989. for (i = 0; i < memoryTypeCount; i += 1) {
  1990. bindResult = VULKAN_INTERNAL_BindResourceMemory(
  1991. renderer,
  1992. memoryTypesToTry[i],
  1993. &memoryRequirements,
  1994. size,
  1995. dedicated,
  1996. buffer,
  1997. VK_NULL_HANDLE,
  1998. usedRegion);
  1999. if (bindResult == 1) {
  2000. selectedMemoryTypeIndex = memoryTypesToTry[i];
  2001. break;
  2002. }
  2003. }
  2004. SDL_free(memoryTypesToTry);
  2005. // Check for warnings on success
  2006. if (bindResult == 1) {
  2007. if (type == VULKAN_BUFFER_TYPE_GPU) {
  2008. if (!renderer->outOfDeviceLocalMemoryWarning) {
  2009. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
  2010. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating buffers on host-local memory, expect degraded performance!");
  2011. renderer->outOfDeviceLocalMemoryWarning = 1;
  2012. }
  2013. }
  2014. } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
  2015. if (!renderer->outofBARMemoryWarning) {
  2016. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
  2017. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of BAR memory, allocating uniform buffers on host-local memory, expect degraded performance!");
  2018. renderer->outofBARMemoryWarning = 1;
  2019. }
  2020. }
  2021. } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) {
  2022. if (!renderer->integratedMemoryNotification) {
  2023. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
  2024. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Integrated memory detected, allocating TransferBuffers on device-local memory!");
  2025. renderer->integratedMemoryNotification = 1;
  2026. }
  2027. }
  2028. }
  2029. }
  2030. return bindResult;
  2031. }
  2032. // Resource tracking
  2033. #define TRACK_RESOURCE(resource, type, array, count, capacity) \
  2034. for (Sint32 i = commandBuffer->count - 1; i >= 0; i -= 1) { \
  2035. if (commandBuffer->array[i] == resource) { \
  2036. return; \
  2037. } \
  2038. } \
  2039. \
  2040. if (commandBuffer->count == commandBuffer->capacity) { \
  2041. commandBuffer->capacity += 1; \
  2042. commandBuffer->array = SDL_realloc( \
  2043. commandBuffer->array, \
  2044. commandBuffer->capacity * sizeof(type)); \
  2045. } \
  2046. commandBuffer->array[commandBuffer->count] = resource; \
  2047. commandBuffer->count += 1; \
  2048. SDL_AtomicIncRef(&resource->referenceCount);
  2049. static void VULKAN_INTERNAL_TrackBuffer(
  2050. VulkanCommandBuffer *commandBuffer,
  2051. VulkanBuffer *buffer)
  2052. {
  2053. TRACK_RESOURCE(
  2054. buffer,
  2055. VulkanBuffer *,
  2056. usedBuffers,
  2057. usedBufferCount,
  2058. usedBufferCapacity)
  2059. }
  2060. static void VULKAN_INTERNAL_TrackTexture(
  2061. VulkanCommandBuffer *commandBuffer,
  2062. VulkanTexture *texture)
  2063. {
  2064. TRACK_RESOURCE(
  2065. texture,
  2066. VulkanTexture *,
  2067. usedTextures,
  2068. usedTextureCount,
  2069. usedTextureCapacity)
  2070. }
  2071. static void VULKAN_INTERNAL_TrackSampler(
  2072. VulkanCommandBuffer *commandBuffer,
  2073. VulkanSampler *sampler)
  2074. {
  2075. TRACK_RESOURCE(
  2076. sampler,
  2077. VulkanSampler *,
  2078. usedSamplers,
  2079. usedSamplerCount,
  2080. usedSamplerCapacity)
  2081. }
  2082. static void VULKAN_INTERNAL_TrackGraphicsPipeline(
  2083. VulkanCommandBuffer *commandBuffer,
  2084. VulkanGraphicsPipeline *graphicsPipeline)
  2085. {
  2086. TRACK_RESOURCE(
  2087. graphicsPipeline,
  2088. VulkanGraphicsPipeline *,
  2089. usedGraphicsPipelines,
  2090. usedGraphicsPipelineCount,
  2091. usedGraphicsPipelineCapacity)
  2092. }
  2093. static void VULKAN_INTERNAL_TrackComputePipeline(
  2094. VulkanCommandBuffer *commandBuffer,
  2095. VulkanComputePipeline *computePipeline)
  2096. {
  2097. TRACK_RESOURCE(
  2098. computePipeline,
  2099. VulkanComputePipeline *,
  2100. usedComputePipelines,
  2101. usedComputePipelineCount,
  2102. usedComputePipelineCapacity)
  2103. }
  2104. static void VULKAN_INTERNAL_TrackFramebuffer(
  2105. VulkanCommandBuffer *commandBuffer,
  2106. VulkanFramebuffer *framebuffer)
  2107. {
  2108. TRACK_RESOURCE(
  2109. framebuffer,
  2110. VulkanFramebuffer *,
  2111. usedFramebuffers,
  2112. usedFramebufferCount,
  2113. usedFramebufferCapacity);
  2114. }
  2115. static void VULKAN_INTERNAL_TrackUniformBuffer(
  2116. VulkanCommandBuffer *commandBuffer,
  2117. VulkanUniformBuffer *uniformBuffer)
  2118. {
  2119. for (Sint32 i = commandBuffer->usedUniformBufferCount - 1; i >= 0; i -= 1) {
  2120. if (commandBuffer->usedUniformBuffers[i] == uniformBuffer) {
  2121. return;
  2122. }
  2123. }
  2124. if (commandBuffer->usedUniformBufferCount == commandBuffer->usedUniformBufferCapacity) {
  2125. commandBuffer->usedUniformBufferCapacity += 1;
  2126. commandBuffer->usedUniformBuffers = SDL_realloc(
  2127. commandBuffer->usedUniformBuffers,
  2128. commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *));
  2129. }
  2130. commandBuffer->usedUniformBuffers[commandBuffer->usedUniformBufferCount] = uniformBuffer;
  2131. commandBuffer->usedUniformBufferCount += 1;
  2132. VULKAN_INTERNAL_TrackBuffer(
  2133. commandBuffer,
  2134. uniformBuffer->buffer);
  2135. }
  2136. #undef TRACK_RESOURCE
  2137. // Memory Barriers
  2138. /*
  2139. * In Vulkan, we must manually synchronize operations that write to resources on the GPU
  2140. * so that read-after-write, write-after-read, and write-after-write hazards do not occur.
  2141. * Additionally, textures are required to be in specific layouts for specific use cases.
  2142. * Both of these tasks are accomplished with vkCmdPipelineBarrier.
  2143. *
  2144. * To insert the correct barriers, we keep track of "usage modes" for buffers and textures.
  2145. * These indicate the current usage of that resource on the command buffer.
  2146. * The transition from one usage mode to another indicates how the barrier should be constructed.
  2147. *
  2148. * Pipeline barriers cannot be inserted during a render pass, but they can be inserted
  2149. * during a compute or copy pass.
  2150. *
  2151. * This means that the "default" usage mode of any given resource should be that it should be
  2152. * ready for a graphics-read operation, because we cannot barrier during a render pass.
  2153. * In the case where a resource is only used in compute, its default usage mode can be compute-read.
  2154. * This strategy allows us to avoid expensive record keeping of command buffer/resource usage mode pairs,
  2155. * and it fully covers synchronization between all combinations of stages.
  2156. *
  2157. * In Upload and Copy functions, we transition the resource immediately before and after the copy command.
  2158. *
  2159. * When binding a resource for compute, we transition when the Bind functions are called.
  2160. * If a bind slot containing a resource is overwritten, we transition the resource in that slot back to its default.
  2161. * When EndComputePass is called we transition all bound resources back to their default state.
  2162. *
  2163. * When binding a texture as a render pass attachment, we transition the resource on BeginRenderPass
  2164. * and transition it back to its default on EndRenderPass.
  2165. *
  2166. * This strategy imposes certain limitations on resource usage flags.
  2167. * For example, a texture cannot have both the SAMPLER and GRAPHICS_STORAGE usage flags,
  2168. * because then it is impossible for the backend to infer which default usage mode the texture should use.
  2169. *
  2170. * Sync hazards can be detected by setting VK_KHRONOS_VALIDATION_VALIDATE_SYNC=1 when using validation layers.
  2171. */
  2172. static void VULKAN_INTERNAL_BufferMemoryBarrier(
  2173. VulkanRenderer *renderer,
  2174. VulkanCommandBuffer *commandBuffer,
  2175. VulkanBufferUsageMode sourceUsageMode,
  2176. VulkanBufferUsageMode destinationUsageMode,
  2177. VulkanBuffer *buffer)
  2178. {
  2179. VkPipelineStageFlags srcStages = 0;
  2180. VkPipelineStageFlags dstStages = 0;
  2181. VkBufferMemoryBarrier memoryBarrier;
  2182. memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  2183. memoryBarrier.pNext = NULL;
  2184. memoryBarrier.srcAccessMask = 0;
  2185. memoryBarrier.dstAccessMask = 0;
  2186. memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2187. memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2188. memoryBarrier.buffer = buffer->buffer;
  2189. memoryBarrier.offset = 0;
  2190. memoryBarrier.size = buffer->size;
  2191. if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) {
  2192. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2193. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2194. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) {
  2195. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2196. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2197. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) {
  2198. srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2199. memoryBarrier.srcAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
  2200. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) {
  2201. srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2202. memoryBarrier.srcAccessMask = VK_ACCESS_INDEX_READ_BIT;
  2203. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) {
  2204. srcStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  2205. memoryBarrier.srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  2206. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2207. srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2208. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2209. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2210. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2211. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2212. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2213. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2214. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2215. } else {
  2216. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer source barrier type!");
  2217. return;
  2218. }
  2219. if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) {
  2220. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2221. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2222. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) {
  2223. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2224. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2225. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) {
  2226. dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2227. memoryBarrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
  2228. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) {
  2229. dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2230. memoryBarrier.dstAccessMask = VK_ACCESS_INDEX_READ_BIT;
  2231. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) {
  2232. dstStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  2233. memoryBarrier.dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  2234. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2235. dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2236. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2237. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2238. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2239. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2240. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2241. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2242. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2243. } else {
  2244. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer destination barrier type!");
  2245. return;
  2246. }
  2247. renderer->vkCmdPipelineBarrier(
  2248. commandBuffer->commandBuffer,
  2249. srcStages,
  2250. dstStages,
  2251. 0,
  2252. 0,
  2253. NULL,
  2254. 1,
  2255. &memoryBarrier,
  2256. 0,
  2257. NULL);
  2258. buffer->transitioned = true;
  2259. }
  2260. static void VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  2261. VulkanRenderer *renderer,
  2262. VulkanCommandBuffer *commandBuffer,
  2263. VulkanTextureUsageMode sourceUsageMode,
  2264. VulkanTextureUsageMode destinationUsageMode,
  2265. VulkanTextureSubresource *textureSubresource)
  2266. {
  2267. VkPipelineStageFlags srcStages = 0;
  2268. VkPipelineStageFlags dstStages = 0;
  2269. VkImageMemoryBarrier memoryBarrier;
  2270. memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2271. memoryBarrier.pNext = NULL;
  2272. memoryBarrier.srcAccessMask = 0;
  2273. memoryBarrier.dstAccessMask = 0;
  2274. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  2275. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  2276. memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2277. memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2278. memoryBarrier.image = textureSubresource->parent->image;
  2279. memoryBarrier.subresourceRange.aspectMask = textureSubresource->parent->aspectFlags;
  2280. memoryBarrier.subresourceRange.baseArrayLayer = textureSubresource->layer;
  2281. memoryBarrier.subresourceRange.layerCount = 1;
  2282. memoryBarrier.subresourceRange.baseMipLevel = textureSubresource->level;
  2283. memoryBarrier.subresourceRange.levelCount = 1;
  2284. if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED) {
  2285. srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  2286. memoryBarrier.srcAccessMask = 0;
  2287. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  2288. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) {
  2289. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2290. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2291. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2292. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) {
  2293. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2294. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2295. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2296. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) {
  2297. srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2298. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2299. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  2300. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2301. srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2302. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2303. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
  2304. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2305. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2306. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2307. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
  2308. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2309. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2310. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2311. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
  2312. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) {
  2313. srcStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  2314. memoryBarrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  2315. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2316. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) {
  2317. srcStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  2318. memoryBarrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  2319. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2320. } else {
  2321. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture source barrier type!");
  2322. return;
  2323. }
  2324. if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) {
  2325. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2326. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2327. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2328. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) {
  2329. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2330. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2331. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2332. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) {
  2333. dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2334. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2335. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  2336. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2337. dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2338. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2339. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  2340. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2341. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2342. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2343. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  2344. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2345. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2346. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2347. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  2348. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) {
  2349. dstStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  2350. memoryBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  2351. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2352. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) {
  2353. dstStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  2354. memoryBarrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  2355. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2356. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_PRESENT) {
  2357. dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  2358. memoryBarrier.dstAccessMask = 0;
  2359. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
  2360. } else {
  2361. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture destination barrier type!");
  2362. return;
  2363. }
  2364. renderer->vkCmdPipelineBarrier(
  2365. commandBuffer->commandBuffer,
  2366. srcStages,
  2367. dstStages,
  2368. 0,
  2369. 0,
  2370. NULL,
  2371. 0,
  2372. NULL,
  2373. 1,
  2374. &memoryBarrier);
  2375. }
  2376. static VulkanBufferUsageMode VULKAN_INTERNAL_DefaultBufferUsageMode(
  2377. VulkanBuffer *buffer)
  2378. {
  2379. // NOTE: order matters here!
  2380. if (buffer->usage & SDL_GPU_BUFFERUSAGE_VERTEX) {
  2381. return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ;
  2382. } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_INDEX) {
  2383. return VULKAN_BUFFER_USAGE_MODE_INDEX_READ;
  2384. } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_INDIRECT) {
  2385. return VULKAN_BUFFER_USAGE_MODE_INDIRECT;
  2386. } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ) {
  2387. return VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ;
  2388. } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ) {
  2389. return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ;
  2390. } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE) {
  2391. return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
  2392. } else {
  2393. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Buffer has no default usage mode!");
  2394. return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ;
  2395. }
  2396. }
  2397. static VulkanTextureUsageMode VULKAN_INTERNAL_DefaultTextureUsageMode(
  2398. VulkanTexture *texture)
  2399. {
  2400. // NOTE: order matters here!
  2401. // NOTE: graphics storage bits and sampler bit are mutually exclusive!
  2402. if (texture->usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) {
  2403. return VULKAN_TEXTURE_USAGE_MODE_SAMPLER;
  2404. } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ) {
  2405. return VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ;
  2406. } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
  2407. return VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT;
  2408. } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
  2409. return VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT;
  2410. } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ) {
  2411. return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ;
  2412. } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE) {
  2413. return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
  2414. } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE) {
  2415. return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
  2416. } else {
  2417. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Texture has no default usage mode!");
  2418. return VULKAN_TEXTURE_USAGE_MODE_SAMPLER;
  2419. }
  2420. }
  2421. static void VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  2422. VulkanRenderer *renderer,
  2423. VulkanCommandBuffer *commandBuffer,
  2424. VulkanBufferUsageMode destinationUsageMode,
  2425. VulkanBuffer *buffer)
  2426. {
  2427. VULKAN_INTERNAL_BufferMemoryBarrier(
  2428. renderer,
  2429. commandBuffer,
  2430. VULKAN_INTERNAL_DefaultBufferUsageMode(buffer),
  2431. destinationUsageMode,
  2432. buffer);
  2433. }
  2434. static void VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  2435. VulkanRenderer *renderer,
  2436. VulkanCommandBuffer *commandBuffer,
  2437. VulkanBufferUsageMode sourceUsageMode,
  2438. VulkanBuffer *buffer)
  2439. {
  2440. VULKAN_INTERNAL_BufferMemoryBarrier(
  2441. renderer,
  2442. commandBuffer,
  2443. sourceUsageMode,
  2444. VULKAN_INTERNAL_DefaultBufferUsageMode(buffer),
  2445. buffer);
  2446. }
  2447. static void VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  2448. VulkanRenderer *renderer,
  2449. VulkanCommandBuffer *commandBuffer,
  2450. VulkanTextureUsageMode destinationUsageMode,
  2451. VulkanTextureSubresource *textureSubresource)
  2452. {
  2453. VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  2454. renderer,
  2455. commandBuffer,
  2456. VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent),
  2457. destinationUsageMode,
  2458. textureSubresource);
  2459. }
  2460. static void VULKAN_INTERNAL_TextureTransitionFromDefaultUsage(
  2461. VulkanRenderer *renderer,
  2462. VulkanCommandBuffer *commandBuffer,
  2463. VulkanTextureUsageMode destinationUsageMode,
  2464. VulkanTexture *texture)
  2465. {
  2466. for (Uint32 i = 0; i < texture->subresourceCount; i += 1) {
  2467. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  2468. renderer,
  2469. commandBuffer,
  2470. destinationUsageMode,
  2471. &texture->subresources[i]);
  2472. }
  2473. }
  2474. static void VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  2475. VulkanRenderer *renderer,
  2476. VulkanCommandBuffer *commandBuffer,
  2477. VulkanTextureUsageMode sourceUsageMode,
  2478. VulkanTextureSubresource *textureSubresource)
  2479. {
  2480. VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  2481. renderer,
  2482. commandBuffer,
  2483. sourceUsageMode,
  2484. VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent),
  2485. textureSubresource);
  2486. }
  2487. static void VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  2488. VulkanRenderer *renderer,
  2489. VulkanCommandBuffer *commandBuffer,
  2490. VulkanTextureUsageMode sourceUsageMode,
  2491. VulkanTexture *texture)
  2492. {
  2493. // FIXME: could optimize this barrier
  2494. for (Uint32 i = 0; i < texture->subresourceCount; i += 1) {
  2495. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  2496. renderer,
  2497. commandBuffer,
  2498. sourceUsageMode,
  2499. &texture->subresources[i]);
  2500. }
  2501. }
  2502. // Resource Disposal
  2503. static void VULKAN_INTERNAL_ReleaseFramebuffer(
  2504. VulkanRenderer *renderer,
  2505. VulkanFramebuffer *framebuffer)
  2506. {
  2507. SDL_LockMutex(renderer->disposeLock);
  2508. EXPAND_ARRAY_IF_NEEDED(
  2509. renderer->framebuffersToDestroy,
  2510. VulkanFramebuffer *,
  2511. renderer->framebuffersToDestroyCount + 1,
  2512. renderer->framebuffersToDestroyCapacity,
  2513. renderer->framebuffersToDestroyCapacity * 2);
  2514. renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer;
  2515. renderer->framebuffersToDestroyCount += 1;
  2516. SDL_UnlockMutex(renderer->disposeLock);
  2517. }
  2518. static void VULKAN_INTERNAL_DestroyFramebuffer(
  2519. VulkanRenderer *renderer,
  2520. VulkanFramebuffer *framebuffer)
  2521. {
  2522. renderer->vkDestroyFramebuffer(
  2523. renderer->logicalDevice,
  2524. framebuffer->framebuffer,
  2525. NULL);
  2526. SDL_free(framebuffer);
  2527. }
  2528. typedef struct CheckOneFramebufferForRemovalData
  2529. {
  2530. Uint32 keysToRemoveCapacity;
  2531. Uint32 keysToRemoveCount;
  2532. FramebufferHashTableKey **keysToRemove;
  2533. VkImageView view;
  2534. } CheckOneFramebufferForRemovalData;
  2535. static bool SDLCALL CheckOneFramebufferForRemoval(void *userdata, const SDL_HashTable *table, const void *vkey, const void *vvalue)
  2536. {
  2537. CheckOneFramebufferForRemovalData *data = (CheckOneFramebufferForRemovalData *) userdata;
  2538. FramebufferHashTableKey *key = (FramebufferHashTableKey *) vkey;
  2539. VkImageView view = data->view;
  2540. bool remove = false;
  2541. for (Uint32 i = 0; i < key->numColorTargets; i += 1) {
  2542. if (key->colorAttachmentViews[i] == view) {
  2543. remove = true;
  2544. }
  2545. }
  2546. for (Uint32 i = 0; i < key->numResolveAttachments; i += 1) {
  2547. if (key->resolveAttachmentViews[i] == view) {
  2548. remove = true;
  2549. }
  2550. }
  2551. if (key->depthStencilAttachmentView == view) {
  2552. remove = true;
  2553. }
  2554. if (remove) {
  2555. if (data->keysToRemoveCount == data->keysToRemoveCapacity) {
  2556. data->keysToRemoveCapacity *= 2;
  2557. void *ptr = SDL_realloc(data->keysToRemove, data->keysToRemoveCapacity * sizeof(FramebufferHashTableKey *));
  2558. if (!ptr) {
  2559. return false; // ugh, stop iterating. We're in trouble.
  2560. }
  2561. data->keysToRemove = (FramebufferHashTableKey **) ptr;
  2562. }
  2563. data->keysToRemove[data->keysToRemoveCount] = key;
  2564. data->keysToRemoveCount++;
  2565. }
  2566. return true; // keep iterating.
  2567. }
  2568. static void VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2569. VulkanRenderer *renderer,
  2570. VkImageView view)
  2571. {
  2572. // Can't remove while iterating!
  2573. CheckOneFramebufferForRemovalData data = { 8, 0, NULL, view };
  2574. data.keysToRemove = (FramebufferHashTableKey **) SDL_malloc(data.keysToRemoveCapacity * sizeof(FramebufferHashTableKey *));
  2575. if (!data.keysToRemove) {
  2576. return; // uhoh.
  2577. }
  2578. SDL_LockMutex(renderer->framebufferFetchLock);
  2579. SDL_IterateHashTable(renderer->framebufferHashTable, CheckOneFramebufferForRemoval, &data);
  2580. for (Uint32 i = 0; i < data.keysToRemoveCount; i += 1) {
  2581. SDL_RemoveFromHashTable(renderer->framebufferHashTable, (void *)data.keysToRemove[i]);
  2582. }
  2583. SDL_UnlockMutex(renderer->framebufferFetchLock);
  2584. SDL_free(data.keysToRemove);
  2585. }
  2586. static void VULKAN_INTERNAL_DestroyTexture(
  2587. VulkanRenderer *renderer,
  2588. VulkanTexture *texture)
  2589. {
  2590. // Clean up subresources
  2591. for (Uint32 subresourceIndex = 0; subresourceIndex < texture->subresourceCount; subresourceIndex += 1) {
  2592. if (texture->subresources[subresourceIndex].renderTargetViews != NULL) {
  2593. for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) {
  2594. VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2595. renderer,
  2596. texture->subresources[subresourceIndex].renderTargetViews[depthIndex]);
  2597. }
  2598. for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) {
  2599. renderer->vkDestroyImageView(
  2600. renderer->logicalDevice,
  2601. texture->subresources[subresourceIndex].renderTargetViews[depthIndex],
  2602. NULL);
  2603. }
  2604. SDL_free(texture->subresources[subresourceIndex].renderTargetViews);
  2605. }
  2606. if (texture->subresources[subresourceIndex].computeWriteView != VK_NULL_HANDLE) {
  2607. renderer->vkDestroyImageView(
  2608. renderer->logicalDevice,
  2609. texture->subresources[subresourceIndex].computeWriteView,
  2610. NULL);
  2611. }
  2612. if (texture->subresources[subresourceIndex].depthStencilView != VK_NULL_HANDLE) {
  2613. VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2614. renderer,
  2615. texture->subresources[subresourceIndex].depthStencilView);
  2616. renderer->vkDestroyImageView(
  2617. renderer->logicalDevice,
  2618. texture->subresources[subresourceIndex].depthStencilView,
  2619. NULL);
  2620. }
  2621. }
  2622. SDL_free(texture->subresources);
  2623. if (texture->fullView) {
  2624. renderer->vkDestroyImageView(
  2625. renderer->logicalDevice,
  2626. texture->fullView,
  2627. NULL);
  2628. }
  2629. /* Don't free an externally managed VkImage (e.g. XR swapchain images) */
  2630. if (texture->image && !texture->externallyManaged) {
  2631. renderer->vkDestroyImage(
  2632. renderer->logicalDevice,
  2633. texture->image,
  2634. NULL);
  2635. }
  2636. if (texture->usedRegion) {
  2637. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  2638. renderer,
  2639. texture->usedRegion);
  2640. }
  2641. SDL_free(texture);
  2642. }
  2643. static void VULKAN_INTERNAL_DestroyBuffer(
  2644. VulkanRenderer *renderer,
  2645. VulkanBuffer *buffer)
  2646. {
  2647. renderer->vkDestroyBuffer(
  2648. renderer->logicalDevice,
  2649. buffer->buffer,
  2650. NULL);
  2651. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  2652. renderer,
  2653. buffer->usedRegion);
  2654. SDL_free(buffer);
  2655. }
  2656. static void VULKAN_INTERNAL_DestroyCommandPool(
  2657. VulkanRenderer *renderer,
  2658. VulkanCommandPool *commandPool)
  2659. {
  2660. Uint32 i;
  2661. VulkanCommandBuffer *commandBuffer;
  2662. renderer->vkDestroyCommandPool(
  2663. renderer->logicalDevice,
  2664. commandPool->commandPool,
  2665. NULL);
  2666. for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1) {
  2667. commandBuffer = commandPool->inactiveCommandBuffers[i];
  2668. SDL_free(commandBuffer->presentDatas);
  2669. SDL_free(commandBuffer->waitSemaphores);
  2670. SDL_free(commandBuffer->signalSemaphores);
  2671. SDL_free(commandBuffer->usedBuffers);
  2672. SDL_free(commandBuffer->usedTextures);
  2673. SDL_free(commandBuffer->usedSamplers);
  2674. SDL_free(commandBuffer->usedGraphicsPipelines);
  2675. SDL_free(commandBuffer->usedComputePipelines);
  2676. SDL_free(commandBuffer->usedFramebuffers);
  2677. SDL_free(commandBuffer->usedUniformBuffers);
  2678. SDL_free(commandBuffer);
  2679. }
  2680. SDL_free(commandPool->inactiveCommandBuffers);
  2681. SDL_free(commandPool);
  2682. }
  2683. static void VULKAN_INTERNAL_DestroyDescriptorSetLayout(
  2684. VulkanRenderer *renderer,
  2685. DescriptorSetLayout *layout)
  2686. {
  2687. if (layout == NULL) {
  2688. return;
  2689. }
  2690. if (layout->descriptorSetLayout != VK_NULL_HANDLE) {
  2691. renderer->vkDestroyDescriptorSetLayout(
  2692. renderer->logicalDevice,
  2693. layout->descriptorSetLayout,
  2694. NULL);
  2695. }
  2696. SDL_free(layout);
  2697. }
  2698. static void VULKAN_INTERNAL_DestroyGraphicsPipeline(
  2699. VulkanRenderer *renderer,
  2700. VulkanGraphicsPipeline *graphicsPipeline)
  2701. {
  2702. renderer->vkDestroyPipeline(
  2703. renderer->logicalDevice,
  2704. graphicsPipeline->pipeline,
  2705. NULL);
  2706. (void)SDL_AtomicDecRef(&graphicsPipeline->vertexShader->referenceCount);
  2707. (void)SDL_AtomicDecRef(&graphicsPipeline->fragmentShader->referenceCount);
  2708. SDL_free(graphicsPipeline);
  2709. }
  2710. static void VULKAN_INTERNAL_DestroyComputePipeline(
  2711. VulkanRenderer *renderer,
  2712. VulkanComputePipeline *computePipeline)
  2713. {
  2714. if (computePipeline->pipeline != VK_NULL_HANDLE) {
  2715. renderer->vkDestroyPipeline(
  2716. renderer->logicalDevice,
  2717. computePipeline->pipeline,
  2718. NULL);
  2719. }
  2720. if (computePipeline->shaderModule != VK_NULL_HANDLE) {
  2721. renderer->vkDestroyShaderModule(
  2722. renderer->logicalDevice,
  2723. computePipeline->shaderModule,
  2724. NULL);
  2725. }
  2726. SDL_free(computePipeline);
  2727. }
  2728. static void VULKAN_INTERNAL_DestroyShader(
  2729. VulkanRenderer *renderer,
  2730. VulkanShader *vulkanShader)
  2731. {
  2732. renderer->vkDestroyShaderModule(
  2733. renderer->logicalDevice,
  2734. vulkanShader->shaderModule,
  2735. NULL);
  2736. SDL_free(vulkanShader->entrypointName);
  2737. SDL_free(vulkanShader);
  2738. }
  2739. static void VULKAN_INTERNAL_DestroySampler(
  2740. VulkanRenderer *renderer,
  2741. VulkanSampler *vulkanSampler)
  2742. {
  2743. renderer->vkDestroySampler(
  2744. renderer->logicalDevice,
  2745. vulkanSampler->sampler,
  2746. NULL);
  2747. SDL_free(vulkanSampler);
  2748. }
  2749. static void VULKAN_INTERNAL_DestroySwapchainImage(
  2750. VulkanRenderer *renderer,
  2751. WindowData *windowData)
  2752. {
  2753. Uint32 i;
  2754. if (windowData == NULL) {
  2755. return;
  2756. }
  2757. for (i = 0; i < windowData->imageCount; i += 1) {
  2758. VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2759. renderer,
  2760. windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0]);
  2761. renderer->vkDestroyImageView(
  2762. renderer->logicalDevice,
  2763. windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0],
  2764. NULL);
  2765. SDL_free(windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews);
  2766. SDL_free(windowData->textureContainers[i].activeTexture->subresources);
  2767. SDL_free(windowData->textureContainers[i].activeTexture);
  2768. }
  2769. SDL_free(windowData->textureContainers);
  2770. windowData->textureContainers = NULL;
  2771. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  2772. if (windowData->imageAvailableSemaphore[i]) {
  2773. renderer->vkDestroySemaphore(
  2774. renderer->logicalDevice,
  2775. windowData->imageAvailableSemaphore[i],
  2776. NULL);
  2777. windowData->imageAvailableSemaphore[i] = VK_NULL_HANDLE;
  2778. }
  2779. }
  2780. for (i = 0; i < windowData->imageCount; i += 1) {
  2781. if (windowData->renderFinishedSemaphore[i]) {
  2782. renderer->vkDestroySemaphore(
  2783. renderer->logicalDevice,
  2784. windowData->renderFinishedSemaphore[i],
  2785. NULL);
  2786. windowData->renderFinishedSemaphore[i] = VK_NULL_HANDLE;
  2787. }
  2788. }
  2789. SDL_free(windowData->renderFinishedSemaphore);
  2790. windowData->renderFinishedSemaphore = NULL;
  2791. windowData->imageCount = 0;
  2792. }
  2793. static void VULKAN_INTERNAL_DestroySwapchain(
  2794. VulkanRenderer *renderer,
  2795. WindowData *windowData)
  2796. {
  2797. if (windowData == NULL) {
  2798. return;
  2799. }
  2800. VULKAN_INTERNAL_DestroySwapchainImage(renderer, windowData);
  2801. if (windowData->swapchain) {
  2802. renderer->vkDestroySwapchainKHR(
  2803. renderer->logicalDevice,
  2804. windowData->swapchain,
  2805. NULL);
  2806. windowData->swapchain = VK_NULL_HANDLE;
  2807. }
  2808. }
  2809. static void VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(
  2810. VulkanRenderer *renderer,
  2811. VulkanGraphicsPipelineResourceLayout *resourceLayout)
  2812. {
  2813. if (resourceLayout->pipelineLayout != VK_NULL_HANDLE) {
  2814. renderer->vkDestroyPipelineLayout(
  2815. renderer->logicalDevice,
  2816. resourceLayout->pipelineLayout,
  2817. NULL);
  2818. }
  2819. SDL_free(resourceLayout);
  2820. }
  2821. static void VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(
  2822. VulkanRenderer *renderer,
  2823. VulkanComputePipelineResourceLayout *resourceLayout)
  2824. {
  2825. if (resourceLayout->pipelineLayout != VK_NULL_HANDLE) {
  2826. renderer->vkDestroyPipelineLayout(
  2827. renderer->logicalDevice,
  2828. resourceLayout->pipelineLayout,
  2829. NULL);
  2830. }
  2831. SDL_free(resourceLayout);
  2832. }
  2833. static void VULKAN_INTERNAL_DestroyDescriptorSetCache(
  2834. VulkanRenderer *renderer,
  2835. DescriptorSetCache *descriptorSetCache)
  2836. {
  2837. for (Uint32 i = 0; i < descriptorSetCache->poolCount; i += 1) {
  2838. for (Uint32 j = 0; j < descriptorSetCache->pools[i].poolCount; j += 1) {
  2839. renderer->vkDestroyDescriptorPool(
  2840. renderer->logicalDevice,
  2841. descriptorSetCache->pools[i].descriptorPools[j],
  2842. NULL);
  2843. }
  2844. SDL_free(descriptorSetCache->pools[i].descriptorSets);
  2845. SDL_free(descriptorSetCache->pools[i].descriptorPools);
  2846. }
  2847. SDL_free(descriptorSetCache->pools);
  2848. SDL_free(descriptorSetCache);
  2849. }
  2850. // Hashtable functions
  2851. static Uint32 SDLCALL VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashFunction(void *userdata, const void *key)
  2852. {
  2853. GraphicsPipelineResourceLayoutHashTableKey *hashTableKey = (GraphicsPipelineResourceLayoutHashTableKey *)key;
  2854. /* The algorithm for this hashing function
  2855. * is taken from Josh Bloch's "Effective Java".
  2856. * (https://stackoverflow.com/a/113600/12492383)
  2857. */
  2858. const Uint32 hashFactor = 31;
  2859. Uint32 result = 1;
  2860. result = result * hashFactor + hashTableKey->vertexSamplerCount;
  2861. result = result * hashFactor + hashTableKey->vertexStorageBufferCount;
  2862. result = result * hashFactor + hashTableKey->vertexStorageTextureCount;
  2863. result = result * hashFactor + hashTableKey->vertexUniformBufferCount;
  2864. result = result * hashFactor + hashTableKey->fragmentSamplerCount;
  2865. result = result * hashFactor + hashTableKey->fragmentStorageBufferCount;
  2866. result = result * hashFactor + hashTableKey->fragmentStorageTextureCount;
  2867. result = result * hashFactor + hashTableKey->fragmentUniformBufferCount;
  2868. return result;
  2869. }
  2870. static bool SDLCALL VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashKeyMatch(void *userdata, const void *aKey, const void *bKey)
  2871. {
  2872. return SDL_memcmp(aKey, bKey, sizeof(GraphicsPipelineResourceLayoutHashTableKey)) == 0;
  2873. }
  2874. static void SDLCALL VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashDestroy(void *userdata, const void *key, const void *value)
  2875. {
  2876. VulkanRenderer *renderer = (VulkanRenderer *)userdata;
  2877. VulkanGraphicsPipelineResourceLayout *resourceLayout = (VulkanGraphicsPipelineResourceLayout *)value;
  2878. VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(renderer, resourceLayout);
  2879. SDL_free((void *)key);
  2880. }
  2881. static Uint32 SDLCALL VULKAN_INTERNAL_ComputePipelineResourceLayoutHashFunction(void *userdata, const void *key)
  2882. {
  2883. ComputePipelineResourceLayoutHashTableKey *hashTableKey = (ComputePipelineResourceLayoutHashTableKey *)key;
  2884. /* The algorithm for this hashing function
  2885. * is taken from Josh Bloch's "Effective Java".
  2886. * (https://stackoverflow.com/a/113600/12492383)
  2887. */
  2888. const Uint32 hashFactor = 31;
  2889. Uint32 result = 1;
  2890. result = result * hashFactor + hashTableKey->samplerCount;
  2891. result = result * hashFactor + hashTableKey->readonlyStorageTextureCount;
  2892. result = result * hashFactor + hashTableKey->readonlyStorageBufferCount;
  2893. result = result * hashFactor + hashTableKey->readWriteStorageTextureCount;
  2894. result = result * hashFactor + hashTableKey->readWriteStorageBufferCount;
  2895. result = result * hashFactor + hashTableKey->uniformBufferCount;
  2896. return result;
  2897. }
  2898. static bool SDLCALL VULKAN_INTERNAL_ComputePipelineResourceLayoutHashKeyMatch(void *userdata, const void *aKey, const void *bKey)
  2899. {
  2900. return SDL_memcmp(aKey, bKey, sizeof(ComputePipelineResourceLayoutHashTableKey)) == 0;
  2901. }
  2902. static void SDLCALL VULKAN_INTERNAL_ComputePipelineResourceLayoutHashDestroy(void *userdata, const void *key, const void *value)
  2903. {
  2904. VulkanRenderer *renderer = (VulkanRenderer *)userdata;
  2905. VulkanComputePipelineResourceLayout *resourceLayout = (VulkanComputePipelineResourceLayout *)value;
  2906. VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(renderer, resourceLayout);
  2907. SDL_free((void *)key);
  2908. }
  2909. static Uint32 SDLCALL VULKAN_INTERNAL_DescriptorSetLayoutHashFunction(void *userdata, const void *key)
  2910. {
  2911. DescriptorSetLayoutHashTableKey *hashTableKey = (DescriptorSetLayoutHashTableKey *)key;
  2912. /* The algorithm for this hashing function
  2913. * is taken from Josh Bloch's "Effective Java".
  2914. * (https://stackoverflow.com/a/113600/12492383)
  2915. */
  2916. const Uint32 hashFactor = 31;
  2917. Uint32 result = 1;
  2918. result = result * hashFactor + hashTableKey->shaderStage;
  2919. result = result * hashFactor + hashTableKey->samplerCount;
  2920. result = result * hashFactor + hashTableKey->storageTextureCount;
  2921. result = result * hashFactor + hashTableKey->storageBufferCount;
  2922. result = result * hashFactor + hashTableKey->writeStorageTextureCount;
  2923. result = result * hashFactor + hashTableKey->writeStorageBufferCount;
  2924. result = result * hashFactor + hashTableKey->uniformBufferCount;
  2925. return result;
  2926. }
  2927. static bool SDLCALL VULKAN_INTERNAL_DescriptorSetLayoutHashKeyMatch(void *userdata, const void *aKey, const void *bKey)
  2928. {
  2929. return SDL_memcmp(aKey, bKey, sizeof(DescriptorSetLayoutHashTableKey)) == 0;
  2930. }
  2931. static void SDLCALL VULKAN_INTERNAL_DescriptorSetLayoutHashDestroy(void *userdata, const void *key, const void *value)
  2932. {
  2933. VulkanRenderer *renderer = (VulkanRenderer *)userdata;
  2934. DescriptorSetLayout *layout = (DescriptorSetLayout *)value;
  2935. VULKAN_INTERNAL_DestroyDescriptorSetLayout(renderer, layout);
  2936. SDL_free((void *)key);
  2937. }
  2938. static Uint32 SDLCALL VULKAN_INTERNAL_CommandPoolHashFunction(void *userdata, const void *key)
  2939. {
  2940. return (Uint32)((CommandPoolHashTableKey *)key)->threadID;
  2941. }
  2942. static bool SDLCALL VULKAN_INTERNAL_CommandPoolHashKeyMatch(void *userdata, const void *aKey, const void *bKey)
  2943. {
  2944. CommandPoolHashTableKey *a = (CommandPoolHashTableKey *)aKey;
  2945. CommandPoolHashTableKey *b = (CommandPoolHashTableKey *)bKey;
  2946. return a->threadID == b->threadID;
  2947. }
  2948. static void SDLCALL VULKAN_INTERNAL_CommandPoolHashDestroy(void *userdata, const void *key, const void *value)
  2949. {
  2950. VulkanRenderer *renderer = (VulkanRenderer *)userdata;
  2951. VulkanCommandPool *pool = (VulkanCommandPool *)value;
  2952. VULKAN_INTERNAL_DestroyCommandPool(renderer, pool);
  2953. SDL_free((void *)key);
  2954. }
  2955. static Uint32 SDLCALL VULKAN_INTERNAL_RenderPassHashFunction(void *userdata, const void *key)
  2956. {
  2957. RenderPassHashTableKey *hashTableKey = (RenderPassHashTableKey *)key;
  2958. /* The algorithm for this hashing function
  2959. * is taken from Josh Bloch's "Effective Java".
  2960. * (https://stackoverflow.com/a/113600/12492383)
  2961. */
  2962. const Uint32 hashFactor = 31;
  2963. Uint32 result = 1;
  2964. for (Uint32 i = 0; i < hashTableKey->numColorTargets; i += 1) {
  2965. result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].loadOp;
  2966. result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].storeOp;
  2967. result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].format;
  2968. }
  2969. for (Uint32 i = 0; i < hashTableKey->numResolveTargets; i += 1) {
  2970. result = result * hashFactor + hashTableKey->resolveTargetFormats[i];
  2971. }
  2972. result = result * hashFactor + hashTableKey->depthStencilTargetDescription.loadOp;
  2973. result = result * hashFactor + hashTableKey->depthStencilTargetDescription.storeOp;
  2974. result = result * hashFactor + hashTableKey->depthStencilTargetDescription.stencilLoadOp;
  2975. result = result * hashFactor + hashTableKey->depthStencilTargetDescription.stencilStoreOp;
  2976. result = result * hashFactor + hashTableKey->depthStencilTargetDescription.format;
  2977. result = result * hashFactor + hashTableKey->sampleCount;
  2978. return result;
  2979. }
  2980. static bool SDLCALL VULKAN_INTERNAL_RenderPassHashKeyMatch(void *userdata, const void *aKey, const void *bKey)
  2981. {
  2982. RenderPassHashTableKey *a = (RenderPassHashTableKey *)aKey;
  2983. RenderPassHashTableKey *b = (RenderPassHashTableKey *)bKey;
  2984. if (a->numColorTargets != b->numColorTargets) {
  2985. return 0;
  2986. }
  2987. if (a->numResolveTargets != b->numResolveTargets) {
  2988. return 0;
  2989. }
  2990. if (a->sampleCount != b->sampleCount) {
  2991. return 0;
  2992. }
  2993. for (Uint32 i = 0; i < a->numColorTargets; i += 1) {
  2994. if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format) {
  2995. return 0;
  2996. }
  2997. if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp) {
  2998. return 0;
  2999. }
  3000. if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp) {
  3001. return 0;
  3002. }
  3003. }
  3004. for (Uint32 i = 0; i < a->numResolveTargets; i += 1) {
  3005. if (a->resolveTargetFormats[i] != b->resolveTargetFormats[i]) {
  3006. return 0;
  3007. }
  3008. }
  3009. if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format) {
  3010. return 0;
  3011. }
  3012. if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp) {
  3013. return 0;
  3014. }
  3015. if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp) {
  3016. return 0;
  3017. }
  3018. if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp) {
  3019. return 0;
  3020. }
  3021. if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp) {
  3022. return 0;
  3023. }
  3024. return 1;
  3025. }
  3026. static void SDLCALL VULKAN_INTERNAL_RenderPassHashDestroy(void *userdata, const void *key, const void *value)
  3027. {
  3028. VulkanRenderer *renderer = (VulkanRenderer *)userdata;
  3029. VulkanRenderPassHashTableValue *renderPassWrapper = (VulkanRenderPassHashTableValue *)value;
  3030. renderer->vkDestroyRenderPass(
  3031. renderer->logicalDevice,
  3032. renderPassWrapper->handle,
  3033. NULL);
  3034. SDL_free(renderPassWrapper);
  3035. SDL_free((void *)key);
  3036. }
  3037. static Uint32 SDLCALL VULKAN_INTERNAL_FramebufferHashFunction(void *userdata, const void *key)
  3038. {
  3039. FramebufferHashTableKey *hashTableKey = (FramebufferHashTableKey *)key;
  3040. /* The algorithm for this hashing function
  3041. * is taken from Josh Bloch's "Effective Java".
  3042. * (https://stackoverflow.com/a/113600/12492383)
  3043. */
  3044. const Uint32 hashFactor = 31;
  3045. Uint32 result = 1;
  3046. for (Uint32 i = 0; i < hashTableKey->numColorTargets; i += 1) {
  3047. result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->colorAttachmentViews[i];
  3048. }
  3049. for (Uint32 i = 0; i < hashTableKey->numResolveAttachments; i += 1) {
  3050. result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->resolveAttachmentViews[i];
  3051. }
  3052. result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->depthStencilAttachmentView;
  3053. result = result * hashFactor + hashTableKey->width;
  3054. result = result * hashFactor + hashTableKey->height;
  3055. return result;
  3056. }
  3057. static bool SDLCALL VULKAN_INTERNAL_FramebufferHashKeyMatch(void *userdata, const void *aKey, const void *bKey)
  3058. {
  3059. FramebufferHashTableKey *a = (FramebufferHashTableKey *)aKey;
  3060. FramebufferHashTableKey *b = (FramebufferHashTableKey *)bKey;
  3061. if (a->numColorTargets != b->numColorTargets) {
  3062. return 0;
  3063. }
  3064. if (a->numResolveAttachments != b->numResolveAttachments) {
  3065. return 0;
  3066. }
  3067. for (Uint32 i = 0; i < a->numColorTargets; i += 1) {
  3068. if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i]) {
  3069. return 0;
  3070. }
  3071. }
  3072. for (Uint32 i = 0; i < a->numResolveAttachments; i += 1) {
  3073. if (a->resolveAttachmentViews[i] != b->resolveAttachmentViews[i]) {
  3074. return 0;
  3075. }
  3076. }
  3077. if (a->depthStencilAttachmentView != b->depthStencilAttachmentView) {
  3078. return 0;
  3079. }
  3080. if (a->width != b->width) {
  3081. return 0;
  3082. }
  3083. if (a->height != b->height) {
  3084. return 0;
  3085. }
  3086. return 1;
  3087. }
  3088. static void SDLCALL VULKAN_INTERNAL_FramebufferHashDestroy(void *userdata, const void *key, const void *value)
  3089. {
  3090. VulkanRenderer *renderer = (VulkanRenderer *)userdata;
  3091. VulkanFramebuffer *framebuffer = (VulkanFramebuffer *)value;
  3092. VULKAN_INTERNAL_ReleaseFramebuffer(renderer, framebuffer);
  3093. SDL_free((void *)key);
  3094. }
  3095. // Descriptor pools
  3096. static bool VULKAN_INTERNAL_AllocateDescriptorSets(
  3097. VulkanRenderer *renderer,
  3098. VkDescriptorPool descriptorPool,
  3099. VkDescriptorSetLayout descriptorSetLayout,
  3100. Uint32 descriptorSetCount,
  3101. VkDescriptorSet *descriptorSetArray)
  3102. {
  3103. VkDescriptorSetAllocateInfo descriptorSetAllocateInfo;
  3104. VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount);
  3105. VkResult vulkanResult;
  3106. Uint32 i;
  3107. for (i = 0; i < descriptorSetCount; i += 1) {
  3108. descriptorSetLayouts[i] = descriptorSetLayout;
  3109. }
  3110. descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  3111. descriptorSetAllocateInfo.pNext = NULL;
  3112. descriptorSetAllocateInfo.descriptorPool = descriptorPool;
  3113. descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount;
  3114. descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts;
  3115. vulkanResult = renderer->vkAllocateDescriptorSets(
  3116. renderer->logicalDevice,
  3117. &descriptorSetAllocateInfo,
  3118. descriptorSetArray);
  3119. SDL_stack_free(descriptorSetLayouts);
  3120. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkAllocateDescriptorSets, false);
  3121. return true;
  3122. }
  3123. static bool VULKAN_INTERNAL_AllocateDescriptorsFromPool(
  3124. VulkanRenderer *renderer,
  3125. DescriptorSetLayout *descriptorSetLayout,
  3126. DescriptorSetPool *descriptorSetPool)
  3127. {
  3128. VkDescriptorPoolSize descriptorPoolSizes[
  3129. MAX_TEXTURE_SAMPLERS_PER_STAGE +
  3130. MAX_STORAGE_TEXTURES_PER_STAGE +
  3131. MAX_STORAGE_BUFFERS_PER_STAGE +
  3132. MAX_COMPUTE_WRITE_TEXTURES +
  3133. MAX_COMPUTE_WRITE_BUFFERS +
  3134. MAX_UNIFORM_BUFFERS_PER_STAGE];
  3135. VkDescriptorPoolCreateInfo descriptorPoolInfo;
  3136. VkDescriptorPool pool;
  3137. VkResult vulkanResult;
  3138. // Category 1
  3139. for (Uint32 i = 0; i < descriptorSetLayout->samplerCount; i += 1) {
  3140. descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3141. descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
  3142. }
  3143. for (Uint32 i = descriptorSetLayout->samplerCount; i < descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount; i += 1) {
  3144. descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring the storage image as a sampled image, because shaders are stupid.
  3145. descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
  3146. }
  3147. for (Uint32 i = descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount; i < descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount + descriptorSetLayout->storageBufferCount; i += 1) {
  3148. descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3149. descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
  3150. }
  3151. // Category 2
  3152. for (Uint32 i = 0; i < descriptorSetLayout->writeStorageTextureCount; i += 1) {
  3153. descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3154. descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
  3155. }
  3156. for (Uint32 i = descriptorSetLayout->writeStorageTextureCount; i < descriptorSetLayout->writeStorageTextureCount + descriptorSetLayout->writeStorageBufferCount; i += 1) {
  3157. descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3158. descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
  3159. }
  3160. // Category 3
  3161. for (Uint32 i = 0; i < descriptorSetLayout->uniformBufferCount; i += 1) {
  3162. descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3163. descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
  3164. }
  3165. descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  3166. descriptorPoolInfo.pNext = NULL;
  3167. descriptorPoolInfo.flags = 0;
  3168. descriptorPoolInfo.maxSets = DESCRIPTOR_POOL_SIZE;
  3169. descriptorPoolInfo.poolSizeCount =
  3170. descriptorSetLayout->samplerCount +
  3171. descriptorSetLayout->storageTextureCount +
  3172. descriptorSetLayout->storageBufferCount +
  3173. descriptorSetLayout->writeStorageTextureCount +
  3174. descriptorSetLayout->writeStorageBufferCount +
  3175. descriptorSetLayout->uniformBufferCount;
  3176. descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
  3177. vulkanResult = renderer->vkCreateDescriptorPool(
  3178. renderer->logicalDevice,
  3179. &descriptorPoolInfo,
  3180. NULL,
  3181. &pool);
  3182. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDescriptorPool, false);
  3183. descriptorSetPool->poolCount += 1;
  3184. descriptorSetPool->descriptorPools = SDL_realloc(
  3185. descriptorSetPool->descriptorPools,
  3186. sizeof(VkDescriptorPool) * descriptorSetPool->poolCount);
  3187. descriptorSetPool->descriptorPools[descriptorSetPool->poolCount - 1] = pool;
  3188. descriptorSetPool->descriptorSets = SDL_realloc(
  3189. descriptorSetPool->descriptorSets,
  3190. sizeof(VkDescriptorSet) * descriptorSetPool->poolCount * DESCRIPTOR_POOL_SIZE);
  3191. if (!VULKAN_INTERNAL_AllocateDescriptorSets(
  3192. renderer,
  3193. pool,
  3194. descriptorSetLayout->descriptorSetLayout,
  3195. DESCRIPTOR_POOL_SIZE,
  3196. &descriptorSetPool->descriptorSets[descriptorSetPool->descriptorSetCount])) {
  3197. return false;
  3198. }
  3199. descriptorSetPool->descriptorSetCount += DESCRIPTOR_POOL_SIZE;
  3200. return true;
  3201. }
  3202. // NOTE: these categories should be mutually exclusive
  3203. static DescriptorSetLayout *VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3204. VulkanRenderer *renderer,
  3205. VkShaderStageFlagBits shaderStage,
  3206. // Category 1: read resources
  3207. Uint32 samplerCount,
  3208. Uint32 storageTextureCount,
  3209. Uint32 storageBufferCount,
  3210. // Category 2: write resources
  3211. Uint32 writeStorageTextureCount,
  3212. Uint32 writeStorageBufferCount,
  3213. // Category 3: uniform buffers
  3214. Uint32 uniformBufferCount)
  3215. {
  3216. DescriptorSetLayoutHashTableKey key;
  3217. SDL_zero(key);
  3218. DescriptorSetLayout *layout = NULL;
  3219. key.shaderStage = shaderStage;
  3220. key.samplerCount = samplerCount;
  3221. key.storageTextureCount = storageTextureCount;
  3222. key.storageBufferCount = storageBufferCount;
  3223. key.writeStorageTextureCount = writeStorageTextureCount;
  3224. key.writeStorageBufferCount = writeStorageBufferCount;
  3225. key.uniformBufferCount = uniformBufferCount;
  3226. SDL_LockMutex(renderer->descriptorSetLayoutFetchLock);
  3227. if (SDL_FindInHashTable(
  3228. renderer->descriptorSetLayoutHashTable,
  3229. (const void *)&key,
  3230. (const void **)&layout)) {
  3231. SDL_UnlockMutex(renderer->descriptorSetLayoutFetchLock);
  3232. return layout;
  3233. }
  3234. VkDescriptorSetLayout descriptorSetLayout;
  3235. VkDescriptorSetLayoutBinding descriptorSetLayoutBindings[
  3236. MAX_TEXTURE_SAMPLERS_PER_STAGE +
  3237. MAX_STORAGE_TEXTURES_PER_STAGE +
  3238. MAX_STORAGE_BUFFERS_PER_STAGE +
  3239. MAX_COMPUTE_WRITE_TEXTURES +
  3240. MAX_COMPUTE_WRITE_BUFFERS];
  3241. VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo;
  3242. descriptorSetLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  3243. descriptorSetLayoutCreateInfo.pNext = NULL;
  3244. descriptorSetLayoutCreateInfo.flags = 0;
  3245. // Category 1
  3246. for (Uint32 i = 0; i < samplerCount; i += 1) {
  3247. descriptorSetLayoutBindings[i].binding = i;
  3248. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3249. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3250. descriptorSetLayoutBindings[i].stageFlags = shaderStage;
  3251. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3252. }
  3253. for (Uint32 i = samplerCount; i < samplerCount + storageTextureCount; i += 1) {
  3254. descriptorSetLayoutBindings[i].binding = i;
  3255. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3256. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring the storage image as a sampled image, because shaders are stupid.
  3257. descriptorSetLayoutBindings[i].stageFlags = shaderStage;
  3258. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3259. }
  3260. for (Uint32 i = samplerCount + storageTextureCount; i < samplerCount + storageTextureCount + storageBufferCount; i += 1) {
  3261. descriptorSetLayoutBindings[i].binding = i;
  3262. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3263. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3264. descriptorSetLayoutBindings[i].stageFlags = shaderStage;
  3265. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3266. }
  3267. // Category 2
  3268. for (Uint32 i = 0; i < writeStorageTextureCount; i += 1) {
  3269. descriptorSetLayoutBindings[i].binding = i;
  3270. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3271. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3272. descriptorSetLayoutBindings[i].stageFlags = shaderStage;
  3273. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3274. }
  3275. for (Uint32 i = writeStorageTextureCount; i < writeStorageTextureCount + writeStorageBufferCount; i += 1) {
  3276. descriptorSetLayoutBindings[i].binding = i;
  3277. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3278. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3279. descriptorSetLayoutBindings[i].stageFlags = shaderStage;
  3280. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3281. }
  3282. // Category 3
  3283. for (Uint32 i = 0; i < uniformBufferCount; i += 1) {
  3284. descriptorSetLayoutBindings[i].binding = i;
  3285. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3286. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3287. descriptorSetLayoutBindings[i].stageFlags = shaderStage;
  3288. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3289. }
  3290. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3291. descriptorSetLayoutCreateInfo.bindingCount =
  3292. samplerCount +
  3293. storageTextureCount +
  3294. storageBufferCount +
  3295. writeStorageTextureCount +
  3296. writeStorageBufferCount +
  3297. uniformBufferCount;
  3298. VkResult vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3299. renderer->logicalDevice,
  3300. &descriptorSetLayoutCreateInfo,
  3301. NULL,
  3302. &descriptorSetLayout);
  3303. if (vulkanResult != VK_SUCCESS) {
  3304. SDL_UnlockMutex(renderer->descriptorSetLayoutFetchLock);
  3305. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDescriptorSetLayout, NULL);
  3306. }
  3307. layout = SDL_malloc(sizeof(DescriptorSetLayout));
  3308. layout->descriptorSetLayout = descriptorSetLayout;
  3309. layout->samplerCount = samplerCount;
  3310. layout->storageBufferCount = storageBufferCount;
  3311. layout->storageTextureCount = storageTextureCount;
  3312. layout->writeStorageBufferCount = writeStorageBufferCount;
  3313. layout->writeStorageTextureCount = writeStorageTextureCount;
  3314. layout->uniformBufferCount = uniformBufferCount;
  3315. layout->ID = SDL_AtomicIncRef(&renderer->layoutResourceID);
  3316. DescriptorSetLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(DescriptorSetLayoutHashTableKey));
  3317. SDL_memcpy(allocedKey, &key, sizeof(DescriptorSetLayoutHashTableKey));
  3318. SDL_InsertIntoHashTable(
  3319. renderer->descriptorSetLayoutHashTable,
  3320. (const void *)allocedKey,
  3321. (const void *)layout, true);
  3322. SDL_UnlockMutex(renderer->descriptorSetLayoutFetchLock);
  3323. return layout;
  3324. }
  3325. static VulkanGraphicsPipelineResourceLayout *VULKAN_INTERNAL_FetchGraphicsPipelineResourceLayout(
  3326. VulkanRenderer *renderer,
  3327. VulkanShader *vertexShader,
  3328. VulkanShader *fragmentShader)
  3329. {
  3330. GraphicsPipelineResourceLayoutHashTableKey key;
  3331. SDL_zero(key);
  3332. VulkanGraphicsPipelineResourceLayout *pipelineResourceLayout = NULL;
  3333. key.vertexSamplerCount = vertexShader->numSamplers;
  3334. key.vertexStorageTextureCount = vertexShader->numStorageTextures;
  3335. key.vertexStorageBufferCount = vertexShader->numStorageBuffers;
  3336. key.vertexUniformBufferCount = vertexShader->numUniformBuffers;
  3337. key.fragmentSamplerCount = fragmentShader->numSamplers;
  3338. key.fragmentStorageTextureCount = fragmentShader->numStorageTextures;
  3339. key.fragmentStorageBufferCount = fragmentShader->numStorageBuffers;
  3340. key.fragmentUniformBufferCount = fragmentShader->numUniformBuffers;
  3341. SDL_LockMutex(renderer->graphicsPipelineLayoutFetchLock);
  3342. if (SDL_FindInHashTable(
  3343. renderer->graphicsPipelineResourceLayoutHashTable,
  3344. (const void *)&key,
  3345. (const void **)&pipelineResourceLayout)) {
  3346. SDL_UnlockMutex(renderer->graphicsPipelineLayoutFetchLock);
  3347. return pipelineResourceLayout;
  3348. }
  3349. VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
  3350. VkDescriptorSetLayout descriptorSetLayouts[4];
  3351. VkResult vulkanResult;
  3352. pipelineResourceLayout = SDL_calloc(1, sizeof(VulkanGraphicsPipelineResourceLayout));
  3353. pipelineResourceLayout->descriptorSetLayouts[0] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3354. renderer,
  3355. VK_SHADER_STAGE_VERTEX_BIT,
  3356. vertexShader->numSamplers,
  3357. vertexShader->numStorageTextures,
  3358. vertexShader->numStorageBuffers,
  3359. 0,
  3360. 0,
  3361. 0);
  3362. pipelineResourceLayout->descriptorSetLayouts[1] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3363. renderer,
  3364. VK_SHADER_STAGE_VERTEX_BIT,
  3365. 0,
  3366. 0,
  3367. 0,
  3368. 0,
  3369. 0,
  3370. vertexShader->numUniformBuffers);
  3371. pipelineResourceLayout->descriptorSetLayouts[2] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3372. renderer,
  3373. VK_SHADER_STAGE_FRAGMENT_BIT,
  3374. fragmentShader->numSamplers,
  3375. fragmentShader->numStorageTextures,
  3376. fragmentShader->numStorageBuffers,
  3377. 0,
  3378. 0,
  3379. 0);
  3380. pipelineResourceLayout->descriptorSetLayouts[3] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3381. renderer,
  3382. VK_SHADER_STAGE_FRAGMENT_BIT,
  3383. 0,
  3384. 0,
  3385. 0,
  3386. 0,
  3387. 0,
  3388. fragmentShader->numUniformBuffers);
  3389. descriptorSetLayouts[0] = pipelineResourceLayout->descriptorSetLayouts[0]->descriptorSetLayout;
  3390. descriptorSetLayouts[1] = pipelineResourceLayout->descriptorSetLayouts[1]->descriptorSetLayout;
  3391. descriptorSetLayouts[2] = pipelineResourceLayout->descriptorSetLayouts[2]->descriptorSetLayout;
  3392. descriptorSetLayouts[3] = pipelineResourceLayout->descriptorSetLayouts[3]->descriptorSetLayout;
  3393. pipelineResourceLayout->vertexSamplerCount = vertexShader->numSamplers;
  3394. pipelineResourceLayout->vertexStorageTextureCount = vertexShader->numStorageTextures;
  3395. pipelineResourceLayout->vertexStorageBufferCount = vertexShader->numStorageBuffers;
  3396. pipelineResourceLayout->vertexUniformBufferCount = vertexShader->numUniformBuffers;
  3397. pipelineResourceLayout->fragmentSamplerCount = fragmentShader->numSamplers;
  3398. pipelineResourceLayout->fragmentStorageTextureCount = fragmentShader->numStorageTextures;
  3399. pipelineResourceLayout->fragmentStorageBufferCount = fragmentShader->numStorageBuffers;
  3400. pipelineResourceLayout->fragmentUniformBufferCount = fragmentShader->numUniformBuffers;
  3401. // Create the pipeline layout
  3402. pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  3403. pipelineLayoutCreateInfo.pNext = NULL;
  3404. pipelineLayoutCreateInfo.flags = 0;
  3405. pipelineLayoutCreateInfo.setLayoutCount = 4;
  3406. pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts;
  3407. pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
  3408. pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
  3409. vulkanResult = renderer->vkCreatePipelineLayout(
  3410. renderer->logicalDevice,
  3411. &pipelineLayoutCreateInfo,
  3412. NULL,
  3413. &pipelineResourceLayout->pipelineLayout);
  3414. if (vulkanResult != VK_SUCCESS) {
  3415. VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(renderer, pipelineResourceLayout);
  3416. SDL_UnlockMutex(renderer->graphicsPipelineLayoutFetchLock);
  3417. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreatePipelineLayout, NULL);
  3418. }
  3419. GraphicsPipelineResourceLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(GraphicsPipelineResourceLayoutHashTableKey));
  3420. SDL_memcpy(allocedKey, &key, sizeof(GraphicsPipelineResourceLayoutHashTableKey));
  3421. SDL_InsertIntoHashTable(
  3422. renderer->graphicsPipelineResourceLayoutHashTable,
  3423. (const void *)allocedKey,
  3424. (const void *)pipelineResourceLayout, true);
  3425. SDL_UnlockMutex(renderer->graphicsPipelineLayoutFetchLock);
  3426. return pipelineResourceLayout;
  3427. }
  3428. static VulkanComputePipelineResourceLayout *VULKAN_INTERNAL_FetchComputePipelineResourceLayout(
  3429. VulkanRenderer *renderer,
  3430. const SDL_GPUComputePipelineCreateInfo *createinfo)
  3431. {
  3432. ComputePipelineResourceLayoutHashTableKey key;
  3433. SDL_zero(key);
  3434. VulkanComputePipelineResourceLayout *pipelineResourceLayout = NULL;
  3435. key.samplerCount = createinfo->num_samplers;
  3436. key.readonlyStorageTextureCount = createinfo->num_readonly_storage_textures;
  3437. key.readonlyStorageBufferCount = createinfo->num_readonly_storage_buffers;
  3438. key.readWriteStorageTextureCount = createinfo->num_readwrite_storage_textures;
  3439. key.readWriteStorageBufferCount = createinfo->num_readwrite_storage_buffers;
  3440. key.uniformBufferCount = createinfo->num_uniform_buffers;
  3441. SDL_LockMutex(renderer->computePipelineLayoutFetchLock);
  3442. if (SDL_FindInHashTable(
  3443. renderer->computePipelineResourceLayoutHashTable,
  3444. (const void *)&key,
  3445. (const void **)&pipelineResourceLayout)) {
  3446. SDL_UnlockMutex(renderer->computePipelineLayoutFetchLock);
  3447. return pipelineResourceLayout;
  3448. }
  3449. VkDescriptorSetLayout descriptorSetLayouts[3];
  3450. VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
  3451. VkResult vulkanResult;
  3452. pipelineResourceLayout = SDL_calloc(1, sizeof(VulkanComputePipelineResourceLayout));
  3453. pipelineResourceLayout->descriptorSetLayouts[0] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3454. renderer,
  3455. VK_SHADER_STAGE_COMPUTE_BIT,
  3456. createinfo->num_samplers,
  3457. createinfo->num_readonly_storage_textures,
  3458. createinfo->num_readonly_storage_buffers,
  3459. 0,
  3460. 0,
  3461. 0);
  3462. pipelineResourceLayout->descriptorSetLayouts[1] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3463. renderer,
  3464. VK_SHADER_STAGE_COMPUTE_BIT,
  3465. 0,
  3466. 0,
  3467. 0,
  3468. createinfo->num_readwrite_storage_textures,
  3469. createinfo->num_readwrite_storage_buffers,
  3470. 0);
  3471. pipelineResourceLayout->descriptorSetLayouts[2] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
  3472. renderer,
  3473. VK_SHADER_STAGE_COMPUTE_BIT,
  3474. 0,
  3475. 0,
  3476. 0,
  3477. 0,
  3478. 0,
  3479. createinfo->num_uniform_buffers);
  3480. descriptorSetLayouts[0] = pipelineResourceLayout->descriptorSetLayouts[0]->descriptorSetLayout;
  3481. descriptorSetLayouts[1] = pipelineResourceLayout->descriptorSetLayouts[1]->descriptorSetLayout;
  3482. descriptorSetLayouts[2] = pipelineResourceLayout->descriptorSetLayouts[2]->descriptorSetLayout;
  3483. pipelineResourceLayout->numSamplers = createinfo->num_samplers;
  3484. pipelineResourceLayout->numReadonlyStorageTextures = createinfo->num_readonly_storage_textures;
  3485. pipelineResourceLayout->numReadonlyStorageBuffers = createinfo->num_readonly_storage_buffers;
  3486. pipelineResourceLayout->numReadWriteStorageTextures = createinfo->num_readwrite_storage_textures;
  3487. pipelineResourceLayout->numReadWriteStorageBuffers = createinfo->num_readwrite_storage_buffers;
  3488. pipelineResourceLayout->numUniformBuffers = createinfo->num_uniform_buffers;
  3489. // Create the pipeline layout
  3490. pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  3491. pipelineLayoutCreateInfo.pNext = NULL;
  3492. pipelineLayoutCreateInfo.flags = 0;
  3493. pipelineLayoutCreateInfo.setLayoutCount = 3;
  3494. pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts;
  3495. pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
  3496. pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
  3497. vulkanResult = renderer->vkCreatePipelineLayout(
  3498. renderer->logicalDevice,
  3499. &pipelineLayoutCreateInfo,
  3500. NULL,
  3501. &pipelineResourceLayout->pipelineLayout);
  3502. if (vulkanResult != VK_SUCCESS) {
  3503. VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(renderer, pipelineResourceLayout);
  3504. SDL_UnlockMutex(renderer->computePipelineLayoutFetchLock);
  3505. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreatePipelineLayout, NULL);
  3506. }
  3507. ComputePipelineResourceLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(ComputePipelineResourceLayoutHashTableKey));
  3508. SDL_memcpy(allocedKey, &key, sizeof(ComputePipelineResourceLayoutHashTableKey));
  3509. SDL_InsertIntoHashTable(
  3510. renderer->computePipelineResourceLayoutHashTable,
  3511. (const void *)allocedKey,
  3512. (const void *)pipelineResourceLayout, true);
  3513. SDL_UnlockMutex(renderer->computePipelineLayoutFetchLock);
  3514. return pipelineResourceLayout;
  3515. }
  3516. // Data Buffer
  3517. static VulkanBuffer *VULKAN_INTERNAL_CreateBuffer(
  3518. VulkanRenderer *renderer,
  3519. VkDeviceSize size,
  3520. SDL_GPUBufferUsageFlags usageFlags,
  3521. VulkanBufferType type,
  3522. bool dedicated,
  3523. const char *debugName)
  3524. {
  3525. VulkanBuffer *buffer;
  3526. VkResult vulkanResult;
  3527. VkBufferCreateInfo createinfo;
  3528. VkBufferUsageFlags vulkanUsageFlags = 0;
  3529. Uint8 bindResult;
  3530. if (usageFlags & SDL_GPU_BUFFERUSAGE_VERTEX) {
  3531. vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
  3532. }
  3533. if (usageFlags & SDL_GPU_BUFFERUSAGE_INDEX) {
  3534. vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
  3535. }
  3536. if (usageFlags & (SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ |
  3537. SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ |
  3538. SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE)) {
  3539. vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
  3540. }
  3541. if (usageFlags & SDL_GPU_BUFFERUSAGE_INDIRECT) {
  3542. vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
  3543. }
  3544. if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
  3545. vulkanUsageFlags |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
  3546. } else {
  3547. // GPU buffers need transfer bits for defrag, transfer buffers need them for transfers
  3548. vulkanUsageFlags |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
  3549. }
  3550. buffer = SDL_calloc(1, sizeof(VulkanBuffer));
  3551. buffer->size = size;
  3552. buffer->usage = usageFlags;
  3553. buffer->type = type;
  3554. buffer->markedForDestroy = false;
  3555. buffer->transitioned = false;
  3556. createinfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  3557. createinfo.pNext = NULL;
  3558. createinfo.flags = 0;
  3559. createinfo.size = size;
  3560. createinfo.usage = vulkanUsageFlags;
  3561. createinfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  3562. createinfo.queueFamilyIndexCount = 1;
  3563. createinfo.pQueueFamilyIndices = &renderer->queueFamilyIndex;
  3564. // Set transfer bits so we can defrag
  3565. createinfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
  3566. vulkanResult = renderer->vkCreateBuffer(
  3567. renderer->logicalDevice,
  3568. &createinfo,
  3569. NULL,
  3570. &buffer->buffer);
  3571. if (vulkanResult != VK_SUCCESS) {
  3572. SDL_free(buffer);
  3573. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateBuffer, NULL);
  3574. }
  3575. bindResult = VULKAN_INTERNAL_BindMemoryForBuffer(
  3576. renderer,
  3577. buffer->buffer,
  3578. buffer->size,
  3579. buffer->type,
  3580. dedicated,
  3581. &buffer->usedRegion);
  3582. if (bindResult != 1) {
  3583. renderer->vkDestroyBuffer(
  3584. renderer->logicalDevice,
  3585. buffer->buffer,
  3586. NULL);
  3587. SDL_free(buffer);
  3588. SET_STRING_ERROR_AND_RETURN("Failed to bind memory for buffer!", NULL);
  3589. }
  3590. buffer->usedRegion->vulkanBuffer = buffer; // lol
  3591. SDL_SetAtomicInt(&buffer->referenceCount, 0);
  3592. if (renderer->debugMode && renderer->supportsDebugUtils && debugName != NULL) {
  3593. VkDebugUtilsObjectNameInfoEXT nameInfo;
  3594. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  3595. nameInfo.pNext = NULL;
  3596. nameInfo.pObjectName = debugName;
  3597. nameInfo.objectType = VK_OBJECT_TYPE_BUFFER;
  3598. nameInfo.objectHandle = (uint64_t)buffer->buffer;
  3599. renderer->vkSetDebugUtilsObjectNameEXT(
  3600. renderer->logicalDevice,
  3601. &nameInfo);
  3602. }
  3603. return buffer;
  3604. }
  3605. static VulkanBufferContainer *VULKAN_INTERNAL_CreateBufferContainer(
  3606. VulkanRenderer *renderer,
  3607. VkDeviceSize size,
  3608. SDL_GPUBufferUsageFlags usageFlags,
  3609. VulkanBufferType type,
  3610. bool dedicated,
  3611. const char *debugName)
  3612. {
  3613. VulkanBufferContainer *bufferContainer;
  3614. VulkanBuffer *buffer;
  3615. buffer = VULKAN_INTERNAL_CreateBuffer(
  3616. renderer,
  3617. size,
  3618. usageFlags,
  3619. type,
  3620. dedicated,
  3621. debugName);
  3622. if (buffer == NULL) {
  3623. return NULL;
  3624. }
  3625. bufferContainer = SDL_calloc(1, sizeof(VulkanBufferContainer));
  3626. bufferContainer->activeBuffer = buffer;
  3627. buffer->container = bufferContainer;
  3628. buffer->containerIndex = 0;
  3629. bufferContainer->bufferCapacity = 1;
  3630. bufferContainer->bufferCount = 1;
  3631. bufferContainer->buffers = SDL_calloc(bufferContainer->bufferCapacity, sizeof(VulkanBuffer *));
  3632. bufferContainer->buffers[0] = bufferContainer->activeBuffer;
  3633. bufferContainer->dedicated = dedicated;
  3634. bufferContainer->debugName = NULL;
  3635. if (debugName != NULL) {
  3636. bufferContainer->debugName = SDL_strdup(debugName);
  3637. }
  3638. return bufferContainer;
  3639. }
  3640. // Texture Subresource Utilities
  3641. static Uint32 VULKAN_INTERNAL_GetTextureSubresourceIndex(
  3642. Uint32 mipLevel,
  3643. Uint32 layer,
  3644. Uint32 numLevels)
  3645. {
  3646. return mipLevel + (layer * numLevels);
  3647. }
  3648. static VulkanTextureSubresource *VULKAN_INTERNAL_FetchTextureSubresource(
  3649. VulkanTextureContainer *textureContainer,
  3650. Uint32 layer,
  3651. Uint32 level)
  3652. {
  3653. Uint32 index = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  3654. level,
  3655. layer,
  3656. textureContainer->header.info.num_levels);
  3657. return &textureContainer->activeTexture->subresources[index];
  3658. }
  3659. static bool VULKAN_INTERNAL_CreateRenderTargetView(
  3660. VulkanRenderer *renderer,
  3661. VulkanTexture *texture,
  3662. Uint32 layerOrDepth,
  3663. Uint32 level,
  3664. VkFormat format,
  3665. VkComponentMapping swizzle,
  3666. VkImageView *pView)
  3667. {
  3668. VkResult vulkanResult;
  3669. VkImageViewCreateInfo imageViewCreateInfo;
  3670. // create framebuffer compatible views for RenderTarget
  3671. imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  3672. imageViewCreateInfo.pNext = NULL;
  3673. imageViewCreateInfo.flags = 0;
  3674. imageViewCreateInfo.image = texture->image;
  3675. imageViewCreateInfo.format = format;
  3676. imageViewCreateInfo.components = swizzle;
  3677. imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
  3678. imageViewCreateInfo.subresourceRange.baseMipLevel = level;
  3679. imageViewCreateInfo.subresourceRange.levelCount = 1;
  3680. imageViewCreateInfo.subresourceRange.baseArrayLayer = layerOrDepth;
  3681. imageViewCreateInfo.subresourceRange.layerCount = 1;
  3682. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
  3683. vulkanResult = renderer->vkCreateImageView(
  3684. renderer->logicalDevice,
  3685. &imageViewCreateInfo,
  3686. NULL,
  3687. pView);
  3688. if (vulkanResult != VK_SUCCESS) {
  3689. *pView = (VkImageView)VK_NULL_HANDLE;
  3690. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImageView, false);
  3691. }
  3692. return true;
  3693. }
  3694. static bool VULKAN_INTERNAL_CreateSubresourceView(
  3695. VulkanRenderer *renderer,
  3696. const SDL_GPUTextureCreateInfo *createinfo,
  3697. VulkanTexture *texture,
  3698. Uint32 layer,
  3699. Uint32 level,
  3700. VkComponentMapping swizzle,
  3701. VkImageView *pView)
  3702. {
  3703. VkResult vulkanResult;
  3704. VkImageViewCreateInfo imageViewCreateInfo;
  3705. // create framebuffer compatible views for RenderTarget
  3706. imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  3707. imageViewCreateInfo.pNext = NULL;
  3708. imageViewCreateInfo.flags = 0;
  3709. imageViewCreateInfo.image = texture->image;
  3710. imageViewCreateInfo.format = SDLToVK_TextureFormat[createinfo->format];
  3711. imageViewCreateInfo.components = swizzle;
  3712. imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
  3713. imageViewCreateInfo.subresourceRange.baseMipLevel = level;
  3714. imageViewCreateInfo.subresourceRange.levelCount = 1;
  3715. imageViewCreateInfo.subresourceRange.baseArrayLayer = layer;
  3716. imageViewCreateInfo.subresourceRange.layerCount = 1;
  3717. imageViewCreateInfo.viewType = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_VIEW_TYPE_3D : VK_IMAGE_VIEW_TYPE_2D;
  3718. vulkanResult = renderer->vkCreateImageView(
  3719. renderer->logicalDevice,
  3720. &imageViewCreateInfo,
  3721. NULL,
  3722. pView);
  3723. if (vulkanResult != VK_SUCCESS) {
  3724. *pView = (VkImageView)VK_NULL_HANDLE;
  3725. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImageView, false);
  3726. }
  3727. return true;
  3728. }
  3729. // Swapchain
  3730. static bool VULKAN_INTERNAL_QuerySwapchainSupport(
  3731. VulkanRenderer *renderer,
  3732. VkPhysicalDevice physicalDevice,
  3733. VkSurfaceKHR surface,
  3734. SwapchainSupportDetails *outputDetails)
  3735. {
  3736. VkResult result;
  3737. VkBool32 supportsPresent;
  3738. renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
  3739. physicalDevice,
  3740. renderer->queueFamilyIndex,
  3741. surface,
  3742. &supportsPresent);
  3743. // Initialize these in case anything fails
  3744. outputDetails->formats = NULL;
  3745. outputDetails->formatsLength = 0;
  3746. outputDetails->presentModes = NULL;
  3747. outputDetails->presentModesLength = 0;
  3748. if (!supportsPresent) {
  3749. SET_STRING_ERROR_AND_RETURN("This surface does not support presenting!", false);
  3750. }
  3751. // Run the device surface queries
  3752. result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  3753. physicalDevice,
  3754. surface,
  3755. &outputDetails->capabilities);
  3756. CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceCapabilitiesKHR, false);
  3757. if (!(outputDetails->capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
  3758. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Opaque presentation unsupported! Expect weird transparency bugs!");
  3759. }
  3760. result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
  3761. physicalDevice,
  3762. surface,
  3763. &outputDetails->formatsLength,
  3764. NULL);
  3765. if (result != VK_SUCCESS) {
  3766. // Make sure the driver didn't mess up this value.
  3767. outputDetails->formatsLength = 0;
  3768. CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceFormatsKHR, false);
  3769. }
  3770. result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
  3771. physicalDevice,
  3772. surface,
  3773. &outputDetails->presentModesLength,
  3774. NULL);
  3775. if (result != VK_SUCCESS) {
  3776. // Make sure the driver didn't mess up this value.
  3777. outputDetails->presentModesLength = 0;
  3778. // Reset this one, too.
  3779. outputDetails->formatsLength = 0;
  3780. CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfacePresentModesKHR, false);
  3781. }
  3782. // Generate the arrays, if applicable
  3783. if (outputDetails->formatsLength != 0) {
  3784. outputDetails->formats = (VkSurfaceFormatKHR *)SDL_malloc(
  3785. sizeof(VkSurfaceFormatKHR) * outputDetails->formatsLength);
  3786. if (!outputDetails->formats) { // OOM
  3787. outputDetails->formatsLength = 0;
  3788. outputDetails->presentModesLength = 0;
  3789. return false;
  3790. }
  3791. result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
  3792. physicalDevice,
  3793. surface,
  3794. &outputDetails->formatsLength,
  3795. outputDetails->formats);
  3796. if (result != VK_SUCCESS) {
  3797. SDL_free(outputDetails->formats);
  3798. outputDetails->formats = NULL;
  3799. outputDetails->formatsLength = 0;
  3800. outputDetails->presentModesLength = 0;
  3801. CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceFormatsKHR, false);
  3802. }
  3803. }
  3804. if (outputDetails->presentModesLength != 0) {
  3805. outputDetails->presentModes = (VkPresentModeKHR *)SDL_malloc(
  3806. sizeof(VkPresentModeKHR) * outputDetails->presentModesLength);
  3807. if (!outputDetails->presentModes) { // OOM
  3808. SDL_free(outputDetails->formats);
  3809. outputDetails->formats = NULL;
  3810. outputDetails->formatsLength = 0;
  3811. outputDetails->presentModesLength = 0;
  3812. return false;
  3813. }
  3814. result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
  3815. physicalDevice,
  3816. surface,
  3817. &outputDetails->presentModesLength,
  3818. outputDetails->presentModes);
  3819. if (result != VK_SUCCESS) {
  3820. SDL_free(outputDetails->formats);
  3821. SDL_free(outputDetails->presentModes);
  3822. outputDetails->formats = NULL;
  3823. outputDetails->presentModes = NULL;
  3824. outputDetails->formatsLength = 0;
  3825. outputDetails->presentModesLength = 0;
  3826. CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfacePresentModesKHR, false);
  3827. }
  3828. }
  3829. /* If we made it here, all the queries were successful. This does NOT
  3830. * necessarily mean there are any supported formats or present modes!
  3831. */
  3832. return true;
  3833. }
  3834. static bool VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  3835. VkFormat desiredFormat,
  3836. VkColorSpaceKHR desiredColorSpace,
  3837. VkSurfaceFormatKHR *availableFormats,
  3838. Uint32 availableFormatsLength)
  3839. {
  3840. Uint32 i;
  3841. for (i = 0; i < availableFormatsLength; i += 1) {
  3842. if (availableFormats[i].format == desiredFormat &&
  3843. availableFormats[i].colorSpace == desiredColorSpace) {
  3844. return true;
  3845. }
  3846. }
  3847. return false;
  3848. }
  3849. static bool VULKAN_INTERNAL_VerifySwapPresentMode(
  3850. VkPresentModeKHR presentMode,
  3851. const VkPresentModeKHR *availablePresentModes,
  3852. Uint32 availablePresentModesLength)
  3853. {
  3854. Uint32 i;
  3855. for (i = 0; i < availablePresentModesLength; i += 1) {
  3856. if (availablePresentModes[i] == presentMode) {
  3857. return true;
  3858. }
  3859. }
  3860. return false;
  3861. }
  3862. /* It would be nice if VULKAN_INTERNAL_CreateSwapchain could return a bool.
  3863. * Unfortunately, some Win32 NVIDIA drivers are stupid
  3864. * and will return surface extents of (0, 0)
  3865. * in certain edge cases, and the swapchain extents are not allowed to be 0.
  3866. * In this case, the client probably still wants to claim the window
  3867. * or recreate the swapchain, so we should return 2 to indicate retry.
  3868. * -cosmonaut
  3869. */
  3870. #define VULKAN_INTERNAL_TRY_AGAIN 2
  3871. static Uint32 VULKAN_INTERNAL_CreateSwapchain(
  3872. VulkanRenderer *renderer,
  3873. WindowData *windowData)
  3874. {
  3875. VkResult vulkanResult;
  3876. VkSwapchainCreateInfoKHR swapchainCreateInfo;
  3877. VkImage *swapchainImages;
  3878. VkSemaphoreCreateInfo semaphoreCreateInfo;
  3879. SwapchainSupportDetails swapchainSupportDetails;
  3880. bool hasValidSwapchainComposition, hasValidPresentMode;
  3881. VkCompositeAlphaFlagsKHR compositeAlphaFlag = 0;
  3882. Uint32 i;
  3883. windowData->frameCounter = 0;
  3884. if (!VULKAN_INTERNAL_QuerySwapchainSupport(
  3885. renderer,
  3886. renderer->physicalDevice,
  3887. windowData->surface,
  3888. &swapchainSupportDetails)) {
  3889. return false;
  3890. }
  3891. // Verify that we can use the requested composition and present mode
  3892. windowData->format = SwapchainCompositionToFormat[windowData->swapchainComposition];
  3893. windowData->colorSpace = SwapchainCompositionToColorSpace[windowData->swapchainComposition];
  3894. windowData->swapchainSwizzle = SwapchainCompositionSwizzle[windowData->swapchainComposition];
  3895. windowData->usingFallbackFormat = false;
  3896. hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  3897. windowData->format,
  3898. windowData->colorSpace,
  3899. swapchainSupportDetails.formats,
  3900. swapchainSupportDetails.formatsLength);
  3901. if (!hasValidSwapchainComposition) {
  3902. // Let's try again with the fallback format...
  3903. windowData->format = SwapchainCompositionToFallbackFormat[windowData->swapchainComposition];
  3904. windowData->usingFallbackFormat = true;
  3905. hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  3906. windowData->format,
  3907. windowData->colorSpace,
  3908. swapchainSupportDetails.formats,
  3909. swapchainSupportDetails.formatsLength);
  3910. }
  3911. hasValidPresentMode = VULKAN_INTERNAL_VerifySwapPresentMode(
  3912. SDLToVK_PresentMode[windowData->presentMode],
  3913. swapchainSupportDetails.presentModes,
  3914. swapchainSupportDetails.presentModesLength);
  3915. if (!hasValidSwapchainComposition || !hasValidPresentMode) {
  3916. if (swapchainSupportDetails.formatsLength > 0) {
  3917. SDL_free(swapchainSupportDetails.formats);
  3918. }
  3919. if (swapchainSupportDetails.presentModesLength > 0) {
  3920. SDL_free(swapchainSupportDetails.presentModes);
  3921. }
  3922. if (!hasValidSwapchainComposition) {
  3923. SET_STRING_ERROR_AND_RETURN("Device does not support requested swapchain composition!", false);
  3924. }
  3925. if (!hasValidPresentMode) {
  3926. SET_STRING_ERROR_AND_RETURN("Device does not support requested present_mode!", false);
  3927. }
  3928. return false;
  3929. }
  3930. // NVIDIA + Win32 can return 0 extent when the window is minimized. Try again!
  3931. if (swapchainSupportDetails.capabilities.currentExtent.width == 0 ||
  3932. swapchainSupportDetails.capabilities.currentExtent.height == 0) {
  3933. if (swapchainSupportDetails.formatsLength > 0) {
  3934. SDL_free(swapchainSupportDetails.formats);
  3935. }
  3936. if (swapchainSupportDetails.presentModesLength > 0) {
  3937. SDL_free(swapchainSupportDetails.presentModes);
  3938. }
  3939. return VULKAN_INTERNAL_TRY_AGAIN;
  3940. }
  3941. Uint32 requestedImageCount = renderer->allowedFramesInFlight;
  3942. #ifdef SDL_PLATFORM_APPLE
  3943. windowData->width = swapchainSupportDetails.capabilities.currentExtent.width;
  3944. windowData->height = swapchainSupportDetails.capabilities.currentExtent.height;
  3945. #else
  3946. windowData->width = SDL_clamp(
  3947. windowData->swapchainCreateWidth,
  3948. swapchainSupportDetails.capabilities.minImageExtent.width,
  3949. swapchainSupportDetails.capabilities.maxImageExtent.width);
  3950. windowData->height = SDL_clamp(windowData->swapchainCreateHeight,
  3951. swapchainSupportDetails.capabilities.minImageExtent.height,
  3952. swapchainSupportDetails.capabilities.maxImageExtent.height);
  3953. #endif
  3954. if (swapchainSupportDetails.capabilities.maxImageCount > 0 &&
  3955. requestedImageCount > swapchainSupportDetails.capabilities.maxImageCount) {
  3956. requestedImageCount = swapchainSupportDetails.capabilities.maxImageCount;
  3957. }
  3958. if (requestedImageCount < swapchainSupportDetails.capabilities.minImageCount) {
  3959. requestedImageCount = swapchainSupportDetails.capabilities.minImageCount;
  3960. }
  3961. if (windowData->presentMode == SDL_GPU_PRESENTMODE_MAILBOX) {
  3962. /* Required for proper triple-buffering.
  3963. * Note that this is below the above maxImageCount check!
  3964. * If the driver advertises MAILBOX but does not support 3 swap
  3965. * images, it's not real mailbox support, so let it fail hard.
  3966. * -flibit
  3967. */
  3968. requestedImageCount = SDL_max(requestedImageCount, 3);
  3969. }
  3970. // Default to opaque, if available, followed by inherit, and overwrite with a value that supports transparency, if necessary.
  3971. if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR) {
  3972. compositeAlphaFlag = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  3973. } else if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) {
  3974. compositeAlphaFlag = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
  3975. }
  3976. if ((windowData->window->flags & SDL_WINDOW_TRANSPARENT) || !compositeAlphaFlag) {
  3977. if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR) {
  3978. compositeAlphaFlag = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
  3979. } else if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR) {
  3980. compositeAlphaFlag = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR;
  3981. } else if (swapchainSupportDetails.capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) {
  3982. compositeAlphaFlag = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
  3983. } else {
  3984. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "SDL_WINDOW_TRANSPARENT flag set, but no suitable swapchain composite alpha value supported!");
  3985. }
  3986. }
  3987. swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
  3988. swapchainCreateInfo.pNext = NULL;
  3989. swapchainCreateInfo.flags = 0;
  3990. swapchainCreateInfo.surface = windowData->surface;
  3991. swapchainCreateInfo.minImageCount = requestedImageCount;
  3992. swapchainCreateInfo.imageFormat = windowData->format;
  3993. swapchainCreateInfo.imageColorSpace = windowData->colorSpace;
  3994. swapchainCreateInfo.imageExtent.width = windowData->width;
  3995. swapchainCreateInfo.imageExtent.height = windowData->height;
  3996. swapchainCreateInfo.imageArrayLayers = 1;
  3997. swapchainCreateInfo.imageUsage =
  3998. VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
  3999. VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  4000. swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  4001. swapchainCreateInfo.queueFamilyIndexCount = 0;
  4002. swapchainCreateInfo.pQueueFamilyIndices = NULL;
  4003. #ifdef SDL_PLATFORM_ANDROID
  4004. swapchainCreateInfo.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  4005. #else
  4006. swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform;
  4007. #endif
  4008. swapchainCreateInfo.compositeAlpha = compositeAlphaFlag;
  4009. swapchainCreateInfo.presentMode = SDLToVK_PresentMode[windowData->presentMode];
  4010. swapchainCreateInfo.clipped = VK_TRUE;
  4011. // The old swapchain could belong to a surface that no longer exists due to app switching.
  4012. swapchainCreateInfo.oldSwapchain = windowData->needsSurfaceRecreate ? (VkSwapchainKHR)0 : windowData->swapchain;
  4013. vulkanResult = renderer->vkCreateSwapchainKHR(
  4014. renderer->logicalDevice,
  4015. &swapchainCreateInfo,
  4016. NULL,
  4017. &windowData->swapchain);
  4018. if (swapchainCreateInfo.oldSwapchain != VK_NULL_HANDLE) {
  4019. renderer->vkDestroySwapchainKHR(renderer->logicalDevice, swapchainCreateInfo.oldSwapchain, NULL);
  4020. }
  4021. if (swapchainSupportDetails.formatsLength > 0) {
  4022. SDL_free(swapchainSupportDetails.formats);
  4023. }
  4024. if (swapchainSupportDetails.presentModesLength > 0) {
  4025. SDL_free(swapchainSupportDetails.presentModes);
  4026. }
  4027. if (vulkanResult != VK_SUCCESS) {
  4028. windowData->swapchain = VK_NULL_HANDLE;
  4029. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSwapchainKHR, false);
  4030. }
  4031. vulkanResult = renderer->vkGetSwapchainImagesKHR(
  4032. renderer->logicalDevice,
  4033. windowData->swapchain,
  4034. &windowData->imageCount,
  4035. NULL);
  4036. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkGetSwapchainImagesKHR, false);
  4037. windowData->textureContainers = SDL_malloc(
  4038. sizeof(VulkanTextureContainer) * windowData->imageCount);
  4039. if (!windowData->textureContainers) { // OOM
  4040. renderer->vkDestroySwapchainKHR(
  4041. renderer->logicalDevice,
  4042. windowData->swapchain,
  4043. NULL);
  4044. windowData->swapchain = VK_NULL_HANDLE;
  4045. return false;
  4046. }
  4047. swapchainImages = SDL_stack_alloc(VkImage, windowData->imageCount);
  4048. vulkanResult = renderer->vkGetSwapchainImagesKHR(
  4049. renderer->logicalDevice,
  4050. windowData->swapchain,
  4051. &windowData->imageCount,
  4052. swapchainImages);
  4053. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkGetSwapchainImagesKHR, false);
  4054. for (i = 0; i < windowData->imageCount; i += 1) {
  4055. // Initialize dummy container
  4056. SDL_zero(windowData->textureContainers[i]);
  4057. windowData->textureContainers[i].canBeCycled = false;
  4058. windowData->textureContainers[i].header.info.width = windowData->width;
  4059. windowData->textureContainers[i].header.info.height = windowData->height;
  4060. windowData->textureContainers[i].header.info.layer_count_or_depth = 1;
  4061. windowData->textureContainers[i].header.info.format = SwapchainCompositionToSDLFormat(
  4062. windowData->swapchainComposition,
  4063. windowData->usingFallbackFormat);
  4064. windowData->textureContainers[i].header.info.type = SDL_GPU_TEXTURETYPE_2D;
  4065. windowData->textureContainers[i].header.info.num_levels = 1;
  4066. windowData->textureContainers[i].header.info.sample_count = SDL_GPU_SAMPLECOUNT_1;
  4067. windowData->textureContainers[i].header.info.usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET;
  4068. windowData->textureContainers[i].activeTexture = SDL_malloc(sizeof(VulkanTexture));
  4069. windowData->textureContainers[i].activeTexture->image = swapchainImages[i];
  4070. // Swapchain memory is managed by the driver
  4071. windowData->textureContainers[i].activeTexture->usedRegion = NULL;
  4072. windowData->textureContainers[i].activeTexture->swizzle = windowData->swapchainSwizzle;
  4073. windowData->textureContainers[i].activeTexture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
  4074. windowData->textureContainers[i].activeTexture->depth = 1;
  4075. windowData->textureContainers[i].activeTexture->usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET;
  4076. windowData->textureContainers[i].activeTexture->container = &windowData->textureContainers[i];
  4077. SDL_SetAtomicInt(&windowData->textureContainers[i].activeTexture->referenceCount, 0);
  4078. // Create slice
  4079. windowData->textureContainers[i].activeTexture->subresourceCount = 1;
  4080. windowData->textureContainers[i].activeTexture->subresources = SDL_malloc(sizeof(VulkanTextureSubresource));
  4081. windowData->textureContainers[i].activeTexture->subresources[0].parent = windowData->textureContainers[i].activeTexture;
  4082. windowData->textureContainers[i].activeTexture->subresources[0].layer = 0;
  4083. windowData->textureContainers[i].activeTexture->subresources[0].level = 0;
  4084. windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews = SDL_malloc(sizeof(VkImageView));
  4085. if (!VULKAN_INTERNAL_CreateRenderTargetView(
  4086. renderer,
  4087. windowData->textureContainers[i].activeTexture,
  4088. 0,
  4089. 0,
  4090. windowData->format,
  4091. windowData->swapchainSwizzle,
  4092. &windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0])) {
  4093. renderer->vkDestroySwapchainKHR(
  4094. renderer->logicalDevice,
  4095. windowData->swapchain,
  4096. NULL);
  4097. windowData->swapchain = VK_NULL_HANDLE;
  4098. return false;
  4099. }
  4100. }
  4101. SDL_stack_free(swapchainImages);
  4102. semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
  4103. semaphoreCreateInfo.pNext = NULL;
  4104. semaphoreCreateInfo.flags = 0;
  4105. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  4106. vulkanResult = renderer->vkCreateSemaphore(
  4107. renderer->logicalDevice,
  4108. &semaphoreCreateInfo,
  4109. NULL,
  4110. &windowData->imageAvailableSemaphore[i]);
  4111. if (vulkanResult != VK_SUCCESS) {
  4112. renderer->vkDestroySwapchainKHR(
  4113. renderer->logicalDevice,
  4114. windowData->swapchain,
  4115. NULL);
  4116. windowData->swapchain = VK_NULL_HANDLE;
  4117. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSemaphore, false);
  4118. }
  4119. windowData->inFlightFences[i] = NULL;
  4120. }
  4121. windowData->renderFinishedSemaphore = SDL_malloc(
  4122. sizeof(VkSemaphore) * windowData->imageCount);
  4123. for (i = 0; i < windowData->imageCount; i += 1) {
  4124. vulkanResult = renderer->vkCreateSemaphore(
  4125. renderer->logicalDevice,
  4126. &semaphoreCreateInfo,
  4127. NULL,
  4128. &windowData->renderFinishedSemaphore[i]);
  4129. if (vulkanResult != VK_SUCCESS) {
  4130. renderer->vkDestroySwapchainKHR(
  4131. renderer->logicalDevice,
  4132. windowData->swapchain,
  4133. NULL);
  4134. windowData->swapchain = VK_NULL_HANDLE;
  4135. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSemaphore, false);
  4136. }
  4137. }
  4138. windowData->needsSwapchainRecreate = false;
  4139. return true;
  4140. }
  4141. // Command Buffers
  4142. static bool VULKAN_INTERNAL_BeginCommandBuffer(
  4143. VulkanRenderer *renderer,
  4144. VulkanCommandBuffer *commandBuffer)
  4145. {
  4146. VkCommandBufferBeginInfo beginInfo;
  4147. VkResult result;
  4148. beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  4149. beginInfo.pNext = NULL;
  4150. beginInfo.flags = 0;
  4151. beginInfo.pInheritanceInfo = NULL;
  4152. beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  4153. result = renderer->vkBeginCommandBuffer(
  4154. commandBuffer->commandBuffer,
  4155. &beginInfo);
  4156. CHECK_VULKAN_ERROR_AND_RETURN(result, vkBeginCommandBuffer, false);
  4157. return true;
  4158. }
  4159. static bool VULKAN_INTERNAL_EndCommandBuffer(
  4160. VulkanRenderer *renderer,
  4161. VulkanCommandBuffer *commandBuffer)
  4162. {
  4163. VkResult result = renderer->vkEndCommandBuffer(
  4164. commandBuffer->commandBuffer);
  4165. CHECK_VULKAN_ERROR_AND_RETURN(result, vkEndCommandBuffer, false);
  4166. return true;
  4167. }
  4168. static void VULKAN_DestroyDevice(
  4169. SDL_GPUDevice *device)
  4170. {
  4171. VulkanRenderer *renderer = (VulkanRenderer *)device->driverData;
  4172. VulkanMemorySubAllocator *allocator;
  4173. VULKAN_Wait(device->driverData);
  4174. for (Sint32 i = renderer->claimedWindowCount - 1; i >= 0; i -= 1) {
  4175. VULKAN_ReleaseWindow(device->driverData, renderer->claimedWindows[i]->window);
  4176. }
  4177. SDL_free(renderer->claimedWindows);
  4178. VULKAN_Wait(device->driverData);
  4179. SDL_free(renderer->submittedCommandBuffers);
  4180. for (Uint32 i = 0; i < renderer->uniformBufferPoolCount; i += 1) {
  4181. VULKAN_INTERNAL_DestroyBuffer(
  4182. renderer,
  4183. renderer->uniformBufferPool[i]->buffer);
  4184. SDL_free(renderer->uniformBufferPool[i]);
  4185. }
  4186. SDL_free(renderer->uniformBufferPool);
  4187. for (Uint32 i = 0; i < renderer->descriptorSetCachePoolCount; i += 1) {
  4188. VULKAN_INTERNAL_DestroyDescriptorSetCache(
  4189. renderer,
  4190. renderer->descriptorSetCachePool[i]);
  4191. }
  4192. SDL_free(renderer->descriptorSetCachePool);
  4193. for (Uint32 i = 0; i < renderer->fencePool.availableFenceCount; i += 1) {
  4194. renderer->vkDestroyFence(
  4195. renderer->logicalDevice,
  4196. renderer->fencePool.availableFences[i]->fence,
  4197. NULL);
  4198. SDL_free(renderer->fencePool.availableFences[i]);
  4199. }
  4200. SDL_free(renderer->fencePool.availableFences);
  4201. SDL_DestroyMutex(renderer->fencePool.lock);
  4202. SDL_DestroyHashTable(renderer->commandPoolHashTable);
  4203. SDL_DestroyHashTable(renderer->renderPassHashTable);
  4204. SDL_DestroyHashTable(renderer->framebufferHashTable);
  4205. SDL_DestroyHashTable(renderer->graphicsPipelineResourceLayoutHashTable);
  4206. SDL_DestroyHashTable(renderer->computePipelineResourceLayoutHashTable);
  4207. SDL_DestroyHashTable(renderer->descriptorSetLayoutHashTable);
  4208. for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
  4209. allocator = &renderer->memoryAllocator->subAllocators[i];
  4210. for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
  4211. for (Sint32 k = allocator->allocations[j]->usedRegionCount - 1; k >= 0; k -= 1) {
  4212. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  4213. renderer,
  4214. allocator->allocations[j]->usedRegions[k]);
  4215. }
  4216. VULKAN_INTERNAL_DeallocateMemory(
  4217. renderer,
  4218. allocator,
  4219. j);
  4220. }
  4221. SDL_free(renderer->memoryAllocator->subAllocators[i].allocations);
  4222. SDL_free(renderer->memoryAllocator->subAllocators[i].sortedFreeRegions);
  4223. }
  4224. SDL_free(renderer->memoryAllocator);
  4225. SDL_free(renderer->texturesToDestroy);
  4226. SDL_free(renderer->buffersToDestroy);
  4227. SDL_free(renderer->graphicsPipelinesToDestroy);
  4228. SDL_free(renderer->computePipelinesToDestroy);
  4229. SDL_free(renderer->shadersToDestroy);
  4230. SDL_free(renderer->samplersToDestroy);
  4231. SDL_free(renderer->framebuffersToDestroy);
  4232. SDL_free(renderer->allocationsToDefrag);
  4233. SDL_DestroyMutex(renderer->allocatorLock);
  4234. SDL_DestroyMutex(renderer->disposeLock);
  4235. SDL_DestroyMutex(renderer->submitLock);
  4236. SDL_DestroyMutex(renderer->acquireCommandBufferLock);
  4237. SDL_DestroyMutex(renderer->acquireUniformBufferLock);
  4238. SDL_DestroyMutex(renderer->renderPassFetchLock);
  4239. SDL_DestroyMutex(renderer->framebufferFetchLock);
  4240. SDL_DestroyMutex(renderer->graphicsPipelineLayoutFetchLock);
  4241. SDL_DestroyMutex(renderer->computePipelineLayoutFetchLock);
  4242. SDL_DestroyMutex(renderer->descriptorSetLayoutFetchLock);
  4243. SDL_DestroyMutex(renderer->windowLock);
  4244. renderer->vkDestroyDevice(renderer->logicalDevice, NULL);
  4245. renderer->vkDestroyInstance(renderer->instance, NULL);
  4246. SDL_DestroyProperties(renderer->props);
  4247. SDL_free(renderer);
  4248. SDL_free(device);
  4249. SDL_Vulkan_UnloadLibrary();
  4250. }
  4251. static SDL_PropertiesID VULKAN_GetDeviceProperties(
  4252. SDL_GPUDevice *device)
  4253. {
  4254. VulkanRenderer *renderer = (VulkanRenderer *)device->driverData;
  4255. return renderer->props;
  4256. }
  4257. static DescriptorSetCache *VULKAN_INTERNAL_AcquireDescriptorSetCache(
  4258. VulkanRenderer *renderer)
  4259. {
  4260. DescriptorSetCache *cache;
  4261. if (renderer->descriptorSetCachePoolCount == 0) {
  4262. cache = SDL_malloc(sizeof(DescriptorSetCache));
  4263. cache->poolCount = 0;
  4264. cache->pools = NULL;
  4265. } else {
  4266. cache = renderer->descriptorSetCachePool[renderer->descriptorSetCachePoolCount - 1];
  4267. renderer->descriptorSetCachePoolCount -= 1;
  4268. }
  4269. return cache;
  4270. }
  4271. static void VULKAN_INTERNAL_ReturnDescriptorSetCacheToPool(
  4272. VulkanRenderer *renderer,
  4273. DescriptorSetCache *descriptorSetCache)
  4274. {
  4275. EXPAND_ARRAY_IF_NEEDED(
  4276. renderer->descriptorSetCachePool,
  4277. DescriptorSetCache *,
  4278. renderer->descriptorSetCachePoolCount + 1,
  4279. renderer->descriptorSetCachePoolCapacity,
  4280. renderer->descriptorSetCachePoolCapacity * 2);
  4281. renderer->descriptorSetCachePool[renderer->descriptorSetCachePoolCount] = descriptorSetCache;
  4282. renderer->descriptorSetCachePoolCount += 1;
  4283. for (Uint32 i = 0; i < descriptorSetCache->poolCount; i += 1) {
  4284. descriptorSetCache->pools[i].descriptorSetIndex = 0;
  4285. }
  4286. }
  4287. static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet(
  4288. VulkanRenderer *renderer,
  4289. VulkanCommandBuffer *vulkanCommandBuffer,
  4290. DescriptorSetLayout *descriptorSetLayout)
  4291. {
  4292. // Grow the pool to meet the descriptor set layout ID
  4293. if (descriptorSetLayout->ID >= vulkanCommandBuffer->descriptorSetCache->poolCount) {
  4294. vulkanCommandBuffer->descriptorSetCache->pools = SDL_realloc(
  4295. vulkanCommandBuffer->descriptorSetCache->pools,
  4296. sizeof(DescriptorSetPool) * (descriptorSetLayout->ID + 1));
  4297. for (Uint32 i = vulkanCommandBuffer->descriptorSetCache->poolCount; i < descriptorSetLayout->ID + 1; i += 1) {
  4298. SDL_zero(vulkanCommandBuffer->descriptorSetCache->pools[i]);
  4299. }
  4300. vulkanCommandBuffer->descriptorSetCache->poolCount = descriptorSetLayout->ID + 1;
  4301. }
  4302. DescriptorSetPool *pool =
  4303. &vulkanCommandBuffer->descriptorSetCache->pools[descriptorSetLayout->ID];
  4304. if (pool->descriptorSetIndex == pool->descriptorSetCount) {
  4305. if (!VULKAN_INTERNAL_AllocateDescriptorsFromPool(
  4306. renderer,
  4307. descriptorSetLayout,
  4308. pool)) {
  4309. return VK_NULL_HANDLE;
  4310. }
  4311. }
  4312. VkDescriptorSet descriptorSet = pool->descriptorSets[pool->descriptorSetIndex];
  4313. pool->descriptorSetIndex += 1;
  4314. return descriptorSet;
  4315. }
  4316. static void VULKAN_INTERNAL_BindGraphicsDescriptorSets(
  4317. VulkanRenderer *renderer,
  4318. VulkanCommandBuffer *commandBuffer)
  4319. {
  4320. VulkanGraphicsPipelineResourceLayout *resourceLayout;
  4321. DescriptorSetLayout *descriptorSetLayout;
  4322. VkWriteDescriptorSet writeDescriptorSets[
  4323. (MAX_TEXTURE_SAMPLERS_PER_STAGE +
  4324. MAX_STORAGE_TEXTURES_PER_STAGE +
  4325. MAX_STORAGE_BUFFERS_PER_STAGE +
  4326. MAX_UNIFORM_BUFFERS_PER_STAGE) * 2];
  4327. VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE * 2];
  4328. VkDescriptorImageInfo imageInfos[(MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE) * 2];
  4329. Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE * 2];
  4330. Uint32 writeCount = 0;
  4331. Uint32 bufferInfoCount = 0;
  4332. Uint32 imageInfoCount = 0;
  4333. Uint32 dynamicOffsetCount = 0;
  4334. if (
  4335. !commandBuffer->needVertexBufferBind &&
  4336. !commandBuffer->needNewVertexResourceDescriptorSet &&
  4337. !commandBuffer->needNewVertexUniformDescriptorSet &&
  4338. !commandBuffer->needNewVertexUniformOffsets &&
  4339. !commandBuffer->needNewFragmentResourceDescriptorSet &&
  4340. !commandBuffer->needNewFragmentUniformDescriptorSet &&
  4341. !commandBuffer->needNewFragmentUniformOffsets
  4342. ) {
  4343. return;
  4344. }
  4345. if (commandBuffer->needVertexBufferBind && commandBuffer->vertexBufferCount > 0) {
  4346. renderer->vkCmdBindVertexBuffers(
  4347. commandBuffer->commandBuffer,
  4348. 0,
  4349. commandBuffer->vertexBufferCount,
  4350. commandBuffer->vertexBuffers,
  4351. commandBuffer->vertexBufferOffsets);
  4352. commandBuffer->needVertexBufferBind = false;
  4353. }
  4354. resourceLayout = commandBuffer->currentGraphicsPipeline->resourceLayout;
  4355. if (commandBuffer->needNewVertexResourceDescriptorSet) {
  4356. descriptorSetLayout = resourceLayout->descriptorSetLayouts[0];
  4357. commandBuffer->vertexResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4358. renderer,
  4359. commandBuffer,
  4360. descriptorSetLayout);
  4361. for (Uint32 i = 0; i < resourceLayout->vertexSamplerCount; i += 1) {
  4362. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4363. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4364. currentWriteDescriptorSet->pNext = NULL;
  4365. currentWriteDescriptorSet->descriptorCount = 1;
  4366. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4367. currentWriteDescriptorSet->dstArrayElement = 0;
  4368. currentWriteDescriptorSet->dstBinding = i;
  4369. currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
  4370. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4371. currentWriteDescriptorSet->pBufferInfo = NULL;
  4372. imageInfos[imageInfoCount].sampler = commandBuffer->vertexSamplerBindings[i];
  4373. imageInfos[imageInfoCount].imageView = commandBuffer->vertexSamplerTextureViewBindings[i];
  4374. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4375. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4376. writeCount += 1;
  4377. imageInfoCount += 1;
  4378. }
  4379. for (Uint32 i = 0; i < resourceLayout->vertexStorageTextureCount; i += 1) {
  4380. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4381. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4382. currentWriteDescriptorSet->pNext = NULL;
  4383. currentWriteDescriptorSet->descriptorCount = 1;
  4384. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring a storage image as a sampled image, because shaders are stupid.
  4385. currentWriteDescriptorSet->dstArrayElement = 0;
  4386. currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + i;
  4387. currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
  4388. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4389. currentWriteDescriptorSet->pBufferInfo = NULL;
  4390. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  4391. imageInfos[imageInfoCount].imageView = commandBuffer->vertexStorageTextureViewBindings[i];
  4392. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  4393. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4394. writeCount += 1;
  4395. imageInfoCount += 1;
  4396. }
  4397. for (Uint32 i = 0; i < resourceLayout->vertexStorageBufferCount; i += 1) {
  4398. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4399. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4400. currentWriteDescriptorSet->pNext = NULL;
  4401. currentWriteDescriptorSet->descriptorCount = 1;
  4402. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4403. currentWriteDescriptorSet->dstArrayElement = 0;
  4404. currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + resourceLayout->vertexStorageTextureCount + i;
  4405. currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
  4406. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4407. currentWriteDescriptorSet->pImageInfo = NULL;
  4408. bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexStorageBufferBindings[i];
  4409. bufferInfos[bufferInfoCount].offset = 0;
  4410. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  4411. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4412. writeCount += 1;
  4413. bufferInfoCount += 1;
  4414. }
  4415. commandBuffer->needNewVertexResourceDescriptorSet = false;
  4416. }
  4417. if (commandBuffer->needNewVertexUniformDescriptorSet) {
  4418. descriptorSetLayout = resourceLayout->descriptorSetLayouts[1];
  4419. commandBuffer->vertexUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4420. renderer,
  4421. commandBuffer,
  4422. descriptorSetLayout);
  4423. for (Uint32 i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) {
  4424. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4425. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4426. currentWriteDescriptorSet->pNext = NULL;
  4427. currentWriteDescriptorSet->descriptorCount = 1;
  4428. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  4429. currentWriteDescriptorSet->dstArrayElement = 0;
  4430. currentWriteDescriptorSet->dstBinding = i;
  4431. currentWriteDescriptorSet->dstSet = commandBuffer->vertexUniformDescriptorSet;
  4432. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4433. currentWriteDescriptorSet->pImageInfo = NULL;
  4434. bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexUniformBuffers[i]->buffer->buffer;
  4435. bufferInfos[bufferInfoCount].offset = 0;
  4436. bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
  4437. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4438. writeCount += 1;
  4439. bufferInfoCount += 1;
  4440. }
  4441. commandBuffer->needNewVertexUniformDescriptorSet = false;
  4442. }
  4443. for (Uint32 i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) {
  4444. dynamicOffsets[dynamicOffsetCount] = commandBuffer->vertexUniformBuffers[i]->drawOffset;
  4445. dynamicOffsetCount += 1;
  4446. }
  4447. if (commandBuffer->needNewFragmentResourceDescriptorSet) {
  4448. descriptorSetLayout = resourceLayout->descriptorSetLayouts[2];
  4449. commandBuffer->fragmentResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4450. renderer,
  4451. commandBuffer,
  4452. descriptorSetLayout);
  4453. for (Uint32 i = 0; i < resourceLayout->fragmentSamplerCount; i += 1) {
  4454. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4455. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4456. currentWriteDescriptorSet->pNext = NULL;
  4457. currentWriteDescriptorSet->descriptorCount = 1;
  4458. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4459. currentWriteDescriptorSet->dstArrayElement = 0;
  4460. currentWriteDescriptorSet->dstBinding = i;
  4461. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
  4462. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4463. currentWriteDescriptorSet->pBufferInfo = NULL;
  4464. imageInfos[imageInfoCount].sampler = commandBuffer->fragmentSamplerBindings[i];
  4465. imageInfos[imageInfoCount].imageView = commandBuffer->fragmentSamplerTextureViewBindings[i];
  4466. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4467. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4468. writeCount += 1;
  4469. imageInfoCount += 1;
  4470. }
  4471. for (Uint32 i = 0; i < resourceLayout->fragmentStorageTextureCount; i += 1) {
  4472. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4473. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4474. currentWriteDescriptorSet->pNext = NULL;
  4475. currentWriteDescriptorSet->descriptorCount = 1;
  4476. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring a storage image as a sampled image, because shaders are stupid.
  4477. currentWriteDescriptorSet->dstArrayElement = 0;
  4478. currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + i;
  4479. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
  4480. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4481. currentWriteDescriptorSet->pBufferInfo = NULL;
  4482. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  4483. imageInfos[imageInfoCount].imageView = commandBuffer->fragmentStorageTextureViewBindings[i];
  4484. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  4485. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4486. writeCount += 1;
  4487. imageInfoCount += 1;
  4488. }
  4489. for (Uint32 i = 0; i < resourceLayout->fragmentStorageBufferCount; i += 1) {
  4490. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4491. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4492. currentWriteDescriptorSet->pNext = NULL;
  4493. currentWriteDescriptorSet->descriptorCount = 1;
  4494. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4495. currentWriteDescriptorSet->dstArrayElement = 0;
  4496. currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + resourceLayout->fragmentStorageTextureCount + i;
  4497. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
  4498. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4499. currentWriteDescriptorSet->pImageInfo = NULL;
  4500. bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentStorageBufferBindings[i];
  4501. bufferInfos[bufferInfoCount].offset = 0;
  4502. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  4503. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4504. writeCount += 1;
  4505. bufferInfoCount += 1;
  4506. }
  4507. commandBuffer->needNewFragmentResourceDescriptorSet = false;
  4508. }
  4509. if (commandBuffer->needNewFragmentUniformDescriptorSet) {
  4510. descriptorSetLayout = resourceLayout->descriptorSetLayouts[3];
  4511. commandBuffer->fragmentUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4512. renderer,
  4513. commandBuffer,
  4514. descriptorSetLayout);
  4515. for (Uint32 i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) {
  4516. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  4517. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4518. currentWriteDescriptorSet->pNext = NULL;
  4519. currentWriteDescriptorSet->descriptorCount = 1;
  4520. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  4521. currentWriteDescriptorSet->dstArrayElement = 0;
  4522. currentWriteDescriptorSet->dstBinding = i;
  4523. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentUniformDescriptorSet;
  4524. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4525. currentWriteDescriptorSet->pImageInfo = NULL;
  4526. bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentUniformBuffers[i]->buffer->buffer;
  4527. bufferInfos[bufferInfoCount].offset = 0;
  4528. bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
  4529. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4530. writeCount += 1;
  4531. bufferInfoCount += 1;
  4532. }
  4533. commandBuffer->needNewFragmentUniformDescriptorSet = false;
  4534. }
  4535. for (Uint32 i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) {
  4536. dynamicOffsets[dynamicOffsetCount] = commandBuffer->fragmentUniformBuffers[i]->drawOffset;
  4537. dynamicOffsetCount += 1;
  4538. }
  4539. renderer->vkUpdateDescriptorSets(
  4540. renderer->logicalDevice,
  4541. writeCount,
  4542. writeDescriptorSets,
  4543. 0,
  4544. NULL);
  4545. VkDescriptorSet sets[4];
  4546. sets[0] = commandBuffer->vertexResourceDescriptorSet;
  4547. sets[1] = commandBuffer->vertexUniformDescriptorSet;
  4548. sets[2] = commandBuffer->fragmentResourceDescriptorSet;
  4549. sets[3] = commandBuffer->fragmentUniformDescriptorSet;
  4550. renderer->vkCmdBindDescriptorSets(
  4551. commandBuffer->commandBuffer,
  4552. VK_PIPELINE_BIND_POINT_GRAPHICS,
  4553. resourceLayout->pipelineLayout,
  4554. 0,
  4555. 4,
  4556. sets,
  4557. dynamicOffsetCount,
  4558. dynamicOffsets);
  4559. commandBuffer->needNewVertexUniformOffsets = false;
  4560. commandBuffer->needNewFragmentUniformOffsets = false;
  4561. }
  4562. static void VULKAN_DrawIndexedPrimitives(
  4563. SDL_GPUCommandBuffer *commandBuffer,
  4564. Uint32 numIndices,
  4565. Uint32 numInstances,
  4566. Uint32 firstIndex,
  4567. Sint32 vertexOffset,
  4568. Uint32 firstInstance)
  4569. {
  4570. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4571. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4572. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4573. renderer->vkCmdDrawIndexed(
  4574. vulkanCommandBuffer->commandBuffer,
  4575. numIndices,
  4576. numInstances,
  4577. firstIndex,
  4578. vertexOffset,
  4579. firstInstance);
  4580. }
  4581. static void VULKAN_DrawPrimitives(
  4582. SDL_GPUCommandBuffer *commandBuffer,
  4583. Uint32 numVertices,
  4584. Uint32 numInstances,
  4585. Uint32 firstVertex,
  4586. Uint32 firstInstance)
  4587. {
  4588. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4589. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4590. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4591. renderer->vkCmdDraw(
  4592. vulkanCommandBuffer->commandBuffer,
  4593. numVertices,
  4594. numInstances,
  4595. firstVertex,
  4596. firstInstance);
  4597. }
  4598. static void VULKAN_DrawPrimitivesIndirect(
  4599. SDL_GPUCommandBuffer *commandBuffer,
  4600. SDL_GPUBuffer *buffer,
  4601. Uint32 offset,
  4602. Uint32 drawCount)
  4603. {
  4604. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4605. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4606. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer;
  4607. Uint32 pitch = sizeof(SDL_GPUIndirectDrawCommand);
  4608. Uint32 i;
  4609. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4610. if (renderer->supportsMultiDrawIndirect) {
  4611. // Real multi-draw!
  4612. renderer->vkCmdDrawIndirect(
  4613. vulkanCommandBuffer->commandBuffer,
  4614. vulkanBuffer->buffer,
  4615. offset,
  4616. drawCount,
  4617. pitch);
  4618. } else {
  4619. // Fake multi-draw...
  4620. for (i = 0; i < drawCount; i += 1) {
  4621. renderer->vkCmdDrawIndirect(
  4622. vulkanCommandBuffer->commandBuffer,
  4623. vulkanBuffer->buffer,
  4624. offset + (pitch * i),
  4625. 1,
  4626. pitch);
  4627. }
  4628. }
  4629. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  4630. }
  4631. static void VULKAN_DrawIndexedPrimitivesIndirect(
  4632. SDL_GPUCommandBuffer *commandBuffer,
  4633. SDL_GPUBuffer *buffer,
  4634. Uint32 offset,
  4635. Uint32 drawCount)
  4636. {
  4637. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4638. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4639. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer;
  4640. Uint32 pitch = sizeof(SDL_GPUIndexedIndirectDrawCommand);
  4641. Uint32 i;
  4642. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4643. if (renderer->supportsMultiDrawIndirect) {
  4644. // Real multi-draw!
  4645. renderer->vkCmdDrawIndexedIndirect(
  4646. vulkanCommandBuffer->commandBuffer,
  4647. vulkanBuffer->buffer,
  4648. offset,
  4649. drawCount,
  4650. pitch);
  4651. } else {
  4652. // Fake multi-draw...
  4653. for (i = 0; i < drawCount; i += 1) {
  4654. renderer->vkCmdDrawIndexedIndirect(
  4655. vulkanCommandBuffer->commandBuffer,
  4656. vulkanBuffer->buffer,
  4657. offset + (pitch * i),
  4658. 1,
  4659. pitch);
  4660. }
  4661. }
  4662. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  4663. }
  4664. // Debug Naming
  4665. static void VULKAN_INTERNAL_SetBufferName(
  4666. VulkanRenderer *renderer,
  4667. VulkanBuffer *buffer,
  4668. const char *text)
  4669. {
  4670. VkDebugUtilsObjectNameInfoEXT nameInfo;
  4671. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4672. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  4673. nameInfo.pNext = NULL;
  4674. nameInfo.pObjectName = text;
  4675. nameInfo.objectType = VK_OBJECT_TYPE_BUFFER;
  4676. nameInfo.objectHandle = (uint64_t)buffer->buffer;
  4677. renderer->vkSetDebugUtilsObjectNameEXT(
  4678. renderer->logicalDevice,
  4679. &nameInfo);
  4680. }
  4681. }
  4682. static void VULKAN_SetBufferName(
  4683. SDL_GPURenderer *driverData,
  4684. SDL_GPUBuffer *buffer,
  4685. const char *text)
  4686. {
  4687. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  4688. VulkanBufferContainer *container = (VulkanBufferContainer *)buffer;
  4689. size_t textLength = SDL_strlen(text) + 1;
  4690. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4691. container->debugName = SDL_realloc(
  4692. container->debugName,
  4693. textLength);
  4694. SDL_utf8strlcpy(
  4695. container->debugName,
  4696. text,
  4697. textLength);
  4698. for (Uint32 i = 0; i < container->bufferCount; i += 1) {
  4699. VULKAN_INTERNAL_SetBufferName(
  4700. renderer,
  4701. container->buffers[i],
  4702. text);
  4703. }
  4704. }
  4705. }
  4706. static void VULKAN_INTERNAL_SetTextureName(
  4707. VulkanRenderer *renderer,
  4708. VulkanTexture *texture,
  4709. const char *text)
  4710. {
  4711. VkDebugUtilsObjectNameInfoEXT nameInfo;
  4712. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4713. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  4714. nameInfo.pNext = NULL;
  4715. nameInfo.pObjectName = text;
  4716. nameInfo.objectType = VK_OBJECT_TYPE_IMAGE;
  4717. nameInfo.objectHandle = (uint64_t)texture->image;
  4718. renderer->vkSetDebugUtilsObjectNameEXT(
  4719. renderer->logicalDevice,
  4720. &nameInfo);
  4721. }
  4722. }
  4723. static void VULKAN_SetTextureName(
  4724. SDL_GPURenderer *driverData,
  4725. SDL_GPUTexture *texture,
  4726. const char *text)
  4727. {
  4728. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  4729. VulkanTextureContainer *container = (VulkanTextureContainer *)texture;
  4730. size_t textLength = SDL_strlen(text) + 1;
  4731. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4732. container->debugName = SDL_realloc(
  4733. container->debugName,
  4734. textLength);
  4735. SDL_utf8strlcpy(
  4736. container->debugName,
  4737. text,
  4738. textLength);
  4739. for (Uint32 i = 0; i < container->textureCount; i += 1) {
  4740. VULKAN_INTERNAL_SetTextureName(
  4741. renderer,
  4742. container->textures[i],
  4743. text);
  4744. }
  4745. }
  4746. }
  4747. static void VULKAN_InsertDebugLabel(
  4748. SDL_GPUCommandBuffer *commandBuffer,
  4749. const char *text)
  4750. {
  4751. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4752. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4753. VkDebugUtilsLabelEXT labelInfo;
  4754. if (renderer->supportsDebugUtils) {
  4755. SDL_zero(labelInfo);
  4756. labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  4757. labelInfo.pLabelName = text;
  4758. renderer->vkCmdInsertDebugUtilsLabelEXT(
  4759. vulkanCommandBuffer->commandBuffer,
  4760. &labelInfo);
  4761. }
  4762. }
  4763. static void VULKAN_PushDebugGroup(
  4764. SDL_GPUCommandBuffer *commandBuffer,
  4765. const char *name)
  4766. {
  4767. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4768. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4769. VkDebugUtilsLabelEXT labelInfo;
  4770. if (renderer->supportsDebugUtils) {
  4771. SDL_zero(labelInfo);
  4772. labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  4773. labelInfo.pLabelName = name;
  4774. renderer->vkCmdBeginDebugUtilsLabelEXT(
  4775. vulkanCommandBuffer->commandBuffer,
  4776. &labelInfo);
  4777. }
  4778. }
  4779. static void VULKAN_PopDebugGroup(
  4780. SDL_GPUCommandBuffer *commandBuffer)
  4781. {
  4782. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4783. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  4784. if (renderer->supportsDebugUtils) {
  4785. renderer->vkCmdEndDebugUtilsLabelEXT(vulkanCommandBuffer->commandBuffer);
  4786. }
  4787. }
  4788. static VulkanTexture *VULKAN_INTERNAL_CreateTexture(
  4789. VulkanRenderer *renderer,
  4790. bool transitionToDefaultLayout,
  4791. const SDL_GPUTextureCreateInfo *createinfo)
  4792. {
  4793. VkResult vulkanResult;
  4794. VkImageCreateInfo imageCreateInfo;
  4795. VkImageCreateFlags imageCreateFlags = 0;
  4796. VkImageViewCreateInfo imageViewCreateInfo;
  4797. Uint8 bindResult;
  4798. VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  4799. Uint32 layerCount = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? 1 : createinfo->layer_count_or_depth;
  4800. Uint32 depth = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? createinfo->layer_count_or_depth : 1;
  4801. VulkanTexture *texture = SDL_calloc(1, sizeof(VulkanTexture));
  4802. texture->swizzle = SwizzleForSDLFormat(createinfo->format);
  4803. texture->depth = depth;
  4804. texture->usage = createinfo->usage;
  4805. SDL_SetAtomicInt(&texture->referenceCount, 0);
  4806. if (IsDepthFormat(createinfo->format)) {
  4807. texture->aspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
  4808. if (IsStencilFormat(createinfo->format)) {
  4809. texture->aspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
  4810. }
  4811. } else {
  4812. texture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
  4813. }
  4814. if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE || createinfo->type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) {
  4815. imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  4816. } else if (createinfo->type == SDL_GPU_TEXTURETYPE_3D) {
  4817. imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
  4818. }
  4819. if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_SAMPLER |
  4820. SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ |
  4821. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ)) {
  4822. vkUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
  4823. }
  4824. if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
  4825. vkUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  4826. }
  4827. if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
  4828. vkUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  4829. }
  4830. if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE |
  4831. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) {
  4832. vkUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT;
  4833. }
  4834. imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  4835. imageCreateInfo.pNext = NULL;
  4836. imageCreateInfo.flags = imageCreateFlags;
  4837. imageCreateInfo.imageType = createinfo->type == SDL_GPU_TEXTURETYPE_3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
  4838. imageCreateInfo.format = SDLToVK_TextureFormat[createinfo->format];
  4839. imageCreateInfo.extent.width = createinfo->width;
  4840. imageCreateInfo.extent.height = createinfo->height;
  4841. imageCreateInfo.extent.depth = depth;
  4842. imageCreateInfo.mipLevels = createinfo->num_levels;
  4843. imageCreateInfo.arrayLayers = layerCount;
  4844. imageCreateInfo.samples = SDLToVK_SampleCount[createinfo->sample_count];
  4845. imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
  4846. imageCreateInfo.usage = vkUsageFlags;
  4847. imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  4848. imageCreateInfo.queueFamilyIndexCount = 0;
  4849. imageCreateInfo.pQueueFamilyIndices = NULL;
  4850. imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  4851. vulkanResult = renderer->vkCreateImage(
  4852. renderer->logicalDevice,
  4853. &imageCreateInfo,
  4854. NULL,
  4855. &texture->image);
  4856. if (vulkanResult != VK_SUCCESS) {
  4857. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4858. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImage, NULL);
  4859. }
  4860. bindResult = VULKAN_INTERNAL_BindMemoryForImage(
  4861. renderer,
  4862. texture->image,
  4863. &texture->usedRegion);
  4864. if (bindResult != 1) {
  4865. renderer->vkDestroyImage(
  4866. renderer->logicalDevice,
  4867. texture->image,
  4868. NULL);
  4869. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4870. SET_STRING_ERROR_AND_RETURN("Unable to bind memory for texture!", NULL);
  4871. }
  4872. texture->usedRegion->vulkanTexture = texture; // lol
  4873. if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_SAMPLER | SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ | SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ)) {
  4874. imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  4875. imageViewCreateInfo.pNext = NULL;
  4876. imageViewCreateInfo.flags = 0;
  4877. imageViewCreateInfo.image = texture->image;
  4878. imageViewCreateInfo.format = SDLToVK_TextureFormat[createinfo->format];
  4879. imageViewCreateInfo.components = texture->swizzle;
  4880. imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags & ~VK_IMAGE_ASPECT_STENCIL_BIT; // Can't sample stencil values
  4881. imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
  4882. imageViewCreateInfo.subresourceRange.levelCount = createinfo->num_levels;
  4883. imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
  4884. imageViewCreateInfo.subresourceRange.layerCount = layerCount;
  4885. if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE) {
  4886. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
  4887. } else if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) {
  4888. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
  4889. } else if (createinfo->type == SDL_GPU_TEXTURETYPE_3D) {
  4890. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D;
  4891. } else if (createinfo->type == SDL_GPU_TEXTURETYPE_2D_ARRAY) {
  4892. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
  4893. } else {
  4894. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
  4895. }
  4896. vulkanResult = renderer->vkCreateImageView(
  4897. renderer->logicalDevice,
  4898. &imageViewCreateInfo,
  4899. NULL,
  4900. &texture->fullView);
  4901. if (vulkanResult != VK_SUCCESS) {
  4902. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4903. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, "vkCreateImageView", NULL);
  4904. }
  4905. }
  4906. // Define slices
  4907. texture->subresourceCount = layerCount * createinfo->num_levels;
  4908. texture->subresources = SDL_calloc(
  4909. texture->subresourceCount,
  4910. sizeof(VulkanTextureSubresource));
  4911. for (Uint32 i = 0; i < layerCount; i += 1) {
  4912. for (Uint32 j = 0; j < createinfo->num_levels; j += 1) {
  4913. Uint32 subresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  4914. j,
  4915. i,
  4916. createinfo->num_levels);
  4917. if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
  4918. texture->subresources[subresourceIndex].renderTargetViews = SDL_malloc(
  4919. depth * sizeof(VkImageView));
  4920. if (depth > 1) {
  4921. for (Uint32 k = 0; k < depth; k += 1) {
  4922. if (!VULKAN_INTERNAL_CreateRenderTargetView(
  4923. renderer,
  4924. texture,
  4925. k,
  4926. j,
  4927. SDLToVK_TextureFormat[createinfo->format],
  4928. texture->swizzle,
  4929. &texture->subresources[subresourceIndex].renderTargetViews[k])) {
  4930. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4931. return NULL;
  4932. }
  4933. }
  4934. } else {
  4935. if (!VULKAN_INTERNAL_CreateRenderTargetView(
  4936. renderer,
  4937. texture,
  4938. i,
  4939. j,
  4940. SDLToVK_TextureFormat[createinfo->format],
  4941. texture->swizzle,
  4942. &texture->subresources[subresourceIndex].renderTargetViews[0])) {
  4943. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4944. return NULL;
  4945. }
  4946. }
  4947. }
  4948. if ((createinfo->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE) || (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) {
  4949. if (!VULKAN_INTERNAL_CreateSubresourceView(
  4950. renderer,
  4951. createinfo,
  4952. texture,
  4953. i,
  4954. j,
  4955. texture->swizzle,
  4956. &texture->subresources[subresourceIndex].computeWriteView)) {
  4957. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4958. return NULL;
  4959. }
  4960. }
  4961. if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
  4962. if (!VULKAN_INTERNAL_CreateSubresourceView(
  4963. renderer,
  4964. createinfo,
  4965. texture,
  4966. i,
  4967. j,
  4968. texture->swizzle,
  4969. &texture->subresources[subresourceIndex].depthStencilView)) {
  4970. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  4971. return NULL;
  4972. }
  4973. }
  4974. texture->subresources[subresourceIndex].parent = texture;
  4975. texture->subresources[subresourceIndex].layer = i;
  4976. texture->subresources[subresourceIndex].level = j;
  4977. }
  4978. }
  4979. // Set debug name if applicable
  4980. if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING)) {
  4981. VkDebugUtilsObjectNameInfoEXT nameInfo;
  4982. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  4983. nameInfo.pNext = NULL;
  4984. nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING, NULL);
  4985. nameInfo.objectType = VK_OBJECT_TYPE_IMAGE;
  4986. nameInfo.objectHandle = (uint64_t)texture->image;
  4987. renderer->vkSetDebugUtilsObjectNameEXT(
  4988. renderer->logicalDevice,
  4989. &nameInfo);
  4990. }
  4991. if (transitionToDefaultLayout) {
  4992. // Let's transition to the default barrier state, because for some reason Vulkan doesn't let us do that with initialLayout.
  4993. VulkanCommandBuffer *barrierCommandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
  4994. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  4995. renderer,
  4996. barrierCommandBuffer,
  4997. VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
  4998. texture);
  4999. VULKAN_INTERNAL_TrackTexture(barrierCommandBuffer, texture);
  5000. if (!VULKAN_Submit((SDL_GPUCommandBuffer *)barrierCommandBuffer)) {
  5001. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  5002. return NULL;
  5003. }
  5004. }
  5005. return texture;
  5006. }
  5007. static void VULKAN_INTERNAL_CycleActiveBuffer(
  5008. VulkanRenderer *renderer,
  5009. VulkanBufferContainer *container)
  5010. {
  5011. VulkanBuffer *buffer;
  5012. // If a previously-cycled buffer is available, we can use that.
  5013. for (Uint32 i = 0; i < container->bufferCount; i += 1) {
  5014. buffer = container->buffers[i];
  5015. if (SDL_GetAtomicInt(&buffer->referenceCount) == 0) {
  5016. container->activeBuffer = buffer;
  5017. return;
  5018. }
  5019. }
  5020. // No buffer handle is available, create a new one.
  5021. buffer = VULKAN_INTERNAL_CreateBuffer(
  5022. renderer,
  5023. container->activeBuffer->size,
  5024. container->activeBuffer->usage,
  5025. container->activeBuffer->type,
  5026. container->dedicated,
  5027. container->debugName);
  5028. if (!buffer) {
  5029. return;
  5030. }
  5031. EXPAND_ARRAY_IF_NEEDED(
  5032. container->buffers,
  5033. VulkanBuffer *,
  5034. container->bufferCount + 1,
  5035. container->bufferCapacity,
  5036. container->bufferCapacity * 2);
  5037. container->buffers[container->bufferCount] = buffer;
  5038. buffer->container = container;
  5039. buffer->containerIndex = container->bufferCount;
  5040. container->bufferCount += 1;
  5041. container->activeBuffer = buffer;
  5042. }
  5043. static void VULKAN_INTERNAL_CycleActiveTexture(
  5044. VulkanRenderer *renderer,
  5045. VulkanCommandBuffer *commandBuffer,
  5046. VulkanTextureContainer *container)
  5047. {
  5048. VulkanTexture *texture;
  5049. // If a previously-cycled texture is available, we can use that.
  5050. for (Uint32 i = 0; i < container->textureCount; i += 1) {
  5051. texture = container->textures[i];
  5052. if (SDL_GetAtomicInt(&texture->referenceCount) == 0) {
  5053. container->activeTexture = texture;
  5054. return;
  5055. }
  5056. }
  5057. // No texture is available, generate a new one.
  5058. texture = VULKAN_INTERNAL_CreateTexture(
  5059. renderer,
  5060. false,
  5061. &container->header.info);
  5062. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  5063. renderer,
  5064. commandBuffer,
  5065. VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
  5066. texture);
  5067. if (!texture) {
  5068. return;
  5069. }
  5070. EXPAND_ARRAY_IF_NEEDED(
  5071. container->textures,
  5072. VulkanTexture *,
  5073. container->textureCount + 1,
  5074. container->textureCapacity,
  5075. container->textureCapacity * 2);
  5076. container->textures[container->textureCount] = texture;
  5077. texture->container = container;
  5078. texture->containerIndex = container->textureCount;
  5079. container->textureCount += 1;
  5080. container->activeTexture = texture;
  5081. }
  5082. static VulkanBuffer *VULKAN_INTERNAL_PrepareBufferForWrite(
  5083. VulkanRenderer *renderer,
  5084. VulkanCommandBuffer *commandBuffer,
  5085. VulkanBufferContainer *bufferContainer,
  5086. bool cycle,
  5087. VulkanBufferUsageMode destinationUsageMode)
  5088. {
  5089. if (
  5090. cycle &&
  5091. SDL_GetAtomicInt(&bufferContainer->activeBuffer->referenceCount) > 0) {
  5092. VULKAN_INTERNAL_CycleActiveBuffer(
  5093. renderer,
  5094. bufferContainer);
  5095. }
  5096. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  5097. renderer,
  5098. commandBuffer,
  5099. destinationUsageMode,
  5100. bufferContainer->activeBuffer);
  5101. return bufferContainer->activeBuffer;
  5102. }
  5103. static VulkanTextureSubresource *VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  5104. VulkanRenderer *renderer,
  5105. VulkanCommandBuffer *commandBuffer,
  5106. VulkanTextureContainer *textureContainer,
  5107. Uint32 layer,
  5108. Uint32 level,
  5109. bool cycle,
  5110. VulkanTextureUsageMode destinationUsageMode)
  5111. {
  5112. VulkanTextureSubresource *textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  5113. textureContainer,
  5114. layer,
  5115. level);
  5116. if (
  5117. cycle &&
  5118. textureContainer->canBeCycled &&
  5119. SDL_GetAtomicInt(&textureContainer->activeTexture->referenceCount) > 0) {
  5120. VULKAN_INTERNAL_CycleActiveTexture(
  5121. renderer,
  5122. commandBuffer,
  5123. textureContainer);
  5124. textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  5125. textureContainer,
  5126. layer,
  5127. level);
  5128. }
  5129. // always do barrier because of layout transitions
  5130. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  5131. renderer,
  5132. commandBuffer,
  5133. destinationUsageMode,
  5134. textureSubresource);
  5135. return textureSubresource;
  5136. }
  5137. static VkRenderPass VULKAN_INTERNAL_CreateRenderPass(
  5138. VulkanRenderer *renderer,
  5139. const SDL_GPUColorTargetInfo *colorTargetInfos,
  5140. Uint32 numColorTargets,
  5141. const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo)
  5142. {
  5143. VkResult vulkanResult;
  5144. VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1 /* depth */];
  5145. VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
  5146. VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS];
  5147. VkAttachmentReference depthStencilAttachmentReference;
  5148. VkRenderPassCreateInfo renderPassCreateInfo;
  5149. VkSubpassDescription subpass;
  5150. VkRenderPass renderPass;
  5151. Uint32 i;
  5152. Uint32 attachmentDescriptionCount = 0;
  5153. Uint32 colorAttachmentReferenceCount = 0;
  5154. Uint32 resolveReferenceCount = 0;
  5155. for (i = 0; i < numColorTargets; i += 1) {
  5156. VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture;
  5157. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5158. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[container->header.info.format];
  5159. attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[container->header.info.sample_count];
  5160. attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[colorTargetInfos[i].load_op];
  5161. attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[colorTargetInfos[i].store_op];
  5162. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5163. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5164. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5165. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5166. colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
  5167. colorAttachmentReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5168. attachmentDescriptionCount += 1;
  5169. if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
  5170. VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
  5171. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5172. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[resolveContainer->header.info.format];
  5173. attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[resolveContainer->header.info.sample_count];
  5174. attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // The texture will be overwritten anyway
  5175. attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_STORE; // Always store the resolve texture
  5176. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5177. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5178. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5179. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5180. resolveReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
  5181. resolveReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5182. attachmentDescriptionCount += 1;
  5183. resolveReferenceCount += 1;
  5184. } else {
  5185. resolveReferences[colorAttachmentReferenceCount].attachment = VK_ATTACHMENT_UNUSED;
  5186. }
  5187. colorAttachmentReferenceCount += 1;
  5188. }
  5189. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  5190. subpass.flags = 0;
  5191. subpass.inputAttachmentCount = 0;
  5192. subpass.pInputAttachments = NULL;
  5193. subpass.colorAttachmentCount = numColorTargets;
  5194. subpass.pColorAttachments = colorAttachmentReferences;
  5195. subpass.preserveAttachmentCount = 0;
  5196. subpass.pPreserveAttachments = NULL;
  5197. if (depthStencilTargetInfo == NULL) {
  5198. subpass.pDepthStencilAttachment = NULL;
  5199. } else {
  5200. VulkanTextureContainer *container = (VulkanTextureContainer *)depthStencilTargetInfo->texture;
  5201. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5202. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[container->header.info.format];
  5203. attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[container->header.info.sample_count];
  5204. attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[depthStencilTargetInfo->load_op];
  5205. attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[depthStencilTargetInfo->store_op];
  5206. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = SDLToVK_LoadOp[depthStencilTargetInfo->stencil_load_op];
  5207. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = SDLToVK_StoreOp[depthStencilTargetInfo->stencil_store_op];
  5208. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5209. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5210. depthStencilAttachmentReference.attachment = attachmentDescriptionCount;
  5211. depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5212. subpass.pDepthStencilAttachment = &depthStencilAttachmentReference;
  5213. attachmentDescriptionCount += 1;
  5214. }
  5215. if (resolveReferenceCount > 0) {
  5216. subpass.pResolveAttachments = resolveReferences;
  5217. } else {
  5218. subpass.pResolveAttachments = NULL;
  5219. }
  5220. renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  5221. renderPassCreateInfo.pNext = NULL;
  5222. renderPassCreateInfo.flags = 0;
  5223. renderPassCreateInfo.pAttachments = attachmentDescriptions;
  5224. renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
  5225. renderPassCreateInfo.subpassCount = 1;
  5226. renderPassCreateInfo.pSubpasses = &subpass;
  5227. renderPassCreateInfo.dependencyCount = 0;
  5228. renderPassCreateInfo.pDependencies = NULL;
  5229. vulkanResult = renderer->vkCreateRenderPass(
  5230. renderer->logicalDevice,
  5231. &renderPassCreateInfo,
  5232. NULL,
  5233. &renderPass);
  5234. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateRenderPass, VK_NULL_HANDLE);
  5235. return renderPass;
  5236. }
  5237. static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass(
  5238. VulkanRenderer *renderer,
  5239. SDL_GPUGraphicsPipelineTargetInfo targetInfo,
  5240. VkSampleCountFlagBits sampleCount)
  5241. {
  5242. VkAttachmentDescription attachmentDescriptions[MAX_COLOR_TARGET_BINDINGS + 1 /* depth */];
  5243. VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
  5244. VkAttachmentReference depthStencilAttachmentReference;
  5245. SDL_GPUColorTargetDescription attachmentDescription;
  5246. VkSubpassDescription subpass;
  5247. VkRenderPassCreateInfo renderPassCreateInfo;
  5248. VkRenderPass renderPass;
  5249. VkResult result;
  5250. Uint32 attachmentDescriptionCount = 0;
  5251. Uint32 colorAttachmentReferenceCount = 0;
  5252. Uint32 i;
  5253. for (i = 0; i < targetInfo.num_color_targets; i += 1) {
  5254. attachmentDescription = targetInfo.color_target_descriptions[i];
  5255. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5256. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[attachmentDescription.format];
  5257. attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
  5258. attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5259. attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5260. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5261. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5262. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5263. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5264. colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
  5265. colorAttachmentReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5266. attachmentDescriptionCount += 1;
  5267. colorAttachmentReferenceCount += 1;
  5268. }
  5269. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  5270. subpass.flags = 0;
  5271. subpass.inputAttachmentCount = 0;
  5272. subpass.pInputAttachments = NULL;
  5273. subpass.colorAttachmentCount = targetInfo.num_color_targets;
  5274. subpass.pColorAttachments = colorAttachmentReferences;
  5275. subpass.preserveAttachmentCount = 0;
  5276. subpass.pPreserveAttachments = NULL;
  5277. if (targetInfo.has_depth_stencil_target) {
  5278. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5279. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[targetInfo.depth_stencil_format];
  5280. attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
  5281. attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5282. attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5283. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5284. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5285. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5286. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5287. depthStencilAttachmentReference.attachment = attachmentDescriptionCount;
  5288. depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5289. subpass.pDepthStencilAttachment = &depthStencilAttachmentReference;
  5290. attachmentDescriptionCount += 1;
  5291. } else {
  5292. subpass.pDepthStencilAttachment = NULL;
  5293. }
  5294. // Resolve attachments aren't needed for transient passes
  5295. subpass.pResolveAttachments = NULL;
  5296. renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  5297. renderPassCreateInfo.pNext = NULL;
  5298. renderPassCreateInfo.flags = 0;
  5299. renderPassCreateInfo.pAttachments = attachmentDescriptions;
  5300. renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
  5301. renderPassCreateInfo.subpassCount = 1;
  5302. renderPassCreateInfo.pSubpasses = &subpass;
  5303. renderPassCreateInfo.dependencyCount = 0;
  5304. renderPassCreateInfo.pDependencies = NULL;
  5305. result = renderer->vkCreateRenderPass(
  5306. renderer->logicalDevice,
  5307. &renderPassCreateInfo,
  5308. NULL,
  5309. &renderPass);
  5310. CHECK_VULKAN_ERROR_AND_RETURN(result, vkCreateRenderPass, VK_NULL_HANDLE);
  5311. return renderPass;
  5312. }
  5313. static SDL_GPUGraphicsPipeline *VULKAN_CreateGraphicsPipeline(
  5314. SDL_GPURenderer *driverData,
  5315. const SDL_GPUGraphicsPipelineCreateInfo *createinfo)
  5316. {
  5317. VkResult vulkanResult;
  5318. Uint32 i;
  5319. VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline *)SDL_malloc(sizeof(VulkanGraphicsPipeline));
  5320. VkGraphicsPipelineCreateInfo vkPipelineCreateInfo;
  5321. VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2];
  5322. VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo;
  5323. VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, createinfo->vertex_input_state.num_vertex_buffers);
  5324. VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, createinfo->vertex_input_state.num_vertex_attributes);
  5325. VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo;
  5326. VkPipelineViewportStateCreateInfo viewportStateCreateInfo;
  5327. VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo;
  5328. VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo;
  5329. VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo;
  5330. VkStencilOpState frontStencilState;
  5331. VkStencilOpState backStencilState;
  5332. VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo;
  5333. VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc(
  5334. VkPipelineColorBlendAttachmentState,
  5335. createinfo->target_info.num_color_targets);
  5336. static const VkDynamicState dynamicStates[] = {
  5337. VK_DYNAMIC_STATE_VIEWPORT,
  5338. VK_DYNAMIC_STATE_SCISSOR,
  5339. VK_DYNAMIC_STATE_BLEND_CONSTANTS,
  5340. VK_DYNAMIC_STATE_STENCIL_REFERENCE
  5341. };
  5342. VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo;
  5343. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5344. // Create a "compatible" render pass
  5345. VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass(
  5346. renderer,
  5347. createinfo->target_info,
  5348. SDLToVK_SampleCount[createinfo->multisample_state.sample_count]);
  5349. // Dynamic state
  5350. dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  5351. dynamicStateCreateInfo.pNext = NULL;
  5352. dynamicStateCreateInfo.flags = 0;
  5353. dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates);
  5354. dynamicStateCreateInfo.pDynamicStates = dynamicStates;
  5355. // Shader stages
  5356. graphicsPipeline->vertexShader = (VulkanShader *)createinfo->vertex_shader;
  5357. SDL_AtomicIncRef(&graphicsPipeline->vertexShader->referenceCount);
  5358. shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  5359. shaderStageCreateInfos[0].pNext = NULL;
  5360. shaderStageCreateInfos[0].flags = 0;
  5361. shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
  5362. shaderStageCreateInfos[0].module = graphicsPipeline->vertexShader->shaderModule;
  5363. shaderStageCreateInfos[0].pName = graphicsPipeline->vertexShader->entrypointName;
  5364. shaderStageCreateInfos[0].pSpecializationInfo = NULL;
  5365. graphicsPipeline->fragmentShader = (VulkanShader *)createinfo->fragment_shader;
  5366. SDL_AtomicIncRef(&graphicsPipeline->fragmentShader->referenceCount);
  5367. shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  5368. shaderStageCreateInfos[1].pNext = NULL;
  5369. shaderStageCreateInfos[1].flags = 0;
  5370. shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  5371. shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShader->shaderModule;
  5372. shaderStageCreateInfos[1].pName = graphicsPipeline->fragmentShader->entrypointName;
  5373. shaderStageCreateInfos[1].pSpecializationInfo = NULL;
  5374. if (renderer->debugMode) {
  5375. if (graphicsPipeline->vertexShader->stage != SDL_GPU_SHADERSTAGE_VERTEX) {
  5376. SDL_assert_release(!"CreateGraphicsPipeline was passed a fragment shader for the vertex stage");
  5377. }
  5378. if (graphicsPipeline->fragmentShader->stage != SDL_GPU_SHADERSTAGE_FRAGMENT) {
  5379. SDL_assert_release(!"CreateGraphicsPipeline was passed a vertex shader for the fragment stage");
  5380. }
  5381. }
  5382. // Vertex input
  5383. for (i = 0; i < createinfo->vertex_input_state.num_vertex_buffers; i += 1) {
  5384. vertexInputBindingDescriptions[i].binding = createinfo->vertex_input_state.vertex_buffer_descriptions[i].slot;
  5385. vertexInputBindingDescriptions[i].inputRate = SDLToVK_VertexInputRate[createinfo->vertex_input_state.vertex_buffer_descriptions[i].input_rate];
  5386. vertexInputBindingDescriptions[i].stride = createinfo->vertex_input_state.vertex_buffer_descriptions[i].pitch;
  5387. }
  5388. for (i = 0; i < createinfo->vertex_input_state.num_vertex_attributes; i += 1) {
  5389. vertexInputAttributeDescriptions[i].binding = createinfo->vertex_input_state.vertex_attributes[i].buffer_slot;
  5390. vertexInputAttributeDescriptions[i].format = SDLToVK_VertexFormat[createinfo->vertex_input_state.vertex_attributes[i].format];
  5391. vertexInputAttributeDescriptions[i].location = createinfo->vertex_input_state.vertex_attributes[i].location;
  5392. vertexInputAttributeDescriptions[i].offset = createinfo->vertex_input_state.vertex_attributes[i].offset;
  5393. }
  5394. vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  5395. vertexInputStateCreateInfo.pNext = NULL;
  5396. vertexInputStateCreateInfo.flags = 0;
  5397. vertexInputStateCreateInfo.vertexBindingDescriptionCount = createinfo->vertex_input_state.num_vertex_buffers;
  5398. vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions;
  5399. vertexInputStateCreateInfo.vertexAttributeDescriptionCount = createinfo->vertex_input_state.num_vertex_attributes;
  5400. vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions;
  5401. // Topology
  5402. inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  5403. inputAssemblyStateCreateInfo.pNext = NULL;
  5404. inputAssemblyStateCreateInfo.flags = 0;
  5405. inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE;
  5406. inputAssemblyStateCreateInfo.topology = SDLToVK_PrimitiveType[createinfo->primitive_type];
  5407. graphicsPipeline->primitiveType = createinfo->primitive_type;
  5408. // Viewport
  5409. // NOTE: viewport and scissor are dynamic, and must be set using the command buffer
  5410. viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  5411. viewportStateCreateInfo.pNext = NULL;
  5412. viewportStateCreateInfo.flags = 0;
  5413. viewportStateCreateInfo.viewportCount = 1;
  5414. viewportStateCreateInfo.pViewports = NULL;
  5415. viewportStateCreateInfo.scissorCount = 1;
  5416. viewportStateCreateInfo.pScissors = NULL;
  5417. // Rasterization
  5418. rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  5419. rasterizationStateCreateInfo.pNext = NULL;
  5420. rasterizationStateCreateInfo.flags = 0;
  5421. rasterizationStateCreateInfo.depthClampEnable = !createinfo->rasterizer_state.enable_depth_clip;
  5422. rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE;
  5423. rasterizationStateCreateInfo.polygonMode = SDLToVK_PolygonMode(
  5424. renderer,
  5425. createinfo->rasterizer_state.fill_mode);
  5426. rasterizationStateCreateInfo.cullMode = SDLToVK_CullMode[createinfo->rasterizer_state.cull_mode];
  5427. rasterizationStateCreateInfo.frontFace = SDLToVK_FrontFace[createinfo->rasterizer_state.front_face];
  5428. rasterizationStateCreateInfo.depthBiasEnable =
  5429. createinfo->rasterizer_state.enable_depth_bias;
  5430. rasterizationStateCreateInfo.depthBiasConstantFactor =
  5431. createinfo->rasterizer_state.depth_bias_constant_factor;
  5432. rasterizationStateCreateInfo.depthBiasClamp =
  5433. createinfo->rasterizer_state.depth_bias_clamp;
  5434. rasterizationStateCreateInfo.depthBiasSlopeFactor =
  5435. createinfo->rasterizer_state.depth_bias_slope_factor;
  5436. rasterizationStateCreateInfo.lineWidth = 1.0f;
  5437. // Multisample
  5438. Uint32 sampleMask = 0xFFFFFFFF;
  5439. multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  5440. multisampleStateCreateInfo.pNext = NULL;
  5441. multisampleStateCreateInfo.flags = 0;
  5442. multisampleStateCreateInfo.rasterizationSamples = SDLToVK_SampleCount[createinfo->multisample_state.sample_count];
  5443. multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE;
  5444. multisampleStateCreateInfo.minSampleShading = 1.0f;
  5445. multisampleStateCreateInfo.pSampleMask = &sampleMask;
  5446. multisampleStateCreateInfo.alphaToCoverageEnable = createinfo->multisample_state.enable_alpha_to_coverage;
  5447. multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE;
  5448. // Depth Stencil State
  5449. frontStencilState.failOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.fail_op];
  5450. frontStencilState.passOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.pass_op];
  5451. frontStencilState.depthFailOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.depth_fail_op];
  5452. frontStencilState.compareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.front_stencil_state.compare_op];
  5453. frontStencilState.compareMask =
  5454. createinfo->depth_stencil_state.compare_mask;
  5455. frontStencilState.writeMask =
  5456. createinfo->depth_stencil_state.write_mask;
  5457. frontStencilState.reference = 0;
  5458. backStencilState.failOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.fail_op];
  5459. backStencilState.passOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.pass_op];
  5460. backStencilState.depthFailOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.depth_fail_op];
  5461. backStencilState.compareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.back_stencil_state.compare_op];
  5462. backStencilState.compareMask =
  5463. createinfo->depth_stencil_state.compare_mask;
  5464. backStencilState.writeMask =
  5465. createinfo->depth_stencil_state.write_mask;
  5466. backStencilState.reference = 0;
  5467. depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  5468. depthStencilStateCreateInfo.pNext = NULL;
  5469. depthStencilStateCreateInfo.flags = 0;
  5470. depthStencilStateCreateInfo.depthTestEnable =
  5471. createinfo->depth_stencil_state.enable_depth_test;
  5472. depthStencilStateCreateInfo.depthWriteEnable =
  5473. createinfo->depth_stencil_state.enable_depth_write;
  5474. depthStencilStateCreateInfo.depthCompareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.compare_op];
  5475. depthStencilStateCreateInfo.depthBoundsTestEnable = VK_FALSE;
  5476. depthStencilStateCreateInfo.stencilTestEnable =
  5477. createinfo->depth_stencil_state.enable_stencil_test;
  5478. depthStencilStateCreateInfo.front = frontStencilState;
  5479. depthStencilStateCreateInfo.back = backStencilState;
  5480. depthStencilStateCreateInfo.minDepthBounds = 0; // unused
  5481. depthStencilStateCreateInfo.maxDepthBounds = 0; // unused
  5482. // Color Blend
  5483. for (i = 0; i < createinfo->target_info.num_color_targets; i += 1) {
  5484. SDL_GPUColorTargetBlendState blendState = createinfo->target_info.color_target_descriptions[i].blend_state;
  5485. SDL_GPUColorComponentFlags colorWriteMask = blendState.enable_color_write_mask ?
  5486. blendState.color_write_mask :
  5487. 0xF;
  5488. colorBlendAttachmentStates[i].blendEnable =
  5489. blendState.enable_blend;
  5490. colorBlendAttachmentStates[i].srcColorBlendFactor = SDLToVK_BlendFactor[blendState.src_color_blendfactor];
  5491. colorBlendAttachmentStates[i].dstColorBlendFactor = SDLToVK_BlendFactor[blendState.dst_color_blendfactor];
  5492. colorBlendAttachmentStates[i].colorBlendOp = SDLToVK_BlendOp[blendState.color_blend_op];
  5493. colorBlendAttachmentStates[i].srcAlphaBlendFactor = SDLToVK_BlendFactor[blendState.src_alpha_blendfactor];
  5494. colorBlendAttachmentStates[i].dstAlphaBlendFactor = SDLToVK_BlendFactor[blendState.dst_alpha_blendfactor];
  5495. colorBlendAttachmentStates[i].alphaBlendOp = SDLToVK_BlendOp[blendState.alpha_blend_op];
  5496. colorBlendAttachmentStates[i].colorWriteMask =
  5497. colorWriteMask;
  5498. }
  5499. colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  5500. colorBlendStateCreateInfo.pNext = NULL;
  5501. colorBlendStateCreateInfo.flags = 0;
  5502. colorBlendStateCreateInfo.attachmentCount =
  5503. createinfo->target_info.num_color_targets;
  5504. colorBlendStateCreateInfo.pAttachments =
  5505. colorBlendAttachmentStates;
  5506. colorBlendStateCreateInfo.blendConstants[0] = 1.0f;
  5507. colorBlendStateCreateInfo.blendConstants[1] = 1.0f;
  5508. colorBlendStateCreateInfo.blendConstants[2] = 1.0f;
  5509. colorBlendStateCreateInfo.blendConstants[3] = 1.0f;
  5510. // We don't support LogicOp, so this is easy.
  5511. colorBlendStateCreateInfo.logicOpEnable = VK_FALSE;
  5512. colorBlendStateCreateInfo.logicOp = 0;
  5513. // Pipeline Layout
  5514. graphicsPipeline->resourceLayout =
  5515. VULKAN_INTERNAL_FetchGraphicsPipelineResourceLayout(
  5516. renderer,
  5517. graphicsPipeline->vertexShader,
  5518. graphicsPipeline->fragmentShader);
  5519. if (graphicsPipeline->resourceLayout == NULL) {
  5520. SDL_stack_free(vertexInputBindingDescriptions);
  5521. SDL_stack_free(vertexInputAttributeDescriptions);
  5522. SDL_stack_free(colorBlendAttachmentStates);
  5523. SDL_free(graphicsPipeline);
  5524. SET_STRING_ERROR_AND_RETURN("Failed to initialize pipeline resource layout!", NULL);
  5525. }
  5526. // Pipeline
  5527. vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  5528. vkPipelineCreateInfo.pNext = NULL;
  5529. vkPipelineCreateInfo.flags = 0;
  5530. vkPipelineCreateInfo.stageCount = 2;
  5531. vkPipelineCreateInfo.pStages = shaderStageCreateInfos;
  5532. vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo;
  5533. vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
  5534. vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE;
  5535. vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
  5536. vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
  5537. vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
  5538. vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo;
  5539. vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
  5540. vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
  5541. vkPipelineCreateInfo.layout = graphicsPipeline->resourceLayout->pipelineLayout;
  5542. vkPipelineCreateInfo.renderPass = transientRenderPass;
  5543. vkPipelineCreateInfo.subpass = 0;
  5544. vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
  5545. vkPipelineCreateInfo.basePipelineIndex = 0;
  5546. // TODO: enable pipeline caching
  5547. vulkanResult = renderer->vkCreateGraphicsPipelines(
  5548. renderer->logicalDevice,
  5549. VK_NULL_HANDLE,
  5550. 1,
  5551. &vkPipelineCreateInfo,
  5552. NULL,
  5553. &graphicsPipeline->pipeline);
  5554. SDL_stack_free(vertexInputBindingDescriptions);
  5555. SDL_stack_free(vertexInputAttributeDescriptions);
  5556. SDL_stack_free(colorBlendAttachmentStates);
  5557. renderer->vkDestroyRenderPass(
  5558. renderer->logicalDevice,
  5559. transientRenderPass,
  5560. NULL);
  5561. if (vulkanResult != VK_SUCCESS) {
  5562. SDL_free(graphicsPipeline);
  5563. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateGraphicsPipelines, NULL);
  5564. }
  5565. SDL_SetAtomicInt(&graphicsPipeline->referenceCount, 0);
  5566. if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_GRAPHICSPIPELINE_CREATE_NAME_STRING)) {
  5567. VkDebugUtilsObjectNameInfoEXT nameInfo;
  5568. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  5569. nameInfo.pNext = NULL;
  5570. nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_GRAPHICSPIPELINE_CREATE_NAME_STRING, NULL);
  5571. nameInfo.objectType = VK_OBJECT_TYPE_PIPELINE;
  5572. nameInfo.objectHandle = (uint64_t)graphicsPipeline->pipeline;
  5573. renderer->vkSetDebugUtilsObjectNameEXT(
  5574. renderer->logicalDevice,
  5575. &nameInfo);
  5576. }
  5577. // Put this data in the pipeline we can do validation in gpu.c
  5578. graphicsPipeline->header.num_vertex_samplers = graphicsPipeline->resourceLayout->vertexSamplerCount;
  5579. graphicsPipeline->header.num_vertex_storage_buffers = graphicsPipeline->resourceLayout->vertexStorageBufferCount;
  5580. graphicsPipeline->header.num_vertex_storage_textures = graphicsPipeline->resourceLayout->vertexStorageTextureCount;
  5581. graphicsPipeline->header.num_vertex_uniform_buffers = graphicsPipeline->resourceLayout->vertexUniformBufferCount;
  5582. graphicsPipeline->header.num_fragment_samplers = graphicsPipeline->resourceLayout->fragmentSamplerCount;
  5583. graphicsPipeline->header.num_fragment_storage_buffers = graphicsPipeline->resourceLayout->fragmentStorageBufferCount;
  5584. graphicsPipeline->header.num_fragment_storage_textures = graphicsPipeline->resourceLayout->fragmentStorageTextureCount;
  5585. graphicsPipeline->header.num_fragment_uniform_buffers = graphicsPipeline->resourceLayout->fragmentUniformBufferCount;
  5586. return (SDL_GPUGraphicsPipeline *)graphicsPipeline;
  5587. }
  5588. static bool VULKAN_INTERNAL_IsValidShaderBytecode(
  5589. const Uint8 *code,
  5590. size_t codeSize)
  5591. {
  5592. // SPIR-V bytecode has a 4 byte header containing 0x07230203. SPIR-V is
  5593. // defined as a stream of words and not a stream of bytes so both byte
  5594. // orders need to be considered.
  5595. //
  5596. // FIXME: It is uncertain if drivers are able to load both byte orders. If
  5597. // needed we may need to do an optional swizzle internally so apps can
  5598. // continue to treat shader code as an opaque blob.
  5599. if (codeSize < 4 || code == NULL) {
  5600. return false;
  5601. }
  5602. const Uint32 magic = 0x07230203;
  5603. const Uint32 magicInv = 0x03022307;
  5604. return SDL_memcmp(code, &magic, 4) == 0 || SDL_memcmp(code, &magicInv, 4) == 0;
  5605. }
  5606. static SDL_GPUComputePipeline *VULKAN_CreateComputePipeline(
  5607. SDL_GPURenderer *driverData,
  5608. const SDL_GPUComputePipelineCreateInfo *createinfo)
  5609. {
  5610. VkShaderModuleCreateInfo shaderModuleCreateInfo;
  5611. VkComputePipelineCreateInfo vkShaderCreateInfo;
  5612. VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo;
  5613. VkResult vulkanResult;
  5614. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5615. VulkanComputePipeline *vulkanComputePipeline;
  5616. if (createinfo->format != SDL_GPU_SHADERFORMAT_SPIRV) {
  5617. SET_STRING_ERROR_AND_RETURN("Incompatible shader format for Vulkan!", NULL);
  5618. }
  5619. if (!VULKAN_INTERNAL_IsValidShaderBytecode(createinfo->code, createinfo->code_size)) {
  5620. SET_STRING_ERROR_AND_RETURN("The provided shader code is not valid SPIR-V!", NULL);
  5621. }
  5622. vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline));
  5623. shaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  5624. shaderModuleCreateInfo.pNext = NULL;
  5625. shaderModuleCreateInfo.flags = 0;
  5626. shaderModuleCreateInfo.codeSize = createinfo->code_size;
  5627. shaderModuleCreateInfo.pCode = (Uint32 *)createinfo->code;
  5628. vulkanResult = renderer->vkCreateShaderModule(
  5629. renderer->logicalDevice,
  5630. &shaderModuleCreateInfo,
  5631. NULL,
  5632. &vulkanComputePipeline->shaderModule);
  5633. if (vulkanResult != VK_SUCCESS) {
  5634. SDL_free(vulkanComputePipeline);
  5635. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateShaderModule, NULL);
  5636. }
  5637. pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  5638. pipelineShaderStageCreateInfo.pNext = NULL;
  5639. pipelineShaderStageCreateInfo.flags = 0;
  5640. pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
  5641. pipelineShaderStageCreateInfo.module = vulkanComputePipeline->shaderModule;
  5642. pipelineShaderStageCreateInfo.pName = createinfo->entrypoint;
  5643. pipelineShaderStageCreateInfo.pSpecializationInfo = NULL;
  5644. vulkanComputePipeline->resourceLayout = VULKAN_INTERNAL_FetchComputePipelineResourceLayout(
  5645. renderer,
  5646. createinfo);
  5647. if (vulkanComputePipeline->resourceLayout == NULL) {
  5648. renderer->vkDestroyShaderModule(
  5649. renderer->logicalDevice,
  5650. vulkanComputePipeline->shaderModule,
  5651. NULL);
  5652. SDL_free(vulkanComputePipeline);
  5653. return NULL;
  5654. }
  5655. vkShaderCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
  5656. vkShaderCreateInfo.pNext = NULL;
  5657. vkShaderCreateInfo.flags = 0;
  5658. vkShaderCreateInfo.stage = pipelineShaderStageCreateInfo;
  5659. vkShaderCreateInfo.layout = vulkanComputePipeline->resourceLayout->pipelineLayout;
  5660. vkShaderCreateInfo.basePipelineHandle = (VkPipeline)VK_NULL_HANDLE;
  5661. vkShaderCreateInfo.basePipelineIndex = 0;
  5662. vulkanResult = renderer->vkCreateComputePipelines(
  5663. renderer->logicalDevice,
  5664. (VkPipelineCache)VK_NULL_HANDLE,
  5665. 1,
  5666. &vkShaderCreateInfo,
  5667. NULL,
  5668. &vulkanComputePipeline->pipeline);
  5669. if (vulkanResult != VK_SUCCESS) {
  5670. VULKAN_INTERNAL_DestroyComputePipeline(renderer, vulkanComputePipeline);
  5671. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateComputePipeline, NULL);
  5672. return NULL;
  5673. }
  5674. SDL_SetAtomicInt(&vulkanComputePipeline->referenceCount, 0);
  5675. if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_COMPUTEPIPELINE_CREATE_NAME_STRING)) {
  5676. VkDebugUtilsObjectNameInfoEXT nameInfo;
  5677. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  5678. nameInfo.pNext = NULL;
  5679. nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_COMPUTEPIPELINE_CREATE_NAME_STRING, NULL);
  5680. nameInfo.objectType = VK_OBJECT_TYPE_PIPELINE;
  5681. nameInfo.objectHandle = (uint64_t)vulkanComputePipeline->pipeline;
  5682. renderer->vkSetDebugUtilsObjectNameEXT(
  5683. renderer->logicalDevice,
  5684. &nameInfo);
  5685. }
  5686. // Track these here for debug layer
  5687. vulkanComputePipeline->header.numSamplers = vulkanComputePipeline->resourceLayout->numSamplers;
  5688. vulkanComputePipeline->header.numReadonlyStorageTextures = vulkanComputePipeline->resourceLayout->numReadonlyStorageTextures;
  5689. vulkanComputePipeline->header.numReadonlyStorageBuffers = vulkanComputePipeline->resourceLayout->numReadonlyStorageBuffers;
  5690. vulkanComputePipeline->header.numReadWriteStorageTextures = vulkanComputePipeline->resourceLayout->numReadWriteStorageTextures;
  5691. vulkanComputePipeline->header.numReadWriteStorageBuffers = vulkanComputePipeline->resourceLayout->numReadWriteStorageBuffers;
  5692. vulkanComputePipeline->header.numUniformBuffers = vulkanComputePipeline->resourceLayout->numUniformBuffers;
  5693. return (SDL_GPUComputePipeline *)vulkanComputePipeline;
  5694. }
  5695. static SDL_GPUSampler *VULKAN_CreateSampler(
  5696. SDL_GPURenderer *driverData,
  5697. const SDL_GPUSamplerCreateInfo *createinfo)
  5698. {
  5699. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5700. VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler));
  5701. VkResult vulkanResult;
  5702. VkSamplerCreateInfo vkSamplerCreateInfo;
  5703. vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  5704. vkSamplerCreateInfo.pNext = NULL;
  5705. vkSamplerCreateInfo.flags = 0;
  5706. vkSamplerCreateInfo.magFilter = SDLToVK_Filter[createinfo->mag_filter];
  5707. vkSamplerCreateInfo.minFilter = SDLToVK_Filter[createinfo->min_filter];
  5708. vkSamplerCreateInfo.mipmapMode = SDLToVK_SamplerMipmapMode[createinfo->mipmap_mode];
  5709. vkSamplerCreateInfo.addressModeU = SDLToVK_SamplerAddressMode[createinfo->address_mode_u];
  5710. vkSamplerCreateInfo.addressModeV = SDLToVK_SamplerAddressMode[createinfo->address_mode_v];
  5711. vkSamplerCreateInfo.addressModeW = SDLToVK_SamplerAddressMode[createinfo->address_mode_w];
  5712. vkSamplerCreateInfo.mipLodBias = createinfo->mip_lod_bias;
  5713. vkSamplerCreateInfo.anisotropyEnable = createinfo->enable_anisotropy;
  5714. vkSamplerCreateInfo.maxAnisotropy = createinfo->max_anisotropy;
  5715. vkSamplerCreateInfo.compareEnable = createinfo->enable_compare;
  5716. vkSamplerCreateInfo.compareOp = SDLToVK_CompareOp[createinfo->compare_op];
  5717. vkSamplerCreateInfo.minLod = createinfo->min_lod;
  5718. vkSamplerCreateInfo.maxLod = createinfo->max_lod;
  5719. vkSamplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // arbitrary, unused
  5720. vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
  5721. vulkanResult = renderer->vkCreateSampler(
  5722. renderer->logicalDevice,
  5723. &vkSamplerCreateInfo,
  5724. NULL,
  5725. &vulkanSampler->sampler);
  5726. if (vulkanResult != VK_SUCCESS) {
  5727. SDL_free(vulkanSampler);
  5728. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSampler, NULL);
  5729. }
  5730. SDL_SetAtomicInt(&vulkanSampler->referenceCount, 0);
  5731. if (renderer->debugMode && renderer->supportsDebugUtils && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_SAMPLER_CREATE_NAME_STRING)) {
  5732. VkDebugUtilsObjectNameInfoEXT nameInfo;
  5733. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  5734. nameInfo.pNext = NULL;
  5735. nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_SAMPLER_CREATE_NAME_STRING, NULL);
  5736. nameInfo.objectType = VK_OBJECT_TYPE_SAMPLER;
  5737. nameInfo.objectHandle = (uint64_t)vulkanSampler->sampler;
  5738. renderer->vkSetDebugUtilsObjectNameEXT(
  5739. renderer->logicalDevice,
  5740. &nameInfo);
  5741. }
  5742. return (SDL_GPUSampler *)vulkanSampler;
  5743. }
  5744. static SDL_GPUShader *VULKAN_CreateShader(
  5745. SDL_GPURenderer *driverData,
  5746. const SDL_GPUShaderCreateInfo *createinfo)
  5747. {
  5748. VulkanShader *vulkanShader;
  5749. VkResult vulkanResult;
  5750. VkShaderModuleCreateInfo vkShaderModuleCreateInfo;
  5751. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5752. if (!VULKAN_INTERNAL_IsValidShaderBytecode(createinfo->code, createinfo->code_size)) {
  5753. SET_STRING_ERROR_AND_RETURN("The provided shader code is not valid SPIR-V!", NULL);
  5754. }
  5755. vulkanShader = SDL_malloc(sizeof(VulkanShader));
  5756. vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  5757. vkShaderModuleCreateInfo.pNext = NULL;
  5758. vkShaderModuleCreateInfo.flags = 0;
  5759. vkShaderModuleCreateInfo.codeSize = createinfo->code_size;
  5760. vkShaderModuleCreateInfo.pCode = (Uint32 *)createinfo->code;
  5761. vulkanResult = renderer->vkCreateShaderModule(
  5762. renderer->logicalDevice,
  5763. &vkShaderModuleCreateInfo,
  5764. NULL,
  5765. &vulkanShader->shaderModule);
  5766. if (vulkanResult != VK_SUCCESS) {
  5767. SDL_free(vulkanShader);
  5768. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateShaderModule, NULL);
  5769. }
  5770. const char *entrypoint = createinfo->entrypoint;
  5771. if (!entrypoint) {
  5772. entrypoint = "main";
  5773. }
  5774. vulkanShader->entrypointName = SDL_strdup(entrypoint);
  5775. vulkanShader->stage = createinfo->stage;
  5776. vulkanShader->numSamplers = createinfo->num_samplers;
  5777. vulkanShader->numStorageTextures = createinfo->num_storage_textures;
  5778. vulkanShader->numStorageBuffers = createinfo->num_storage_buffers;
  5779. vulkanShader->numUniformBuffers = createinfo->num_uniform_buffers;
  5780. SDL_SetAtomicInt(&vulkanShader->referenceCount, 0);
  5781. if (renderer->debugMode && SDL_HasProperty(createinfo->props, SDL_PROP_GPU_SHADER_CREATE_NAME_STRING)) {
  5782. VkDebugUtilsObjectNameInfoEXT nameInfo;
  5783. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  5784. nameInfo.pNext = NULL;
  5785. nameInfo.pObjectName = SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_SHADER_CREATE_NAME_STRING, NULL);
  5786. nameInfo.objectType = VK_OBJECT_TYPE_SHADER_MODULE;
  5787. nameInfo.objectHandle = (uint64_t)vulkanShader->shaderModule;
  5788. renderer->vkSetDebugUtilsObjectNameEXT(
  5789. renderer->logicalDevice,
  5790. &nameInfo);
  5791. }
  5792. return (SDL_GPUShader *)vulkanShader;
  5793. }
  5794. static bool VULKAN_SupportsSampleCount(
  5795. SDL_GPURenderer *driverData,
  5796. SDL_GPUTextureFormat format,
  5797. SDL_GPUSampleCount sampleCount)
  5798. {
  5799. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5800. VkSampleCountFlags bits = IsDepthFormat(format) ? renderer->physicalDeviceProperties.properties.limits.framebufferDepthSampleCounts : renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
  5801. VkSampleCountFlagBits vkSampleCount = SDLToVK_SampleCount[sampleCount];
  5802. return !!(bits & vkSampleCount);
  5803. }
  5804. static SDL_GPUTexture *VULKAN_CreateTexture(
  5805. SDL_GPURenderer *driverData,
  5806. const SDL_GPUTextureCreateInfo *createinfo)
  5807. {
  5808. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5809. VulkanTexture *texture;
  5810. VulkanTextureContainer *container;
  5811. texture = VULKAN_INTERNAL_CreateTexture(
  5812. renderer,
  5813. true,
  5814. createinfo);
  5815. if (texture == NULL) {
  5816. return NULL;
  5817. }
  5818. container = SDL_malloc(sizeof(VulkanTextureContainer));
  5819. // Copy properties so we don't lose information when the client destroys them
  5820. container->header.info = *createinfo;
  5821. container->header.info.props = SDL_CreateProperties();
  5822. if (createinfo->props) {
  5823. SDL_CopyProperties(createinfo->props, container->header.info.props);
  5824. }
  5825. container->canBeCycled = true;
  5826. container->activeTexture = texture;
  5827. container->textureCapacity = 1;
  5828. container->textureCount = 1;
  5829. container->textures = SDL_malloc(
  5830. container->textureCapacity * sizeof(VulkanTexture *));
  5831. container->textures[0] = container->activeTexture;
  5832. container->debugName = NULL;
  5833. if (SDL_HasProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING)) {
  5834. container->debugName = SDL_strdup(SDL_GetStringProperty(createinfo->props, SDL_PROP_GPU_TEXTURE_CREATE_NAME_STRING, NULL));
  5835. }
  5836. texture->container = container;
  5837. texture->containerIndex = 0;
  5838. return (SDL_GPUTexture *)container;
  5839. }
  5840. static SDL_GPUBuffer *VULKAN_CreateBuffer(
  5841. SDL_GPURenderer *driverData,
  5842. SDL_GPUBufferUsageFlags usageFlags,
  5843. Uint32 size,
  5844. const char *debugName)
  5845. {
  5846. return (SDL_GPUBuffer *)VULKAN_INTERNAL_CreateBufferContainer(
  5847. (VulkanRenderer *)driverData,
  5848. (VkDeviceSize)size,
  5849. usageFlags,
  5850. VULKAN_BUFFER_TYPE_GPU,
  5851. false,
  5852. debugName);
  5853. }
  5854. static VulkanUniformBuffer *VULKAN_INTERNAL_CreateUniformBuffer(
  5855. VulkanRenderer *renderer,
  5856. Uint32 size)
  5857. {
  5858. VulkanUniformBuffer *uniformBuffer = SDL_calloc(1, sizeof(VulkanUniformBuffer));
  5859. uniformBuffer->buffer = VULKAN_INTERNAL_CreateBuffer(
  5860. renderer,
  5861. (VkDeviceSize)size,
  5862. 0,
  5863. VULKAN_BUFFER_TYPE_UNIFORM,
  5864. false,
  5865. NULL);
  5866. uniformBuffer->drawOffset = 0;
  5867. uniformBuffer->writeOffset = 0;
  5868. uniformBuffer->buffer->uniformBufferForDefrag = uniformBuffer;
  5869. return uniformBuffer;
  5870. }
  5871. static SDL_GPUTransferBuffer *VULKAN_CreateTransferBuffer(
  5872. SDL_GPURenderer *driverData,
  5873. SDL_GPUTransferBufferUsage usage,
  5874. Uint32 size,
  5875. const char *debugName)
  5876. {
  5877. return (SDL_GPUTransferBuffer *)VULKAN_INTERNAL_CreateBufferContainer(
  5878. (VulkanRenderer *)driverData,
  5879. (VkDeviceSize)size,
  5880. 0,
  5881. VULKAN_BUFFER_TYPE_TRANSFER,
  5882. true, // Dedicated allocations preserve the data even if a defrag is triggered.
  5883. debugName);
  5884. }
  5885. static void VULKAN_INTERNAL_ReleaseTexture(
  5886. VulkanRenderer *renderer,
  5887. VulkanTexture *vulkanTexture)
  5888. {
  5889. if (vulkanTexture->markedForDestroy) {
  5890. return;
  5891. }
  5892. SDL_LockMutex(renderer->disposeLock);
  5893. EXPAND_ARRAY_IF_NEEDED(
  5894. renderer->texturesToDestroy,
  5895. VulkanTexture *,
  5896. renderer->texturesToDestroyCount + 1,
  5897. renderer->texturesToDestroyCapacity,
  5898. renderer->texturesToDestroyCapacity * 2);
  5899. renderer->texturesToDestroy[renderer->texturesToDestroyCount] = vulkanTexture;
  5900. renderer->texturesToDestroyCount += 1;
  5901. vulkanTexture->markedForDestroy = true;
  5902. SDL_UnlockMutex(renderer->disposeLock);
  5903. }
  5904. static void VULKAN_ReleaseTexture(
  5905. SDL_GPURenderer *driverData,
  5906. SDL_GPUTexture *texture)
  5907. {
  5908. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5909. VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)texture;
  5910. Uint32 i;
  5911. SDL_LockMutex(renderer->disposeLock);
  5912. for (i = 0; i < vulkanTextureContainer->textureCount; i += 1) {
  5913. VULKAN_INTERNAL_ReleaseTexture(renderer, vulkanTextureContainer->textures[i]);
  5914. }
  5915. SDL_DestroyProperties(vulkanTextureContainer->header.info.props);
  5916. // Containers are just client handles, so we can destroy immediately
  5917. SDL_free(vulkanTextureContainer->debugName);
  5918. SDL_free(vulkanTextureContainer->textures);
  5919. SDL_free(vulkanTextureContainer);
  5920. SDL_UnlockMutex(renderer->disposeLock);
  5921. }
  5922. static void VULKAN_ReleaseSampler(
  5923. SDL_GPURenderer *driverData,
  5924. SDL_GPUSampler *sampler)
  5925. {
  5926. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5927. VulkanSampler *vulkanSampler = (VulkanSampler *)sampler;
  5928. SDL_LockMutex(renderer->disposeLock);
  5929. EXPAND_ARRAY_IF_NEEDED(
  5930. renderer->samplersToDestroy,
  5931. VulkanSampler *,
  5932. renderer->samplersToDestroyCount + 1,
  5933. renderer->samplersToDestroyCapacity,
  5934. renderer->samplersToDestroyCapacity * 2);
  5935. renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler;
  5936. renderer->samplersToDestroyCount += 1;
  5937. SDL_UnlockMutex(renderer->disposeLock);
  5938. }
  5939. static void VULKAN_INTERNAL_ReleaseBuffer(
  5940. VulkanRenderer *renderer,
  5941. VulkanBuffer *vulkanBuffer)
  5942. {
  5943. if (vulkanBuffer->markedForDestroy) {
  5944. return;
  5945. }
  5946. SDL_LockMutex(renderer->disposeLock);
  5947. EXPAND_ARRAY_IF_NEEDED(
  5948. renderer->buffersToDestroy,
  5949. VulkanBuffer *,
  5950. renderer->buffersToDestroyCount + 1,
  5951. renderer->buffersToDestroyCapacity,
  5952. renderer->buffersToDestroyCapacity * 2);
  5953. renderer->buffersToDestroy[renderer->buffersToDestroyCount] = vulkanBuffer;
  5954. renderer->buffersToDestroyCount += 1;
  5955. vulkanBuffer->markedForDestroy = true;
  5956. vulkanBuffer->container = NULL;
  5957. SDL_UnlockMutex(renderer->disposeLock);
  5958. }
  5959. static void VULKAN_INTERNAL_ReleaseBufferContainer(
  5960. VulkanRenderer *renderer,
  5961. VulkanBufferContainer *bufferContainer)
  5962. {
  5963. Uint32 i;
  5964. SDL_LockMutex(renderer->disposeLock);
  5965. for (i = 0; i < bufferContainer->bufferCount; i += 1) {
  5966. VULKAN_INTERNAL_ReleaseBuffer(renderer, bufferContainer->buffers[i]);
  5967. }
  5968. // Containers are just client handles, so we can free immediately
  5969. if (bufferContainer->debugName != NULL) {
  5970. SDL_free(bufferContainer->debugName);
  5971. bufferContainer->debugName = NULL;
  5972. }
  5973. SDL_free(bufferContainer->buffers);
  5974. SDL_free(bufferContainer);
  5975. SDL_UnlockMutex(renderer->disposeLock);
  5976. }
  5977. static void VULKAN_ReleaseBuffer(
  5978. SDL_GPURenderer *driverData,
  5979. SDL_GPUBuffer *buffer)
  5980. {
  5981. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5982. VulkanBufferContainer *vulkanBufferContainer = (VulkanBufferContainer *)buffer;
  5983. VULKAN_INTERNAL_ReleaseBufferContainer(
  5984. renderer,
  5985. vulkanBufferContainer);
  5986. }
  5987. static void VULKAN_ReleaseTransferBuffer(
  5988. SDL_GPURenderer *driverData,
  5989. SDL_GPUTransferBuffer *transferBuffer)
  5990. {
  5991. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5992. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer;
  5993. VULKAN_INTERNAL_ReleaseBufferContainer(
  5994. renderer,
  5995. transferBufferContainer);
  5996. }
  5997. static void VULKAN_ReleaseShader(
  5998. SDL_GPURenderer *driverData,
  5999. SDL_GPUShader *shader)
  6000. {
  6001. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  6002. VulkanShader *vulkanShader = (VulkanShader *)shader;
  6003. SDL_LockMutex(renderer->disposeLock);
  6004. EXPAND_ARRAY_IF_NEEDED(
  6005. renderer->shadersToDestroy,
  6006. VulkanShader *,
  6007. renderer->shadersToDestroyCount + 1,
  6008. renderer->shadersToDestroyCapacity,
  6009. renderer->shadersToDestroyCapacity * 2);
  6010. renderer->shadersToDestroy[renderer->shadersToDestroyCount] = vulkanShader;
  6011. renderer->shadersToDestroyCount += 1;
  6012. SDL_UnlockMutex(renderer->disposeLock);
  6013. }
  6014. static void VULKAN_ReleaseComputePipeline(
  6015. SDL_GPURenderer *driverData,
  6016. SDL_GPUComputePipeline *computePipeline)
  6017. {
  6018. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  6019. VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline;
  6020. SDL_LockMutex(renderer->disposeLock);
  6021. EXPAND_ARRAY_IF_NEEDED(
  6022. renderer->computePipelinesToDestroy,
  6023. VulkanComputePipeline *,
  6024. renderer->computePipelinesToDestroyCount + 1,
  6025. renderer->computePipelinesToDestroyCapacity,
  6026. renderer->computePipelinesToDestroyCapacity * 2);
  6027. renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline;
  6028. renderer->computePipelinesToDestroyCount += 1;
  6029. SDL_UnlockMutex(renderer->disposeLock);
  6030. }
  6031. static void VULKAN_ReleaseGraphicsPipeline(
  6032. SDL_GPURenderer *driverData,
  6033. SDL_GPUGraphicsPipeline *graphicsPipeline)
  6034. {
  6035. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  6036. VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline *)graphicsPipeline;
  6037. SDL_LockMutex(renderer->disposeLock);
  6038. EXPAND_ARRAY_IF_NEEDED(
  6039. renderer->graphicsPipelinesToDestroy,
  6040. VulkanGraphicsPipeline *,
  6041. renderer->graphicsPipelinesToDestroyCount + 1,
  6042. renderer->graphicsPipelinesToDestroyCapacity,
  6043. renderer->graphicsPipelinesToDestroyCapacity * 2);
  6044. renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline;
  6045. renderer->graphicsPipelinesToDestroyCount += 1;
  6046. SDL_UnlockMutex(renderer->disposeLock);
  6047. }
  6048. // Command Buffer render state
  6049. static VkRenderPass VULKAN_INTERNAL_FetchRenderPass(
  6050. VulkanRenderer *renderer,
  6051. const SDL_GPUColorTargetInfo *colorTargetInfos,
  6052. Uint32 numColorTargets,
  6053. const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo)
  6054. {
  6055. VulkanRenderPassHashTableValue *renderPassWrapper = NULL;
  6056. VkRenderPass renderPassHandle;
  6057. RenderPassHashTableKey key;
  6058. Uint32 i;
  6059. SDL_zero(key);
  6060. for (i = 0; i < numColorTargets; i += 1) {
  6061. key.colorTargetDescriptions[i].format = SDLToVK_TextureFormat[((VulkanTextureContainer *)colorTargetInfos[i].texture)->header.info.format];
  6062. key.colorTargetDescriptions[i].loadOp = colorTargetInfos[i].load_op;
  6063. key.colorTargetDescriptions[i].storeOp = colorTargetInfos[i].store_op;
  6064. if (colorTargetInfos[i].resolve_texture != NULL) {
  6065. key.resolveTargetFormats[key.numResolveTargets] = SDLToVK_TextureFormat[((VulkanTextureContainer *)colorTargetInfos[i].resolve_texture)->header.info.format];
  6066. key.numResolveTargets += 1;
  6067. }
  6068. }
  6069. key.sampleCount = VK_SAMPLE_COUNT_1_BIT;
  6070. if (numColorTargets > 0) {
  6071. key.sampleCount = SDLToVK_SampleCount[((VulkanTextureContainer *)colorTargetInfos[0].texture)->header.info.sample_count];
  6072. } else if (numColorTargets == 0 && depthStencilTargetInfo != NULL) {
  6073. key.sampleCount = SDLToVK_SampleCount[((VulkanTextureContainer *)depthStencilTargetInfo->texture)->header.info.sample_count];
  6074. }
  6075. key.numColorTargets = numColorTargets;
  6076. if (depthStencilTargetInfo == NULL) {
  6077. key.depthStencilTargetDescription.format = 0;
  6078. key.depthStencilTargetDescription.loadOp = SDL_GPU_LOADOP_DONT_CARE;
  6079. key.depthStencilTargetDescription.storeOp = SDL_GPU_STOREOP_DONT_CARE;
  6080. key.depthStencilTargetDescription.stencilLoadOp = SDL_GPU_LOADOP_DONT_CARE;
  6081. key.depthStencilTargetDescription.stencilStoreOp = SDL_GPU_STOREOP_DONT_CARE;
  6082. } else {
  6083. key.depthStencilTargetDescription.format = SDLToVK_TextureFormat[((VulkanTextureContainer *)depthStencilTargetInfo->texture)->header.info.format];
  6084. key.depthStencilTargetDescription.loadOp = depthStencilTargetInfo->load_op;
  6085. key.depthStencilTargetDescription.storeOp = depthStencilTargetInfo->store_op;
  6086. key.depthStencilTargetDescription.stencilLoadOp = depthStencilTargetInfo->stencil_load_op;
  6087. key.depthStencilTargetDescription.stencilStoreOp = depthStencilTargetInfo->stencil_store_op;
  6088. }
  6089. SDL_LockMutex(renderer->renderPassFetchLock);
  6090. bool result = SDL_FindInHashTable(
  6091. renderer->renderPassHashTable,
  6092. (const void *)&key,
  6093. (const void **)&renderPassWrapper);
  6094. if (result) {
  6095. SDL_UnlockMutex(renderer->renderPassFetchLock);
  6096. return renderPassWrapper->handle;
  6097. }
  6098. renderPassHandle = VULKAN_INTERNAL_CreateRenderPass(
  6099. renderer,
  6100. colorTargetInfos,
  6101. numColorTargets,
  6102. depthStencilTargetInfo);
  6103. if (renderPassHandle == VK_NULL_HANDLE) {
  6104. SDL_UnlockMutex(renderer->renderPassFetchLock);
  6105. return VK_NULL_HANDLE;
  6106. }
  6107. // Have to malloc the key to store it in the hashtable
  6108. RenderPassHashTableKey *allocedKey = SDL_malloc(sizeof(RenderPassHashTableKey));
  6109. SDL_memcpy(allocedKey, &key, sizeof(RenderPassHashTableKey));
  6110. renderPassWrapper = SDL_malloc(sizeof(VulkanRenderPassHashTableValue));
  6111. renderPassWrapper->handle = renderPassHandle;
  6112. SDL_InsertIntoHashTable(
  6113. renderer->renderPassHashTable,
  6114. (const void *)allocedKey,
  6115. (const void *)renderPassWrapper, true);
  6116. SDL_UnlockMutex(renderer->renderPassFetchLock);
  6117. return renderPassHandle;
  6118. }
  6119. static VulkanFramebuffer *VULKAN_INTERNAL_FetchFramebuffer(
  6120. VulkanRenderer *renderer,
  6121. VkRenderPass renderPass,
  6122. const SDL_GPUColorTargetInfo *colorTargetInfos,
  6123. Uint32 numColorTargets,
  6124. const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo,
  6125. Uint32 width,
  6126. Uint32 height)
  6127. {
  6128. VulkanFramebuffer *vulkanFramebuffer = NULL;
  6129. VkFramebufferCreateInfo framebufferInfo;
  6130. VkResult result;
  6131. VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1 /* depth */];
  6132. FramebufferHashTableKey key;
  6133. Uint32 attachmentCount = 0;
  6134. Uint32 i;
  6135. SDL_zero(imageViewAttachments);
  6136. SDL_zero(key);
  6137. key.numColorTargets = numColorTargets;
  6138. for (i = 0; i < numColorTargets; i += 1) {
  6139. VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture;
  6140. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6141. container,
  6142. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane,
  6143. colorTargetInfos[i].mip_level);
  6144. Uint32 rtvIndex =
  6145. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorTargetInfos[i].layer_or_depth_plane : 0;
  6146. key.colorAttachmentViews[i] = subresource->renderTargetViews[rtvIndex];
  6147. if (colorTargetInfos[i].resolve_texture != NULL) {
  6148. VulkanTextureContainer *resolveTextureContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
  6149. VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6150. resolveTextureContainer,
  6151. colorTargetInfos[i].layer_or_depth_plane,
  6152. colorTargetInfos[i].mip_level);
  6153. key.resolveAttachmentViews[key.numResolveAttachments] = resolveSubresource->renderTargetViews[0];
  6154. key.numResolveAttachments += 1;
  6155. }
  6156. }
  6157. if (depthStencilTargetInfo == NULL) {
  6158. key.depthStencilAttachmentView = VK_NULL_HANDLE;
  6159. } else {
  6160. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6161. (VulkanTextureContainer *)depthStencilTargetInfo->texture,
  6162. depthStencilTargetInfo->layer,
  6163. depthStencilTargetInfo->mip_level);
  6164. key.depthStencilAttachmentView = subresource->depthStencilView;
  6165. }
  6166. key.width = width;
  6167. key.height = height;
  6168. SDL_LockMutex(renderer->framebufferFetchLock);
  6169. bool findResult = SDL_FindInHashTable(
  6170. renderer->framebufferHashTable,
  6171. (const void *)&key,
  6172. (const void **)&vulkanFramebuffer);
  6173. if (findResult) {
  6174. SDL_UnlockMutex(renderer->framebufferFetchLock);
  6175. return vulkanFramebuffer;
  6176. }
  6177. vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer));
  6178. SDL_SetAtomicInt(&vulkanFramebuffer->referenceCount, 0);
  6179. // Create a new framebuffer
  6180. for (i = 0; i < numColorTargets; i += 1) {
  6181. VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture;
  6182. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6183. container,
  6184. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane,
  6185. colorTargetInfos[i].mip_level);
  6186. Uint32 rtvIndex =
  6187. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorTargetInfos[i].layer_or_depth_plane : 0;
  6188. imageViewAttachments[attachmentCount] = subresource->renderTargetViews[rtvIndex];
  6189. attachmentCount += 1;
  6190. if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
  6191. VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
  6192. VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6193. resolveContainer,
  6194. colorTargetInfos[i].resolve_layer,
  6195. colorTargetInfos[i].resolve_mip_level);
  6196. imageViewAttachments[attachmentCount] = resolveSubresource->renderTargetViews[0];
  6197. attachmentCount += 1;
  6198. }
  6199. }
  6200. if (depthStencilTargetInfo != NULL) {
  6201. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6202. (VulkanTextureContainer *)depthStencilTargetInfo->texture,
  6203. depthStencilTargetInfo->layer,
  6204. depthStencilTargetInfo->mip_level);
  6205. imageViewAttachments[attachmentCount] = subresource->depthStencilView;
  6206. attachmentCount += 1;
  6207. }
  6208. framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  6209. framebufferInfo.pNext = NULL;
  6210. framebufferInfo.flags = 0;
  6211. framebufferInfo.renderPass = renderPass;
  6212. framebufferInfo.attachmentCount = attachmentCount;
  6213. framebufferInfo.pAttachments = imageViewAttachments;
  6214. framebufferInfo.width = key.width;
  6215. framebufferInfo.height = key.height;
  6216. framebufferInfo.layers = 1;
  6217. result = renderer->vkCreateFramebuffer(
  6218. renderer->logicalDevice,
  6219. &framebufferInfo,
  6220. NULL,
  6221. &vulkanFramebuffer->framebuffer);
  6222. if (result == VK_SUCCESS) {
  6223. // Have to malloc the key to store it in the hashtable
  6224. FramebufferHashTableKey *allocedKey = SDL_malloc(sizeof(FramebufferHashTableKey));
  6225. SDL_memcpy(allocedKey, &key, sizeof(FramebufferHashTableKey));
  6226. SDL_InsertIntoHashTable(
  6227. renderer->framebufferHashTable,
  6228. (const void *)allocedKey,
  6229. (const void *)vulkanFramebuffer, true);
  6230. } else {
  6231. SDL_free(vulkanFramebuffer);
  6232. SDL_UnlockMutex(renderer->framebufferFetchLock);
  6233. CHECK_VULKAN_ERROR_AND_RETURN(result, vkCreateFramebuffer, NULL);
  6234. }
  6235. SDL_UnlockMutex(renderer->framebufferFetchLock);
  6236. return vulkanFramebuffer;
  6237. }
  6238. static void VULKAN_INTERNAL_SetCurrentViewport(
  6239. VulkanCommandBuffer *commandBuffer,
  6240. const SDL_GPUViewport *viewport)
  6241. {
  6242. VulkanCommandBuffer *vulkanCommandBuffer = commandBuffer;
  6243. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6244. vulkanCommandBuffer->currentViewport.x = viewport->x;
  6245. vulkanCommandBuffer->currentViewport.width = viewport->w;
  6246. vulkanCommandBuffer->currentViewport.minDepth = viewport->min_depth;
  6247. vulkanCommandBuffer->currentViewport.maxDepth = viewport->max_depth;
  6248. // Viewport flip for consistency with other backends
  6249. vulkanCommandBuffer->currentViewport.y = viewport->y + viewport->h;
  6250. vulkanCommandBuffer->currentViewport.height = -viewport->h;
  6251. renderer->vkCmdSetViewport(
  6252. vulkanCommandBuffer->commandBuffer,
  6253. 0,
  6254. 1,
  6255. &vulkanCommandBuffer->currentViewport);
  6256. }
  6257. static void VULKAN_SetViewport(
  6258. SDL_GPUCommandBuffer *commandBuffer,
  6259. const SDL_GPUViewport *viewport)
  6260. {
  6261. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6262. VULKAN_INTERNAL_SetCurrentViewport(
  6263. vulkanCommandBuffer,
  6264. viewport);
  6265. }
  6266. static void VULKAN_INTERNAL_SetCurrentScissor(
  6267. VulkanCommandBuffer *vulkanCommandBuffer,
  6268. const SDL_Rect *scissor)
  6269. {
  6270. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6271. vulkanCommandBuffer->currentScissor.offset.x = scissor->x;
  6272. vulkanCommandBuffer->currentScissor.offset.y = scissor->y;
  6273. vulkanCommandBuffer->currentScissor.extent.width = scissor->w;
  6274. vulkanCommandBuffer->currentScissor.extent.height = scissor->h;
  6275. renderer->vkCmdSetScissor(
  6276. vulkanCommandBuffer->commandBuffer,
  6277. 0,
  6278. 1,
  6279. &vulkanCommandBuffer->currentScissor);
  6280. }
  6281. static void VULKAN_SetScissor(
  6282. SDL_GPUCommandBuffer *commandBuffer,
  6283. const SDL_Rect *scissor)
  6284. {
  6285. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6286. VULKAN_INTERNAL_SetCurrentScissor(
  6287. vulkanCommandBuffer,
  6288. scissor);
  6289. }
  6290. static void VULKAN_INTERNAL_SetCurrentBlendConstants(
  6291. VulkanCommandBuffer *vulkanCommandBuffer,
  6292. SDL_FColor blendConstants)
  6293. {
  6294. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6295. vulkanCommandBuffer->blendConstants[0] = blendConstants.r;
  6296. vulkanCommandBuffer->blendConstants[1] = blendConstants.g;
  6297. vulkanCommandBuffer->blendConstants[2] = blendConstants.b;
  6298. vulkanCommandBuffer->blendConstants[3] = blendConstants.a;
  6299. renderer->vkCmdSetBlendConstants(
  6300. vulkanCommandBuffer->commandBuffer,
  6301. vulkanCommandBuffer->blendConstants);
  6302. }
  6303. static void VULKAN_SetBlendConstants(
  6304. SDL_GPUCommandBuffer *commandBuffer,
  6305. SDL_FColor blendConstants)
  6306. {
  6307. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6308. VULKAN_INTERNAL_SetCurrentBlendConstants(
  6309. vulkanCommandBuffer,
  6310. blendConstants);
  6311. }
  6312. static void VULKAN_INTERNAL_SetCurrentStencilReference(
  6313. VulkanCommandBuffer *vulkanCommandBuffer,
  6314. Uint8 reference)
  6315. {
  6316. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6317. vulkanCommandBuffer->stencilRef = reference;
  6318. renderer->vkCmdSetStencilReference(
  6319. vulkanCommandBuffer->commandBuffer,
  6320. VK_STENCIL_FACE_FRONT_AND_BACK,
  6321. vulkanCommandBuffer->stencilRef);
  6322. }
  6323. static void VULKAN_SetStencilReference(
  6324. SDL_GPUCommandBuffer *commandBuffer,
  6325. Uint8 reference)
  6326. {
  6327. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6328. VULKAN_INTERNAL_SetCurrentStencilReference(
  6329. vulkanCommandBuffer,
  6330. reference);
  6331. }
  6332. static void VULKAN_BindVertexSamplers(
  6333. SDL_GPUCommandBuffer *commandBuffer,
  6334. Uint32 firstSlot,
  6335. const SDL_GPUTextureSamplerBinding *textureSamplerBindings,
  6336. Uint32 numBindings)
  6337. {
  6338. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6339. for (Uint32 i = 0; i < numBindings; i += 1) {
  6340. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
  6341. VulkanSampler *sampler = (VulkanSampler *)textureSamplerBindings[i].sampler;
  6342. if (vulkanCommandBuffer->vertexSamplerBindings[firstSlot + i] != sampler->sampler) {
  6343. VULKAN_INTERNAL_TrackSampler(
  6344. vulkanCommandBuffer,
  6345. (VulkanSampler *)textureSamplerBindings[i].sampler);
  6346. vulkanCommandBuffer->vertexSamplerBindings[firstSlot + i] = sampler->sampler;
  6347. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6348. }
  6349. if (vulkanCommandBuffer->vertexSamplerTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) {
  6350. VULKAN_INTERNAL_TrackTexture(
  6351. vulkanCommandBuffer,
  6352. textureContainer->activeTexture);
  6353. vulkanCommandBuffer->vertexSamplerTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView;
  6354. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6355. }
  6356. }
  6357. }
  6358. static void VULKAN_BindVertexStorageTextures(
  6359. SDL_GPUCommandBuffer *commandBuffer,
  6360. Uint32 firstSlot,
  6361. SDL_GPUTexture *const *storageTextures,
  6362. Uint32 numBindings)
  6363. {
  6364. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6365. for (Uint32 i = 0; i < numBindings; i += 1) {
  6366. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
  6367. if (vulkanCommandBuffer->vertexStorageTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) {
  6368. VULKAN_INTERNAL_TrackTexture(
  6369. vulkanCommandBuffer,
  6370. textureContainer->activeTexture);
  6371. vulkanCommandBuffer->vertexStorageTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView;
  6372. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6373. }
  6374. }
  6375. }
  6376. static void VULKAN_BindVertexStorageBuffers(
  6377. SDL_GPUCommandBuffer *commandBuffer,
  6378. Uint32 firstSlot,
  6379. SDL_GPUBuffer *const *storageBuffers,
  6380. Uint32 numBindings)
  6381. {
  6382. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6383. for (Uint32 i = 0; i < numBindings; i += 1) {
  6384. VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
  6385. if (vulkanCommandBuffer->vertexStorageBufferBindings[firstSlot + i] != bufferContainer->activeBuffer->buffer) {
  6386. VULKAN_INTERNAL_TrackBuffer(
  6387. vulkanCommandBuffer,
  6388. bufferContainer->activeBuffer);
  6389. vulkanCommandBuffer->vertexStorageBufferBindings[firstSlot + i] = bufferContainer->activeBuffer->buffer;
  6390. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6391. }
  6392. }
  6393. }
  6394. static void VULKAN_BindFragmentSamplers(
  6395. SDL_GPUCommandBuffer *commandBuffer,
  6396. Uint32 firstSlot,
  6397. const SDL_GPUTextureSamplerBinding *textureSamplerBindings,
  6398. Uint32 numBindings)
  6399. {
  6400. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6401. for (Uint32 i = 0; i < numBindings; i += 1) {
  6402. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
  6403. VulkanSampler *sampler = (VulkanSampler *)textureSamplerBindings[i].sampler;
  6404. if (vulkanCommandBuffer->fragmentSamplerBindings[firstSlot + i] != sampler->sampler) {
  6405. VULKAN_INTERNAL_TrackSampler(
  6406. vulkanCommandBuffer,
  6407. (VulkanSampler *)textureSamplerBindings[i].sampler);
  6408. vulkanCommandBuffer->fragmentSamplerBindings[firstSlot + i] = sampler->sampler;
  6409. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6410. }
  6411. if (vulkanCommandBuffer->fragmentSamplerTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) {
  6412. VULKAN_INTERNAL_TrackTexture(
  6413. vulkanCommandBuffer,
  6414. textureContainer->activeTexture);
  6415. vulkanCommandBuffer->fragmentSamplerTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView;
  6416. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6417. }
  6418. }
  6419. }
  6420. static void VULKAN_BindFragmentStorageTextures(
  6421. SDL_GPUCommandBuffer *commandBuffer,
  6422. Uint32 firstSlot,
  6423. SDL_GPUTexture *const *storageTextures,
  6424. Uint32 numBindings)
  6425. {
  6426. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6427. for (Uint32 i = 0; i < numBindings; i += 1) {
  6428. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
  6429. if (vulkanCommandBuffer->fragmentStorageTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) {
  6430. VULKAN_INTERNAL_TrackTexture(
  6431. vulkanCommandBuffer,
  6432. textureContainer->activeTexture);
  6433. vulkanCommandBuffer->fragmentStorageTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView;
  6434. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6435. }
  6436. }
  6437. }
  6438. static void VULKAN_BindFragmentStorageBuffers(
  6439. SDL_GPUCommandBuffer *commandBuffer,
  6440. Uint32 firstSlot,
  6441. SDL_GPUBuffer *const *storageBuffers,
  6442. Uint32 numBindings)
  6443. {
  6444. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6445. VulkanBufferContainer *bufferContainer;
  6446. Uint32 i;
  6447. for (i = 0; i < numBindings; i += 1) {
  6448. bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
  6449. if (vulkanCommandBuffer->fragmentStorageBufferBindings[firstSlot + i] != bufferContainer->activeBuffer->buffer) {
  6450. VULKAN_INTERNAL_TrackBuffer(
  6451. vulkanCommandBuffer,
  6452. bufferContainer->activeBuffer);
  6453. vulkanCommandBuffer->fragmentStorageBufferBindings[firstSlot + i] = bufferContainer->activeBuffer->buffer;
  6454. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6455. }
  6456. }
  6457. }
  6458. static VulkanUniformBuffer *VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6459. VulkanCommandBuffer *commandBuffer)
  6460. {
  6461. VulkanRenderer *renderer = commandBuffer->renderer;
  6462. VulkanUniformBuffer *uniformBuffer;
  6463. SDL_LockMutex(renderer->acquireUniformBufferLock);
  6464. if (renderer->uniformBufferPoolCount > 0) {
  6465. uniformBuffer = renderer->uniformBufferPool[renderer->uniformBufferPoolCount - 1];
  6466. renderer->uniformBufferPoolCount -= 1;
  6467. } else {
  6468. uniformBuffer = VULKAN_INTERNAL_CreateUniformBuffer(
  6469. renderer,
  6470. UNIFORM_BUFFER_SIZE);
  6471. }
  6472. SDL_UnlockMutex(renderer->acquireUniformBufferLock);
  6473. VULKAN_INTERNAL_TrackUniformBuffer(commandBuffer, uniformBuffer);
  6474. return uniformBuffer;
  6475. }
  6476. static void VULKAN_INTERNAL_ReturnUniformBufferToPool(
  6477. VulkanRenderer *renderer,
  6478. VulkanUniformBuffer *uniformBuffer)
  6479. {
  6480. if (renderer->uniformBufferPoolCount >= renderer->uniformBufferPoolCapacity) {
  6481. renderer->uniformBufferPoolCapacity *= 2;
  6482. renderer->uniformBufferPool = SDL_realloc(
  6483. renderer->uniformBufferPool,
  6484. renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *));
  6485. }
  6486. renderer->uniformBufferPool[renderer->uniformBufferPoolCount] = uniformBuffer;
  6487. renderer->uniformBufferPoolCount += 1;
  6488. uniformBuffer->writeOffset = 0;
  6489. uniformBuffer->drawOffset = 0;
  6490. }
  6491. static void VULKAN_INTERNAL_PushUniformData(
  6492. VulkanCommandBuffer *commandBuffer,
  6493. VulkanUniformBufferStage uniformBufferStage,
  6494. Uint32 slotIndex,
  6495. const void *data,
  6496. Uint32 length)
  6497. {
  6498. Uint32 blockSize =
  6499. VULKAN_INTERNAL_NextHighestAlignment32(
  6500. length,
  6501. commandBuffer->renderer->minUBOAlignment);
  6502. VulkanUniformBuffer *uniformBuffer;
  6503. if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
  6504. if (commandBuffer->vertexUniformBuffers[slotIndex] == NULL) {
  6505. commandBuffer->vertexUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6506. commandBuffer);
  6507. }
  6508. uniformBuffer = commandBuffer->vertexUniformBuffers[slotIndex];
  6509. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
  6510. if (commandBuffer->fragmentUniformBuffers[slotIndex] == NULL) {
  6511. commandBuffer->fragmentUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6512. commandBuffer);
  6513. }
  6514. uniformBuffer = commandBuffer->fragmentUniformBuffers[slotIndex];
  6515. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
  6516. if (commandBuffer->computeUniformBuffers[slotIndex] == NULL) {
  6517. commandBuffer->computeUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6518. commandBuffer);
  6519. }
  6520. uniformBuffer = commandBuffer->computeUniformBuffers[slotIndex];
  6521. } else {
  6522. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
  6523. return;
  6524. }
  6525. // If there is no more room, acquire a new uniform buffer
  6526. if (uniformBuffer->writeOffset + blockSize + MAX_UBO_SECTION_SIZE >= uniformBuffer->buffer->size) {
  6527. uniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(commandBuffer);
  6528. uniformBuffer->drawOffset = 0;
  6529. uniformBuffer->writeOffset = 0;
  6530. if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
  6531. commandBuffer->vertexUniformBuffers[slotIndex] = uniformBuffer;
  6532. commandBuffer->needNewVertexUniformDescriptorSet = true;
  6533. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
  6534. commandBuffer->fragmentUniformBuffers[slotIndex] = uniformBuffer;
  6535. commandBuffer->needNewFragmentUniformDescriptorSet = true;
  6536. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
  6537. commandBuffer->computeUniformBuffers[slotIndex] = uniformBuffer;
  6538. commandBuffer->needNewComputeUniformDescriptorSet = true;
  6539. } else {
  6540. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
  6541. return;
  6542. }
  6543. }
  6544. uniformBuffer->drawOffset = uniformBuffer->writeOffset;
  6545. Uint8 *dst =
  6546. uniformBuffer->buffer->usedRegion->allocation->mapPointer +
  6547. uniformBuffer->buffer->usedRegion->resourceOffset +
  6548. uniformBuffer->writeOffset;
  6549. SDL_memcpy(
  6550. dst,
  6551. data,
  6552. length);
  6553. uniformBuffer->writeOffset += blockSize;
  6554. if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
  6555. commandBuffer->needNewVertexUniformOffsets = true;
  6556. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
  6557. commandBuffer->needNewFragmentUniformOffsets = true;
  6558. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
  6559. commandBuffer->needNewComputeUniformOffsets = true;
  6560. } else {
  6561. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
  6562. return;
  6563. }
  6564. }
  6565. static void VULKAN_BeginRenderPass(
  6566. SDL_GPUCommandBuffer *commandBuffer,
  6567. const SDL_GPUColorTargetInfo *colorTargetInfos,
  6568. Uint32 numColorTargets,
  6569. const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo)
  6570. {
  6571. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6572. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6573. VkRenderPass renderPass;
  6574. VulkanFramebuffer *framebuffer;
  6575. Uint32 w, h;
  6576. VkClearValue *clearValues;
  6577. Uint32 clearCount = 0;
  6578. Uint32 totalColorAttachmentCount = 0;
  6579. Uint32 i;
  6580. SDL_GPUViewport defaultViewport;
  6581. SDL_Rect defaultScissor;
  6582. SDL_FColor defaultBlendConstants;
  6583. Uint32 framebufferWidth = SDL_MAX_UINT32;
  6584. Uint32 framebufferHeight = SDL_MAX_UINT32;
  6585. for (i = 0; i < numColorTargets; i += 1) {
  6586. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorTargetInfos[i].texture;
  6587. w = textureContainer->header.info.width >> colorTargetInfos[i].mip_level;
  6588. h = textureContainer->header.info.height >> colorTargetInfos[i].mip_level;
  6589. // The framebuffer cannot be larger than the smallest attachment.
  6590. if (w < framebufferWidth) {
  6591. framebufferWidth = w;
  6592. }
  6593. if (h < framebufferHeight) {
  6594. framebufferHeight = h;
  6595. }
  6596. }
  6597. if (depthStencilTargetInfo != NULL) {
  6598. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilTargetInfo->texture;
  6599. w = textureContainer->header.info.width >> depthStencilTargetInfo->mip_level;
  6600. h = textureContainer->header.info.height >> depthStencilTargetInfo->mip_level;
  6601. // The framebuffer cannot be larger than the smallest attachment.
  6602. if (w < framebufferWidth) {
  6603. framebufferWidth = w;
  6604. }
  6605. if (h < framebufferHeight) {
  6606. framebufferHeight = h;
  6607. }
  6608. }
  6609. for (i = 0; i < numColorTargets; i += 1) {
  6610. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorTargetInfos[i].texture;
  6611. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6612. renderer,
  6613. vulkanCommandBuffer,
  6614. textureContainer,
  6615. textureContainer->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane,
  6616. colorTargetInfos[i].mip_level,
  6617. colorTargetInfos[i].cycle,
  6618. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT);
  6619. vulkanCommandBuffer->colorAttachmentSubresources[vulkanCommandBuffer->colorAttachmentSubresourceCount] = subresource;
  6620. vulkanCommandBuffer->colorAttachmentSubresourceCount += 1;
  6621. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent);
  6622. totalColorAttachmentCount += 1;
  6623. clearCount += 1;
  6624. if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
  6625. VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
  6626. VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6627. renderer,
  6628. vulkanCommandBuffer,
  6629. resolveContainer,
  6630. colorTargetInfos[i].resolve_layer,
  6631. colorTargetInfos[i].resolve_mip_level,
  6632. colorTargetInfos[i].cycle_resolve_texture,
  6633. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT);
  6634. vulkanCommandBuffer->resolveAttachmentSubresources[vulkanCommandBuffer->resolveAttachmentSubresourceCount] = resolveSubresource;
  6635. vulkanCommandBuffer->resolveAttachmentSubresourceCount += 1;
  6636. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, resolveSubresource->parent);
  6637. totalColorAttachmentCount += 1;
  6638. clearCount += 1;
  6639. }
  6640. }
  6641. if (depthStencilTargetInfo != NULL) {
  6642. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilTargetInfo->texture;
  6643. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6644. renderer,
  6645. vulkanCommandBuffer,
  6646. textureContainer,
  6647. depthStencilTargetInfo->layer,
  6648. depthStencilTargetInfo->mip_level,
  6649. depthStencilTargetInfo->cycle,
  6650. VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT);
  6651. vulkanCommandBuffer->depthStencilAttachmentSubresource = subresource;
  6652. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent);
  6653. clearCount += 1;
  6654. }
  6655. // Fetch required render objects
  6656. renderPass = VULKAN_INTERNAL_FetchRenderPass(
  6657. renderer,
  6658. colorTargetInfos,
  6659. numColorTargets,
  6660. depthStencilTargetInfo);
  6661. if (renderPass == VK_NULL_HANDLE) {
  6662. return;
  6663. }
  6664. framebuffer = VULKAN_INTERNAL_FetchFramebuffer(
  6665. renderer,
  6666. renderPass,
  6667. colorTargetInfos,
  6668. numColorTargets,
  6669. depthStencilTargetInfo,
  6670. framebufferWidth,
  6671. framebufferHeight);
  6672. if (framebuffer == NULL) {
  6673. return;
  6674. }
  6675. VULKAN_INTERNAL_TrackFramebuffer(vulkanCommandBuffer, framebuffer);
  6676. // Set clear values
  6677. clearValues = SDL_stack_alloc(VkClearValue, clearCount);
  6678. int clearIndex = 0;
  6679. for (i = 0; i < numColorTargets; i += 1) {
  6680. clearValues[clearIndex].color.float32[0] = colorTargetInfos[i].clear_color.r;
  6681. clearValues[clearIndex].color.float32[1] = colorTargetInfos[i].clear_color.g;
  6682. clearValues[clearIndex].color.float32[2] = colorTargetInfos[i].clear_color.b;
  6683. clearValues[clearIndex].color.float32[3] = colorTargetInfos[i].clear_color.a;
  6684. clearIndex += 1;
  6685. if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
  6686. // Skip over the resolve texture, we're not clearing it
  6687. clearIndex += 1;
  6688. }
  6689. }
  6690. if (depthStencilTargetInfo != NULL) {
  6691. clearValues[totalColorAttachmentCount].depthStencil.depth =
  6692. depthStencilTargetInfo->clear_depth;
  6693. clearValues[totalColorAttachmentCount].depthStencil.stencil =
  6694. depthStencilTargetInfo->clear_stencil;
  6695. }
  6696. VkRenderPassBeginInfo renderPassBeginInfo;
  6697. renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  6698. renderPassBeginInfo.pNext = NULL;
  6699. renderPassBeginInfo.renderPass = renderPass;
  6700. renderPassBeginInfo.framebuffer = framebuffer->framebuffer;
  6701. renderPassBeginInfo.pClearValues = clearValues;
  6702. renderPassBeginInfo.clearValueCount = clearCount;
  6703. renderPassBeginInfo.renderArea.extent.width = framebufferWidth;
  6704. renderPassBeginInfo.renderArea.extent.height = framebufferHeight;
  6705. renderPassBeginInfo.renderArea.offset.x = 0;
  6706. renderPassBeginInfo.renderArea.offset.y = 0;
  6707. renderer->vkCmdBeginRenderPass(
  6708. vulkanCommandBuffer->commandBuffer,
  6709. &renderPassBeginInfo,
  6710. VK_SUBPASS_CONTENTS_INLINE);
  6711. SDL_stack_free(clearValues);
  6712. // Set sensible default states
  6713. defaultViewport.x = 0;
  6714. defaultViewport.y = 0;
  6715. defaultViewport.w = (float)framebufferWidth;
  6716. defaultViewport.h = (float)framebufferHeight;
  6717. defaultViewport.min_depth = 0;
  6718. defaultViewport.max_depth = 1;
  6719. VULKAN_INTERNAL_SetCurrentViewport(
  6720. vulkanCommandBuffer,
  6721. &defaultViewport);
  6722. defaultScissor.x = 0;
  6723. defaultScissor.y = 0;
  6724. defaultScissor.w = (Sint32)framebufferWidth;
  6725. defaultScissor.h = (Sint32)framebufferHeight;
  6726. VULKAN_INTERNAL_SetCurrentScissor(
  6727. vulkanCommandBuffer,
  6728. &defaultScissor);
  6729. defaultBlendConstants.r = 1.0f;
  6730. defaultBlendConstants.g = 1.0f;
  6731. defaultBlendConstants.b = 1.0f;
  6732. defaultBlendConstants.a = 1.0f;
  6733. VULKAN_INTERNAL_SetCurrentBlendConstants(
  6734. vulkanCommandBuffer,
  6735. defaultBlendConstants);
  6736. VULKAN_INTERNAL_SetCurrentStencilReference(
  6737. vulkanCommandBuffer,
  6738. 0);
  6739. }
  6740. static void VULKAN_BindGraphicsPipeline(
  6741. SDL_GPUCommandBuffer *commandBuffer,
  6742. SDL_GPUGraphicsPipeline *graphicsPipeline)
  6743. {
  6744. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6745. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6746. VulkanGraphicsPipeline *pipeline = (VulkanGraphicsPipeline *)graphicsPipeline;
  6747. renderer->vkCmdBindPipeline(
  6748. vulkanCommandBuffer->commandBuffer,
  6749. VK_PIPELINE_BIND_POINT_GRAPHICS,
  6750. pipeline->pipeline);
  6751. vulkanCommandBuffer->currentGraphicsPipeline = pipeline;
  6752. VULKAN_INTERNAL_TrackGraphicsPipeline(vulkanCommandBuffer, pipeline);
  6753. // Acquire uniform buffers if necessary
  6754. for (Uint32 i = 0; i < pipeline->resourceLayout->vertexUniformBufferCount; i += 1) {
  6755. if (vulkanCommandBuffer->vertexUniformBuffers[i] == NULL) {
  6756. vulkanCommandBuffer->vertexUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6757. vulkanCommandBuffer);
  6758. }
  6759. }
  6760. for (Uint32 i = 0; i < pipeline->resourceLayout->fragmentUniformBufferCount; i += 1) {
  6761. if (vulkanCommandBuffer->fragmentUniformBuffers[i] == NULL) {
  6762. vulkanCommandBuffer->fragmentUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6763. vulkanCommandBuffer);
  6764. }
  6765. }
  6766. // Mark bindings as needed
  6767. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6768. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6769. vulkanCommandBuffer->needNewVertexUniformDescriptorSet = true;
  6770. vulkanCommandBuffer->needNewFragmentUniformDescriptorSet = true;
  6771. vulkanCommandBuffer->needNewVertexUniformOffsets = true;
  6772. vulkanCommandBuffer->needNewFragmentUniformOffsets = true;
  6773. }
  6774. static void VULKAN_BindVertexBuffers(
  6775. SDL_GPUCommandBuffer *commandBuffer,
  6776. Uint32 firstSlot,
  6777. const SDL_GPUBufferBinding *bindings,
  6778. Uint32 numBindings)
  6779. {
  6780. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6781. for (Uint32 i = 0; i < numBindings; i += 1) {
  6782. VulkanBuffer *buffer = ((VulkanBufferContainer *)bindings[i].buffer)->activeBuffer;
  6783. if (vulkanCommandBuffer->vertexBuffers[firstSlot + i] != buffer->buffer || vulkanCommandBuffer->vertexBufferOffsets[firstSlot + i] != bindings[i].offset) {
  6784. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, buffer);
  6785. vulkanCommandBuffer->vertexBuffers[firstSlot + i] = buffer->buffer;
  6786. vulkanCommandBuffer->vertexBufferOffsets[firstSlot + i] = bindings[i].offset;
  6787. vulkanCommandBuffer->needVertexBufferBind = true;
  6788. }
  6789. }
  6790. vulkanCommandBuffer->vertexBufferCount =
  6791. SDL_max(vulkanCommandBuffer->vertexBufferCount, firstSlot + numBindings);
  6792. }
  6793. static void VULKAN_BindIndexBuffer(
  6794. SDL_GPUCommandBuffer *commandBuffer,
  6795. const SDL_GPUBufferBinding *binding,
  6796. SDL_GPUIndexElementSize indexElementSize)
  6797. {
  6798. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6799. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6800. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)binding->buffer)->activeBuffer;
  6801. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  6802. renderer->vkCmdBindIndexBuffer(
  6803. vulkanCommandBuffer->commandBuffer,
  6804. vulkanBuffer->buffer,
  6805. (VkDeviceSize)binding->offset,
  6806. SDLToVK_IndexType[indexElementSize]);
  6807. }
  6808. static void VULKAN_PushVertexUniformData(
  6809. SDL_GPUCommandBuffer *commandBuffer,
  6810. Uint32 slotIndex,
  6811. const void *data,
  6812. Uint32 length)
  6813. {
  6814. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6815. VULKAN_INTERNAL_PushUniformData(
  6816. vulkanCommandBuffer,
  6817. VULKAN_UNIFORM_BUFFER_STAGE_VERTEX,
  6818. slotIndex,
  6819. data,
  6820. length);
  6821. }
  6822. static void VULKAN_PushFragmentUniformData(
  6823. SDL_GPUCommandBuffer *commandBuffer,
  6824. Uint32 slotIndex,
  6825. const void *data,
  6826. Uint32 length)
  6827. {
  6828. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6829. VULKAN_INTERNAL_PushUniformData(
  6830. vulkanCommandBuffer,
  6831. VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT,
  6832. slotIndex,
  6833. data,
  6834. length);
  6835. }
  6836. static void VULKAN_EndRenderPass(
  6837. SDL_GPUCommandBuffer *commandBuffer)
  6838. {
  6839. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6840. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6841. Uint32 i;
  6842. renderer->vkCmdEndRenderPass(
  6843. vulkanCommandBuffer->commandBuffer);
  6844. for (i = 0; i < vulkanCommandBuffer->colorAttachmentSubresourceCount; i += 1) {
  6845. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  6846. renderer,
  6847. vulkanCommandBuffer,
  6848. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
  6849. vulkanCommandBuffer->colorAttachmentSubresources[i]);
  6850. }
  6851. vulkanCommandBuffer->colorAttachmentSubresourceCount = 0;
  6852. for (i = 0; i < vulkanCommandBuffer->resolveAttachmentSubresourceCount; i += 1) {
  6853. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  6854. renderer,
  6855. vulkanCommandBuffer,
  6856. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
  6857. vulkanCommandBuffer->resolveAttachmentSubresources[i]);
  6858. }
  6859. vulkanCommandBuffer->resolveAttachmentSubresourceCount = 0;
  6860. if (vulkanCommandBuffer->depthStencilAttachmentSubresource != NULL) {
  6861. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  6862. renderer,
  6863. vulkanCommandBuffer,
  6864. VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT,
  6865. vulkanCommandBuffer->depthStencilAttachmentSubresource);
  6866. vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL;
  6867. }
  6868. vulkanCommandBuffer->currentGraphicsPipeline = NULL;
  6869. vulkanCommandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
  6870. vulkanCommandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
  6871. vulkanCommandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
  6872. vulkanCommandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
  6873. // Reset bind state
  6874. SDL_zeroa(vulkanCommandBuffer->colorAttachmentSubresources);
  6875. SDL_zeroa(vulkanCommandBuffer->resolveAttachmentSubresources);
  6876. vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL;
  6877. SDL_zeroa(vulkanCommandBuffer->vertexBuffers);
  6878. SDL_zeroa(vulkanCommandBuffer->vertexBufferOffsets);
  6879. vulkanCommandBuffer->vertexBufferCount = 0;
  6880. SDL_zeroa(vulkanCommandBuffer->vertexSamplerBindings);
  6881. SDL_zeroa(vulkanCommandBuffer->vertexSamplerTextureViewBindings);
  6882. SDL_zeroa(vulkanCommandBuffer->vertexStorageTextureViewBindings);
  6883. SDL_zeroa(vulkanCommandBuffer->vertexStorageBufferBindings);
  6884. SDL_zeroa(vulkanCommandBuffer->fragmentSamplerBindings);
  6885. SDL_zeroa(vulkanCommandBuffer->fragmentSamplerTextureViewBindings);
  6886. SDL_zeroa(vulkanCommandBuffer->fragmentStorageTextureViewBindings);
  6887. SDL_zeroa(vulkanCommandBuffer->fragmentStorageBufferBindings);
  6888. }
  6889. static void VULKAN_BeginComputePass(
  6890. SDL_GPUCommandBuffer *commandBuffer,
  6891. const SDL_GPUStorageTextureReadWriteBinding *storageTextureBindings,
  6892. Uint32 numStorageTextureBindings,
  6893. const SDL_GPUStorageBufferReadWriteBinding *storageBufferBindings,
  6894. Uint32 numStorageBufferBindings)
  6895. {
  6896. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6897. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6898. VulkanBufferContainer *bufferContainer;
  6899. VulkanBuffer *buffer;
  6900. Uint32 i;
  6901. vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount = numStorageTextureBindings;
  6902. for (i = 0; i < numStorageTextureBindings; i += 1) {
  6903. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextureBindings[i].texture;
  6904. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6905. renderer,
  6906. vulkanCommandBuffer,
  6907. textureContainer,
  6908. storageTextureBindings[i].layer,
  6909. storageTextureBindings[i].mip_level,
  6910. storageTextureBindings[i].cycle,
  6911. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE);
  6912. vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i] = subresource;
  6913. vulkanCommandBuffer->readWriteComputeStorageTextureViewBindings[i] = subresource->computeWriteView;
  6914. VULKAN_INTERNAL_TrackTexture(
  6915. vulkanCommandBuffer,
  6916. subresource->parent);
  6917. }
  6918. for (i = 0; i < numStorageBufferBindings; i += 1) {
  6919. bufferContainer = (VulkanBufferContainer *)storageBufferBindings[i].buffer;
  6920. buffer = VULKAN_INTERNAL_PrepareBufferForWrite(
  6921. renderer,
  6922. vulkanCommandBuffer,
  6923. bufferContainer,
  6924. storageBufferBindings[i].cycle,
  6925. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE);
  6926. vulkanCommandBuffer->readWriteComputeStorageBuffers[i] = buffer;
  6927. vulkanCommandBuffer->readWriteComputeStorageBufferBindings[i] = buffer->buffer;
  6928. VULKAN_INTERNAL_TrackBuffer(
  6929. vulkanCommandBuffer,
  6930. buffer);
  6931. }
  6932. }
  6933. static void VULKAN_BindComputePipeline(
  6934. SDL_GPUCommandBuffer *commandBuffer,
  6935. SDL_GPUComputePipeline *computePipeline)
  6936. {
  6937. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6938. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6939. VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline;
  6940. renderer->vkCmdBindPipeline(
  6941. vulkanCommandBuffer->commandBuffer,
  6942. VK_PIPELINE_BIND_POINT_COMPUTE,
  6943. vulkanComputePipeline->pipeline);
  6944. vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline;
  6945. VULKAN_INTERNAL_TrackComputePipeline(vulkanCommandBuffer, vulkanComputePipeline);
  6946. // Acquire uniform buffers if necessary
  6947. for (Uint32 i = 0; i < vulkanComputePipeline->resourceLayout->numUniformBuffers; i += 1) {
  6948. if (vulkanCommandBuffer->computeUniformBuffers[i] == NULL) {
  6949. vulkanCommandBuffer->computeUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6950. vulkanCommandBuffer);
  6951. }
  6952. }
  6953. // Mark binding as needed
  6954. vulkanCommandBuffer->needNewComputeReadWriteDescriptorSet = true;
  6955. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  6956. vulkanCommandBuffer->needNewComputeUniformDescriptorSet = true;
  6957. vulkanCommandBuffer->needNewComputeUniformOffsets = true;
  6958. }
  6959. static void VULKAN_BindComputeSamplers(
  6960. SDL_GPUCommandBuffer *commandBuffer,
  6961. Uint32 firstSlot,
  6962. const SDL_GPUTextureSamplerBinding *textureSamplerBindings,
  6963. Uint32 numBindings)
  6964. {
  6965. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6966. for (Uint32 i = 0; i < numBindings; i += 1) {
  6967. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
  6968. VulkanSampler *sampler = (VulkanSampler *)textureSamplerBindings[i].sampler;
  6969. if (vulkanCommandBuffer->computeSamplerBindings[firstSlot + i] != sampler->sampler) {
  6970. VULKAN_INTERNAL_TrackSampler(
  6971. vulkanCommandBuffer,
  6972. sampler);
  6973. vulkanCommandBuffer->computeSamplerBindings[firstSlot + i] = sampler->sampler;
  6974. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  6975. }
  6976. if (vulkanCommandBuffer->computeSamplerTextureViewBindings[firstSlot + i] != textureContainer->activeTexture->fullView) {
  6977. VULKAN_INTERNAL_TrackTexture(
  6978. vulkanCommandBuffer,
  6979. textureContainer->activeTexture);
  6980. vulkanCommandBuffer->computeSamplerTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView;
  6981. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  6982. }
  6983. }
  6984. }
  6985. static void VULKAN_BindComputeStorageTextures(
  6986. SDL_GPUCommandBuffer *commandBuffer,
  6987. Uint32 firstSlot,
  6988. SDL_GPUTexture *const *storageTextures,
  6989. Uint32 numBindings)
  6990. {
  6991. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6992. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6993. for (Uint32 i = 0; i < numBindings; i += 1) {
  6994. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
  6995. if (vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] != textureContainer->activeTexture) {
  6996. /* If a different texture as in this slot, transition it back to its default usage */
  6997. if (vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] != NULL) {
  6998. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  6999. renderer,
  7000. vulkanCommandBuffer,
  7001. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  7002. vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i]);
  7003. }
  7004. /* Then transition the new texture and prepare it for binding */
  7005. VULKAN_INTERNAL_TextureTransitionFromDefaultUsage(
  7006. renderer,
  7007. vulkanCommandBuffer,
  7008. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  7009. textureContainer->activeTexture);
  7010. VULKAN_INTERNAL_TrackTexture(
  7011. vulkanCommandBuffer,
  7012. textureContainer->activeTexture);
  7013. vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] = textureContainer->activeTexture;
  7014. vulkanCommandBuffer->readOnlyComputeStorageTextureViewBindings[firstSlot + i] = textureContainer->activeTexture->fullView;
  7015. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  7016. }
  7017. }
  7018. }
  7019. static void VULKAN_BindComputeStorageBuffers(
  7020. SDL_GPUCommandBuffer *commandBuffer,
  7021. Uint32 firstSlot,
  7022. SDL_GPUBuffer *const *storageBuffers,
  7023. Uint32 numBindings)
  7024. {
  7025. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7026. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7027. for (Uint32 i = 0; i < numBindings; i += 1) {
  7028. VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
  7029. if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] != bufferContainer->activeBuffer) {
  7030. /* If a different buffer was in this slot, transition it back to its default usage */
  7031. if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] != NULL) {
  7032. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7033. renderer,
  7034. vulkanCommandBuffer,
  7035. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  7036. vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i]);
  7037. }
  7038. /* Then transition the new buffer and prepare it for binding */
  7039. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  7040. renderer,
  7041. vulkanCommandBuffer,
  7042. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  7043. bufferContainer->activeBuffer);
  7044. VULKAN_INTERNAL_TrackBuffer(
  7045. vulkanCommandBuffer,
  7046. bufferContainer->activeBuffer);
  7047. vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] = bufferContainer->activeBuffer;
  7048. vulkanCommandBuffer->readOnlyComputeStorageBufferBindings[firstSlot + i] = bufferContainer->activeBuffer->buffer;
  7049. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  7050. }
  7051. }
  7052. }
  7053. static void VULKAN_PushComputeUniformData(
  7054. SDL_GPUCommandBuffer *commandBuffer,
  7055. Uint32 slotIndex,
  7056. const void *data,
  7057. Uint32 length)
  7058. {
  7059. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7060. VULKAN_INTERNAL_PushUniformData(
  7061. vulkanCommandBuffer,
  7062. VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE,
  7063. slotIndex,
  7064. data,
  7065. length);
  7066. }
  7067. static void VULKAN_INTERNAL_BindComputeDescriptorSets(
  7068. VulkanRenderer *renderer,
  7069. VulkanCommandBuffer *commandBuffer)
  7070. {
  7071. VulkanComputePipelineResourceLayout *resourceLayout;
  7072. DescriptorSetLayout *descriptorSetLayout;
  7073. VkWriteDescriptorSet writeDescriptorSets[
  7074. MAX_TEXTURE_SAMPLERS_PER_STAGE +
  7075. MAX_STORAGE_TEXTURES_PER_STAGE +
  7076. MAX_STORAGE_BUFFERS_PER_STAGE +
  7077. MAX_COMPUTE_WRITE_TEXTURES +
  7078. MAX_COMPUTE_WRITE_BUFFERS +
  7079. MAX_UNIFORM_BUFFERS_PER_STAGE];
  7080. VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE + MAX_COMPUTE_WRITE_BUFFERS + MAX_UNIFORM_BUFFERS_PER_STAGE];
  7081. VkDescriptorImageInfo imageInfos[MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE + MAX_COMPUTE_WRITE_TEXTURES];
  7082. Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE];
  7083. Uint32 writeCount = 0;
  7084. Uint32 bufferInfoCount = 0;
  7085. Uint32 imageInfoCount = 0;
  7086. Uint32 dynamicOffsetCount = 0;
  7087. if (
  7088. !commandBuffer->needNewComputeReadOnlyDescriptorSet &&
  7089. !commandBuffer->needNewComputeReadWriteDescriptorSet &&
  7090. !commandBuffer->needNewComputeUniformDescriptorSet &&
  7091. !commandBuffer->needNewComputeUniformOffsets
  7092. ) {
  7093. return;
  7094. }
  7095. resourceLayout = commandBuffer->currentComputePipeline->resourceLayout;
  7096. if (commandBuffer->needNewComputeReadOnlyDescriptorSet) {
  7097. descriptorSetLayout = resourceLayout->descriptorSetLayouts[0];
  7098. commandBuffer->computeReadOnlyDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  7099. renderer,
  7100. commandBuffer,
  7101. descriptorSetLayout);
  7102. for (Uint32 i = 0; i < resourceLayout->numSamplers; i += 1) {
  7103. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  7104. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7105. currentWriteDescriptorSet->pNext = NULL;
  7106. currentWriteDescriptorSet->descriptorCount = 1;
  7107. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  7108. currentWriteDescriptorSet->dstArrayElement = 0;
  7109. currentWriteDescriptorSet->dstBinding = i;
  7110. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
  7111. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7112. currentWriteDescriptorSet->pBufferInfo = NULL;
  7113. imageInfos[imageInfoCount].sampler = commandBuffer->computeSamplerBindings[i];
  7114. imageInfos[imageInfoCount].imageView = commandBuffer->computeSamplerTextureViewBindings[i];
  7115. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  7116. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  7117. writeCount += 1;
  7118. imageInfoCount += 1;
  7119. }
  7120. for (Uint32 i = 0; i < resourceLayout->numReadonlyStorageTextures; i += 1) {
  7121. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  7122. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7123. currentWriteDescriptorSet->pNext = NULL;
  7124. currentWriteDescriptorSet->descriptorCount = 1;
  7125. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; // Yes, we are declaring the readonly storage texture as a sampled image, because shaders are stupid.
  7126. currentWriteDescriptorSet->dstArrayElement = 0;
  7127. currentWriteDescriptorSet->dstBinding = resourceLayout->numSamplers + i;
  7128. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
  7129. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7130. currentWriteDescriptorSet->pBufferInfo = NULL;
  7131. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  7132. imageInfos[imageInfoCount].imageView = commandBuffer->readOnlyComputeStorageTextureViewBindings[i];
  7133. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  7134. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  7135. writeCount += 1;
  7136. imageInfoCount += 1;
  7137. }
  7138. for (Uint32 i = 0; i < resourceLayout->numReadonlyStorageBuffers; i += 1) {
  7139. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  7140. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7141. currentWriteDescriptorSet->pNext = NULL;
  7142. currentWriteDescriptorSet->descriptorCount = 1;
  7143. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  7144. currentWriteDescriptorSet->dstArrayElement = 0;
  7145. currentWriteDescriptorSet->dstBinding = resourceLayout->numSamplers + resourceLayout->numReadonlyStorageTextures + i;
  7146. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
  7147. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7148. currentWriteDescriptorSet->pImageInfo = NULL;
  7149. bufferInfos[bufferInfoCount].buffer = commandBuffer->readOnlyComputeStorageBufferBindings[i];
  7150. bufferInfos[bufferInfoCount].offset = 0;
  7151. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  7152. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  7153. writeCount += 1;
  7154. bufferInfoCount += 1;
  7155. }
  7156. commandBuffer->needNewComputeReadOnlyDescriptorSet = false;
  7157. }
  7158. if (commandBuffer->needNewComputeReadWriteDescriptorSet) {
  7159. descriptorSetLayout = resourceLayout->descriptorSetLayouts[1];
  7160. commandBuffer->computeReadWriteDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  7161. renderer,
  7162. commandBuffer,
  7163. descriptorSetLayout);
  7164. for (Uint32 i = 0; i < resourceLayout->numReadWriteStorageTextures; i += 1) {
  7165. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  7166. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7167. currentWriteDescriptorSet->pNext = NULL;
  7168. currentWriteDescriptorSet->descriptorCount = 1;
  7169. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  7170. currentWriteDescriptorSet->dstArrayElement = 0;
  7171. currentWriteDescriptorSet->dstBinding = i;
  7172. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadWriteDescriptorSet;
  7173. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7174. currentWriteDescriptorSet->pBufferInfo = NULL;
  7175. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  7176. imageInfos[imageInfoCount].imageView = commandBuffer->readWriteComputeStorageTextureViewBindings[i];
  7177. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  7178. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  7179. writeCount += 1;
  7180. imageInfoCount += 1;
  7181. }
  7182. for (Uint32 i = 0; i < resourceLayout->numReadWriteStorageBuffers; i += 1) {
  7183. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  7184. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7185. currentWriteDescriptorSet->pNext = NULL;
  7186. currentWriteDescriptorSet->descriptorCount = 1;
  7187. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  7188. currentWriteDescriptorSet->dstArrayElement = 0;
  7189. currentWriteDescriptorSet->dstBinding = resourceLayout->numReadWriteStorageTextures + i;
  7190. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadWriteDescriptorSet;
  7191. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7192. currentWriteDescriptorSet->pImageInfo = NULL;
  7193. bufferInfos[bufferInfoCount].buffer = commandBuffer->readWriteComputeStorageBufferBindings[i];
  7194. bufferInfos[bufferInfoCount].offset = 0;
  7195. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  7196. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  7197. writeCount += 1;
  7198. bufferInfoCount += 1;
  7199. }
  7200. commandBuffer->needNewComputeReadWriteDescriptorSet = false;
  7201. }
  7202. if (commandBuffer->needNewComputeUniformDescriptorSet) {
  7203. descriptorSetLayout = resourceLayout->descriptorSetLayouts[2];
  7204. commandBuffer->computeUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  7205. renderer,
  7206. commandBuffer,
  7207. descriptorSetLayout);
  7208. for (Uint32 i = 0; i < resourceLayout->numUniformBuffers; i += 1) {
  7209. VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
  7210. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7211. currentWriteDescriptorSet->pNext = NULL;
  7212. currentWriteDescriptorSet->descriptorCount = 1;
  7213. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  7214. currentWriteDescriptorSet->dstArrayElement = 0;
  7215. currentWriteDescriptorSet->dstBinding = i;
  7216. currentWriteDescriptorSet->dstSet = commandBuffer->computeUniformDescriptorSet;
  7217. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7218. currentWriteDescriptorSet->pImageInfo = NULL;
  7219. bufferInfos[bufferInfoCount].buffer = commandBuffer->computeUniformBuffers[i]->buffer->buffer;
  7220. bufferInfos[bufferInfoCount].offset = 0;
  7221. bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
  7222. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  7223. writeCount += 1;
  7224. bufferInfoCount += 1;
  7225. }
  7226. commandBuffer->needNewComputeUniformDescriptorSet = false;
  7227. }
  7228. for (Uint32 i = 0; i < resourceLayout->numUniformBuffers; i += 1) {
  7229. dynamicOffsets[i] = commandBuffer->computeUniformBuffers[i]->drawOffset;
  7230. dynamicOffsetCount += 1;
  7231. }
  7232. renderer->vkUpdateDescriptorSets(
  7233. renderer->logicalDevice,
  7234. writeCount,
  7235. writeDescriptorSets,
  7236. 0,
  7237. NULL);
  7238. VkDescriptorSet sets[3];
  7239. sets[0] = commandBuffer->computeReadOnlyDescriptorSet;
  7240. sets[1] = commandBuffer->computeReadWriteDescriptorSet;
  7241. sets[2] = commandBuffer->computeUniformDescriptorSet;
  7242. renderer->vkCmdBindDescriptorSets(
  7243. commandBuffer->commandBuffer,
  7244. VK_PIPELINE_BIND_POINT_COMPUTE,
  7245. resourceLayout->pipelineLayout,
  7246. 0,
  7247. 3,
  7248. sets,
  7249. dynamicOffsetCount,
  7250. dynamicOffsets);
  7251. commandBuffer->needNewComputeUniformOffsets = false;
  7252. }
  7253. static void VULKAN_DispatchCompute(
  7254. SDL_GPUCommandBuffer *commandBuffer,
  7255. Uint32 groupcountX,
  7256. Uint32 groupcountY,
  7257. Uint32 groupcountZ)
  7258. {
  7259. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7260. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7261. VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer);
  7262. renderer->vkCmdDispatch(
  7263. vulkanCommandBuffer->commandBuffer,
  7264. groupcountX,
  7265. groupcountY,
  7266. groupcountZ);
  7267. }
  7268. static void VULKAN_DispatchComputeIndirect(
  7269. SDL_GPUCommandBuffer *commandBuffer,
  7270. SDL_GPUBuffer *buffer,
  7271. Uint32 offset)
  7272. {
  7273. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7274. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7275. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer;
  7276. VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer);
  7277. renderer->vkCmdDispatchIndirect(
  7278. vulkanCommandBuffer->commandBuffer,
  7279. vulkanBuffer->buffer,
  7280. offset);
  7281. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  7282. }
  7283. static void VULKAN_EndComputePass(
  7284. SDL_GPUCommandBuffer *commandBuffer)
  7285. {
  7286. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7287. Uint32 i;
  7288. for (i = 0; i < vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount; i += 1) {
  7289. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7290. vulkanCommandBuffer->renderer,
  7291. vulkanCommandBuffer,
  7292. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  7293. vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i]);
  7294. vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i] = NULL;
  7295. }
  7296. vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount = 0;
  7297. for (i = 0; i < MAX_COMPUTE_WRITE_BUFFERS; i += 1) {
  7298. if (vulkanCommandBuffer->readWriteComputeStorageBuffers[i] != NULL) {
  7299. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7300. vulkanCommandBuffer->renderer,
  7301. vulkanCommandBuffer,
  7302. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  7303. vulkanCommandBuffer->readWriteComputeStorageBuffers[i]);
  7304. vulkanCommandBuffer->readWriteComputeStorageBuffers[i] = NULL;
  7305. }
  7306. }
  7307. for (i = 0; i < MAX_STORAGE_TEXTURES_PER_STAGE; i += 1) {
  7308. if (vulkanCommandBuffer->readOnlyComputeStorageTextures[i] != NULL) {
  7309. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  7310. vulkanCommandBuffer->renderer,
  7311. vulkanCommandBuffer,
  7312. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  7313. vulkanCommandBuffer->readOnlyComputeStorageTextures[i]);
  7314. vulkanCommandBuffer->readOnlyComputeStorageTextures[i] = NULL;
  7315. }
  7316. }
  7317. for (i = 0; i < MAX_STORAGE_BUFFERS_PER_STAGE; i += 1) {
  7318. if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] != NULL) {
  7319. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7320. vulkanCommandBuffer->renderer,
  7321. vulkanCommandBuffer,
  7322. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  7323. vulkanCommandBuffer->readOnlyComputeStorageBuffers[i]);
  7324. vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] = NULL;
  7325. }
  7326. }
  7327. // we don't need a barrier for sampler resources because sampler state is always the default if sampler bit is set
  7328. SDL_zeroa(vulkanCommandBuffer->computeSamplerTextureViewBindings);
  7329. SDL_zeroa(vulkanCommandBuffer->computeSamplerBindings);
  7330. SDL_zeroa(vulkanCommandBuffer->readWriteComputeStorageTextureViewBindings);
  7331. SDL_zeroa(vulkanCommandBuffer->readWriteComputeStorageBufferBindings);
  7332. vulkanCommandBuffer->currentComputePipeline = NULL;
  7333. vulkanCommandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
  7334. vulkanCommandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE;
  7335. vulkanCommandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
  7336. }
  7337. static void *VULKAN_MapTransferBuffer(
  7338. SDL_GPURenderer *driverData,
  7339. SDL_GPUTransferBuffer *transferBuffer,
  7340. bool cycle)
  7341. {
  7342. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  7343. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer;
  7344. if (
  7345. cycle &&
  7346. SDL_GetAtomicInt(&transferBufferContainer->activeBuffer->referenceCount) > 0) {
  7347. VULKAN_INTERNAL_CycleActiveBuffer(
  7348. renderer,
  7349. transferBufferContainer);
  7350. }
  7351. Uint8 *bufferPointer =
  7352. transferBufferContainer->activeBuffer->usedRegion->allocation->mapPointer +
  7353. transferBufferContainer->activeBuffer->usedRegion->resourceOffset;
  7354. return bufferPointer;
  7355. }
  7356. static void VULKAN_UnmapTransferBuffer(
  7357. SDL_GPURenderer *driverData,
  7358. SDL_GPUTransferBuffer *transferBuffer)
  7359. {
  7360. // no-op because transfer buffers are persistently mapped
  7361. (void)driverData;
  7362. (void)transferBuffer;
  7363. }
  7364. static void VULKAN_BeginCopyPass(
  7365. SDL_GPUCommandBuffer *commandBuffer)
  7366. {
  7367. // no-op
  7368. (void)commandBuffer;
  7369. }
  7370. static void VULKAN_UploadToTexture(
  7371. SDL_GPUCommandBuffer *commandBuffer,
  7372. const SDL_GPUTextureTransferInfo *source,
  7373. const SDL_GPUTextureRegion *destination,
  7374. bool cycle)
  7375. {
  7376. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7377. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7378. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transfer_buffer;
  7379. VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)destination->texture;
  7380. VulkanTextureSubresource *vulkanTextureSubresource;
  7381. VkBufferImageCopy imageCopy;
  7382. // Note that the transfer buffer does not need a barrier, as it is synced by the client
  7383. vulkanTextureSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  7384. renderer,
  7385. vulkanCommandBuffer,
  7386. vulkanTextureContainer,
  7387. destination->layer,
  7388. destination->mip_level,
  7389. cycle,
  7390. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
  7391. imageCopy.imageExtent.width = destination->w;
  7392. imageCopy.imageExtent.height = destination->h;
  7393. imageCopy.imageExtent.depth = destination->d;
  7394. imageCopy.imageOffset.x = destination->x;
  7395. imageCopy.imageOffset.y = destination->y;
  7396. imageCopy.imageOffset.z = destination->z;
  7397. imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags;
  7398. imageCopy.imageSubresource.baseArrayLayer = destination->layer;
  7399. imageCopy.imageSubresource.layerCount = 1;
  7400. imageCopy.imageSubresource.mipLevel = destination->mip_level;
  7401. imageCopy.bufferOffset = source->offset;
  7402. imageCopy.bufferRowLength = source->pixels_per_row;
  7403. imageCopy.bufferImageHeight = source->rows_per_layer;
  7404. renderer->vkCmdCopyBufferToImage(
  7405. vulkanCommandBuffer->commandBuffer,
  7406. transferBufferContainer->activeBuffer->buffer,
  7407. vulkanTextureSubresource->parent->image,
  7408. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7409. 1,
  7410. &imageCopy);
  7411. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7412. renderer,
  7413. vulkanCommandBuffer,
  7414. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7415. vulkanTextureSubresource);
  7416. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
  7417. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent);
  7418. }
  7419. static void VULKAN_UploadToBuffer(
  7420. SDL_GPUCommandBuffer *commandBuffer,
  7421. const SDL_GPUTransferBufferLocation *source,
  7422. const SDL_GPUBufferRegion *destination,
  7423. bool cycle)
  7424. {
  7425. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7426. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7427. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transfer_buffer;
  7428. VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)destination->buffer;
  7429. VkBufferCopy bufferCopy;
  7430. // Note that the transfer buffer does not need a barrier, as it is synced by the client
  7431. VulkanBuffer *vulkanBuffer = VULKAN_INTERNAL_PrepareBufferForWrite(
  7432. renderer,
  7433. vulkanCommandBuffer,
  7434. bufferContainer,
  7435. cycle,
  7436. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION);
  7437. bufferCopy.srcOffset = source->offset;
  7438. bufferCopy.dstOffset = destination->offset;
  7439. bufferCopy.size = destination->size;
  7440. renderer->vkCmdCopyBuffer(
  7441. vulkanCommandBuffer->commandBuffer,
  7442. transferBufferContainer->activeBuffer->buffer,
  7443. vulkanBuffer->buffer,
  7444. 1,
  7445. &bufferCopy);
  7446. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7447. renderer,
  7448. vulkanCommandBuffer,
  7449. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  7450. vulkanBuffer);
  7451. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
  7452. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  7453. }
  7454. // Readback
  7455. static void VULKAN_DownloadFromTexture(
  7456. SDL_GPUCommandBuffer *commandBuffer,
  7457. const SDL_GPUTextureRegion *source,
  7458. const SDL_GPUTextureTransferInfo *destination)
  7459. {
  7460. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7461. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7462. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)source->texture;
  7463. VulkanTextureSubresource *vulkanTextureSubresource;
  7464. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transfer_buffer;
  7465. VkBufferImageCopy imageCopy;
  7466. vulkanTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  7467. textureContainer,
  7468. source->layer,
  7469. source->mip_level);
  7470. // Note that the transfer buffer does not need a barrier, as it is synced by the client
  7471. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7472. renderer,
  7473. vulkanCommandBuffer,
  7474. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7475. vulkanTextureSubresource);
  7476. imageCopy.imageExtent.width = source->w;
  7477. imageCopy.imageExtent.height = source->h;
  7478. imageCopy.imageExtent.depth = source->d;
  7479. imageCopy.imageOffset.x = source->x;
  7480. imageCopy.imageOffset.y = source->y;
  7481. imageCopy.imageOffset.z = source->z;
  7482. imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags;
  7483. imageCopy.imageSubresource.baseArrayLayer = source->layer;
  7484. imageCopy.imageSubresource.layerCount = 1;
  7485. imageCopy.imageSubresource.mipLevel = source->mip_level;
  7486. imageCopy.bufferOffset = destination->offset;
  7487. imageCopy.bufferRowLength = destination->pixels_per_row;
  7488. imageCopy.bufferImageHeight = destination->rows_per_layer;
  7489. renderer->vkCmdCopyImageToBuffer(
  7490. vulkanCommandBuffer->commandBuffer,
  7491. vulkanTextureSubresource->parent->image,
  7492. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7493. transferBufferContainer->activeBuffer->buffer,
  7494. 1,
  7495. &imageCopy);
  7496. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7497. renderer,
  7498. vulkanCommandBuffer,
  7499. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7500. vulkanTextureSubresource);
  7501. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
  7502. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent);
  7503. }
  7504. static void VULKAN_DownloadFromBuffer(
  7505. SDL_GPUCommandBuffer *commandBuffer,
  7506. const SDL_GPUBufferRegion *source,
  7507. const SDL_GPUTransferBufferLocation *destination)
  7508. {
  7509. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7510. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7511. VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)source->buffer;
  7512. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transfer_buffer;
  7513. VkBufferCopy bufferCopy;
  7514. // Note that transfer buffer does not need a barrier, as it is synced by the client
  7515. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  7516. renderer,
  7517. vulkanCommandBuffer,
  7518. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7519. bufferContainer->activeBuffer);
  7520. bufferCopy.srcOffset = source->offset;
  7521. bufferCopy.dstOffset = destination->offset;
  7522. bufferCopy.size = source->size;
  7523. renderer->vkCmdCopyBuffer(
  7524. vulkanCommandBuffer->commandBuffer,
  7525. bufferContainer->activeBuffer->buffer,
  7526. transferBufferContainer->activeBuffer->buffer,
  7527. 1,
  7528. &bufferCopy);
  7529. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7530. renderer,
  7531. vulkanCommandBuffer,
  7532. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7533. bufferContainer->activeBuffer);
  7534. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
  7535. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, bufferContainer->activeBuffer);
  7536. }
  7537. static void VULKAN_CopyTextureToTexture(
  7538. SDL_GPUCommandBuffer *commandBuffer,
  7539. const SDL_GPUTextureLocation *source,
  7540. const SDL_GPUTextureLocation *destination,
  7541. Uint32 w,
  7542. Uint32 h,
  7543. Uint32 d,
  7544. bool cycle)
  7545. {
  7546. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7547. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7548. VulkanTextureSubresource *srcSubresource;
  7549. VulkanTextureSubresource *dstSubresource;
  7550. VkImageCopy imageCopy;
  7551. srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  7552. (VulkanTextureContainer *)source->texture,
  7553. source->layer,
  7554. source->mip_level);
  7555. dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  7556. renderer,
  7557. vulkanCommandBuffer,
  7558. (VulkanTextureContainer *)destination->texture,
  7559. destination->layer,
  7560. destination->mip_level,
  7561. cycle,
  7562. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
  7563. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7564. renderer,
  7565. vulkanCommandBuffer,
  7566. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7567. srcSubresource);
  7568. imageCopy.srcOffset.x = source->x;
  7569. imageCopy.srcOffset.y = source->y;
  7570. imageCopy.srcOffset.z = source->z;
  7571. imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
  7572. imageCopy.srcSubresource.baseArrayLayer = source->layer;
  7573. imageCopy.srcSubresource.layerCount = 1;
  7574. imageCopy.srcSubresource.mipLevel = source->mip_level;
  7575. imageCopy.dstOffset.x = destination->x;
  7576. imageCopy.dstOffset.y = destination->y;
  7577. imageCopy.dstOffset.z = destination->z;
  7578. imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
  7579. imageCopy.dstSubresource.baseArrayLayer = destination->layer;
  7580. imageCopy.dstSubresource.layerCount = 1;
  7581. imageCopy.dstSubresource.mipLevel = destination->mip_level;
  7582. imageCopy.extent.width = w;
  7583. imageCopy.extent.height = h;
  7584. imageCopy.extent.depth = d;
  7585. renderer->vkCmdCopyImage(
  7586. vulkanCommandBuffer->commandBuffer,
  7587. srcSubresource->parent->image,
  7588. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7589. dstSubresource->parent->image,
  7590. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7591. 1,
  7592. &imageCopy);
  7593. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7594. renderer,
  7595. vulkanCommandBuffer,
  7596. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7597. srcSubresource);
  7598. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7599. renderer,
  7600. vulkanCommandBuffer,
  7601. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7602. dstSubresource);
  7603. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent);
  7604. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent);
  7605. }
  7606. static void VULKAN_CopyBufferToBuffer(
  7607. SDL_GPUCommandBuffer *commandBuffer,
  7608. const SDL_GPUBufferLocation *source,
  7609. const SDL_GPUBufferLocation *destination,
  7610. Uint32 size,
  7611. bool cycle)
  7612. {
  7613. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7614. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7615. VulkanBufferContainer *srcContainer = (VulkanBufferContainer *)source->buffer;
  7616. VulkanBufferContainer *dstContainer = (VulkanBufferContainer *)destination->buffer;
  7617. VkBufferCopy bufferCopy;
  7618. VulkanBuffer *dstBuffer = VULKAN_INTERNAL_PrepareBufferForWrite(
  7619. renderer,
  7620. vulkanCommandBuffer,
  7621. dstContainer,
  7622. cycle,
  7623. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION);
  7624. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  7625. renderer,
  7626. vulkanCommandBuffer,
  7627. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7628. srcContainer->activeBuffer);
  7629. bufferCopy.srcOffset = source->offset;
  7630. bufferCopy.dstOffset = destination->offset;
  7631. bufferCopy.size = size;
  7632. renderer->vkCmdCopyBuffer(
  7633. vulkanCommandBuffer->commandBuffer,
  7634. srcContainer->activeBuffer->buffer,
  7635. dstBuffer->buffer,
  7636. 1,
  7637. &bufferCopy);
  7638. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7639. renderer,
  7640. vulkanCommandBuffer,
  7641. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7642. srcContainer->activeBuffer);
  7643. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7644. renderer,
  7645. vulkanCommandBuffer,
  7646. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  7647. dstBuffer);
  7648. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, srcContainer->activeBuffer);
  7649. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, dstBuffer);
  7650. }
  7651. static void VULKAN_GenerateMipmaps(
  7652. SDL_GPUCommandBuffer *commandBuffer,
  7653. SDL_GPUTexture *texture)
  7654. {
  7655. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7656. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7657. VulkanTextureContainer *container = (VulkanTextureContainer *)texture;
  7658. VulkanTextureSubresource *srcTextureSubresource;
  7659. VulkanTextureSubresource *dstTextureSubresource;
  7660. VkImageBlit blit;
  7661. // Blit each slice sequentially. Barriers, barriers everywhere!
  7662. for (Uint32 layerOrDepthIndex = 0; layerOrDepthIndex < container->header.info.layer_count_or_depth; layerOrDepthIndex += 1)
  7663. for (Uint32 level = 1; level < container->header.info.num_levels; level += 1) {
  7664. Uint32 layer = container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : layerOrDepthIndex;
  7665. Uint32 depth = container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? layerOrDepthIndex : 0;
  7666. Uint32 srcSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  7667. level - 1,
  7668. layer,
  7669. container->header.info.num_levels);
  7670. Uint32 dstSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  7671. level,
  7672. layer,
  7673. container->header.info.num_levels);
  7674. srcTextureSubresource = &container->activeTexture->subresources[srcSubresourceIndex];
  7675. dstTextureSubresource = &container->activeTexture->subresources[dstSubresourceIndex];
  7676. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7677. renderer,
  7678. vulkanCommandBuffer,
  7679. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7680. srcTextureSubresource);
  7681. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7682. renderer,
  7683. vulkanCommandBuffer,
  7684. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7685. dstTextureSubresource);
  7686. blit.srcOffsets[0].x = 0;
  7687. blit.srcOffsets[0].y = 0;
  7688. blit.srcOffsets[0].z = depth;
  7689. blit.srcOffsets[1].x = container->header.info.width >> (level - 1);
  7690. blit.srcOffsets[1].y = container->header.info.height >> (level - 1);
  7691. blit.srcOffsets[1].z = depth + 1;
  7692. blit.dstOffsets[0].x = 0;
  7693. blit.dstOffsets[0].y = 0;
  7694. blit.dstOffsets[0].z = depth;
  7695. blit.dstOffsets[1].x = container->header.info.width >> level;
  7696. blit.dstOffsets[1].y = container->header.info.height >> level;
  7697. blit.dstOffsets[1].z = depth + 1;
  7698. blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  7699. blit.srcSubresource.baseArrayLayer = layer;
  7700. blit.srcSubresource.layerCount = 1;
  7701. blit.srcSubresource.mipLevel = level - 1;
  7702. blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  7703. blit.dstSubresource.baseArrayLayer = layer;
  7704. blit.dstSubresource.layerCount = 1;
  7705. blit.dstSubresource.mipLevel = level;
  7706. renderer->vkCmdBlitImage(
  7707. vulkanCommandBuffer->commandBuffer,
  7708. container->activeTexture->image,
  7709. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7710. container->activeTexture->image,
  7711. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7712. 1,
  7713. &blit,
  7714. VK_FILTER_LINEAR);
  7715. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7716. renderer,
  7717. vulkanCommandBuffer,
  7718. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7719. srcTextureSubresource);
  7720. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7721. renderer,
  7722. vulkanCommandBuffer,
  7723. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7724. dstTextureSubresource);
  7725. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcTextureSubresource->parent);
  7726. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstTextureSubresource->parent);
  7727. }
  7728. }
  7729. static void VULKAN_EndCopyPass(
  7730. SDL_GPUCommandBuffer *commandBuffer)
  7731. {
  7732. // no-op
  7733. (void)commandBuffer;
  7734. }
  7735. static void VULKAN_Blit(
  7736. SDL_GPUCommandBuffer *commandBuffer,
  7737. const SDL_GPUBlitInfo *info)
  7738. {
  7739. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7740. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7741. TextureCommonHeader *srcHeader = (TextureCommonHeader *)info->source.texture;
  7742. TextureCommonHeader *dstHeader = (TextureCommonHeader *)info->destination.texture;
  7743. VkImageBlit region;
  7744. Uint32 srcLayer = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : info->source.layer_or_depth_plane;
  7745. Uint32 srcDepth = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? info->source.layer_or_depth_plane : 0;
  7746. Uint32 dstLayer = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : info->destination.layer_or_depth_plane;
  7747. Uint32 dstDepth = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? info->destination.layer_or_depth_plane : 0;
  7748. int32_t swap;
  7749. // Using BeginRenderPass to clear because vkCmdClearColorImage requires barriers anyway
  7750. if (info->load_op == SDL_GPU_LOADOP_CLEAR) {
  7751. SDL_GPUColorTargetInfo targetInfo;
  7752. SDL_zero(targetInfo);
  7753. targetInfo.texture = info->destination.texture;
  7754. targetInfo.mip_level = info->destination.mip_level;
  7755. targetInfo.layer_or_depth_plane = info->destination.layer_or_depth_plane;
  7756. targetInfo.load_op = SDL_GPU_LOADOP_CLEAR;
  7757. targetInfo.store_op = SDL_GPU_STOREOP_STORE;
  7758. targetInfo.clear_color = info->clear_color;
  7759. targetInfo.cycle = info->cycle;
  7760. VULKAN_BeginRenderPass(
  7761. commandBuffer,
  7762. &targetInfo,
  7763. 1,
  7764. NULL);
  7765. VULKAN_EndRenderPass(commandBuffer);
  7766. }
  7767. VulkanTextureSubresource *srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  7768. (VulkanTextureContainer *)info->source.texture,
  7769. srcLayer,
  7770. info->source.mip_level);
  7771. VulkanTextureSubresource *dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  7772. renderer,
  7773. vulkanCommandBuffer,
  7774. (VulkanTextureContainer *)info->destination.texture,
  7775. dstLayer,
  7776. info->destination.mip_level,
  7777. info->cycle,
  7778. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
  7779. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7780. renderer,
  7781. vulkanCommandBuffer,
  7782. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7783. srcSubresource);
  7784. region.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
  7785. region.srcSubresource.baseArrayLayer = srcSubresource->layer;
  7786. region.srcSubresource.layerCount = 1;
  7787. region.srcSubresource.mipLevel = srcSubresource->level;
  7788. region.srcOffsets[0].x = info->source.x;
  7789. region.srcOffsets[0].y = info->source.y;
  7790. region.srcOffsets[0].z = srcDepth;
  7791. region.srcOffsets[1].x = info->source.x + info->source.w;
  7792. region.srcOffsets[1].y = info->source.y + info->source.h;
  7793. region.srcOffsets[1].z = srcDepth + 1;
  7794. if (info->flip_mode & SDL_FLIP_HORIZONTAL) {
  7795. // flip the x positions
  7796. swap = region.srcOffsets[0].x;
  7797. region.srcOffsets[0].x = region.srcOffsets[1].x;
  7798. region.srcOffsets[1].x = swap;
  7799. }
  7800. if (info->flip_mode & SDL_FLIP_VERTICAL) {
  7801. // flip the y positions
  7802. swap = region.srcOffsets[0].y;
  7803. region.srcOffsets[0].y = region.srcOffsets[1].y;
  7804. region.srcOffsets[1].y = swap;
  7805. }
  7806. region.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
  7807. region.dstSubresource.baseArrayLayer = dstSubresource->layer;
  7808. region.dstSubresource.layerCount = 1;
  7809. region.dstSubresource.mipLevel = dstSubresource->level;
  7810. region.dstOffsets[0].x = info->destination.x;
  7811. region.dstOffsets[0].y = info->destination.y;
  7812. region.dstOffsets[0].z = dstDepth;
  7813. region.dstOffsets[1].x = info->destination.x + info->destination.w;
  7814. region.dstOffsets[1].y = info->destination.y + info->destination.h;
  7815. region.dstOffsets[1].z = dstDepth + 1;
  7816. renderer->vkCmdBlitImage(
  7817. vulkanCommandBuffer->commandBuffer,
  7818. srcSubresource->parent->image,
  7819. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7820. dstSubresource->parent->image,
  7821. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7822. 1,
  7823. &region,
  7824. SDLToVK_Filter[info->filter]);
  7825. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7826. renderer,
  7827. vulkanCommandBuffer,
  7828. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7829. srcSubresource);
  7830. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7831. renderer,
  7832. vulkanCommandBuffer,
  7833. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7834. dstSubresource);
  7835. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent);
  7836. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent);
  7837. }
  7838. static bool VULKAN_INTERNAL_AllocateCommandBuffer(
  7839. VulkanRenderer *renderer,
  7840. VulkanCommandPool *vulkanCommandPool)
  7841. {
  7842. VkCommandBufferAllocateInfo allocateInfo;
  7843. VkResult vulkanResult;
  7844. VkCommandBuffer commandBufferHandle;
  7845. VulkanCommandBuffer *commandBuffer;
  7846. vulkanCommandPool->inactiveCommandBufferCapacity += 1;
  7847. vulkanCommandPool->inactiveCommandBuffers = SDL_realloc(
  7848. vulkanCommandPool->inactiveCommandBuffers,
  7849. sizeof(VulkanCommandBuffer *) *
  7850. vulkanCommandPool->inactiveCommandBufferCapacity);
  7851. allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  7852. allocateInfo.pNext = NULL;
  7853. allocateInfo.commandPool = vulkanCommandPool->commandPool;
  7854. allocateInfo.commandBufferCount = 1;
  7855. allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  7856. vulkanResult = renderer->vkAllocateCommandBuffers(
  7857. renderer->logicalDevice,
  7858. &allocateInfo,
  7859. &commandBufferHandle);
  7860. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkAllocateCommandBuffers, false);
  7861. commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer));
  7862. commandBuffer->renderer = renderer;
  7863. commandBuffer->commandPool = vulkanCommandPool;
  7864. commandBuffer->commandBuffer = commandBufferHandle;
  7865. commandBuffer->inFlightFence = VK_NULL_HANDLE;
  7866. // Presentation tracking
  7867. commandBuffer->presentDataCapacity = 1;
  7868. commandBuffer->presentDataCount = 0;
  7869. commandBuffer->presentDatas = SDL_malloc(
  7870. commandBuffer->presentDataCapacity * sizeof(VulkanPresentData));
  7871. commandBuffer->waitSemaphoreCapacity = 1;
  7872. commandBuffer->waitSemaphoreCount = 0;
  7873. commandBuffer->waitSemaphores = SDL_malloc(
  7874. commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore));
  7875. commandBuffer->signalSemaphoreCapacity = 1;
  7876. commandBuffer->signalSemaphoreCount = 0;
  7877. commandBuffer->signalSemaphores = SDL_malloc(
  7878. commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore));
  7879. // Resource bind tracking
  7880. commandBuffer->needVertexBufferBind = false;
  7881. commandBuffer->needNewVertexResourceDescriptorSet = true;
  7882. commandBuffer->needNewVertexUniformDescriptorSet = true;
  7883. commandBuffer->needNewVertexUniformOffsets = true;
  7884. commandBuffer->needNewFragmentResourceDescriptorSet = true;
  7885. commandBuffer->needNewFragmentUniformDescriptorSet = true;
  7886. commandBuffer->needNewFragmentUniformOffsets = true;
  7887. commandBuffer->needNewComputeReadWriteDescriptorSet = true;
  7888. commandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  7889. commandBuffer->needNewComputeUniformDescriptorSet = true;
  7890. commandBuffer->needNewComputeUniformOffsets = true;
  7891. commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
  7892. commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
  7893. commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
  7894. commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
  7895. commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
  7896. commandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE;
  7897. commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
  7898. // Resource tracking
  7899. commandBuffer->usedBufferCapacity = 4;
  7900. commandBuffer->usedBufferCount = 0;
  7901. commandBuffer->usedBuffers = SDL_malloc(
  7902. commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer *));
  7903. commandBuffer->usedTextureCapacity = 4;
  7904. commandBuffer->usedTextureCount = 0;
  7905. commandBuffer->usedTextures = SDL_malloc(
  7906. commandBuffer->usedTextureCapacity * sizeof(VulkanTexture *));
  7907. commandBuffer->usedSamplerCapacity = 4;
  7908. commandBuffer->usedSamplerCount = 0;
  7909. commandBuffer->usedSamplers = SDL_malloc(
  7910. commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler *));
  7911. commandBuffer->usedGraphicsPipelineCapacity = 4;
  7912. commandBuffer->usedGraphicsPipelineCount = 0;
  7913. commandBuffer->usedGraphicsPipelines = SDL_malloc(
  7914. commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline *));
  7915. commandBuffer->usedComputePipelineCapacity = 4;
  7916. commandBuffer->usedComputePipelineCount = 0;
  7917. commandBuffer->usedComputePipelines = SDL_malloc(
  7918. commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline *));
  7919. commandBuffer->usedFramebufferCapacity = 4;
  7920. commandBuffer->usedFramebufferCount = 0;
  7921. commandBuffer->usedFramebuffers = SDL_malloc(
  7922. commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer *));
  7923. commandBuffer->usedUniformBufferCapacity = 4;
  7924. commandBuffer->usedUniformBufferCount = 0;
  7925. commandBuffer->usedUniformBuffers = SDL_malloc(
  7926. commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *));
  7927. commandBuffer->swapchainRequested = false;
  7928. // Pool it!
  7929. vulkanCommandPool->inactiveCommandBuffers[vulkanCommandPool->inactiveCommandBufferCount] = commandBuffer;
  7930. vulkanCommandPool->inactiveCommandBufferCount += 1;
  7931. return true;
  7932. }
  7933. static VulkanCommandPool *VULKAN_INTERNAL_FetchCommandPool(
  7934. VulkanRenderer *renderer,
  7935. SDL_ThreadID threadID)
  7936. {
  7937. VulkanCommandPool *vulkanCommandPool = NULL;
  7938. VkCommandPoolCreateInfo commandPoolCreateInfo;
  7939. VkResult vulkanResult;
  7940. CommandPoolHashTableKey key;
  7941. key.threadID = threadID;
  7942. bool result = SDL_FindInHashTable(
  7943. renderer->commandPoolHashTable,
  7944. (const void *)&key,
  7945. (const void **)&vulkanCommandPool);
  7946. if (result) {
  7947. return vulkanCommandPool;
  7948. }
  7949. vulkanCommandPool = (VulkanCommandPool *)SDL_malloc(sizeof(VulkanCommandPool));
  7950. commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  7951. commandPoolCreateInfo.pNext = NULL;
  7952. commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  7953. commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
  7954. vulkanResult = renderer->vkCreateCommandPool(
  7955. renderer->logicalDevice,
  7956. &commandPoolCreateInfo,
  7957. NULL,
  7958. &vulkanCommandPool->commandPool);
  7959. if (vulkanResult != VK_SUCCESS) {
  7960. SDL_free(vulkanCommandPool);
  7961. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateCommandPool, NULL);
  7962. return NULL;
  7963. }
  7964. vulkanCommandPool->threadID = threadID;
  7965. vulkanCommandPool->inactiveCommandBufferCapacity = 0;
  7966. vulkanCommandPool->inactiveCommandBufferCount = 0;
  7967. vulkanCommandPool->inactiveCommandBuffers = NULL;
  7968. if (!VULKAN_INTERNAL_AllocateCommandBuffer(
  7969. renderer,
  7970. vulkanCommandPool)) {
  7971. VULKAN_INTERNAL_DestroyCommandPool(renderer, vulkanCommandPool);
  7972. return NULL;
  7973. }
  7974. CommandPoolHashTableKey *allocedKey = SDL_malloc(sizeof(CommandPoolHashTableKey));
  7975. allocedKey->threadID = threadID;
  7976. SDL_InsertIntoHashTable(
  7977. renderer->commandPoolHashTable,
  7978. (const void *)allocedKey,
  7979. (const void *)vulkanCommandPool, true);
  7980. return vulkanCommandPool;
  7981. }
  7982. static VulkanCommandBuffer *VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(
  7983. VulkanRenderer *renderer,
  7984. SDL_ThreadID threadID)
  7985. {
  7986. VulkanCommandPool *commandPool =
  7987. VULKAN_INTERNAL_FetchCommandPool(renderer, threadID);
  7988. VulkanCommandBuffer *commandBuffer;
  7989. if (commandPool == NULL) {
  7990. return NULL;
  7991. }
  7992. if (commandPool->inactiveCommandBufferCount == 0) {
  7993. if (!VULKAN_INTERNAL_AllocateCommandBuffer(
  7994. renderer,
  7995. commandPool)) {
  7996. return NULL;
  7997. }
  7998. }
  7999. commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1];
  8000. commandPool->inactiveCommandBufferCount -= 1;
  8001. return commandBuffer;
  8002. }
  8003. static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer(
  8004. SDL_GPURenderer *driverData)
  8005. {
  8006. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8007. VkResult result;
  8008. Uint32 i;
  8009. SDL_ThreadID threadID = SDL_GetCurrentThreadID();
  8010. SDL_LockMutex(renderer->acquireCommandBufferLock);
  8011. VulkanCommandBuffer *commandBuffer =
  8012. VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID);
  8013. DescriptorSetCache *descriptorSetCache =
  8014. VULKAN_INTERNAL_AcquireDescriptorSetCache(renderer);
  8015. SDL_UnlockMutex(renderer->acquireCommandBufferLock);
  8016. if (commandBuffer == NULL) {
  8017. return NULL;
  8018. }
  8019. commandBuffer->descriptorSetCache = descriptorSetCache;
  8020. // Reset state
  8021. commandBuffer->currentComputePipeline = NULL;
  8022. commandBuffer->currentGraphicsPipeline = NULL;
  8023. SDL_zeroa(commandBuffer->colorAttachmentSubresources);
  8024. SDL_zeroa(commandBuffer->resolveAttachmentSubresources);
  8025. commandBuffer->depthStencilAttachmentSubresource = NULL;
  8026. commandBuffer->colorAttachmentSubresourceCount = 0;
  8027. commandBuffer->resolveAttachmentSubresourceCount = 0;
  8028. for (i = 0; i < MAX_UNIFORM_BUFFERS_PER_STAGE; i += 1) {
  8029. commandBuffer->vertexUniformBuffers[i] = NULL;
  8030. commandBuffer->fragmentUniformBuffers[i] = NULL;
  8031. commandBuffer->computeUniformBuffers[i] = NULL;
  8032. }
  8033. commandBuffer->needVertexBufferBind = false;
  8034. commandBuffer->needNewVertexResourceDescriptorSet = true;
  8035. commandBuffer->needNewVertexUniformDescriptorSet = true;
  8036. commandBuffer->needNewVertexUniformOffsets = true;
  8037. commandBuffer->needNewFragmentResourceDescriptorSet = true;
  8038. commandBuffer->needNewFragmentUniformDescriptorSet = true;
  8039. commandBuffer->needNewFragmentUniformOffsets = true;
  8040. commandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  8041. commandBuffer->needNewComputeUniformDescriptorSet = true;
  8042. commandBuffer->needNewComputeUniformOffsets = true;
  8043. commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
  8044. commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
  8045. commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
  8046. commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
  8047. commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
  8048. commandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE;
  8049. commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
  8050. SDL_zeroa(commandBuffer->vertexBuffers);
  8051. SDL_zeroa(commandBuffer->vertexBufferOffsets);
  8052. commandBuffer->vertexBufferCount = 0;
  8053. SDL_zeroa(commandBuffer->vertexSamplerTextureViewBindings);
  8054. SDL_zeroa(commandBuffer->vertexSamplerBindings);
  8055. SDL_zeroa(commandBuffer->vertexStorageTextureViewBindings);
  8056. SDL_zeroa(commandBuffer->vertexStorageBufferBindings);
  8057. SDL_zeroa(commandBuffer->fragmentSamplerTextureViewBindings);
  8058. SDL_zeroa(commandBuffer->fragmentSamplerBindings);
  8059. SDL_zeroa(commandBuffer->fragmentStorageTextureViewBindings);
  8060. SDL_zeroa(commandBuffer->fragmentStorageBufferBindings);
  8061. SDL_zeroa(commandBuffer->readWriteComputeStorageTextureSubresources);
  8062. commandBuffer->readWriteComputeStorageTextureSubresourceCount = 0;
  8063. SDL_zeroa(commandBuffer->readWriteComputeStorageBuffers);
  8064. SDL_zeroa(commandBuffer->computeSamplerTextureViewBindings);
  8065. SDL_zeroa(commandBuffer->computeSamplerBindings);
  8066. SDL_zeroa(commandBuffer->readOnlyComputeStorageTextureViewBindings);
  8067. SDL_zeroa(commandBuffer->readOnlyComputeStorageBufferBindings);
  8068. SDL_zeroa(commandBuffer->readOnlyComputeStorageTextures);
  8069. SDL_zeroa(commandBuffer->readOnlyComputeStorageBuffers);
  8070. commandBuffer->autoReleaseFence = true;
  8071. commandBuffer->swapchainRequested = false;
  8072. commandBuffer->isDefrag = 0;
  8073. /* Reset the command buffer here to avoid resets being called
  8074. * from a separate thread than where the command buffer was acquired
  8075. */
  8076. result = renderer->vkResetCommandBuffer(
  8077. commandBuffer->commandBuffer,
  8078. VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
  8079. CHECK_VULKAN_ERROR_AND_RETURN(result, vkResetCommandBuffer, NULL);
  8080. if (!VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer)) {
  8081. return NULL;
  8082. }
  8083. return (SDL_GPUCommandBuffer *)commandBuffer;
  8084. }
  8085. static bool VULKAN_QueryFence(
  8086. SDL_GPURenderer *driverData,
  8087. SDL_GPUFence *fence)
  8088. {
  8089. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8090. VkResult result;
  8091. result = renderer->vkGetFenceStatus(
  8092. renderer->logicalDevice,
  8093. ((VulkanFenceHandle *)fence)->fence);
  8094. if (result == VK_SUCCESS) {
  8095. return true;
  8096. } else if (result == VK_NOT_READY) {
  8097. return false;
  8098. } else {
  8099. SET_ERROR_AND_RETURN("vkGetFenceStatus: %s", VkErrorMessages(result), false);
  8100. }
  8101. }
  8102. static void VULKAN_INTERNAL_ReturnFenceToPool(
  8103. VulkanRenderer *renderer,
  8104. VulkanFenceHandle *fenceHandle)
  8105. {
  8106. SDL_LockMutex(renderer->fencePool.lock);
  8107. EXPAND_ARRAY_IF_NEEDED(
  8108. renderer->fencePool.availableFences,
  8109. VulkanFenceHandle *,
  8110. renderer->fencePool.availableFenceCount + 1,
  8111. renderer->fencePool.availableFenceCapacity,
  8112. renderer->fencePool.availableFenceCapacity * 2);
  8113. renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount] = fenceHandle;
  8114. renderer->fencePool.availableFenceCount += 1;
  8115. SDL_UnlockMutex(renderer->fencePool.lock);
  8116. }
  8117. static void VULKAN_ReleaseFence(
  8118. SDL_GPURenderer *driverData,
  8119. SDL_GPUFence *fence)
  8120. {
  8121. VulkanFenceHandle *handle = (VulkanFenceHandle *)fence;
  8122. if (SDL_AtomicDecRef(&handle->referenceCount)) {
  8123. VULKAN_INTERNAL_ReturnFenceToPool((VulkanRenderer *)driverData, handle);
  8124. }
  8125. }
  8126. static WindowData *VULKAN_INTERNAL_FetchWindowData(
  8127. SDL_Window *window)
  8128. {
  8129. SDL_PropertiesID properties = SDL_GetWindowProperties(window);
  8130. return (WindowData *)SDL_GetPointerProperty(properties, WINDOW_PROPERTY_DATA, NULL);
  8131. }
  8132. static bool VULKAN_INTERNAL_OnWindowResize(void *userdata, SDL_Event *e)
  8133. {
  8134. SDL_Window *w = (SDL_Window *)userdata;
  8135. WindowData *data;
  8136. if (e->type == SDL_EVENT_WINDOW_PIXEL_SIZE_CHANGED && e->window.windowID == SDL_GetWindowID(w)) {
  8137. data = VULKAN_INTERNAL_FetchWindowData(w);
  8138. data->needsSwapchainRecreate = true;
  8139. data->swapchainCreateWidth = e->window.data1;
  8140. data->swapchainCreateHeight = e->window.data2;
  8141. }
  8142. return true;
  8143. }
  8144. static bool VULKAN_SupportsSwapchainComposition(
  8145. SDL_GPURenderer *driverData,
  8146. SDL_Window *window,
  8147. SDL_GPUSwapchainComposition swapchainComposition)
  8148. {
  8149. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8150. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8151. VkSurfaceKHR surface;
  8152. SwapchainSupportDetails supportDetails;
  8153. bool result = false;
  8154. if (windowData == NULL) {
  8155. SET_STRING_ERROR_AND_RETURN("Must claim window before querying swapchain composition support!", false);
  8156. }
  8157. surface = windowData->surface;
  8158. if (!surface) {
  8159. SET_STRING_ERROR_AND_RETURN("Window has no Vulkan surface", false);
  8160. }
  8161. if (VULKAN_INTERNAL_QuerySwapchainSupport(
  8162. renderer,
  8163. renderer->physicalDevice,
  8164. surface,
  8165. &supportDetails)) {
  8166. result = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  8167. SwapchainCompositionToFormat[swapchainComposition],
  8168. SwapchainCompositionToColorSpace[swapchainComposition],
  8169. supportDetails.formats,
  8170. supportDetails.formatsLength);
  8171. if (!result) {
  8172. // Let's try again with the fallback format...
  8173. result = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  8174. SwapchainCompositionToFallbackFormat[swapchainComposition],
  8175. SwapchainCompositionToColorSpace[swapchainComposition],
  8176. supportDetails.formats,
  8177. supportDetails.formatsLength);
  8178. }
  8179. SDL_free(supportDetails.formats);
  8180. SDL_free(supportDetails.presentModes);
  8181. }
  8182. return result;
  8183. }
  8184. static bool VULKAN_SupportsPresentMode(
  8185. SDL_GPURenderer *driverData,
  8186. SDL_Window *window,
  8187. SDL_GPUPresentMode presentMode)
  8188. {
  8189. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8190. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8191. VkSurfaceKHR surface;
  8192. SwapchainSupportDetails supportDetails;
  8193. bool result = false;
  8194. if (windowData == NULL) {
  8195. SET_STRING_ERROR_AND_RETURN("Must claim window before querying present mode support!", false);
  8196. }
  8197. surface = windowData->surface;
  8198. if (!surface) {
  8199. SET_STRING_ERROR_AND_RETURN("Window has no Vulkan surface", false);
  8200. }
  8201. if (VULKAN_INTERNAL_QuerySwapchainSupport(
  8202. renderer,
  8203. renderer->physicalDevice,
  8204. surface,
  8205. &supportDetails)) {
  8206. result = VULKAN_INTERNAL_VerifySwapPresentMode(
  8207. SDLToVK_PresentMode[presentMode],
  8208. supportDetails.presentModes,
  8209. supportDetails.presentModesLength);
  8210. SDL_free(supportDetails.formats);
  8211. SDL_free(supportDetails.presentModes);
  8212. }
  8213. return result;
  8214. }
  8215. static bool VULKAN_ClaimWindow(
  8216. SDL_GPURenderer *driverData,
  8217. SDL_Window *window)
  8218. {
  8219. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8220. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8221. if (windowData == NULL) {
  8222. windowData = (WindowData *)SDL_calloc(1, sizeof(WindowData));
  8223. if (!windowData) {
  8224. return false;
  8225. }
  8226. windowData->window = window;
  8227. windowData->renderer = renderer;
  8228. windowData->refcount = 1;
  8229. windowData->presentMode = SDL_GPU_PRESENTMODE_VSYNC;
  8230. windowData->swapchainComposition = SDL_GPU_SWAPCHAINCOMPOSITION_SDR;
  8231. // On non-Apple platforms the swapchain capability currentExtent can be different from the window,
  8232. // so we have to query the window size.
  8233. #ifndef SDL_PLATFORM_APPLE
  8234. int w, h;
  8235. SDL_SyncWindow(window);
  8236. SDL_GetWindowSizeInPixels(window, &w, &h);
  8237. windowData->swapchainCreateWidth = w;
  8238. windowData->swapchainCreateHeight = h;
  8239. #endif
  8240. SDL_VideoDevice *videoDevice = SDL_GetVideoDevice();
  8241. if (!videoDevice) {
  8242. SDL_free(windowData);
  8243. return SDL_SetError("No video device found");
  8244. }
  8245. if (!videoDevice->Vulkan_CreateSurface) {
  8246. SDL_free(windowData);
  8247. return SDL_SetError("Video device does not implement Vulkan_CreateSurface");
  8248. }
  8249. // Each window must have its own surface.
  8250. if (!videoDevice->Vulkan_CreateSurface(
  8251. videoDevice,
  8252. windowData->window,
  8253. renderer->instance,
  8254. NULL, // FIXME: VAllocationCallbacks
  8255. &windowData->surface)) {
  8256. SDL_free(windowData);
  8257. return false;
  8258. }
  8259. Uint32 createSwapchainResult = VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
  8260. if (createSwapchainResult == 1) {
  8261. SDL_SetPointerProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA, windowData);
  8262. SDL_LockMutex(renderer->windowLock);
  8263. if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity) {
  8264. renderer->claimedWindowCapacity *= 2;
  8265. renderer->claimedWindows = SDL_realloc(
  8266. renderer->claimedWindows,
  8267. renderer->claimedWindowCapacity * sizeof(WindowData *));
  8268. }
  8269. renderer->claimedWindows[renderer->claimedWindowCount] = windowData;
  8270. renderer->claimedWindowCount += 1;
  8271. SDL_UnlockMutex(renderer->windowLock);
  8272. SDL_AddWindowEventWatch(SDL_WINDOW_EVENT_WATCH_NORMAL, VULKAN_INTERNAL_OnWindowResize, window);
  8273. return true;
  8274. } else if (createSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
  8275. windowData->needsSwapchainRecreate = true;
  8276. return true;
  8277. } else {
  8278. // Failed to create swapchain, destroy surface and free data
  8279. renderer->vkDestroySurfaceKHR(
  8280. renderer->instance,
  8281. windowData->surface,
  8282. NULL);
  8283. SDL_free(windowData);
  8284. return false;
  8285. }
  8286. } else if (windowData->renderer == renderer) {
  8287. ++windowData->refcount;
  8288. return true;
  8289. } else {
  8290. SET_STRING_ERROR_AND_RETURN("Window already claimed", false);
  8291. }
  8292. }
  8293. static void VULKAN_ReleaseWindow(
  8294. SDL_GPURenderer *driverData,
  8295. SDL_Window *window)
  8296. {
  8297. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8298. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8299. Uint32 i;
  8300. if (windowData == NULL) {
  8301. return;
  8302. }
  8303. if (windowData->renderer != renderer) {
  8304. SDL_SetError("Window not claimed by this device");
  8305. return;
  8306. }
  8307. if (windowData->refcount > 1) {
  8308. --windowData->refcount;
  8309. return;
  8310. }
  8311. VULKAN_Wait(driverData);
  8312. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  8313. if (windowData->inFlightFences[i] != NULL) {
  8314. VULKAN_ReleaseFence(
  8315. driverData,
  8316. windowData->inFlightFences[i]);
  8317. }
  8318. }
  8319. VULKAN_INTERNAL_DestroySwapchain(
  8320. (VulkanRenderer *)driverData,
  8321. windowData);
  8322. renderer->vkDestroySurfaceKHR(
  8323. renderer->instance,
  8324. windowData->surface,
  8325. NULL);
  8326. windowData->surface = VK_NULL_HANDLE;
  8327. SDL_LockMutex(renderer->windowLock);
  8328. for (i = 0; i < renderer->claimedWindowCount; i += 1) {
  8329. if (renderer->claimedWindows[i]->window == window) {
  8330. renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1];
  8331. renderer->claimedWindowCount -= 1;
  8332. break;
  8333. }
  8334. }
  8335. SDL_UnlockMutex(renderer->windowLock);
  8336. SDL_free(windowData);
  8337. SDL_ClearProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA);
  8338. SDL_RemoveWindowEventWatch(SDL_WINDOW_EVENT_WATCH_NORMAL, VULKAN_INTERNAL_OnWindowResize, window);
  8339. }
  8340. static Uint32 VULKAN_INTERNAL_RecreateSwapchain(
  8341. VulkanRenderer *renderer,
  8342. WindowData *windowData)
  8343. {
  8344. Uint32 i;
  8345. if (!VULKAN_Wait((SDL_GPURenderer *)renderer)) {
  8346. return false;
  8347. }
  8348. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  8349. if (windowData->inFlightFences[i] != NULL) {
  8350. VULKAN_ReleaseFence(
  8351. (SDL_GPURenderer *)renderer,
  8352. windowData->inFlightFences[i]);
  8353. windowData->inFlightFences[i] = NULL;
  8354. }
  8355. }
  8356. #ifdef SDL_VIDEO_DRIVER_PRIVATE
  8357. // Private platforms also invalidate the window, so don't try to preserve the surface/swapchain
  8358. VULKAN_INTERNAL_DestroySwapchain(renderer, windowData);
  8359. #else
  8360. VULKAN_INTERNAL_DestroySwapchainImage(renderer, windowData);
  8361. #endif
  8362. return VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
  8363. }
  8364. static bool VULKAN_WaitForSwapchain(
  8365. SDL_GPURenderer *driverData,
  8366. SDL_Window *window)
  8367. {
  8368. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8369. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8370. if (windowData == NULL) {
  8371. SET_STRING_ERROR_AND_RETURN("Cannot wait for a swapchain from an unclaimed window!", false);
  8372. }
  8373. if (windowData->inFlightFences[windowData->frameCounter] != NULL) {
  8374. if (!VULKAN_WaitForFences(
  8375. driverData,
  8376. true,
  8377. &windowData->inFlightFences[windowData->frameCounter],
  8378. 1)) {
  8379. return false;
  8380. }
  8381. }
  8382. return true;
  8383. }
  8384. static bool VULKAN_INTERNAL_AcquireSwapchainTexture(
  8385. bool block,
  8386. SDL_GPUCommandBuffer *commandBuffer,
  8387. SDL_Window *window,
  8388. SDL_GPUTexture **swapchainTexture,
  8389. Uint32 *swapchainTextureWidth,
  8390. Uint32 *swapchainTextureHeight)
  8391. {
  8392. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  8393. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  8394. Uint32 swapchainImageIndex;
  8395. WindowData *windowData;
  8396. VkResult acquireResult = VK_SUCCESS;
  8397. VulkanTextureContainer *swapchainTextureContainer = NULL;
  8398. VulkanPresentData *presentData;
  8399. *swapchainTexture = NULL;
  8400. if (swapchainTextureWidth) {
  8401. *swapchainTextureWidth = 0;
  8402. }
  8403. if (swapchainTextureHeight) {
  8404. *swapchainTextureHeight = 0;
  8405. }
  8406. windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8407. if (windowData == NULL) {
  8408. SET_STRING_ERROR_AND_RETURN("Cannot acquire a swapchain texture from an unclaimed window!", false);
  8409. }
  8410. // The command buffer is flagged for cleanup when the swapchain is requested as a cleanup timing mechanism
  8411. vulkanCommandBuffer->swapchainRequested = true;
  8412. if (window->flags & SDL_WINDOW_HIDDEN) {
  8413. // Edge case, texture is filled in with NULL but not an error
  8414. return true;
  8415. }
  8416. if (windowData->needsSurfaceRecreate) {
  8417. SDL_VideoDevice *videoDevice = SDL_GetVideoDevice();
  8418. SDL_assert(videoDevice);
  8419. SDL_assert(videoDevice->Vulkan_CreateSurface);
  8420. renderer->vkDestroySurfaceKHR(
  8421. renderer->instance,
  8422. windowData->surface,
  8423. NULL);
  8424. if (!videoDevice->Vulkan_CreateSurface(
  8425. videoDevice,
  8426. windowData->window,
  8427. renderer->instance,
  8428. NULL, // FIXME: VAllocationCallbacks
  8429. &windowData->surface)) {
  8430. SET_STRING_ERROR_AND_RETURN("Failed to recreate Vulkan surface!", false);
  8431. }
  8432. }
  8433. // If window data marked as needing swapchain recreate, try to recreate
  8434. if (windowData->needsSwapchainRecreate) {
  8435. Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8436. if (!recreateSwapchainResult) {
  8437. return false;
  8438. } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
  8439. // Edge case, texture is filled in with NULL but not an error
  8440. if (windowData->inFlightFences[windowData->frameCounter] != NULL) {
  8441. VULKAN_ReleaseFence(
  8442. (SDL_GPURenderer *)renderer,
  8443. windowData->inFlightFences[windowData->frameCounter]);
  8444. windowData->inFlightFences[windowData->frameCounter] = NULL;
  8445. }
  8446. return true;
  8447. }
  8448. // Unset this flag until after the swapchain has been recreated to let VULKAN_INTERNAL_CreateSwapchain()
  8449. // know whether it needs to pass the old swapchain or not.
  8450. windowData->needsSurfaceRecreate = false;
  8451. }
  8452. if (windowData->inFlightFences[windowData->frameCounter] != NULL) {
  8453. if (block) {
  8454. // If we are blocking, just wait for the fence!
  8455. if (!VULKAN_WaitForFences(
  8456. (SDL_GPURenderer *)renderer,
  8457. true,
  8458. &windowData->inFlightFences[windowData->frameCounter],
  8459. 1)) {
  8460. return false;
  8461. }
  8462. } else {
  8463. // If we are not blocking and the least recent fence is not signaled,
  8464. // return true to indicate that there is no error but rendering should be skipped.
  8465. if (!VULKAN_QueryFence(
  8466. (SDL_GPURenderer *)renderer,
  8467. windowData->inFlightFences[windowData->frameCounter])) {
  8468. return true;
  8469. }
  8470. }
  8471. VULKAN_ReleaseFence(
  8472. (SDL_GPURenderer *)renderer,
  8473. windowData->inFlightFences[windowData->frameCounter]);
  8474. windowData->inFlightFences[windowData->frameCounter] = NULL;
  8475. }
  8476. // Finally, try to acquire!
  8477. while (true) {
  8478. acquireResult = renderer->vkAcquireNextImageKHR(
  8479. renderer->logicalDevice,
  8480. windowData->swapchain,
  8481. SDL_MAX_UINT64,
  8482. windowData->imageAvailableSemaphore[windowData->frameCounter],
  8483. VK_NULL_HANDLE,
  8484. &swapchainImageIndex);
  8485. if (acquireResult == VK_SUCCESS || acquireResult == VK_SUBOPTIMAL_KHR) {
  8486. break; // we got the next image!
  8487. }
  8488. // If acquisition is invalid, let's try to recreate
  8489. Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8490. if (!recreateSwapchainResult) {
  8491. return false;
  8492. } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
  8493. // Edge case, texture is filled in with NULL but not an error
  8494. return true;
  8495. }
  8496. }
  8497. if (swapchainTextureWidth) {
  8498. *swapchainTextureWidth = windowData->width;
  8499. }
  8500. if (swapchainTextureHeight) {
  8501. *swapchainTextureHeight = windowData->height;
  8502. }
  8503. swapchainTextureContainer = &windowData->textureContainers[swapchainImageIndex];
  8504. // We need a special execution dependency with pWaitDstStageMask or image transition can start before acquire finishes
  8505. VkImageMemoryBarrier imageBarrier;
  8506. imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  8507. imageBarrier.pNext = NULL;
  8508. imageBarrier.srcAccessMask = 0;
  8509. imageBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  8510. imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  8511. imageBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  8512. imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  8513. imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  8514. imageBarrier.image = swapchainTextureContainer->activeTexture->image;
  8515. imageBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  8516. imageBarrier.subresourceRange.baseMipLevel = 0;
  8517. imageBarrier.subresourceRange.levelCount = 1;
  8518. imageBarrier.subresourceRange.baseArrayLayer = 0;
  8519. imageBarrier.subresourceRange.layerCount = 1;
  8520. renderer->vkCmdPipelineBarrier(
  8521. vulkanCommandBuffer->commandBuffer,
  8522. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  8523. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  8524. 0,
  8525. 0,
  8526. NULL,
  8527. 0,
  8528. NULL,
  8529. 1,
  8530. &imageBarrier);
  8531. // Set up present struct
  8532. if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity) {
  8533. vulkanCommandBuffer->presentDataCapacity += 1;
  8534. vulkanCommandBuffer->presentDatas = SDL_realloc(
  8535. vulkanCommandBuffer->presentDatas,
  8536. vulkanCommandBuffer->presentDataCapacity * sizeof(VulkanPresentData));
  8537. }
  8538. presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount];
  8539. vulkanCommandBuffer->presentDataCount += 1;
  8540. presentData->windowData = windowData;
  8541. presentData->swapchainImageIndex = swapchainImageIndex;
  8542. // Set up present semaphores
  8543. if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity) {
  8544. vulkanCommandBuffer->waitSemaphoreCapacity += 1;
  8545. vulkanCommandBuffer->waitSemaphores = SDL_realloc(
  8546. vulkanCommandBuffer->waitSemaphores,
  8547. vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore));
  8548. }
  8549. vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] =
  8550. windowData->imageAvailableSemaphore[windowData->frameCounter];
  8551. vulkanCommandBuffer->waitSemaphoreCount += 1;
  8552. if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity) {
  8553. vulkanCommandBuffer->signalSemaphoreCapacity += 1;
  8554. vulkanCommandBuffer->signalSemaphores = SDL_realloc(
  8555. vulkanCommandBuffer->signalSemaphores,
  8556. vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore));
  8557. }
  8558. vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] =
  8559. windowData->renderFinishedSemaphore[swapchainImageIndex];
  8560. vulkanCommandBuffer->signalSemaphoreCount += 1;
  8561. *swapchainTexture = (SDL_GPUTexture *)swapchainTextureContainer;
  8562. return true;
  8563. }
  8564. static bool VULKAN_AcquireSwapchainTexture(
  8565. SDL_GPUCommandBuffer *command_buffer,
  8566. SDL_Window *window,
  8567. SDL_GPUTexture **swapchain_texture,
  8568. Uint32 *swapchain_texture_width,
  8569. Uint32 *swapchain_texture_height
  8570. ) {
  8571. return VULKAN_INTERNAL_AcquireSwapchainTexture(
  8572. false,
  8573. command_buffer,
  8574. window,
  8575. swapchain_texture,
  8576. swapchain_texture_width,
  8577. swapchain_texture_height);
  8578. }
  8579. static bool VULKAN_WaitAndAcquireSwapchainTexture(
  8580. SDL_GPUCommandBuffer *command_buffer,
  8581. SDL_Window *window,
  8582. SDL_GPUTexture **swapchain_texture,
  8583. Uint32 *swapchain_texture_width,
  8584. Uint32 *swapchain_texture_height
  8585. ) {
  8586. return VULKAN_INTERNAL_AcquireSwapchainTexture(
  8587. true,
  8588. command_buffer,
  8589. window,
  8590. swapchain_texture,
  8591. swapchain_texture_width,
  8592. swapchain_texture_height);
  8593. }
  8594. static SDL_GPUTextureFormat VULKAN_GetSwapchainTextureFormat(
  8595. SDL_GPURenderer *driverData,
  8596. SDL_Window *window)
  8597. {
  8598. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8599. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8600. if (windowData == NULL) {
  8601. SET_STRING_ERROR_AND_RETURN("Cannot get swapchain format, window has not been claimed!", SDL_GPU_TEXTUREFORMAT_INVALID);
  8602. }
  8603. return SwapchainCompositionToSDLFormat(
  8604. windowData->swapchainComposition,
  8605. windowData->usingFallbackFormat);
  8606. }
  8607. static bool VULKAN_SetSwapchainParameters(
  8608. SDL_GPURenderer *driverData,
  8609. SDL_Window *window,
  8610. SDL_GPUSwapchainComposition swapchainComposition,
  8611. SDL_GPUPresentMode presentMode)
  8612. {
  8613. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8614. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8615. if (windowData == NULL) {
  8616. SET_STRING_ERROR_AND_RETURN("Cannot set swapchain parameters on unclaimed window!", false);
  8617. }
  8618. if (!VULKAN_SupportsSwapchainComposition(driverData, window, swapchainComposition)) {
  8619. SET_STRING_ERROR_AND_RETURN("Swapchain composition not supported!", false);
  8620. }
  8621. if (!VULKAN_SupportsPresentMode(driverData, window, presentMode)) {
  8622. SET_STRING_ERROR_AND_RETURN("Present mode not supported!", false);
  8623. }
  8624. windowData->presentMode = presentMode;
  8625. windowData->swapchainComposition = swapchainComposition;
  8626. Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8627. if (!recreateSwapchainResult) {
  8628. return false;
  8629. } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
  8630. // Edge case, swapchain extent is (0, 0) but this is not an error
  8631. windowData->needsSwapchainRecreate = true;
  8632. return true;
  8633. }
  8634. return true;
  8635. }
  8636. static bool VULKAN_SetAllowedFramesInFlight(
  8637. SDL_GPURenderer *driverData,
  8638. Uint32 allowedFramesInFlight)
  8639. {
  8640. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8641. renderer->allowedFramesInFlight = allowedFramesInFlight;
  8642. for (Uint32 i = 0; i < renderer->claimedWindowCount; i += 1) {
  8643. WindowData *windowData = renderer->claimedWindows[i];
  8644. Uint32 recreateResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8645. if (!recreateResult) {
  8646. return false;
  8647. } else if (recreateResult == VULKAN_INTERNAL_TRY_AGAIN) {
  8648. // Edge case, swapchain extent is (0, 0) but this is not an error
  8649. windowData->needsSwapchainRecreate = true;
  8650. }
  8651. }
  8652. return true;
  8653. }
  8654. // Submission structure
  8655. static VulkanFenceHandle *VULKAN_INTERNAL_AcquireFenceFromPool(
  8656. VulkanRenderer *renderer)
  8657. {
  8658. VulkanFenceHandle *handle;
  8659. VkFenceCreateInfo fenceCreateInfo;
  8660. VkFence fence;
  8661. VkResult vulkanResult;
  8662. if (renderer->fencePool.availableFenceCount == 0) {
  8663. // Create fence
  8664. fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  8665. fenceCreateInfo.pNext = NULL;
  8666. fenceCreateInfo.flags = 0;
  8667. vulkanResult = renderer->vkCreateFence(
  8668. renderer->logicalDevice,
  8669. &fenceCreateInfo,
  8670. NULL,
  8671. &fence);
  8672. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateFence, NULL);
  8673. handle = SDL_malloc(sizeof(VulkanFenceHandle));
  8674. handle->fence = fence;
  8675. SDL_SetAtomicInt(&handle->referenceCount, 0);
  8676. return handle;
  8677. }
  8678. SDL_LockMutex(renderer->fencePool.lock);
  8679. handle = renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount - 1];
  8680. renderer->fencePool.availableFenceCount -= 1;
  8681. vulkanResult = renderer->vkResetFences(
  8682. renderer->logicalDevice,
  8683. 1,
  8684. &handle->fence);
  8685. SDL_UnlockMutex(renderer->fencePool.lock);
  8686. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkResetFences, NULL);
  8687. return handle;
  8688. }
  8689. static void VULKAN_INTERNAL_PerformPendingDestroys(
  8690. VulkanRenderer *renderer)
  8691. {
  8692. SDL_LockMutex(renderer->disposeLock);
  8693. for (Sint32 i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1) {
  8694. if (SDL_GetAtomicInt(&renderer->texturesToDestroy[i]->referenceCount) == 0) {
  8695. VULKAN_INTERNAL_DestroyTexture(
  8696. renderer,
  8697. renderer->texturesToDestroy[i]);
  8698. renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1];
  8699. renderer->texturesToDestroyCount -= 1;
  8700. }
  8701. }
  8702. for (Sint32 i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1) {
  8703. if (SDL_GetAtomicInt(&renderer->buffersToDestroy[i]->referenceCount) == 0) {
  8704. VULKAN_INTERNAL_DestroyBuffer(
  8705. renderer,
  8706. renderer->buffersToDestroy[i]);
  8707. renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1];
  8708. renderer->buffersToDestroyCount -= 1;
  8709. }
  8710. }
  8711. for (Sint32 i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1) {
  8712. if (SDL_GetAtomicInt(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0) {
  8713. VULKAN_INTERNAL_DestroyGraphicsPipeline(
  8714. renderer,
  8715. renderer->graphicsPipelinesToDestroy[i]);
  8716. renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1];
  8717. renderer->graphicsPipelinesToDestroyCount -= 1;
  8718. }
  8719. }
  8720. for (Sint32 i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1) {
  8721. if (SDL_GetAtomicInt(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0) {
  8722. VULKAN_INTERNAL_DestroyComputePipeline(
  8723. renderer,
  8724. renderer->computePipelinesToDestroy[i]);
  8725. renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1];
  8726. renderer->computePipelinesToDestroyCount -= 1;
  8727. }
  8728. }
  8729. for (Sint32 i = renderer->shadersToDestroyCount - 1; i >= 0; i -= 1) {
  8730. if (SDL_GetAtomicInt(&renderer->shadersToDestroy[i]->referenceCount) == 0) {
  8731. VULKAN_INTERNAL_DestroyShader(
  8732. renderer,
  8733. renderer->shadersToDestroy[i]);
  8734. renderer->shadersToDestroy[i] = renderer->shadersToDestroy[renderer->shadersToDestroyCount - 1];
  8735. renderer->shadersToDestroyCount -= 1;
  8736. }
  8737. }
  8738. for (Sint32 i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1) {
  8739. if (SDL_GetAtomicInt(&renderer->samplersToDestroy[i]->referenceCount) == 0) {
  8740. VULKAN_INTERNAL_DestroySampler(
  8741. renderer,
  8742. renderer->samplersToDestroy[i]);
  8743. renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1];
  8744. renderer->samplersToDestroyCount -= 1;
  8745. }
  8746. }
  8747. for (Sint32 i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1) {
  8748. if (SDL_GetAtomicInt(&renderer->framebuffersToDestroy[i]->referenceCount) == 0) {
  8749. VULKAN_INTERNAL_DestroyFramebuffer(
  8750. renderer,
  8751. renderer->framebuffersToDestroy[i]);
  8752. renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1];
  8753. renderer->framebuffersToDestroyCount -= 1;
  8754. }
  8755. }
  8756. SDL_UnlockMutex(renderer->disposeLock);
  8757. }
  8758. static void VULKAN_INTERNAL_CleanCommandBuffer(
  8759. VulkanRenderer *renderer,
  8760. VulkanCommandBuffer *commandBuffer,
  8761. bool cancel)
  8762. {
  8763. if (commandBuffer->autoReleaseFence) {
  8764. VULKAN_ReleaseFence(
  8765. (SDL_GPURenderer *)renderer,
  8766. (SDL_GPUFence *)commandBuffer->inFlightFence);
  8767. commandBuffer->inFlightFence = NULL;
  8768. }
  8769. // Uniform buffers are now available
  8770. SDL_LockMutex(renderer->acquireUniformBufferLock);
  8771. for (Sint32 i = 0; i < commandBuffer->usedUniformBufferCount; i += 1) {
  8772. VULKAN_INTERNAL_ReturnUniformBufferToPool(
  8773. renderer,
  8774. commandBuffer->usedUniformBuffers[i]);
  8775. }
  8776. commandBuffer->usedUniformBufferCount = 0;
  8777. SDL_UnlockMutex(renderer->acquireUniformBufferLock);
  8778. // Decrement reference counts
  8779. for (Sint32 i = 0; i < commandBuffer->usedBufferCount; i += 1) {
  8780. (void)SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount);
  8781. }
  8782. commandBuffer->usedBufferCount = 0;
  8783. for (Sint32 i = 0; i < commandBuffer->usedTextureCount; i += 1) {
  8784. (void)SDL_AtomicDecRef(&commandBuffer->usedTextures[i]->referenceCount);
  8785. }
  8786. commandBuffer->usedTextureCount = 0;
  8787. for (Sint32 i = 0; i < commandBuffer->usedSamplerCount; i += 1) {
  8788. (void)SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount);
  8789. }
  8790. commandBuffer->usedSamplerCount = 0;
  8791. for (Sint32 i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1) {
  8792. (void)SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount);
  8793. }
  8794. commandBuffer->usedGraphicsPipelineCount = 0;
  8795. for (Sint32 i = 0; i < commandBuffer->usedComputePipelineCount; i += 1) {
  8796. (void)SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount);
  8797. }
  8798. commandBuffer->usedComputePipelineCount = 0;
  8799. for (Sint32 i = 0; i < commandBuffer->usedFramebufferCount; i += 1) {
  8800. (void)SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount);
  8801. }
  8802. commandBuffer->usedFramebufferCount = 0;
  8803. // Reset presentation data
  8804. commandBuffer->presentDataCount = 0;
  8805. commandBuffer->waitSemaphoreCount = 0;
  8806. commandBuffer->signalSemaphoreCount = 0;
  8807. commandBuffer->swapchainRequested = false;
  8808. // Reset defrag state
  8809. if (commandBuffer->isDefrag) {
  8810. renderer->defragInProgress = 0;
  8811. }
  8812. // Return command buffer to pool
  8813. SDL_LockMutex(renderer->acquireCommandBufferLock);
  8814. if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity) {
  8815. commandBuffer->commandPool->inactiveCommandBufferCapacity += 1;
  8816. commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc(
  8817. commandBuffer->commandPool->inactiveCommandBuffers,
  8818. commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer *));
  8819. }
  8820. commandBuffer->commandPool->inactiveCommandBuffers[commandBuffer->commandPool->inactiveCommandBufferCount] = commandBuffer;
  8821. commandBuffer->commandPool->inactiveCommandBufferCount += 1;
  8822. // Release descriptor set cache
  8823. VULKAN_INTERNAL_ReturnDescriptorSetCacheToPool(
  8824. renderer,
  8825. commandBuffer->descriptorSetCache);
  8826. commandBuffer->descriptorSetCache = NULL;
  8827. SDL_UnlockMutex(renderer->acquireCommandBufferLock);
  8828. // Remove this command buffer from the submitted list
  8829. if (!cancel) {
  8830. for (Uint32 i = 0; i < renderer->submittedCommandBufferCount; i += 1) {
  8831. if (renderer->submittedCommandBuffers[i] == commandBuffer) {
  8832. renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1];
  8833. renderer->submittedCommandBufferCount -= 1;
  8834. }
  8835. }
  8836. }
  8837. }
  8838. static bool VULKAN_WaitForFences(
  8839. SDL_GPURenderer *driverData,
  8840. bool waitAll,
  8841. SDL_GPUFence *const *fences,
  8842. Uint32 numFences)
  8843. {
  8844. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8845. VkFence *vkFences = SDL_stack_alloc(VkFence, numFences);
  8846. VkResult result;
  8847. for (Uint32 i = 0; i < numFences; i += 1) {
  8848. vkFences[i] = ((VulkanFenceHandle *)fences[i])->fence;
  8849. }
  8850. result = renderer->vkWaitForFences(
  8851. renderer->logicalDevice,
  8852. numFences,
  8853. vkFences,
  8854. waitAll,
  8855. SDL_MAX_UINT64);
  8856. CHECK_VULKAN_ERROR_AND_RETURN(result, vkWaitForFences, false);
  8857. SDL_stack_free(vkFences);
  8858. SDL_LockMutex(renderer->submitLock);
  8859. for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
  8860. result = renderer->vkGetFenceStatus(
  8861. renderer->logicalDevice,
  8862. renderer->submittedCommandBuffers[i]->inFlightFence->fence);
  8863. if (result == VK_SUCCESS) {
  8864. VULKAN_INTERNAL_CleanCommandBuffer(
  8865. renderer,
  8866. renderer->submittedCommandBuffers[i],
  8867. false);
  8868. }
  8869. }
  8870. VULKAN_INTERNAL_PerformPendingDestroys(renderer);
  8871. SDL_UnlockMutex(renderer->submitLock);
  8872. return true;
  8873. }
  8874. static bool VULKAN_Wait(
  8875. SDL_GPURenderer *driverData)
  8876. {
  8877. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8878. VulkanCommandBuffer *commandBuffer;
  8879. VkResult result;
  8880. Sint32 i;
  8881. SDL_LockMutex(renderer->submitLock);
  8882. result = renderer->vkDeviceWaitIdle(renderer->logicalDevice);
  8883. if (result != VK_SUCCESS) {
  8884. if (renderer->debugMode) {
  8885. SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s %s", "vkDeviceWaitIdle", VkErrorMessages(result));
  8886. }
  8887. SDL_SetError("%s %s", "vkDeviceWaitIdle", VkErrorMessages(result));
  8888. SDL_UnlockMutex(renderer->submitLock);
  8889. return false;
  8890. }
  8891. for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
  8892. commandBuffer = renderer->submittedCommandBuffers[i];
  8893. VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer, false);
  8894. }
  8895. VULKAN_INTERNAL_PerformPendingDestroys(renderer);
  8896. SDL_UnlockMutex(renderer->submitLock);
  8897. return true;
  8898. }
  8899. static SDL_GPUFence *VULKAN_SubmitAndAcquireFence(
  8900. SDL_GPUCommandBuffer *commandBuffer)
  8901. {
  8902. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  8903. vulkanCommandBuffer->autoReleaseFence = false;
  8904. if (!VULKAN_Submit(commandBuffer)) {
  8905. return NULL;
  8906. }
  8907. return (SDL_GPUFence *)vulkanCommandBuffer->inFlightFence;
  8908. }
  8909. static void VULKAN_INTERNAL_ReleaseCommandBuffer(VulkanCommandBuffer *vulkanCommandBuffer)
  8910. {
  8911. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  8912. if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity) {
  8913. renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1;
  8914. renderer->submittedCommandBuffers = SDL_realloc(
  8915. renderer->submittedCommandBuffers,
  8916. sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity);
  8917. }
  8918. renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = vulkanCommandBuffer;
  8919. renderer->submittedCommandBufferCount += 1;
  8920. }
  8921. static bool VULKAN_Submit(
  8922. SDL_GPUCommandBuffer *commandBuffer)
  8923. {
  8924. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  8925. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  8926. VkSubmitInfo submitInfo;
  8927. VkPresentInfoKHR presentInfo;
  8928. VulkanPresentData *presentData;
  8929. VkResult vulkanResult, presentResult = VK_SUCCESS;
  8930. VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT];
  8931. Uint32 swapchainImageIndex;
  8932. VulkanTextureSubresource *swapchainTextureSubresource;
  8933. VulkanMemorySubAllocator *allocator;
  8934. bool performCleanups =
  8935. (renderer->claimedWindowCount > 0 && vulkanCommandBuffer->swapchainRequested) ||
  8936. renderer->claimedWindowCount == 0;
  8937. SDL_LockMutex(renderer->submitLock);
  8938. // FIXME: Can this just be permanent?
  8939. for (Uint32 i = 0; i < MAX_PRESENT_COUNT; i += 1) {
  8940. waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  8941. }
  8942. for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
  8943. swapchainImageIndex = vulkanCommandBuffer->presentDatas[j].swapchainImageIndex;
  8944. swapchainTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  8945. &vulkanCommandBuffer->presentDatas[j].windowData->textureContainers[swapchainImageIndex],
  8946. 0,
  8947. 0);
  8948. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  8949. renderer,
  8950. vulkanCommandBuffer,
  8951. VULKAN_TEXTURE_USAGE_MODE_PRESENT,
  8952. swapchainTextureSubresource);
  8953. }
  8954. if (performCleanups &&
  8955. renderer->allocationsToDefragCount > 0 &&
  8956. !renderer->defragInProgress) {
  8957. if (!VULKAN_INTERNAL_DefragmentMemory(renderer, vulkanCommandBuffer))
  8958. {
  8959. SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to defragment memory, likely OOM!");
  8960. }
  8961. }
  8962. if (!VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer)) {
  8963. SDL_UnlockMutex(renderer->submitLock);
  8964. return false;
  8965. }
  8966. vulkanCommandBuffer->inFlightFence = VULKAN_INTERNAL_AcquireFenceFromPool(renderer);
  8967. if (vulkanCommandBuffer->inFlightFence == NULL) {
  8968. SDL_UnlockMutex(renderer->submitLock);
  8969. return false;
  8970. }
  8971. // Command buffer has a reference to the in-flight fence
  8972. (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount);
  8973. submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  8974. submitInfo.pNext = NULL;
  8975. submitInfo.commandBufferCount = 1;
  8976. submitInfo.pCommandBuffers = &vulkanCommandBuffer->commandBuffer;
  8977. submitInfo.pWaitDstStageMask = waitStages;
  8978. submitInfo.pWaitSemaphores = vulkanCommandBuffer->waitSemaphores;
  8979. submitInfo.waitSemaphoreCount = vulkanCommandBuffer->waitSemaphoreCount;
  8980. submitInfo.pSignalSemaphores = vulkanCommandBuffer->signalSemaphores;
  8981. submitInfo.signalSemaphoreCount = vulkanCommandBuffer->signalSemaphoreCount;
  8982. vulkanResult = renderer->vkQueueSubmit(
  8983. renderer->unifiedQueue,
  8984. 1,
  8985. &submitInfo,
  8986. vulkanCommandBuffer->inFlightFence->fence);
  8987. if (vulkanResult != VK_SUCCESS) {
  8988. SDL_UnlockMutex(renderer->submitLock);
  8989. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkQueueSubmit, false);
  8990. }
  8991. // Present, if applicable
  8992. for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
  8993. presentData = &vulkanCommandBuffer->presentDatas[j];
  8994. presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
  8995. presentInfo.pNext = NULL;
  8996. presentInfo.pWaitSemaphores =
  8997. &presentData->windowData->renderFinishedSemaphore[presentData->swapchainImageIndex];
  8998. presentInfo.waitSemaphoreCount = 1;
  8999. presentInfo.pSwapchains = &presentData->windowData->swapchain;
  9000. presentInfo.swapchainCount = 1;
  9001. presentInfo.pImageIndices = &presentData->swapchainImageIndex;
  9002. presentInfo.pResults = NULL;
  9003. presentResult = renderer->vkQueuePresentKHR(
  9004. renderer->unifiedQueue,
  9005. &presentInfo);
  9006. if (presentResult == VK_SUCCESS || presentResult == VK_SUBOPTIMAL_KHR || presentResult == VK_ERROR_OUT_OF_DATE_KHR) {
  9007. // If presenting, the swapchain is using the in-flight fence
  9008. presentData->windowData->inFlightFences[presentData->windowData->frameCounter] = (SDL_GPUFence *)vulkanCommandBuffer->inFlightFence;
  9009. (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount);
  9010. // On the Android platform, VK_SUBOPTIMAL_KHR is returned whenever the device is rotated. We'll just ignore this for now.
  9011. #ifndef SDL_PLATFORM_ANDROID
  9012. if (presentResult == VK_SUBOPTIMAL_KHR) {
  9013. presentData->windowData->needsSwapchainRecreate = true;
  9014. }
  9015. #endif
  9016. if (presentResult == VK_ERROR_OUT_OF_DATE_KHR) {
  9017. presentData->windowData->needsSwapchainRecreate = true;
  9018. }
  9019. } else if (presentResult == VK_ERROR_SURFACE_LOST_KHR) {
  9020. // Android can destroy the surface at any time when the app goes into the background,
  9021. // even after successfully acquiring a swapchain texture and before presenting it.
  9022. presentData->windowData->needsSwapchainRecreate = true;
  9023. presentData->windowData->needsSurfaceRecreate = true;
  9024. } else {
  9025. if (presentResult != VK_SUCCESS) {
  9026. VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer);
  9027. SDL_UnlockMutex(renderer->submitLock);
  9028. }
  9029. CHECK_VULKAN_ERROR_AND_RETURN(presentResult, vkQueuePresentKHR, false);
  9030. }
  9031. presentData->windowData->frameCounter =
  9032. (presentData->windowData->frameCounter + 1) % renderer->allowedFramesInFlight;
  9033. }
  9034. if (performCleanups) {
  9035. for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
  9036. vulkanResult = renderer->vkGetFenceStatus(
  9037. renderer->logicalDevice,
  9038. renderer->submittedCommandBuffers[i]->inFlightFence->fence);
  9039. if (vulkanResult == VK_SUCCESS) {
  9040. VULKAN_INTERNAL_CleanCommandBuffer(
  9041. renderer,
  9042. renderer->submittedCommandBuffers[i],
  9043. false);
  9044. }
  9045. }
  9046. if (renderer->checkEmptyAllocations) {
  9047. SDL_LockMutex(renderer->allocatorLock);
  9048. for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
  9049. allocator = &renderer->memoryAllocator->subAllocators[i];
  9050. for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
  9051. if (allocator->allocations[j]->usedRegionCount == 0) {
  9052. VULKAN_INTERNAL_DeallocateMemory(
  9053. renderer,
  9054. allocator,
  9055. j);
  9056. }
  9057. }
  9058. }
  9059. renderer->checkEmptyAllocations = false;
  9060. SDL_UnlockMutex(renderer->allocatorLock);
  9061. }
  9062. VULKAN_INTERNAL_PerformPendingDestroys(renderer);
  9063. }
  9064. // Mark command buffer as submitted
  9065. VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer);
  9066. SDL_UnlockMutex(renderer->submitLock);
  9067. return true;
  9068. }
  9069. static bool VULKAN_Cancel(
  9070. SDL_GPUCommandBuffer *commandBuffer)
  9071. {
  9072. VulkanRenderer *renderer;
  9073. VulkanCommandBuffer *vulkanCommandBuffer;
  9074. VkResult result;
  9075. vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  9076. renderer = vulkanCommandBuffer->renderer;
  9077. result = renderer->vkResetCommandBuffer(
  9078. vulkanCommandBuffer->commandBuffer,
  9079. VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
  9080. CHECK_VULKAN_ERROR_AND_RETURN(result, vkResetCommandBuffer, false);
  9081. vulkanCommandBuffer->autoReleaseFence = false;
  9082. SDL_LockMutex(renderer->submitLock);
  9083. VULKAN_INTERNAL_CleanCommandBuffer(renderer, vulkanCommandBuffer, true);
  9084. SDL_UnlockMutex(renderer->submitLock);
  9085. return true;
  9086. }
  9087. static bool VULKAN_INTERNAL_DefragmentMemory(
  9088. VulkanRenderer *renderer,
  9089. VulkanCommandBuffer *commandBuffer)
  9090. {
  9091. renderer->defragInProgress = 1;
  9092. commandBuffer->isDefrag = 1;
  9093. SDL_LockMutex(renderer->allocatorLock);
  9094. VulkanMemoryAllocation *allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
  9095. renderer->allocationsToDefragCount -= 1;
  9096. /* For each used region in the allocation
  9097. * create a new resource, copy the data
  9098. * and re-point the resource containers
  9099. */
  9100. for (Uint32 i = 0; i < allocation->usedRegionCount; i += 1) {
  9101. VulkanMemoryUsedRegion *currentRegion = allocation->usedRegions[i];
  9102. if (currentRegion->isBuffer && !currentRegion->vulkanBuffer->markedForDestroy) {
  9103. VulkanBuffer *newBuffer = VULKAN_INTERNAL_CreateBuffer(
  9104. renderer,
  9105. currentRegion->vulkanBuffer->size,
  9106. currentRegion->vulkanBuffer->usage,
  9107. currentRegion->vulkanBuffer->type,
  9108. false,
  9109. currentRegion->vulkanBuffer->container != NULL ? currentRegion->vulkanBuffer->container->debugName : NULL);
  9110. if (newBuffer == NULL) {
  9111. SDL_UnlockMutex(renderer->allocatorLock);
  9112. SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to allocate defrag buffer!");
  9113. return false;
  9114. }
  9115. // Copy buffer contents if necessary
  9116. if (
  9117. currentRegion->vulkanBuffer->type == VULKAN_BUFFER_TYPE_GPU && currentRegion->vulkanBuffer->transitioned) {
  9118. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  9119. renderer,
  9120. commandBuffer,
  9121. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  9122. currentRegion->vulkanBuffer);
  9123. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  9124. renderer,
  9125. commandBuffer,
  9126. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  9127. newBuffer);
  9128. VkBufferCopy bufferCopy;
  9129. bufferCopy.srcOffset = 0;
  9130. bufferCopy.dstOffset = 0;
  9131. bufferCopy.size = currentRegion->resourceSize;
  9132. renderer->vkCmdCopyBuffer(
  9133. commandBuffer->commandBuffer,
  9134. currentRegion->vulkanBuffer->buffer,
  9135. newBuffer->buffer,
  9136. 1,
  9137. &bufferCopy);
  9138. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  9139. renderer,
  9140. commandBuffer,
  9141. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  9142. newBuffer);
  9143. VULKAN_INTERNAL_TrackBuffer(commandBuffer, currentRegion->vulkanBuffer);
  9144. VULKAN_INTERNAL_TrackBuffer(commandBuffer, newBuffer);
  9145. }
  9146. // re-point original container to new buffer
  9147. newBuffer->container = currentRegion->vulkanBuffer->container;
  9148. newBuffer->containerIndex = currentRegion->vulkanBuffer->containerIndex;
  9149. if (newBuffer->type == VULKAN_BUFFER_TYPE_UNIFORM) {
  9150. currentRegion->vulkanBuffer->uniformBufferForDefrag->buffer = newBuffer;
  9151. } else {
  9152. newBuffer->container->buffers[newBuffer->containerIndex] = newBuffer;
  9153. if (newBuffer->container->activeBuffer == currentRegion->vulkanBuffer) {
  9154. newBuffer->container->activeBuffer = newBuffer;
  9155. }
  9156. }
  9157. if (currentRegion->vulkanBuffer->uniformBufferForDefrag) {
  9158. newBuffer->uniformBufferForDefrag = currentRegion->vulkanBuffer->uniformBufferForDefrag;
  9159. }
  9160. VULKAN_INTERNAL_ReleaseBuffer(renderer, currentRegion->vulkanBuffer);
  9161. } else if (!currentRegion->isBuffer && !currentRegion->vulkanTexture->markedForDestroy) {
  9162. VulkanTexture *newTexture = VULKAN_INTERNAL_CreateTexture(
  9163. renderer,
  9164. false,
  9165. &currentRegion->vulkanTexture->container->header.info);
  9166. if (newTexture == NULL) {
  9167. SDL_UnlockMutex(renderer->allocatorLock);
  9168. SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to allocate defrag buffer!");
  9169. return false;
  9170. }
  9171. SDL_GPUTextureCreateInfo info = currentRegion->vulkanTexture->container->header.info;
  9172. for (Uint32 subresourceIndex = 0; subresourceIndex < currentRegion->vulkanTexture->subresourceCount; subresourceIndex += 1) {
  9173. // copy subresource if necessary
  9174. VulkanTextureSubresource *srcSubresource = &currentRegion->vulkanTexture->subresources[subresourceIndex];
  9175. VulkanTextureSubresource *dstSubresource = &newTexture->subresources[subresourceIndex];
  9176. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  9177. renderer,
  9178. commandBuffer,
  9179. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  9180. srcSubresource);
  9181. VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  9182. renderer,
  9183. commandBuffer,
  9184. VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
  9185. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  9186. dstSubresource);
  9187. VkImageCopy imageCopy;
  9188. imageCopy.srcOffset.x = 0;
  9189. imageCopy.srcOffset.y = 0;
  9190. imageCopy.srcOffset.z = 0;
  9191. imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
  9192. imageCopy.srcSubresource.baseArrayLayer = srcSubresource->layer;
  9193. imageCopy.srcSubresource.layerCount = 1;
  9194. imageCopy.srcSubresource.mipLevel = srcSubresource->level;
  9195. imageCopy.extent.width = SDL_max(1, info.width >> srcSubresource->level);
  9196. imageCopy.extent.height = SDL_max(1, info.height >> srcSubresource->level);
  9197. imageCopy.extent.depth = info.type == SDL_GPU_TEXTURETYPE_3D ? info.layer_count_or_depth : 1;
  9198. imageCopy.dstOffset.x = 0;
  9199. imageCopy.dstOffset.y = 0;
  9200. imageCopy.dstOffset.z = 0;
  9201. imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
  9202. imageCopy.dstSubresource.baseArrayLayer = dstSubresource->layer;
  9203. imageCopy.dstSubresource.layerCount = 1;
  9204. imageCopy.dstSubresource.mipLevel = dstSubresource->level;
  9205. renderer->vkCmdCopyImage(
  9206. commandBuffer->commandBuffer,
  9207. currentRegion->vulkanTexture->image,
  9208. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  9209. newTexture->image,
  9210. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  9211. 1,
  9212. &imageCopy);
  9213. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  9214. renderer,
  9215. commandBuffer,
  9216. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  9217. dstSubresource);
  9218. VULKAN_INTERNAL_TrackTexture(commandBuffer, srcSubresource->parent);
  9219. VULKAN_INTERNAL_TrackTexture(commandBuffer, dstSubresource->parent);
  9220. }
  9221. // re-point original container to new texture
  9222. newTexture->container = currentRegion->vulkanTexture->container;
  9223. newTexture->containerIndex = currentRegion->vulkanTexture->containerIndex;
  9224. newTexture->container->textures[currentRegion->vulkanTexture->containerIndex] = newTexture;
  9225. if (currentRegion->vulkanTexture == currentRegion->vulkanTexture->container->activeTexture) {
  9226. newTexture->container->activeTexture = newTexture;
  9227. }
  9228. VULKAN_INTERNAL_ReleaseTexture(renderer, currentRegion->vulkanTexture);
  9229. }
  9230. }
  9231. SDL_UnlockMutex(renderer->allocatorLock);
  9232. return true;
  9233. }
  9234. // Format Info
  9235. static bool VULKAN_SupportsTextureFormat(
  9236. SDL_GPURenderer *driverData,
  9237. SDL_GPUTextureFormat format,
  9238. SDL_GPUTextureType type,
  9239. SDL_GPUTextureUsageFlags usage)
  9240. {
  9241. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  9242. VkFormat vulkanFormat = SDLToVK_TextureFormat[format];
  9243. VkImageUsageFlags vulkanUsage = 0;
  9244. VkImageCreateFlags createFlags = 0;
  9245. VkImageFormatProperties properties;
  9246. VkResult vulkanResult;
  9247. if (usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) {
  9248. vulkanUsage |= VK_IMAGE_USAGE_SAMPLED_BIT;
  9249. }
  9250. if (usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
  9251. vulkanUsage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  9252. }
  9253. if (usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
  9254. vulkanUsage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  9255. }
  9256. if (usage & (SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ |
  9257. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ |
  9258. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE |
  9259. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) {
  9260. vulkanUsage |= VK_IMAGE_USAGE_STORAGE_BIT;
  9261. }
  9262. if (type == SDL_GPU_TEXTURETYPE_CUBE || type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) {
  9263. createFlags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  9264. }
  9265. vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
  9266. renderer->physicalDevice,
  9267. vulkanFormat,
  9268. (type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D,
  9269. VK_IMAGE_TILING_OPTIMAL,
  9270. vulkanUsage,
  9271. createFlags,
  9272. &properties);
  9273. return vulkanResult == VK_SUCCESS;
  9274. }
  9275. // Device instantiation
  9276. static inline Uint8 CheckDeviceExtensions(
  9277. VkExtensionProperties *extensions,
  9278. Uint32 numExtensions,
  9279. VulkanExtensions *supports)
  9280. {
  9281. Uint32 i;
  9282. SDL_memset(supports, '\0', sizeof(VulkanExtensions));
  9283. for (i = 0; i < numExtensions; i += 1) {
  9284. const char *name = extensions[i].extensionName;
  9285. #define CHECK(ext) \
  9286. if (SDL_strcmp(name, "VK_" #ext) == 0) { \
  9287. supports->ext = 1; \
  9288. }
  9289. CHECK(KHR_swapchain)
  9290. else CHECK(KHR_maintenance1) else CHECK(KHR_driver_properties) else CHECK(KHR_portability_subset) else CHECK(MSFT_layered_driver) else CHECK(EXT_texture_compression_astc_hdr)
  9291. #undef CHECK
  9292. }
  9293. return (supports->KHR_swapchain &&
  9294. supports->KHR_maintenance1);
  9295. }
  9296. static inline Uint32 GetDeviceExtensionCount(VulkanExtensions *supports)
  9297. {
  9298. return (
  9299. supports->KHR_swapchain +
  9300. supports->KHR_maintenance1 +
  9301. supports->KHR_driver_properties +
  9302. supports->KHR_portability_subset +
  9303. supports->MSFT_layered_driver +
  9304. supports->EXT_texture_compression_astc_hdr);
  9305. }
  9306. static inline void CreateDeviceExtensionArray(
  9307. VulkanExtensions *supports,
  9308. const char **extensions)
  9309. {
  9310. Uint8 cur = 0;
  9311. #define CHECK(ext) \
  9312. if (supports->ext) { \
  9313. extensions[cur++] = "VK_" #ext; \
  9314. }
  9315. CHECK(KHR_swapchain)
  9316. CHECK(KHR_maintenance1)
  9317. CHECK(KHR_driver_properties)
  9318. CHECK(KHR_portability_subset)
  9319. CHECK(MSFT_layered_driver)
  9320. CHECK(EXT_texture_compression_astc_hdr)
  9321. #undef CHECK
  9322. }
  9323. static inline Uint8 SupportsInstanceExtension(
  9324. const char *ext,
  9325. VkExtensionProperties *availableExtensions,
  9326. Uint32 numAvailableExtensions)
  9327. {
  9328. Uint32 i;
  9329. for (i = 0; i < numAvailableExtensions; i += 1) {
  9330. if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0) {
  9331. return 1;
  9332. }
  9333. }
  9334. return 0;
  9335. }
  9336. static Uint8 VULKAN_INTERNAL_CheckInstanceExtensions(
  9337. const char **requiredExtensions,
  9338. Uint32 requiredExtensionsLength,
  9339. bool *supportsDebugUtils,
  9340. bool *supportsColorspace,
  9341. bool *supportsPhysicalDeviceProperties2,
  9342. int *firstUnsupportedExtensionIndex)
  9343. {
  9344. Uint32 extensionCount, i;
  9345. VkExtensionProperties *availableExtensions;
  9346. Uint8 allExtensionsSupported = 1;
  9347. vkEnumerateInstanceExtensionProperties(
  9348. NULL,
  9349. &extensionCount,
  9350. NULL);
  9351. availableExtensions = SDL_malloc(
  9352. extensionCount * sizeof(VkExtensionProperties));
  9353. vkEnumerateInstanceExtensionProperties(
  9354. NULL,
  9355. &extensionCount,
  9356. availableExtensions);
  9357. for (i = 0; i < requiredExtensionsLength; i += 1) {
  9358. if (!SupportsInstanceExtension(
  9359. requiredExtensions[i],
  9360. availableExtensions,
  9361. extensionCount)) {
  9362. allExtensionsSupported = 0;
  9363. *firstUnsupportedExtensionIndex = i;
  9364. break;
  9365. }
  9366. }
  9367. // This is optional, but nice to have!
  9368. *supportsDebugUtils = SupportsInstanceExtension(
  9369. VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
  9370. availableExtensions,
  9371. extensionCount);
  9372. // Also optional and nice to have!
  9373. *supportsColorspace = SupportsInstanceExtension(
  9374. VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME,
  9375. availableExtensions,
  9376. extensionCount);
  9377. // Only needed for KHR_driver_properties!
  9378. *supportsPhysicalDeviceProperties2 = SupportsInstanceExtension(
  9379. VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
  9380. availableExtensions,
  9381. extensionCount);
  9382. SDL_free(availableExtensions);
  9383. return allExtensionsSupported;
  9384. }
  9385. static Uint8 CheckOptInDeviceExtensions(VulkanFeatures *features,
  9386. Uint32 numExtensions,
  9387. VkExtensionProperties *availableExtensions,
  9388. const char **missingExtensionName) {
  9389. Uint8 supportsAll = 1;
  9390. for (Uint32 extensionIdx = 0; extensionIdx < features->additionalDeviceExtensionCount; extensionIdx++) {
  9391. bool found = false;
  9392. for (Uint32 searchIdx = 0; searchIdx < numExtensions; searchIdx++) {
  9393. if (SDL_strcmp(features->additionalDeviceExtensionNames[extensionIdx], availableExtensions[searchIdx].extensionName) == 0) {
  9394. found = true;
  9395. break;
  9396. }
  9397. }
  9398. if (!found) {
  9399. supportsAll = 0;
  9400. *missingExtensionName = features->additionalDeviceExtensionNames[extensionIdx];
  9401. break;
  9402. }
  9403. }
  9404. return supportsAll;
  9405. }
  9406. static Uint8 VULKAN_INTERNAL_CheckDeviceExtensions(
  9407. VulkanRenderer *renderer,
  9408. VulkanFeatures *features,
  9409. VkPhysicalDevice physicalDevice,
  9410. VulkanExtensions *physicalDeviceExtensions)
  9411. {
  9412. Uint32 extensionCount;
  9413. VkExtensionProperties *availableExtensions;
  9414. Uint8 allExtensionsSupported;
  9415. renderer->vkEnumerateDeviceExtensionProperties(
  9416. physicalDevice,
  9417. NULL,
  9418. &extensionCount,
  9419. NULL);
  9420. availableExtensions = (VkExtensionProperties *)SDL_malloc(
  9421. extensionCount * sizeof(VkExtensionProperties));
  9422. renderer->vkEnumerateDeviceExtensionProperties(
  9423. physicalDevice,
  9424. NULL,
  9425. &extensionCount,
  9426. availableExtensions);
  9427. allExtensionsSupported = CheckDeviceExtensions(
  9428. availableExtensions,
  9429. extensionCount,
  9430. physicalDeviceExtensions);
  9431. if (features->usesCustomVulkanOptions) {
  9432. const char *missingExtensionName;
  9433. if (!CheckOptInDeviceExtensions(features, extensionCount, availableExtensions, &missingExtensionName)) {
  9434. SDL_assert(missingExtensionName);
  9435. if (renderer->debugMode) {
  9436. SDL_LogError(SDL_LOG_CATEGORY_GPU,
  9437. "Required Vulkan device extension '%s' not supported",
  9438. missingExtensionName);
  9439. }
  9440. allExtensionsSupported = 0;
  9441. }
  9442. }
  9443. SDL_free(availableExtensions);
  9444. return allExtensionsSupported;
  9445. }
  9446. static Uint8 VULKAN_INTERNAL_CheckValidationLayers(
  9447. const char **validationLayers,
  9448. Uint32 validationLayersLength)
  9449. {
  9450. Uint32 layerCount;
  9451. VkLayerProperties *availableLayers;
  9452. Uint32 i, j;
  9453. Uint8 layerFound = 0;
  9454. vkEnumerateInstanceLayerProperties(&layerCount, NULL);
  9455. availableLayers = (VkLayerProperties *)SDL_malloc(
  9456. layerCount * sizeof(VkLayerProperties));
  9457. vkEnumerateInstanceLayerProperties(&layerCount, availableLayers);
  9458. for (i = 0; i < validationLayersLength; i += 1) {
  9459. layerFound = 0;
  9460. for (j = 0; j < layerCount; j += 1) {
  9461. if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0) {
  9462. layerFound = 1;
  9463. break;
  9464. }
  9465. }
  9466. if (!layerFound) {
  9467. break;
  9468. }
  9469. }
  9470. SDL_free(availableLayers);
  9471. return layerFound;
  9472. }
  9473. #define CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, feature, result) \
  9474. if (requested->feature && !supported->feature) { \
  9475. SDL_LogVerbose( \
  9476. SDL_LOG_CATEGORY_GPU, \
  9477. "SDL GPU Vulkan: Application requested unsupported physical device feature '" #feature "'"); \
  9478. result = false; \
  9479. }
  9480. static bool VULKAN_INTERNAL_ValidateOptInVulkan10Features(VkPhysicalDeviceFeatures *requested, VkPhysicalDeviceFeatures *supported)
  9481. {
  9482. if (requested && supported) {
  9483. bool result = true;
  9484. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, robustBufferAccess, result)
  9485. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, fullDrawIndexUint32, result)
  9486. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, imageCubeArray, result)
  9487. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, independentBlend, result)
  9488. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, geometryShader, result)
  9489. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, tessellationShader, result)
  9490. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sampleRateShading, result)
  9491. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, dualSrcBlend, result)
  9492. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, logicOp, result)
  9493. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiDrawIndirect, result)
  9494. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, drawIndirectFirstInstance, result)
  9495. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, depthClamp, result)
  9496. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, depthBiasClamp, result)
  9497. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, fillModeNonSolid, result)
  9498. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, depthBounds, result)
  9499. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, wideLines, result)
  9500. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, largePoints, result)
  9501. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, alphaToOne, result)
  9502. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiViewport, result)
  9503. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerAnisotropy, result)
  9504. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionETC2, result)
  9505. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionASTC_LDR, result)
  9506. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionBC, result)
  9507. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, occlusionQueryPrecise, result)
  9508. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, pipelineStatisticsQuery, result)
  9509. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vertexPipelineStoresAndAtomics, result)
  9510. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, fragmentStoresAndAtomics, result)
  9511. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderTessellationAndGeometryPointSize, result)
  9512. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderImageGatherExtended, result)
  9513. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageExtendedFormats, result)
  9514. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageMultisample, result)
  9515. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageReadWithoutFormat, result)
  9516. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageWriteWithoutFormat, result)
  9517. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformBufferArrayDynamicIndexing, result)
  9518. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSampledImageArrayDynamicIndexing, result)
  9519. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageBufferArrayDynamicIndexing, result)
  9520. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageArrayDynamicIndexing, result)
  9521. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderClipDistance, result)
  9522. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderCullDistance, result)
  9523. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderFloat64, result)
  9524. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInt64, result)
  9525. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInt16, result)
  9526. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderResourceResidency, result)
  9527. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderResourceMinLod, result)
  9528. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseBinding, result)
  9529. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyBuffer, result)
  9530. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyImage2D, result)
  9531. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyImage3D, result)
  9532. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency2Samples, result)
  9533. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency4Samples, result)
  9534. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency8Samples, result)
  9535. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidency16Samples, result)
  9536. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, sparseResidencyAliased, result)
  9537. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, variableMultisampleRate, result)
  9538. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, inheritedQueries, result)
  9539. return result;
  9540. } else {
  9541. return false;
  9542. }
  9543. }
  9544. static bool VULKAN_INTERNAL_ValidateOptInVulkan11Features(VkPhysicalDeviceVulkan11Features *requested, VkPhysicalDeviceVulkan11Features *supported)
  9545. {
  9546. if (requested && supported) {
  9547. bool result = true;
  9548. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storageBuffer16BitAccess, result)
  9549. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, uniformAndStorageBuffer16BitAccess, result)
  9550. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storagePushConstant16, result)
  9551. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storageInputOutput16, result)
  9552. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiview, result)
  9553. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiviewGeometryShader, result)
  9554. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, multiviewTessellationShader, result)
  9555. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, variablePointersStorageBuffer, result)
  9556. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, variablePointers, result)
  9557. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, protectedMemory, result)
  9558. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerYcbcrConversion, result)
  9559. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderDrawParameters, result)
  9560. return result;
  9561. } else {
  9562. return false;
  9563. }
  9564. }
  9565. static bool VULKAN_INTERNAL_ValidateOptInVulkan12Features(VkPhysicalDeviceVulkan12Features *requested, VkPhysicalDeviceVulkan12Features *supported)
  9566. {
  9567. if (requested && supported) {
  9568. bool result = true;
  9569. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerMirrorClampToEdge, result)
  9570. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, drawIndirectCount, result)
  9571. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storageBuffer8BitAccess, result)
  9572. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, uniformAndStorageBuffer8BitAccess, result)
  9573. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, storagePushConstant8, result)
  9574. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderBufferInt64Atomics, result)
  9575. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSharedInt64Atomics, result)
  9576. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderFloat16, result)
  9577. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInt8, result)
  9578. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorIndexing, result)
  9579. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInputAttachmentArrayDynamicIndexing, result)
  9580. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformTexelBufferArrayDynamicIndexing, result)
  9581. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageTexelBufferArrayDynamicIndexing, result)
  9582. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformBufferArrayNonUniformIndexing, result)
  9583. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSampledImageArrayNonUniformIndexing, result)
  9584. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageBufferArrayNonUniformIndexing, result)
  9585. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageImageArrayNonUniformIndexing, result)
  9586. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderInputAttachmentArrayNonUniformIndexing, result)
  9587. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderUniformTexelBufferArrayNonUniformIndexing, result)
  9588. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderStorageTexelBufferArrayNonUniformIndexing, result)
  9589. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingUniformBufferUpdateAfterBind, result)
  9590. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingSampledImageUpdateAfterBind, result)
  9591. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingStorageImageUpdateAfterBind, result)
  9592. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingStorageBufferUpdateAfterBind, result)
  9593. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingUniformTexelBufferUpdateAfterBind, result)
  9594. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingStorageTexelBufferUpdateAfterBind, result)
  9595. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingUpdateUnusedWhilePending, result)
  9596. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingPartiallyBound, result)
  9597. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingVariableDescriptorCount, result)
  9598. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, runtimeDescriptorArray, result)
  9599. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, samplerFilterMinmax, result)
  9600. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, scalarBlockLayout, result)
  9601. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, imagelessFramebuffer, result)
  9602. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, uniformBufferStandardLayout, result)
  9603. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderSubgroupExtendedTypes, result)
  9604. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, separateDepthStencilLayouts, result)
  9605. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, hostQueryReset, result)
  9606. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, timelineSemaphore, result)
  9607. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, bufferDeviceAddress, result)
  9608. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, bufferDeviceAddressCaptureReplay, result)
  9609. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, bufferDeviceAddressMultiDevice, result)
  9610. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vulkanMemoryModel, result)
  9611. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vulkanMemoryModelDeviceScope, result)
  9612. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, vulkanMemoryModelAvailabilityVisibilityChains, result)
  9613. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderOutputViewportIndex, result)
  9614. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderOutputLayer, result)
  9615. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, subgroupBroadcastDynamicId, result)
  9616. return result;
  9617. } else {
  9618. return false;
  9619. }
  9620. }
  9621. static bool VULKAN_INTERNAL_ValidateOptInVulkan13Features(VkPhysicalDeviceVulkan13Features *requested, VkPhysicalDeviceVulkan13Features *supported)
  9622. {
  9623. if (requested && supported) {
  9624. bool result = true;
  9625. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, robustImageAccess, result)
  9626. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, inlineUniformBlock, result)
  9627. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, descriptorBindingInlineUniformBlockUpdateAfterBind, result)
  9628. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, pipelineCreationCacheControl, result)
  9629. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, privateData, result)
  9630. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderDemoteToHelperInvocation, result)
  9631. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderTerminateInvocation, result)
  9632. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, subgroupSizeControl, result)
  9633. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, computeFullSubgroups, result)
  9634. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, synchronization2, result)
  9635. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, textureCompressionASTC_HDR, result)
  9636. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderZeroInitializeWorkgroupMemory, result)
  9637. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, dynamicRendering, result)
  9638. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, shaderIntegerDotProduct, result)
  9639. CHECK_OPTIONAL_DEVICE_FEATURE(requested, supported, maintenance4, result)
  9640. return result;
  9641. } else {
  9642. return false;
  9643. }
  9644. }
  9645. #undef CHECK_OPTIONAL_DEVICE_FEATURE
  9646. static bool VULKAN_INTERNAL_ValidateOptInFeatures(VulkanRenderer *renderer, VulkanFeatures *features, VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *vk10Features)
  9647. {
  9648. bool supportsAllFeatures = true;
  9649. int minorVersion = VK_API_VERSION_MINOR(features->desiredApiVersion);
  9650. if (minorVersion < 1) {
  9651. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan10Features(&features->desiredVulkan10DeviceFeatures, vk10Features);
  9652. } else if (minorVersion < 2) {
  9653. // Query device features using the pre-1.2 structures
  9654. VkPhysicalDevice16BitStorageFeatures storage = { 0 };
  9655. storage.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
  9656. VkPhysicalDeviceMultiviewFeatures multiview = { 0 };
  9657. multiview.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
  9658. VkPhysicalDeviceProtectedMemoryFeatures protectedMem = { 0 };
  9659. protectedMem.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
  9660. VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcr = { 0 };
  9661. ycbcr.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
  9662. VkPhysicalDeviceShaderDrawParametersFeatures drawParams = { 0 };
  9663. drawParams.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
  9664. VkPhysicalDeviceVariablePointersFeatures varPointers = { 0 };
  9665. varPointers.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
  9666. VkPhysicalDeviceFeatures2 supportedFeatureList = { 0 };
  9667. supportedFeatureList.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  9668. supportedFeatureList.pNext = &storage;
  9669. storage.pNext = &multiview;
  9670. multiview.pNext = &protectedMem;
  9671. protectedMem.pNext = &ycbcr;
  9672. ycbcr.pNext = &drawParams;
  9673. drawParams.pNext = &varPointers;
  9674. renderer->vkGetPhysicalDeviceFeatures2(physicalDevice, &supportedFeatureList);
  9675. // Pack the results into the post-1.2 structure for easier checking
  9676. VkPhysicalDeviceVulkan11Features vk11Features = { 0 };
  9677. vk11Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
  9678. vk11Features.storageBuffer16BitAccess = storage.storageBuffer16BitAccess;
  9679. vk11Features.uniformAndStorageBuffer16BitAccess = storage.uniformAndStorageBuffer16BitAccess;
  9680. vk11Features.storagePushConstant16 = storage.storagePushConstant16;
  9681. vk11Features.storageInputOutput16 = storage.storageInputOutput16;
  9682. vk11Features.multiview = multiview.multiview;
  9683. vk11Features.multiviewGeometryShader = multiview.multiviewGeometryShader;
  9684. vk11Features.multiviewTessellationShader = multiview.multiviewTessellationShader;
  9685. vk11Features.protectedMemory = protectedMem.protectedMemory;
  9686. vk11Features.samplerYcbcrConversion = ycbcr.samplerYcbcrConversion;
  9687. vk11Features.shaderDrawParameters = drawParams.shaderDrawParameters;
  9688. vk11Features.variablePointers = varPointers.variablePointers;
  9689. vk11Features.variablePointersStorageBuffer = varPointers.variablePointersStorageBuffer;
  9690. // Check support
  9691. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan10Features(&features->desiredVulkan10DeviceFeatures, vk10Features);
  9692. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan11Features(&features->desiredVulkan11DeviceFeatures, &vk11Features);
  9693. } else {
  9694. VkPhysicalDeviceVulkan11Features vk11Features = { 0 };
  9695. vk11Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
  9696. VkPhysicalDeviceVulkan12Features vk12Features = { 0 };
  9697. vk12Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
  9698. VkPhysicalDeviceVulkan13Features vk13Features = { 0 };
  9699. vk13Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES;
  9700. VkPhysicalDeviceFeatures2 supportedFeatureList = { 0 };
  9701. supportedFeatureList.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  9702. supportedFeatureList.pNext = &vk11Features;
  9703. vk11Features.pNext = &vk12Features;
  9704. vk12Features.pNext = &vk13Features;
  9705. renderer->vkGetPhysicalDeviceFeatures2(physicalDevice, &supportedFeatureList);
  9706. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan10Features(&features->desiredVulkan10DeviceFeatures, vk10Features);
  9707. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan11Features(&features->desiredVulkan11DeviceFeatures, &vk11Features);
  9708. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan12Features(&features->desiredVulkan12DeviceFeatures, &vk12Features);
  9709. supportsAllFeatures &= VULKAN_INTERNAL_ValidateOptInVulkan13Features(&features->desiredVulkan13DeviceFeatures, &vk13Features);
  9710. }
  9711. return supportsAllFeatures;
  9712. }
  9713. static void VULKAN_INTERNAL_AddDeviceFeatures(VkBool32 *firstFeature, VkBool32 *lastFeature, VkBool32 *firstFeatureToAdd)
  9714. {
  9715. while (firstFeature <= lastFeature) {
  9716. *firstFeature = (*firstFeature | *firstFeatureToAdd);
  9717. firstFeature++;
  9718. firstFeatureToAdd++;
  9719. }
  9720. }
  9721. static bool VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_11(VkPhysicalDeviceFeatures *dst10,
  9722. VkPhysicalDeviceVulkan11Features *dst11,
  9723. VkBaseOutStructure *src)
  9724. {
  9725. bool hasAdded = false;
  9726. switch (src->sType) {
  9727. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
  9728. {
  9729. VkPhysicalDeviceFeatures2 *newFeatures = (VkPhysicalDeviceFeatures2 *)src;
  9730. VULKAN_INTERNAL_AddDeviceFeatures(&dst10->robustBufferAccess,
  9731. &dst10->inheritedQueries,
  9732. &newFeatures->features.robustBufferAccess);
  9733. hasAdded = true;
  9734. } break;
  9735. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
  9736. {
  9737. VkPhysicalDevice16BitStorageFeatures *newFeatures = (VkPhysicalDevice16BitStorageFeatures *)src;
  9738. dst11->storageBuffer16BitAccess |= newFeatures->storageBuffer16BitAccess;
  9739. dst11->uniformAndStorageBuffer16BitAccess |= newFeatures->uniformAndStorageBuffer16BitAccess;
  9740. dst11->storagePushConstant16 |= newFeatures->storagePushConstant16;
  9741. dst11->storageInputOutput16 |= newFeatures->storageInputOutput16;
  9742. hasAdded = true;
  9743. } break;
  9744. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
  9745. {
  9746. VkPhysicalDeviceMultiviewFeatures *newFeatures = (VkPhysicalDeviceMultiviewFeatures *)src;
  9747. dst11->multiview |= newFeatures->multiview;
  9748. dst11->multiviewGeometryShader |= newFeatures->multiviewGeometryShader;
  9749. dst11->multiviewTessellationShader |= newFeatures->multiviewTessellationShader;
  9750. hasAdded = true;
  9751. } break;
  9752. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
  9753. {
  9754. VkPhysicalDeviceProtectedMemoryFeatures *newFeatures = (VkPhysicalDeviceProtectedMemoryFeatures *)src;
  9755. dst11->protectedMemory |= newFeatures->protectedMemory;
  9756. hasAdded = true;
  9757. } break;
  9758. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
  9759. {
  9760. VkPhysicalDeviceSamplerYcbcrConversionFeatures *newFeatures = (VkPhysicalDeviceSamplerYcbcrConversionFeatures *)src;
  9761. dst11->samplerYcbcrConversion |= newFeatures->samplerYcbcrConversion;
  9762. hasAdded = true;
  9763. } break;
  9764. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
  9765. {
  9766. VkPhysicalDeviceShaderDrawParametersFeatures *newFeatures = (VkPhysicalDeviceShaderDrawParametersFeatures *)src;
  9767. dst11->shaderDrawParameters |= newFeatures->shaderDrawParameters;
  9768. hasAdded = true;
  9769. } break;
  9770. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
  9771. {
  9772. VkPhysicalDeviceVariablePointersFeatures *newFeatures = (VkPhysicalDeviceVariablePointersFeatures *)src;
  9773. dst11->variablePointers |= newFeatures->variablePointers;
  9774. dst11->variablePointersStorageBuffer |= newFeatures->variablePointersStorageBuffer;
  9775. hasAdded = true;
  9776. } break;
  9777. default:
  9778. break;
  9779. }
  9780. return hasAdded;
  9781. }
  9782. static bool VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_12_Or_Later(VkPhysicalDeviceFeatures *dst10,
  9783. VkPhysicalDeviceVulkan11Features *dst11,
  9784. VkPhysicalDeviceVulkan12Features *dst12,
  9785. VkPhysicalDeviceVulkan13Features *dst13,
  9786. Uint32 apiVersion,
  9787. VkBaseOutStructure *src)
  9788. {
  9789. int minorVersion = VK_API_VERSION_MINOR(apiVersion);
  9790. SDL_assert(apiVersion >= 2);
  9791. bool hasAdded = VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_11(dst10, dst11, src);
  9792. if (!hasAdded) {
  9793. switch (src->sType) {
  9794. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
  9795. {
  9796. VkPhysicalDeviceVulkan11Features *newFeatures = (VkPhysicalDeviceVulkan11Features *)src;
  9797. VULKAN_INTERNAL_AddDeviceFeatures(&dst11->storageBuffer16BitAccess,
  9798. &dst11->shaderDrawParameters,
  9799. &newFeatures->storageBuffer16BitAccess);
  9800. hasAdded = true;
  9801. } break;
  9802. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
  9803. {
  9804. VkPhysicalDeviceVulkan12Features *newFeatures = (VkPhysicalDeviceVulkan12Features *)src;
  9805. VULKAN_INTERNAL_AddDeviceFeatures(&dst12->samplerMirrorClampToEdge,
  9806. &dst12->subgroupBroadcastDynamicId,
  9807. &newFeatures->samplerMirrorClampToEdge);
  9808. hasAdded = true;
  9809. } break;
  9810. case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
  9811. {
  9812. if (minorVersion >= 3) {
  9813. VkPhysicalDeviceVulkan13Features *newFeatures = (VkPhysicalDeviceVulkan13Features *)src;
  9814. VULKAN_INTERNAL_AddDeviceFeatures(&dst13->robustImageAccess,
  9815. &dst13->maintenance4,
  9816. &newFeatures->robustImageAccess);
  9817. hasAdded = true;
  9818. }
  9819. } break;
  9820. default:
  9821. break;
  9822. }
  9823. }
  9824. return hasAdded;
  9825. }
  9826. static void VULKAN_INTERNAL_AddOptInVulkanOptions(SDL_PropertiesID props, VulkanRenderer *renderer, VulkanFeatures *features)
  9827. {
  9828. if (SDL_HasProperty(props, SDL_PROP_GPU_DEVICE_CREATE_VULKAN_OPTIONS_POINTER)) {
  9829. SDL_GPUVulkanOptions *options = (SDL_GPUVulkanOptions *)SDL_GetPointerProperty(props, SDL_PROP_GPU_DEVICE_CREATE_VULKAN_OPTIONS_POINTER, NULL);
  9830. if (options) {
  9831. features->usesCustomVulkanOptions = true;
  9832. features->desiredApiVersion = options->vulkan_api_version;
  9833. SDL_zero(features->desiredVulkan11DeviceFeatures);
  9834. SDL_zero(features->desiredVulkan12DeviceFeatures);
  9835. SDL_zero(features->desiredVulkan13DeviceFeatures);
  9836. features->desiredVulkan11DeviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
  9837. features->desiredVulkan12DeviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
  9838. features->desiredVulkan13DeviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES;
  9839. // Handle requested device features
  9840. VkPhysicalDeviceFeatures *vk10Features = &features->desiredVulkan10DeviceFeatures;
  9841. VkPhysicalDeviceVulkan11Features *vk11Features = &features->desiredVulkan11DeviceFeatures;
  9842. VkPhysicalDeviceVulkan12Features *vk12Features = &features->desiredVulkan12DeviceFeatures;
  9843. VkPhysicalDeviceVulkan13Features *vk13Features = &features->desiredVulkan13DeviceFeatures;
  9844. if (options->vulkan_10_physical_device_features) {
  9845. VkPhysicalDeviceFeatures *deviceFeatures = (VkPhysicalDeviceFeatures *)options->vulkan_10_physical_device_features;
  9846. VULKAN_INTERNAL_AddDeviceFeatures(&vk10Features->robustBufferAccess,
  9847. &vk10Features->inheritedQueries,
  9848. &deviceFeatures->robustBufferAccess);
  9849. }
  9850. int minorVersion = VK_API_VERSION_MINOR(features->desiredApiVersion);
  9851. bool supportsHigherLevelFeatures = minorVersion > 0;
  9852. if (supportsHigherLevelFeatures && options->feature_list) {
  9853. if (minorVersion < 2) {
  9854. // Iterate through the entire list and combine all requested features
  9855. VkBaseOutStructure *nextStructure = (VkBaseOutStructure *)options->feature_list;
  9856. while (nextStructure) {
  9857. VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_11(vk10Features, vk11Features, nextStructure);
  9858. nextStructure = nextStructure->pNext;
  9859. }
  9860. } else {
  9861. // Iterate through the entire list and combine all requested features
  9862. VkBaseOutStructure *nextStructure = (VkBaseOutStructure *)options->feature_list;
  9863. while (nextStructure) {
  9864. VULKAN_INTERNAL_TryAddDeviceFeatures_Vulkan_12_Or_Later(vk10Features,
  9865. vk11Features,
  9866. vk12Features,
  9867. vk13Features,
  9868. features->desiredApiVersion,
  9869. nextStructure);
  9870. nextStructure = nextStructure->pNext;
  9871. }
  9872. }
  9873. }
  9874. features->additionalDeviceExtensionCount = options->device_extension_count;
  9875. features->additionalDeviceExtensionNames = options->device_extension_names;
  9876. features->additionalInstanceExtensionCount = options->instance_extension_count;
  9877. features->additionalInstanceExtensionNames = options->instance_extension_names;
  9878. } else if (renderer->debugMode) {
  9879. SDL_LogWarn(SDL_LOG_CATEGORY_GPU,
  9880. "VULKAN_INTERNAL_AddOptInVulkanOptions: Additional options property was set, but value was null. This may be a bug.");
  9881. }
  9882. }
  9883. }
  9884. static Uint8 VULKAN_INTERNAL_CreateInstance(VulkanRenderer *renderer, VulkanFeatures *features)
  9885. {
  9886. VkResult vulkanResult;
  9887. VkApplicationInfo appInfo;
  9888. VkInstanceCreateFlags createFlags;
  9889. const char *const *originalInstanceExtensionNames;
  9890. const char **instanceExtensionNames;
  9891. Uint32 instanceExtensionCount;
  9892. VkInstanceCreateInfo createInfo;
  9893. static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" };
  9894. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  9895. appInfo.pNext = NULL;
  9896. appInfo.pApplicationName = NULL;
  9897. appInfo.applicationVersion = 0;
  9898. appInfo.pEngineName = "SDLGPU";
  9899. appInfo.engineVersion = SDL_VERSION;
  9900. appInfo.apiVersion = features->usesCustomVulkanOptions
  9901. ? features->desiredApiVersion
  9902. : VK_MAKE_VERSION(1, 0, 0);
  9903. createFlags = 0;
  9904. originalInstanceExtensionNames = SDL_Vulkan_GetInstanceExtensions(&instanceExtensionCount);
  9905. if (!originalInstanceExtensionNames) {
  9906. SDL_LogError(
  9907. SDL_LOG_CATEGORY_GPU,
  9908. "SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s",
  9909. SDL_GetError());
  9910. return 0;
  9911. }
  9912. Uint32 extraInstanceExtensionCount = features->additionalInstanceExtensionCount;
  9913. const char** extraInstanceExtensionNames = features->additionalInstanceExtensionNames;
  9914. /* Extra space for the following extensions:
  9915. * VK_KHR_get_physical_device_properties2
  9916. * VK_EXT_swapchain_colorspace
  9917. * VK_EXT_debug_utils
  9918. * VK_KHR_portability_enumeration
  9919. *
  9920. * Plus additional opt-in extensions.
  9921. */
  9922. instanceExtensionNames = SDL_stack_alloc(
  9923. const char *,
  9924. instanceExtensionCount + 4 + extraInstanceExtensionCount);
  9925. const char** nextInstanceExtensionNamePtr = instanceExtensionNames;
  9926. SDL_memcpy((void *)nextInstanceExtensionNamePtr, originalInstanceExtensionNames, instanceExtensionCount * sizeof(const char *));
  9927. nextInstanceExtensionNamePtr += instanceExtensionCount;
  9928. if (extraInstanceExtensionCount > 0) {
  9929. SDL_memcpy((void *)nextInstanceExtensionNamePtr, extraInstanceExtensionNames, extraInstanceExtensionCount * sizeof(const char *));
  9930. nextInstanceExtensionNamePtr += extraInstanceExtensionCount;
  9931. }
  9932. #ifdef SDL_PLATFORM_APPLE
  9933. *nextInstanceExtensionNamePtr++ = VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME;
  9934. instanceExtensionCount++;
  9935. createFlags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  9936. #endif
  9937. int firstUnsupportedExtensionIndex = 0;
  9938. if (!VULKAN_INTERNAL_CheckInstanceExtensions(
  9939. instanceExtensionNames,
  9940. instanceExtensionCount + extraInstanceExtensionCount,
  9941. &renderer->supportsDebugUtils,
  9942. &renderer->supportsColorspace,
  9943. &renderer->supportsPhysicalDeviceProperties2,
  9944. &firstUnsupportedExtensionIndex)) {
  9945. if (renderer->debugMode) {
  9946. SDL_LogError(SDL_LOG_CATEGORY_GPU,
  9947. "Required Vulkan instance extension '%s' not supported",
  9948. instanceExtensionNames[firstUnsupportedExtensionIndex]);
  9949. }
  9950. SDL_SetError("Required Vulkan instance extension '%s' not supported",
  9951. instanceExtensionNames[firstUnsupportedExtensionIndex]);
  9952. SDL_stack_free((char *)instanceExtensionNames);
  9953. return false;
  9954. }
  9955. if (renderer->supportsDebugUtils) {
  9956. // Append the debug extension
  9957. *nextInstanceExtensionNamePtr++ = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  9958. instanceExtensionCount++;
  9959. } else {
  9960. SDL_LogWarn(
  9961. SDL_LOG_CATEGORY_GPU,
  9962. "%s is not supported!",
  9963. VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  9964. }
  9965. if (renderer->supportsColorspace) {
  9966. // Append colorspace extension
  9967. *nextInstanceExtensionNamePtr++ = VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME;
  9968. instanceExtensionCount++;
  9969. }
  9970. if (renderer->supportsPhysicalDeviceProperties2) {
  9971. // Append KHR_physical_device_properties2 extension
  9972. *nextInstanceExtensionNamePtr++ = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
  9973. instanceExtensionCount++;
  9974. }
  9975. createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  9976. createInfo.pNext = NULL;
  9977. createInfo.flags = createFlags;
  9978. createInfo.pApplicationInfo = &appInfo;
  9979. createInfo.ppEnabledLayerNames = layerNames;
  9980. createInfo.enabledExtensionCount = instanceExtensionCount + extraInstanceExtensionCount;
  9981. createInfo.ppEnabledExtensionNames = instanceExtensionNames;
  9982. if (renderer->debugMode) {
  9983. createInfo.enabledLayerCount = SDL_arraysize(layerNames);
  9984. if (!VULKAN_INTERNAL_CheckValidationLayers(
  9985. layerNames,
  9986. createInfo.enabledLayerCount)) {
  9987. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Validation layers not found, continuing without validation");
  9988. createInfo.enabledLayerCount = 0;
  9989. } else {
  9990. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Validation layers enabled, expect debug level performance!");
  9991. }
  9992. } else {
  9993. createInfo.enabledLayerCount = 0;
  9994. }
  9995. #ifdef HAVE_GPU_OPENXR
  9996. if (renderer->xrInstance) {
  9997. XrResult xrResult;
  9998. PFN_xrCreateVulkanInstanceKHR xrCreateVulkanInstanceKHR;
  9999. if ((xrResult = xrGetInstanceProcAddr(renderer->xrInstance, "xrCreateVulkanInstanceKHR", (PFN_xrVoidFunction *)&xrCreateVulkanInstanceKHR)) != XR_SUCCESS) {
  10000. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to get xrCreateVulkanInstanceKHR");
  10001. SDL_stack_free((char *)instanceExtensionNames);
  10002. return 0;
  10003. }
  10004. XrVulkanInstanceCreateInfoKHR xrCreateInfo = {XR_TYPE_VULKAN_INSTANCE_CREATE_INFO_KHR};
  10005. xrCreateInfo.vulkanCreateInfo = &createInfo;
  10006. xrCreateInfo.systemId = renderer->xrSystemId;
  10007. xrCreateInfo.pfnGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
  10008. SDL_assert(xrCreateInfo.pfnGetInstanceProcAddr);
  10009. if ((xrResult = xrCreateVulkanInstanceKHR(renderer->xrInstance, &xrCreateInfo, &renderer->instance, &vulkanResult)) != XR_SUCCESS) {
  10010. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to create vulkan instance, reason %d, %d", xrResult, vulkanResult);
  10011. SDL_stack_free((char *)instanceExtensionNames);
  10012. return 0;
  10013. }
  10014. } else
  10015. #endif // HAVE_GPU_OPENXR
  10016. {
  10017. vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance);
  10018. }
  10019. SDL_stack_free((char *)instanceExtensionNames);
  10020. if (vulkanResult != VK_SUCCESS) {
  10021. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateInstance, 0);
  10022. }
  10023. return 1;
  10024. }
  10025. static bool VULKAN_INTERNAL_GetDeviceRank(
  10026. VulkanRenderer *renderer,
  10027. VkPhysicalDevice physicalDevice,
  10028. VulkanExtensions *physicalDeviceExtensions,
  10029. Uint64 *deviceRank)
  10030. {
  10031. static const Uint8 DEVICE_PRIORITY_HIGHPERFORMANCE[] = {
  10032. 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER
  10033. 3, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
  10034. 4, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU
  10035. 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU
  10036. 1 // VK_PHYSICAL_DEVICE_TYPE_CPU
  10037. };
  10038. static const Uint8 DEVICE_PRIORITY_LOWPOWER[] = {
  10039. 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER
  10040. 4, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
  10041. 3, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU
  10042. 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU
  10043. 1 // VK_PHYSICAL_DEVICE_TYPE_CPU
  10044. };
  10045. const Uint8 *devicePriority = renderer->preferLowPower ? DEVICE_PRIORITY_LOWPOWER : DEVICE_PRIORITY_HIGHPERFORMANCE;
  10046. bool isConformant;
  10047. VkPhysicalDeviceType deviceType;
  10048. if (physicalDeviceExtensions->KHR_driver_properties || physicalDeviceExtensions->MSFT_layered_driver) {
  10049. VkPhysicalDeviceProperties2KHR physicalDeviceProperties;
  10050. VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties = { 0 };
  10051. VkPhysicalDeviceLayeredDriverPropertiesMSFT physicalDeviceLayeredDriverProperties = { 0 };
  10052. void** ppNext = &physicalDeviceProperties.pNext;
  10053. physicalDeviceProperties.sType =
  10054. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  10055. if (physicalDeviceExtensions->KHR_driver_properties) {
  10056. *ppNext = &physicalDeviceDriverProperties;
  10057. ppNext = &physicalDeviceDriverProperties.pNext;
  10058. physicalDeviceDriverProperties.sType =
  10059. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
  10060. }
  10061. if (physicalDeviceExtensions->MSFT_layered_driver) {
  10062. *ppNext = &physicalDeviceLayeredDriverProperties;
  10063. ppNext = &physicalDeviceLayeredDriverProperties.pNext;
  10064. physicalDeviceLayeredDriverProperties.sType =
  10065. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT;
  10066. }
  10067. *ppNext = NULL;
  10068. renderer->vkGetPhysicalDeviceProperties2KHR(
  10069. physicalDevice,
  10070. &physicalDeviceProperties);
  10071. if (physicalDeviceExtensions->KHR_driver_properties) {
  10072. isConformant = (physicalDeviceDriverProperties.conformanceVersion.major >= 1);
  10073. } else {
  10074. isConformant = true; // We can't check this, so just assume it's conformant
  10075. }
  10076. if (physicalDeviceExtensions->MSFT_layered_driver && physicalDeviceLayeredDriverProperties.underlyingAPI != VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT) {
  10077. /* Rank Dozen above CPU, but below INTEGRATED.
  10078. * This is needed for WSL specifically.
  10079. */
  10080. deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
  10081. /* Dozen hasn't been tested for conformance and it probably won't be,
  10082. * but WSL may need this so let's be generous.
  10083. * -flibit
  10084. */
  10085. isConformant = true;
  10086. } else {
  10087. deviceType = physicalDeviceProperties.properties.deviceType;
  10088. }
  10089. } else {
  10090. VkPhysicalDeviceProperties physicalDeviceProperties;
  10091. renderer->vkGetPhysicalDeviceProperties(
  10092. physicalDevice,
  10093. &physicalDeviceProperties);
  10094. deviceType = physicalDeviceProperties.deviceType;
  10095. isConformant = true; // We can't check this, so just assume it's conformant
  10096. }
  10097. if (renderer->requireHardwareAcceleration) {
  10098. if (deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU &&
  10099. deviceType != VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU &&
  10100. deviceType != VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU) {
  10101. // In addition to CPU, "Other" drivers (including layered drivers) don't count as hardware-accelerated
  10102. return 0;
  10103. }
  10104. }
  10105. /* As far as I know, the only drivers available to users that are also
  10106. * non-conformant are incomplete Mesa drivers and Vulkan-on-12. hasvk is one
  10107. * example of a non-conformant driver that's built by default.
  10108. * -flibit
  10109. */
  10110. if (!isConformant) {
  10111. return 0;
  10112. }
  10113. /* Apply a large bias on the devicePriority so that we always respect the order in the priority arrays.
  10114. * We also rank by e.g. VRAM which should have less influence than the device type.
  10115. */
  10116. Uint64 devicePriorityValue = devicePriority[deviceType] * 1000000;
  10117. if (*deviceRank < devicePriorityValue) {
  10118. /* This device outranks the best device we've found so far!
  10119. * This includes a dedicated GPU that has less features than an
  10120. * integrated GPU, because this is a freak case that is almost
  10121. * never intentionally desired by the end user
  10122. */
  10123. *deviceRank = devicePriorityValue;
  10124. } else if (*deviceRank > devicePriorityValue) {
  10125. /* Device is outranked by a previous device, don't even try to
  10126. * run a query and reset the rank to avoid overwrites
  10127. */
  10128. *deviceRank = 0;
  10129. return false;
  10130. }
  10131. /* If we prefer high performance, sum up all device local memory (rounded to megabytes)
  10132. * to deviceRank. In the niche case of someone having multiple dedicated GPUs in the same
  10133. * system, this theoretically picks the most powerful one (or at least the one with the
  10134. * most memory!)
  10135. *
  10136. * We do this *after* discarding all non suitable devices, which means if this computer
  10137. * has multiple dedicated GPUs that all meet our criteria, *and* the user asked for high
  10138. * performance, then we always pick the GPU with more VRAM.
  10139. */
  10140. if (!renderer->preferLowPower) {
  10141. Uint32 i;
  10142. Uint64 videoMemory = 0;
  10143. VkPhysicalDeviceMemoryProperties deviceMemory;
  10144. renderer->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &deviceMemory);
  10145. for (i = 0; i < deviceMemory.memoryHeapCount; i++) {
  10146. VkMemoryHeap heap = deviceMemory.memoryHeaps[i];
  10147. if (heap.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) {
  10148. videoMemory += heap.size;
  10149. }
  10150. }
  10151. // Round it to megabytes (as per the vulkan spec videoMemory is in bytes)
  10152. Uint64 videoMemoryRounded = videoMemory / 1024 / 1024;
  10153. *deviceRank += videoMemoryRounded;
  10154. }
  10155. return true;
  10156. }
  10157. static Uint8 VULKAN_INTERNAL_IsDeviceSuitable(
  10158. VulkanRenderer *renderer,
  10159. VulkanFeatures *features,
  10160. VkPhysicalDevice physicalDevice,
  10161. VulkanExtensions *physicalDeviceExtensions,
  10162. Uint32 *queueFamilyIndex)
  10163. {
  10164. Uint32 queueFamilyCount, queueFamilyRank, queueFamilyBest;
  10165. VkQueueFamilyProperties *queueProps;
  10166. bool supportsPresent;
  10167. VkPhysicalDeviceFeatures deviceFeatures;
  10168. Uint32 i;
  10169. renderer->vkGetPhysicalDeviceFeatures(
  10170. physicalDevice,
  10171. &deviceFeatures);
  10172. if ((!deviceFeatures.independentBlend && features->desiredVulkan10DeviceFeatures.independentBlend) ||
  10173. (!deviceFeatures.imageCubeArray && features->desiredVulkan10DeviceFeatures.imageCubeArray) ||
  10174. (!deviceFeatures.depthClamp && features->desiredVulkan10DeviceFeatures.depthClamp) ||
  10175. (!deviceFeatures.shaderClipDistance && features->desiredVulkan10DeviceFeatures.shaderClipDistance) ||
  10176. (!deviceFeatures.drawIndirectFirstInstance && features->desiredVulkan10DeviceFeatures.drawIndirectFirstInstance) ||
  10177. (!deviceFeatures.sampleRateShading && features->desiredVulkan10DeviceFeatures.sampleRateShading) ||
  10178. (!deviceFeatures.samplerAnisotropy && features->desiredVulkan10DeviceFeatures.samplerAnisotropy)) {
  10179. return 0;
  10180. }
  10181. // Check opt-in device features
  10182. if (features->usesCustomVulkanOptions) {
  10183. bool supportsAllFeatures = VULKAN_INTERNAL_ValidateOptInFeatures(renderer, features, physicalDevice, &deviceFeatures);
  10184. if (!supportsAllFeatures) {
  10185. return 0;
  10186. }
  10187. }
  10188. if (!VULKAN_INTERNAL_CheckDeviceExtensions(
  10189. renderer,
  10190. features,
  10191. physicalDevice,
  10192. physicalDeviceExtensions)) {
  10193. return 0;
  10194. }
  10195. renderer->vkGetPhysicalDeviceQueueFamilyProperties(
  10196. physicalDevice,
  10197. &queueFamilyCount,
  10198. NULL);
  10199. queueProps = SDL_stack_alloc(
  10200. VkQueueFamilyProperties,
  10201. queueFamilyCount);
  10202. renderer->vkGetPhysicalDeviceQueueFamilyProperties(
  10203. physicalDevice,
  10204. &queueFamilyCount,
  10205. queueProps);
  10206. queueFamilyBest = 0;
  10207. *queueFamilyIndex = SDL_MAX_UINT32;
  10208. for (i = 0; i < queueFamilyCount; i += 1) {
  10209. supportsPresent = SDL_Vulkan_GetPresentationSupport(
  10210. renderer->instance,
  10211. physicalDevice,
  10212. i);
  10213. if (!supportsPresent ||
  10214. !(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)) {
  10215. // Not a graphics family, ignore.
  10216. continue;
  10217. }
  10218. /* The queue family bitflags are kind of annoying.
  10219. *
  10220. * We of course need a graphics family, but we ideally want the
  10221. * _primary_ graphics family. The spec states that at least one
  10222. * graphics family must also be a compute family, so generally
  10223. * drivers make that the first one. But hey, maybe something
  10224. * genuinely can't do compute or something, and FNA doesn't
  10225. * need it, so we'll be open to a non-compute queue family.
  10226. *
  10227. * Additionally, it's common to see the primary queue family
  10228. * have the transfer bit set, which is great! But this is
  10229. * actually optional; it's impossible to NOT have transfers in
  10230. * graphics/compute but it _is_ possible for a graphics/compute
  10231. * family, even the primary one, to just decide not to set the
  10232. * bitflag. Admittedly, a driver may want to isolate transfer
  10233. * queues to a dedicated family so that queues made solely for
  10234. * transfers can have an optimized DMA queue.
  10235. *
  10236. * That, or the driver author got lazy and decided not to set
  10237. * the bit. Looking at you, Android.
  10238. *
  10239. * -flibit
  10240. */
  10241. if (queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) {
  10242. if (queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT) {
  10243. // Has all attribs!
  10244. queueFamilyRank = 3;
  10245. } else {
  10246. // Probably has a DMA transfer queue family
  10247. queueFamilyRank = 2;
  10248. }
  10249. } else {
  10250. // Just a graphics family, probably has something better
  10251. queueFamilyRank = 1;
  10252. }
  10253. if (queueFamilyRank > queueFamilyBest) {
  10254. *queueFamilyIndex = i;
  10255. queueFamilyBest = queueFamilyRank;
  10256. }
  10257. }
  10258. SDL_stack_free(queueProps);
  10259. if (*queueFamilyIndex == SDL_MAX_UINT32) {
  10260. // Somehow no graphics queues existed. Compute-only device?
  10261. return 0;
  10262. }
  10263. // FIXME: Need better structure for checking vs storing swapchain support details
  10264. return 1;
  10265. }
  10266. static Uint8 VULKAN_INTERNAL_DeterminePhysicalDevice(VulkanRenderer *renderer, VulkanFeatures *features)
  10267. {
  10268. VkResult vulkanResult;
  10269. VkPhysicalDevice *physicalDevices;
  10270. VulkanExtensions *physicalDeviceExtensions;
  10271. Uint32 i, physicalDeviceCount;
  10272. Sint32 suitableIndex;
  10273. Uint32 suitableQueueFamilyIndex;
  10274. Uint64 highestRank;
  10275. #ifdef HAVE_GPU_OPENXR
  10276. // When XR is enabled, let the OpenXR runtime choose the physical device
  10277. if (renderer->xrInstance) {
  10278. XrResult xrResult;
  10279. VulkanExtensions xrPhysicalDeviceExtensions;
  10280. Uint32 queueFamilyIndex;
  10281. PFN_xrGetVulkanGraphicsDevice2KHR xrGetVulkanGraphicsDevice2KHR;
  10282. xrResult = xrGetInstanceProcAddr(
  10283. renderer->xrInstance,
  10284. "xrGetVulkanGraphicsDevice2KHR",
  10285. (PFN_xrVoidFunction *)&xrGetVulkanGraphicsDevice2KHR);
  10286. if (xrResult != XR_SUCCESS) {
  10287. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to get xrGetVulkanGraphicsDevice2KHR, result: %d", xrResult);
  10288. return 0;
  10289. }
  10290. XrVulkanGraphicsDeviceGetInfoKHR graphicsDeviceGetInfo;
  10291. SDL_zero(graphicsDeviceGetInfo);
  10292. graphicsDeviceGetInfo.type = XR_TYPE_VULKAN_GRAPHICS_DEVICE_GET_INFO_KHR;
  10293. graphicsDeviceGetInfo.systemId = renderer->xrSystemId;
  10294. graphicsDeviceGetInfo.vulkanInstance = renderer->instance;
  10295. xrResult = xrGetVulkanGraphicsDevice2KHR(
  10296. renderer->xrInstance,
  10297. &graphicsDeviceGetInfo,
  10298. &renderer->physicalDevice);
  10299. if (xrResult != XR_SUCCESS) {
  10300. SDL_LogError(SDL_LOG_CATEGORY_GPU, "xrGetVulkanGraphicsDevice2KHR failed, result: %d", xrResult);
  10301. return 0;
  10302. }
  10303. // Verify the XR-chosen device is suitable
  10304. if (!VULKAN_INTERNAL_IsDeviceSuitable(
  10305. renderer,
  10306. features,
  10307. renderer->physicalDevice,
  10308. &xrPhysicalDeviceExtensions,
  10309. &queueFamilyIndex)) {
  10310. SDL_LogError(SDL_LOG_CATEGORY_GPU, "The physical device chosen by the OpenXR runtime is not suitable");
  10311. return 0;
  10312. }
  10313. renderer->supports = xrPhysicalDeviceExtensions;
  10314. renderer->queueFamilyIndex = queueFamilyIndex;
  10315. } else
  10316. #endif // HAVE_GPU_OPENXR
  10317. {
  10318. vulkanResult = renderer->vkEnumeratePhysicalDevices(
  10319. renderer->instance,
  10320. &physicalDeviceCount,
  10321. NULL);
  10322. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkEnumeratePhysicalDevices, 0);
  10323. if (physicalDeviceCount == 0) {
  10324. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Failed to find any GPUs with Vulkan support");
  10325. return 0;
  10326. }
  10327. physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount);
  10328. physicalDeviceExtensions = SDL_stack_alloc(VulkanExtensions, physicalDeviceCount);
  10329. vulkanResult = renderer->vkEnumeratePhysicalDevices(
  10330. renderer->instance,
  10331. &physicalDeviceCount,
  10332. physicalDevices);
  10333. /* This should be impossible to hit, but from what I can tell this can
  10334. * be triggered not because the array is too small, but because there
  10335. * were drivers that turned out to be bogus, so this is the loader's way
  10336. * of telling us that the list is now smaller than expected :shrug:
  10337. */
  10338. if (vulkanResult == VK_INCOMPLETE) {
  10339. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkEnumeratePhysicalDevices returned VK_INCOMPLETE, will keep trying anyway...");
  10340. vulkanResult = VK_SUCCESS;
  10341. }
  10342. if (vulkanResult != VK_SUCCESS) {
  10343. SDL_LogWarn(
  10344. SDL_LOG_CATEGORY_GPU,
  10345. "vkEnumeratePhysicalDevices failed: %s",
  10346. VkErrorMessages(vulkanResult));
  10347. SDL_stack_free(physicalDevices);
  10348. SDL_stack_free(physicalDeviceExtensions);
  10349. return 0;
  10350. }
  10351. // Any suitable device will do, but we'd like the best
  10352. suitableIndex = -1;
  10353. suitableQueueFamilyIndex = 0;
  10354. highestRank = 0;
  10355. for (i = 0; i < physicalDeviceCount; i += 1) {
  10356. Uint32 queueFamilyIndex;
  10357. Uint64 deviceRank;
  10358. if (!VULKAN_INTERNAL_IsDeviceSuitable(
  10359. renderer,
  10360. features,
  10361. physicalDevices[i],
  10362. &physicalDeviceExtensions[i],
  10363. &queueFamilyIndex)) {
  10364. // Device does not meet the minimum requirements, skip it entirely
  10365. continue;
  10366. }
  10367. deviceRank = highestRank;
  10368. if (VULKAN_INTERNAL_GetDeviceRank(
  10369. renderer,
  10370. physicalDevices[i],
  10371. &physicalDeviceExtensions[i],
  10372. &deviceRank)) {
  10373. /* Use this for rendering.
  10374. * Note that this may override a previous device that
  10375. * supports rendering, but shares the same device rank.
  10376. */
  10377. suitableIndex = i;
  10378. suitableQueueFamilyIndex = queueFamilyIndex;
  10379. highestRank = deviceRank;
  10380. }
  10381. }
  10382. if (suitableIndex != -1) {
  10383. renderer->supports = physicalDeviceExtensions[suitableIndex];
  10384. renderer->physicalDevice = physicalDevices[suitableIndex];
  10385. renderer->queueFamilyIndex = suitableQueueFamilyIndex;
  10386. } else {
  10387. SDL_stack_free(physicalDevices);
  10388. SDL_stack_free(physicalDeviceExtensions);
  10389. return 0;
  10390. }
  10391. SDL_stack_free(physicalDevices);
  10392. SDL_stack_free(physicalDeviceExtensions);
  10393. }
  10394. renderer->physicalDeviceProperties.sType =
  10395. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  10396. if (renderer->supports.KHR_driver_properties) {
  10397. renderer->physicalDeviceDriverProperties.sType =
  10398. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
  10399. renderer->physicalDeviceDriverProperties.pNext = NULL;
  10400. renderer->physicalDeviceProperties.pNext =
  10401. &renderer->physicalDeviceDriverProperties;
  10402. renderer->vkGetPhysicalDeviceProperties2KHR(
  10403. renderer->physicalDevice,
  10404. &renderer->physicalDeviceProperties);
  10405. } else {
  10406. renderer->physicalDeviceProperties.pNext = NULL;
  10407. renderer->vkGetPhysicalDeviceProperties(
  10408. renderer->physicalDevice,
  10409. &renderer->physicalDeviceProperties.properties);
  10410. }
  10411. renderer->vkGetPhysicalDeviceMemoryProperties(
  10412. renderer->physicalDevice,
  10413. &renderer->memoryProperties);
  10414. return 1;
  10415. }
  10416. static Uint8 VULKAN_INTERNAL_CreateLogicalDevice(
  10417. VulkanRenderer *renderer,
  10418. VulkanFeatures *features)
  10419. {
  10420. VkResult vulkanResult;
  10421. VkDeviceCreateInfo deviceCreateInfo;
  10422. VkPhysicalDeviceFeatures haveDeviceFeatures;
  10423. VkPhysicalDevicePortabilitySubsetFeaturesKHR portabilityFeatures;
  10424. const char **deviceExtensions;
  10425. VkDeviceQueueCreateInfo queueCreateInfo;
  10426. float queuePriority = 1.0f;
  10427. queueCreateInfo.sType =
  10428. VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  10429. queueCreateInfo.pNext = NULL;
  10430. queueCreateInfo.flags = 0;
  10431. queueCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
  10432. queueCreateInfo.queueCount = 1;
  10433. queueCreateInfo.pQueuePriorities = &queuePriority;
  10434. // check feature support
  10435. renderer->vkGetPhysicalDeviceFeatures(
  10436. renderer->physicalDevice,
  10437. &haveDeviceFeatures);
  10438. // specifying used device features
  10439. if (haveDeviceFeatures.fillModeNonSolid) {
  10440. features->desiredVulkan10DeviceFeatures.fillModeNonSolid = VK_TRUE;
  10441. renderer->supportsFillModeNonSolid = true;
  10442. }
  10443. if (haveDeviceFeatures.multiDrawIndirect) {
  10444. features->desiredVulkan10DeviceFeatures.multiDrawIndirect = VK_TRUE;
  10445. renderer->supportsMultiDrawIndirect = true;
  10446. }
  10447. // creating the logical device
  10448. deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
  10449. if (renderer->supports.KHR_portability_subset) {
  10450. portabilityFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
  10451. portabilityFeatures.pNext = NULL;
  10452. portabilityFeatures.constantAlphaColorBlendFactors = VK_FALSE;
  10453. portabilityFeatures.events = VK_FALSE;
  10454. portabilityFeatures.imageViewFormatReinterpretation = VK_FALSE;
  10455. portabilityFeatures.imageViewFormatSwizzle = VK_TRUE;
  10456. portabilityFeatures.imageView2DOn3DImage = VK_FALSE;
  10457. portabilityFeatures.multisampleArrayImage = VK_FALSE;
  10458. portabilityFeatures.mutableComparisonSamplers = VK_FALSE;
  10459. portabilityFeatures.pointPolygons = VK_FALSE;
  10460. portabilityFeatures.samplerMipLodBias = VK_FALSE; // Technically should be true, but eh
  10461. portabilityFeatures.separateStencilMaskRef = VK_FALSE;
  10462. portabilityFeatures.shaderSampleRateInterpolationFunctions = VK_FALSE;
  10463. portabilityFeatures.tessellationIsolines = VK_FALSE;
  10464. portabilityFeatures.tessellationPointMode = VK_FALSE;
  10465. portabilityFeatures.triangleFans = VK_FALSE;
  10466. portabilityFeatures.vertexAttributeAccessBeyondStride = VK_FALSE;
  10467. deviceCreateInfo.pNext = &portabilityFeatures;
  10468. } else {
  10469. deviceCreateInfo.pNext = NULL;
  10470. }
  10471. deviceCreateInfo.flags = 0;
  10472. deviceCreateInfo.queueCreateInfoCount = 1;
  10473. deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
  10474. deviceCreateInfo.enabledLayerCount = 0;
  10475. deviceCreateInfo.ppEnabledLayerNames = NULL;
  10476. deviceCreateInfo.enabledExtensionCount = GetDeviceExtensionCount(
  10477. &renderer->supports);
  10478. deviceExtensions = SDL_stack_alloc(
  10479. const char *,
  10480. deviceCreateInfo.enabledExtensionCount);
  10481. CreateDeviceExtensionArray(&renderer->supports, deviceExtensions);
  10482. deviceCreateInfo.ppEnabledExtensionNames = deviceExtensions;
  10483. VkPhysicalDeviceFeatures2 featureList;
  10484. int minor = VK_VERSION_MINOR(features->desiredApiVersion);
  10485. struct {
  10486. VkPhysicalDevice16BitStorageFeatures storage;
  10487. VkPhysicalDeviceMultiviewFeatures multiview;
  10488. VkPhysicalDeviceProtectedMemoryFeatures protectedMem;
  10489. VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcr;
  10490. VkPhysicalDeviceShaderDrawParametersFeatures drawParams;
  10491. VkPhysicalDeviceVariablePointersFeatures varPointers;
  10492. } legacyFeatures;
  10493. if (features->usesCustomVulkanOptions && minor > 0) {
  10494. featureList.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  10495. featureList.features = features->desiredVulkan10DeviceFeatures;
  10496. if (minor > 1) {
  10497. featureList.pNext = &features->desiredVulkan11DeviceFeatures;
  10498. features->desiredVulkan11DeviceFeatures.pNext = &features->desiredVulkan12DeviceFeatures;
  10499. features->desiredVulkan12DeviceFeatures.pNext = minor > 2 ? &features->desiredVulkan13DeviceFeatures : NULL;
  10500. features->desiredVulkan13DeviceFeatures.pNext = NULL;
  10501. } else {
  10502. // Break VkPhysicalDeviceVulkan11Features into pre 1.2 structures for Vulkan 1.1 Support
  10503. SDL_zero(legacyFeatures);
  10504. legacyFeatures.storage.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
  10505. legacyFeatures.storage.storageBuffer16BitAccess = features->desiredVulkan11DeviceFeatures.storageBuffer16BitAccess;
  10506. legacyFeatures.storage.storageInputOutput16 = features->desiredVulkan11DeviceFeatures.storageInputOutput16;
  10507. legacyFeatures.storage.storagePushConstant16 = features->desiredVulkan11DeviceFeatures.storagePushConstant16;
  10508. legacyFeatures.storage.uniformAndStorageBuffer16BitAccess = features->desiredVulkan11DeviceFeatures.uniformAndStorageBuffer16BitAccess;
  10509. legacyFeatures.multiview.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
  10510. legacyFeatures.multiview.multiview = features->desiredVulkan11DeviceFeatures.multiview;
  10511. legacyFeatures.multiview.multiviewGeometryShader = features->desiredVulkan11DeviceFeatures.multiviewGeometryShader;
  10512. legacyFeatures.multiview.multiviewTessellationShader = features->desiredVulkan11DeviceFeatures.multiviewTessellationShader;
  10513. legacyFeatures.protectedMem.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
  10514. legacyFeatures.protectedMem.protectedMemory = features->desiredVulkan11DeviceFeatures.protectedMemory;
  10515. legacyFeatures.ycbcr.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
  10516. legacyFeatures.ycbcr.samplerYcbcrConversion = features->desiredVulkan11DeviceFeatures.samplerYcbcrConversion;
  10517. legacyFeatures.drawParams.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
  10518. legacyFeatures.drawParams.shaderDrawParameters = features->desiredVulkan11DeviceFeatures.shaderDrawParameters;
  10519. legacyFeatures.varPointers.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
  10520. legacyFeatures.varPointers.variablePointers = features->desiredVulkan11DeviceFeatures.variablePointers;
  10521. legacyFeatures.varPointers.variablePointersStorageBuffer = features->desiredVulkan11DeviceFeatures.variablePointersStorageBuffer;
  10522. featureList.pNext = &legacyFeatures.storage;
  10523. legacyFeatures.storage.pNext = &legacyFeatures.multiview;
  10524. legacyFeatures.multiview.pNext = &legacyFeatures.protectedMem;
  10525. legacyFeatures.protectedMem.pNext = &legacyFeatures.ycbcr;
  10526. legacyFeatures.ycbcr.pNext = &legacyFeatures.drawParams;
  10527. legacyFeatures.drawParams.pNext = &legacyFeatures.varPointers;
  10528. }
  10529. deviceCreateInfo.pEnabledFeatures = NULL;
  10530. deviceCreateInfo.pNext = &featureList;
  10531. } else {
  10532. deviceCreateInfo.pEnabledFeatures = &features->desiredVulkan10DeviceFeatures;
  10533. }
  10534. #ifdef HAVE_GPU_OPENXR
  10535. if (renderer->xrInstance) {
  10536. XrResult xrResult;
  10537. PFN_xrCreateVulkanDeviceKHR xrCreateVulkanDeviceKHR;
  10538. if ((xrResult = xrGetInstanceProcAddr(renderer->xrInstance, "xrCreateVulkanDeviceKHR", (PFN_xrVoidFunction *)&xrCreateVulkanDeviceKHR)) != XR_SUCCESS) {
  10539. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to get xrCreateVulkanDeviceKHR");
  10540. SDL_stack_free((void *)deviceExtensions);
  10541. return 0;
  10542. }
  10543. XrVulkanDeviceCreateInfoKHR xrDeviceCreateInfo = {XR_TYPE_VULKAN_DEVICE_CREATE_INFO_KHR};
  10544. xrDeviceCreateInfo.vulkanCreateInfo = &deviceCreateInfo;
  10545. xrDeviceCreateInfo.systemId = renderer->xrSystemId;
  10546. xrDeviceCreateInfo.vulkanPhysicalDevice = renderer->physicalDevice;
  10547. xrDeviceCreateInfo.pfnGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
  10548. SDL_assert(xrDeviceCreateInfo.pfnGetInstanceProcAddr);
  10549. if ((xrResult = xrCreateVulkanDeviceKHR(renderer->xrInstance, &xrDeviceCreateInfo, &renderer->logicalDevice, &vulkanResult)) != XR_SUCCESS) {
  10550. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create OpenXR Vulkan logical device, result %d, %d", xrResult, vulkanResult);
  10551. SDL_stack_free((void *)deviceExtensions);
  10552. return 0;
  10553. }
  10554. } else
  10555. #endif // HAVE_GPU_OPENXR
  10556. {
  10557. vulkanResult = renderer->vkCreateDevice(
  10558. renderer->physicalDevice,
  10559. &deviceCreateInfo,
  10560. NULL,
  10561. &renderer->logicalDevice);
  10562. }
  10563. SDL_stack_free((void *)deviceExtensions);
  10564. CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDevice, 0);
  10565. // Load vkDevice entry points
  10566. #define VULKAN_DEVICE_FUNCTION(func) \
  10567. renderer->func = (PFN_##func) \
  10568. renderer->vkGetDeviceProcAddr( \
  10569. renderer->logicalDevice, \
  10570. #func);
  10571. #include "SDL_gpu_vulkan_vkfuncs.h"
  10572. renderer->vkGetDeviceQueue(
  10573. renderer->logicalDevice,
  10574. renderer->queueFamilyIndex,
  10575. 0,
  10576. &renderer->unifiedQueue);
  10577. return 1;
  10578. }
  10579. static void VULKAN_INTERNAL_LoadEntryPoints(void)
  10580. {
  10581. // Required for MoltenVK support
  10582. SDL_setenv_unsafe("MVK_CONFIG_FULL_IMAGE_VIEW_SWIZZLE", "1", 1);
  10583. // Load Vulkan entry points
  10584. if (!SDL_Vulkan_LoadLibrary(NULL)) {
  10585. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: SDL_Vulkan_LoadLibrary failed!");
  10586. return;
  10587. }
  10588. #ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA
  10589. #pragma GCC diagnostic push
  10590. #pragma GCC diagnostic ignored "-Wpedantic"
  10591. #endif
  10592. vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
  10593. #ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA
  10594. #pragma GCC diagnostic pop
  10595. #endif
  10596. if (vkGetInstanceProcAddr == NULL) {
  10597. SDL_LogWarn(
  10598. SDL_LOG_CATEGORY_GPU,
  10599. "SDL_Vulkan_GetVkGetInstanceProcAddr(): %s",
  10600. SDL_GetError());
  10601. return;
  10602. }
  10603. #define VULKAN_GLOBAL_FUNCTION(name) \
  10604. name = (PFN_##name)vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \
  10605. if (name == NULL) { \
  10606. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \
  10607. return; \
  10608. }
  10609. #include "SDL_gpu_vulkan_vkfuncs.h"
  10610. }
  10611. static bool VULKAN_INTERNAL_PrepareVulkan(
  10612. VulkanRenderer *renderer,
  10613. VulkanFeatures *features,
  10614. SDL_PropertiesID props)
  10615. {
  10616. VULKAN_INTERNAL_LoadEntryPoints();
  10617. SDL_zerop(features);
  10618. // Opt out device features (higher compatibility in exchange for reduced functionality)
  10619. features->desiredVulkan10DeviceFeatures.samplerAnisotropy = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_ANISOTROPY_BOOLEAN, true) ? VK_TRUE : VK_FALSE;
  10620. features->desiredVulkan10DeviceFeatures.depthClamp = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_DEPTH_CLAMPING_BOOLEAN, true) ? VK_TRUE : VK_FALSE;
  10621. features->desiredVulkan10DeviceFeatures.shaderClipDistance = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_CLIP_DISTANCE_BOOLEAN, true) ? VK_TRUE : VK_FALSE;
  10622. features->desiredVulkan10DeviceFeatures.drawIndirectFirstInstance = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_FEATURE_INDIRECT_DRAW_FIRST_INSTANCE_BOOLEAN, true) ? VK_TRUE : VK_FALSE;
  10623. // These features have near universal support so they are always enabled
  10624. features->desiredVulkan10DeviceFeatures.independentBlend = VK_TRUE;
  10625. features->desiredVulkan10DeviceFeatures.sampleRateShading = VK_TRUE;
  10626. features->desiredVulkan10DeviceFeatures.imageCubeArray = VK_TRUE;
  10627. // Handle opt-in device features
  10628. VULKAN_INTERNAL_AddOptInVulkanOptions(props, renderer, features);
  10629. renderer->requireHardwareAcceleration = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_VULKAN_REQUIRE_HARDWARE_ACCELERATION_BOOLEAN, false);
  10630. if (!VULKAN_INTERNAL_CreateInstance(renderer, features)) {
  10631. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Could not create Vulkan instance");
  10632. return false;
  10633. }
  10634. #define VULKAN_INSTANCE_FUNCTION(func) \
  10635. renderer->func = (PFN_##func)vkGetInstanceProcAddr(renderer->instance, #func);
  10636. #include "SDL_gpu_vulkan_vkfuncs.h"
  10637. if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer, features)) {
  10638. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Failed to determine a suitable physical device");
  10639. return false;
  10640. }
  10641. return true;
  10642. }
  10643. #ifdef HAVE_GPU_OPENXR
  10644. static bool VULKAN_INTERNAL_SearchForOpenXrGpuExtension(XrExtensionProperties *found_extension)
  10645. {
  10646. XrResult result;
  10647. Uint32 extension_count;
  10648. Uint32 i;
  10649. result = xrEnumerateInstanceExtensionProperties(NULL, 0, &extension_count, NULL);
  10650. if (result != XR_SUCCESS)
  10651. return false;
  10652. XrExtensionProperties *extension_properties = (XrExtensionProperties *)SDL_calloc(extension_count, sizeof(XrExtensionProperties));
  10653. for (i = 0; i < extension_count; i++)
  10654. extension_properties[i] = (XrExtensionProperties){XR_TYPE_EXTENSION_PROPERTIES};
  10655. result = xrEnumerateInstanceExtensionProperties(NULL, extension_count, &extension_count, extension_properties);
  10656. if (result != XR_SUCCESS) {
  10657. SDL_free(extension_properties);
  10658. return false;
  10659. }
  10660. for (i = 0; i < extension_count; i++) {
  10661. XrExtensionProperties extension = extension_properties[i];
  10662. // NOTE: as generally recommended, we support KHR_vulkan_enable2 *only*
  10663. // see https://fredemmott.com/blog/2024/11/25/best-practices-for-openxr-api-layers.html
  10664. if (SDL_strcmp(extension.extensionName, XR_KHR_VULKAN_ENABLE2_EXTENSION_NAME) == 0) {
  10665. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Found " XR_KHR_VULKAN_ENABLE2_EXTENSION_NAME " extension");
  10666. *found_extension = extension;
  10667. SDL_free(extension_properties);
  10668. return true;
  10669. }
  10670. }
  10671. SDL_free(extension_properties);
  10672. return false;
  10673. }
  10674. static XrResult VULKAN_INTERNAL_GetXrMinimumVulkanApiVersion(XrVersion *minimumVulkanApiVersion, XrInstance instance, XrSystemId systemId)
  10675. {
  10676. XrResult xrResult;
  10677. PFN_xrGetVulkanGraphicsRequirements2KHR xrGetVulkanGraphicsRequirements2KHR;
  10678. if ((xrResult = xrGetInstanceProcAddr(instance, "xrGetVulkanGraphicsRequirements2KHR", (PFN_xrVoidFunction *)&xrGetVulkanGraphicsRequirements2KHR)) != XR_SUCCESS) {
  10679. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to get xrGetVulkanGraphicsRequirements2KHR");
  10680. return xrResult;
  10681. }
  10682. XrGraphicsRequirementsVulkanKHR graphicsRequirementsVulkan = {XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN2_KHR};
  10683. if ((xrResult = xrGetVulkanGraphicsRequirements2KHR(instance, systemId, &graphicsRequirementsVulkan)) != XR_SUCCESS) {
  10684. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to get vulkan graphics requirements, got OpenXR error %d", xrResult);
  10685. return xrResult;
  10686. }
  10687. *minimumVulkanApiVersion = graphicsRequirementsVulkan.minApiVersionSupported;
  10688. return XR_SUCCESS;
  10689. }
  10690. #endif // HAVE_GPU_OPENXR
  10691. static bool VULKAN_PrepareDriver(SDL_VideoDevice *_this, SDL_PropertiesID props)
  10692. {
  10693. // Set up dummy VulkanRenderer
  10694. VulkanRenderer *renderer;
  10695. VulkanFeatures features;
  10696. bool result = false;
  10697. if (!SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_SHADERS_SPIRV_BOOLEAN, false)) {
  10698. return false;
  10699. }
  10700. if (_this->Vulkan_CreateSurface == NULL) {
  10701. return false;
  10702. }
  10703. if (!SDL_Vulkan_LoadLibrary(NULL)) {
  10704. return false;
  10705. }
  10706. #ifdef HAVE_GPU_OPENXR
  10707. XrResult xrResult;
  10708. XrInstancePfns *instancePfns = NULL;
  10709. XrInstance xrInstance = XR_NULL_HANDLE;
  10710. XrSystemId xrSystemId = XR_NULL_HANDLE;
  10711. bool xr = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_XR_ENABLE_BOOLEAN, false);
  10712. #ifdef SDL_PLATFORM_ANDROID
  10713. /* On Android/Quest, don't test XR in PrepareDriver. The Quest OpenXR runtime
  10714. * can't handle having its instance created and destroyed during preparation
  10715. * and then recreated during device creation. Just return true for XR mode
  10716. * and let CreateDevice do the real work. */
  10717. if (xr) {
  10718. SDL_Vulkan_UnloadLibrary();
  10719. return true;
  10720. }
  10721. #endif
  10722. if (xr) {
  10723. if (!SDL_OpenXR_LoadLibrary()) {
  10724. SDL_SetError("Failed to load OpenXR loader or a required symbol");
  10725. SDL_Vulkan_UnloadLibrary();
  10726. return false;
  10727. }
  10728. XrExtensionProperties gpuExtension;
  10729. if (!VULKAN_INTERNAL_SearchForOpenXrGpuExtension(&gpuExtension)) {
  10730. SDL_SetError("Failed to find a suitable OpenXR GPU extension.");
  10731. SDL_Vulkan_UnloadLibrary();
  10732. SDL_OpenXR_UnloadLibrary();
  10733. return false;
  10734. }
  10735. const char *extensionName = gpuExtension.extensionName;
  10736. if ((xrResult = xrCreateInstance(&(XrInstanceCreateInfo){
  10737. .type = XR_TYPE_INSTANCE_CREATE_INFO,
  10738. .applicationInfo = {
  10739. .apiVersion = SDL_GetNumberProperty(props, SDL_PROP_GPU_DEVICE_CREATE_XR_VERSION_NUMBER, XR_API_VERSION_1_0),
  10740. .applicationName = "SDL",
  10741. },
  10742. .enabledExtensionCount = 1,
  10743. .enabledExtensionNames = &extensionName,
  10744. },
  10745. &xrInstance)) != XR_SUCCESS) {
  10746. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to create OpenXR instance");
  10747. SDL_Vulkan_UnloadLibrary();
  10748. SDL_OpenXR_UnloadLibrary();
  10749. return false;
  10750. }
  10751. instancePfns = SDL_OPENXR_LoadInstanceSymbols(xrInstance);
  10752. if (!instancePfns) {
  10753. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to load needed OpenXR instance symbols");
  10754. SDL_Vulkan_UnloadLibrary();
  10755. SDL_OpenXR_UnloadLibrary();
  10756. return false;
  10757. }
  10758. if ((xrResult = instancePfns->xrGetSystem(xrInstance, &(XrSystemGetInfo){
  10759. .type = XR_TYPE_SYSTEM_GET_INFO,
  10760. .formFactor = (XrFormFactor)SDL_GetNumberProperty(props, SDL_PROP_GPU_DEVICE_CREATE_XR_FORM_FACTOR_NUMBER, XR_FORM_FACTOR_HEAD_MOUNTED_DISPLAY),
  10761. },
  10762. &xrSystemId)) != XR_SUCCESS) {
  10763. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to get OpenXR system");
  10764. instancePfns->xrDestroyInstance(xrInstance);
  10765. SDL_Vulkan_UnloadLibrary();
  10766. SDL_OpenXR_UnloadLibrary();
  10767. SDL_free(instancePfns);
  10768. return false;
  10769. }
  10770. }
  10771. #endif // HAVE_GPU_OPENXR
  10772. renderer = (VulkanRenderer *)SDL_calloc(1, sizeof(*renderer));
  10773. if (renderer) {
  10774. // This needs to be set early for log filtering
  10775. renderer->debugMode = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_DEBUGMODE_BOOLEAN, false);
  10776. renderer->preferLowPower = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_PREFERLOWPOWER_BOOLEAN, false);
  10777. renderer->minimumVkVersion = VK_API_VERSION_1_0;
  10778. #ifdef HAVE_GPU_OPENXR
  10779. renderer->xrInstance = xrInstance;
  10780. renderer->xrSystemId = xrSystemId;
  10781. if (xr) {
  10782. XrVersion minimumVkVersionXr;
  10783. xrResult = VULKAN_INTERNAL_GetXrMinimumVulkanApiVersion(&minimumVkVersionXr, xrInstance, xrSystemId);
  10784. if (xrResult != XR_SUCCESS) {
  10785. SDL_SetError("Failed to get the minimum supported Vulkan API version.");
  10786. instancePfns->xrDestroyInstance(xrInstance);
  10787. SDL_Vulkan_UnloadLibrary();
  10788. SDL_OpenXR_UnloadLibrary();
  10789. SDL_free(instancePfns);
  10790. SDL_free(renderer);
  10791. return false;
  10792. }
  10793. renderer->minimumVkVersion = VK_MAKE_API_VERSION(
  10794. 0,
  10795. XR_VERSION_MAJOR(minimumVkVersionXr),
  10796. XR_VERSION_MINOR(minimumVkVersionXr),
  10797. XR_VERSION_PATCH(minimumVkVersionXr));
  10798. }
  10799. #endif // HAVE_GPU_OPENXR
  10800. result = VULKAN_INTERNAL_PrepareVulkan(renderer, &features, props);
  10801. if (result) {
  10802. renderer->vkDestroyInstance(renderer->instance, NULL);
  10803. }
  10804. #ifdef HAVE_GPU_OPENXR
  10805. if (instancePfns) {
  10806. instancePfns->xrDestroyInstance(xrInstance);
  10807. SDL_free(instancePfns);
  10808. SDL_OpenXR_UnloadLibrary();
  10809. }
  10810. #endif // HAVE_GPU_OPENXR
  10811. SDL_free(renderer);
  10812. }
  10813. SDL_Vulkan_UnloadLibrary();
  10814. return result;
  10815. }
  10816. static XrResult VULKAN_DestroyXRSwapchain(
  10817. SDL_GPURenderer *driverData,
  10818. XrSwapchain swapchain,
  10819. SDL_GPUTexture **swapchainImages)
  10820. {
  10821. #ifdef HAVE_GPU_OPENXR
  10822. XrResult result;
  10823. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  10824. VULKAN_Wait(driverData);
  10825. Uint32 swapchainCount;
  10826. result = renderer->xr->xrEnumerateSwapchainImages(swapchain, 0, &swapchainCount, NULL);
  10827. if (result != XR_SUCCESS) {
  10828. return result;
  10829. }
  10830. // We always want to destroy the swapchain images, so don't early return if xrDestroySwapchain fails for some reason
  10831. for (Uint32 i = 0; i < swapchainCount; i++) {
  10832. VulkanTextureContainer *container = (VulkanTextureContainer *)swapchainImages[i];
  10833. if (!container->externallyManaged) {
  10834. SDL_SetError("Invalid GPU Texture handle.");
  10835. return XR_ERROR_HANDLE_INVALID;
  10836. }
  10837. VULKAN_INTERNAL_DestroyTexture(renderer, container->activeTexture);
  10838. // Free the container now that it's unused
  10839. SDL_free(container);
  10840. }
  10841. SDL_free(swapchainImages);
  10842. return renderer->xr->xrDestroySwapchain(swapchain);
  10843. #else
  10844. SDL_SetError("SDL not built with OpenXR support");
  10845. return XR_ERROR_FUNCTION_UNSUPPORTED;
  10846. #endif
  10847. }
  10848. #ifdef HAVE_GPU_OPENXR
  10849. static bool VULKAN_INTERNAL_FindXRSrgbSwapchain(int64_t *supportedFormats, Uint32 numFormats, SDL_GPUTextureFormat *sdlFormat, int64_t *vkFormat)
  10850. {
  10851. for (Uint32 i = 0; i < SDL_arraysize(SDLToVK_TextureFormat_SrgbOnly); i++) {
  10852. for (Uint32 j = 0; j < numFormats; j++) {
  10853. if (SDLToVK_TextureFormat_SrgbOnly[i].vk == supportedFormats[j]) {
  10854. *sdlFormat = SDLToVK_TextureFormat_SrgbOnly[i].sdl;
  10855. *vkFormat = SDLToVK_TextureFormat_SrgbOnly[i].vk;
  10856. return true;
  10857. }
  10858. }
  10859. }
  10860. return false;
  10861. }
  10862. #endif // HAVE_GPU_OPENXR
  10863. static SDL_GPUTextureFormat* VULKAN_GetXRSwapchainFormats(
  10864. SDL_GPURenderer *driverData,
  10865. XrSession session,
  10866. int *num_formats)
  10867. {
  10868. #ifdef HAVE_GPU_OPENXR
  10869. XrResult result;
  10870. Uint32 i, j, num_supported_formats;
  10871. int64_t *supported_formats;
  10872. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  10873. result = renderer->xr->xrEnumerateSwapchainFormats(session, 0, &num_supported_formats, NULL);
  10874. if (result != XR_SUCCESS) return NULL;
  10875. supported_formats = SDL_stack_alloc(int64_t, num_supported_formats);
  10876. result = renderer->xr->xrEnumerateSwapchainFormats(session, num_supported_formats, &num_supported_formats, supported_formats);
  10877. if (result != XR_SUCCESS) {
  10878. SDL_stack_free(supported_formats);
  10879. return NULL;
  10880. }
  10881. // FIXME: For now we're just searching for the optimal format, not all supported formats.
  10882. // FIXME: Expand this search for all SDL_GPU formats!
  10883. SDL_GPUTextureFormat sdlFormat;
  10884. int64_t vkFormat = VK_FORMAT_UNDEFINED;
  10885. // The OpenXR spec recommends applications not submit linear data, so let's try to explicitly find an sRGB swapchain before we search the whole list
  10886. if (!VULKAN_INTERNAL_FindXRSrgbSwapchain(supported_formats, num_supported_formats, &sdlFormat, &vkFormat)) {
  10887. // Iterate over all formats the runtime supports
  10888. for (i = 0; i < num_supported_formats && vkFormat == VK_FORMAT_UNDEFINED; i++) {
  10889. // Iterate over all formats we support
  10890. for (j = 0; j < SDL_arraysize(SDLToVK_TextureFormat); j++) {
  10891. // Pick the first format the runtime wants that we also support, the runtime should return these in order of preference
  10892. if (SDLToVK_TextureFormat[j] == supported_formats[i]) {
  10893. vkFormat = supported_formats[i];
  10894. sdlFormat = j;
  10895. break;
  10896. }
  10897. }
  10898. }
  10899. }
  10900. SDL_stack_free(supported_formats);
  10901. if (vkFormat == VK_FORMAT_UNDEFINED) {
  10902. SDL_SetError("Failed to find a swapchain format supported by both OpenXR and SDL");
  10903. return NULL;
  10904. }
  10905. SDL_GPUTextureFormat *retval = (SDL_GPUTextureFormat*) SDL_malloc(sizeof(SDL_GPUTextureFormat) * 2);
  10906. retval[0] = sdlFormat;
  10907. retval[1] = SDL_GPU_TEXTUREFORMAT_INVALID;
  10908. *num_formats = 1;
  10909. return retval;
  10910. #else
  10911. SDL_SetError("SDL not built with OpenXR support");
  10912. return NULL;
  10913. #endif
  10914. }
  10915. static XrResult VULKAN_CreateXRSwapchain(
  10916. SDL_GPURenderer *driverData,
  10917. XrSession session,
  10918. const XrSwapchainCreateInfo *oldCreateInfo,
  10919. SDL_GPUTextureFormat format,
  10920. XrSwapchain *swapchain,
  10921. SDL_GPUTexture ***textures)
  10922. {
  10923. #ifdef HAVE_GPU_OPENXR
  10924. XrResult result;
  10925. Uint32 i, j;
  10926. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  10927. XrSwapchainCreateInfo createInfo = *oldCreateInfo;
  10928. createInfo.format = SDLToVK_TextureFormat[format];
  10929. result = renderer->xr->xrCreateSwapchain(session, &createInfo, swapchain);
  10930. if (result != XR_SUCCESS) return result;
  10931. Uint32 swapchainImageCount;
  10932. result = renderer->xr->xrEnumerateSwapchainImages(*swapchain, 0, &swapchainImageCount, NULL);
  10933. if (result != XR_SUCCESS) return result;
  10934. XrSwapchainImageVulkan2KHR *swapchainImages = (XrSwapchainImageVulkan2KHR *)SDL_calloc(swapchainImageCount, sizeof(XrSwapchainImageVulkan2KHR));
  10935. for (i = 0; i < swapchainImageCount; i++) swapchainImages[i].type = XR_TYPE_SWAPCHAIN_IMAGE_VULKAN2_KHR;
  10936. result = renderer->xr->xrEnumerateSwapchainImages(*swapchain, swapchainImageCount, &swapchainImageCount, (XrSwapchainImageBaseHeader *)swapchainImages);
  10937. if (result != XR_SUCCESS) {
  10938. SDL_free(swapchainImages);
  10939. return result;
  10940. }
  10941. VulkanTextureContainer **textureContainers = (VulkanTextureContainer **)SDL_calloc(swapchainImageCount, sizeof(VulkanTextureContainer *));
  10942. for (Uint32 idx = 0; idx < swapchainImageCount; idx++) {
  10943. VkImage vkImage = swapchainImages[idx].image;
  10944. VulkanTexture *texture = SDL_calloc(1, sizeof(VulkanTexture));
  10945. texture->swizzle = SwizzleForSDLFormat(format);
  10946. texture->depth = 1;
  10947. texture->usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET;
  10948. SDL_SetAtomicInt(&texture->referenceCount, 0);
  10949. texture->image = vkImage;
  10950. texture->externallyManaged = true;
  10951. texture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
  10952. texture->subresourceCount = createInfo.arraySize * createInfo.mipCount;
  10953. texture->subresources = SDL_calloc(
  10954. texture->subresourceCount,
  10955. sizeof(VulkanTextureSubresource));
  10956. for (i = 0; i < createInfo.arraySize; i += 1) {
  10957. for (j = 0; j < createInfo.mipCount; j += 1) {
  10958. Uint32 subresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  10959. j,
  10960. i,
  10961. createInfo.mipCount);
  10962. if (createInfo.usageFlags & XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT) {
  10963. texture->subresources[subresourceIndex].renderTargetViews = SDL_malloc(sizeof(VkImageView));
  10964. if (!VULKAN_INTERNAL_CreateRenderTargetView(
  10965. renderer,
  10966. texture,
  10967. i,
  10968. j,
  10969. SDLToVK_TextureFormat[format],
  10970. texture->swizzle,
  10971. &texture->subresources[subresourceIndex].renderTargetViews[0])) {
  10972. VULKAN_INTERNAL_DestroyTexture(renderer, texture);
  10973. SDL_SetError("Failed to create render target view");
  10974. return XR_ERROR_RUNTIME_FAILURE;
  10975. }
  10976. }
  10977. texture->subresources[subresourceIndex].parent = texture;
  10978. texture->subresources[subresourceIndex].layer = i;
  10979. texture->subresources[subresourceIndex].level = j;
  10980. }
  10981. }
  10982. // Transition to the default barrier state
  10983. VulkanCommandBuffer *barrierCommandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
  10984. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  10985. renderer,
  10986. barrierCommandBuffer,
  10987. VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
  10988. texture);
  10989. VULKAN_INTERNAL_TrackTexture(barrierCommandBuffer, texture);
  10990. VULKAN_Submit((SDL_GPUCommandBuffer *)barrierCommandBuffer);
  10991. textureContainers[idx] = SDL_malloc(sizeof(VulkanTextureContainer));
  10992. VulkanTextureContainer *container = textureContainers[idx];
  10993. SDL_zero(container->header.info);
  10994. container->header.info.width = createInfo.width;
  10995. container->header.info.height = createInfo.height;
  10996. container->header.info.format = format;
  10997. container->header.info.layer_count_or_depth = createInfo.arraySize;
  10998. container->header.info.num_levels = createInfo.mipCount;
  10999. container->header.info.sample_count = SDL_GPU_SAMPLECOUNT_1;
  11000. container->header.info.usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET;
  11001. container->externallyManaged = true;
  11002. container->canBeCycled = false;
  11003. container->activeTexture = texture;
  11004. container->textureCapacity = 1;
  11005. container->textureCount = 1;
  11006. container->textures = SDL_malloc(
  11007. container->textureCapacity * sizeof(VulkanTexture *));
  11008. container->textures[0] = container->activeTexture;
  11009. container->debugName = NULL;
  11010. }
  11011. *textures = (SDL_GPUTexture **)textureContainers;
  11012. SDL_free(swapchainImages);
  11013. return XR_SUCCESS;
  11014. #else
  11015. SDL_SetError("SDL not built with OpenXR support");
  11016. return XR_ERROR_FUNCTION_UNSUPPORTED;
  11017. #endif
  11018. }
  11019. static XrResult VULKAN_CreateXRSession(
  11020. SDL_GPURenderer *driverData,
  11021. const XrSessionCreateInfo *createinfo,
  11022. XrSession *session)
  11023. {
  11024. #ifdef HAVE_GPU_OPENXR
  11025. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  11026. // Copy out the existing next ptr so that we can append it to the end of the chain we create
  11027. const void *XR_MAY_ALIAS currentNextPtr = createinfo->next;
  11028. // KHR_vulkan_enable and KHR_vulkan_enable2 share this structure, so we don't need to change any logic here to handle both
  11029. XrGraphicsBindingVulkanKHR graphicsBinding = {XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR};
  11030. graphicsBinding.instance = renderer->instance;
  11031. graphicsBinding.physicalDevice = renderer->physicalDevice;
  11032. graphicsBinding.device = renderer->logicalDevice;
  11033. graphicsBinding.queueFamilyIndex = renderer->queueFamilyIndex;
  11034. graphicsBinding.queueIndex = 0; // we only ever have one queue, so hardcode queue index 0
  11035. graphicsBinding.next = currentNextPtr;
  11036. XrSessionCreateInfo sessionCreateInfo = *createinfo;
  11037. sessionCreateInfo.systemId = renderer->xrSystemId;
  11038. sessionCreateInfo.next = &graphicsBinding;
  11039. return renderer->xr->xrCreateSession(renderer->xrInstance, &sessionCreateInfo, session);
  11040. #else
  11041. SDL_SetError("SDL not built with OpenXR support");
  11042. return XR_ERROR_FUNCTION_UNSUPPORTED;
  11043. #endif
  11044. }
  11045. static SDL_GPUDevice *VULKAN_CreateDevice(bool debugMode, bool preferLowPower, SDL_PropertiesID props)
  11046. {
  11047. VulkanRenderer *renderer;
  11048. VulkanFeatures features;
  11049. SDL_GPUDevice *result;
  11050. Uint32 i;
  11051. bool verboseLogs = SDL_GetBooleanProperty(
  11052. props,
  11053. SDL_PROP_GPU_DEVICE_CREATE_VERBOSE_BOOLEAN,
  11054. true);
  11055. if (!SDL_Vulkan_LoadLibrary(NULL)) {
  11056. SDL_assert(!"This should have failed in PrepareDevice first!");
  11057. return NULL;
  11058. }
  11059. renderer = (VulkanRenderer *)SDL_calloc(1, sizeof(*renderer));
  11060. if (!renderer) {
  11061. SDL_Vulkan_UnloadLibrary();
  11062. return NULL;
  11063. }
  11064. renderer->debugMode = debugMode;
  11065. renderer->preferLowPower = preferLowPower;
  11066. renderer->allowedFramesInFlight = 2;
  11067. renderer->minimumVkVersion = VK_API_VERSION_1_0;
  11068. #ifdef HAVE_GPU_OPENXR
  11069. bool xr = SDL_GetBooleanProperty(props, SDL_PROP_GPU_DEVICE_CREATE_XR_ENABLE_BOOLEAN, false);
  11070. XrInstance *xrInstance = SDL_GetPointerProperty(props, SDL_PROP_GPU_DEVICE_CREATE_XR_INSTANCE_POINTER, NULL);
  11071. XrSystemId *xrSystemId = SDL_GetPointerProperty(props, SDL_PROP_GPU_DEVICE_CREATE_XR_SYSTEM_ID_POINTER, NULL);
  11072. if (xr) {
  11073. XrExtensionProperties gpuExtension;
  11074. if (!xrInstance) {
  11075. SDL_SetError("You must specify an out pointer for the OpenXR instance");
  11076. SDL_free(renderer);
  11077. SDL_Vulkan_UnloadLibrary();
  11078. return NULL;
  11079. }
  11080. if (!xrSystemId) {
  11081. SDL_SetError("You must specify an out pointer for the OpenXR system ID");
  11082. SDL_free(renderer);
  11083. SDL_Vulkan_UnloadLibrary();
  11084. return NULL;
  11085. }
  11086. if (!SDL_OpenXR_LoadLibrary()) {
  11087. SDL_assert(!"This should have failed in PrepareDevice first!");
  11088. SDL_free(renderer);
  11089. SDL_Vulkan_UnloadLibrary();
  11090. return NULL;
  11091. }
  11092. if (!VULKAN_INTERNAL_SearchForOpenXrGpuExtension(&gpuExtension)) {
  11093. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to find a compatible OpenXR vulkan extension");
  11094. SDL_OpenXR_UnloadLibrary();
  11095. SDL_free(renderer);
  11096. SDL_Vulkan_UnloadLibrary();
  11097. return NULL;
  11098. }
  11099. if (!SDL_OPENXR_INTERNAL_GPUInitOpenXR(debugMode, gpuExtension, props, xrInstance, xrSystemId, &renderer->xr)) {
  11100. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to init OpenXR");
  11101. SDL_OpenXR_UnloadLibrary();
  11102. SDL_free(renderer);
  11103. SDL_Vulkan_UnloadLibrary();
  11104. return NULL;
  11105. }
  11106. renderer->xrInstance = *xrInstance;
  11107. renderer->xrSystemId = *xrSystemId;
  11108. XrVersion minimumVulkanApiVersion;
  11109. if (VULKAN_INTERNAL_GetXrMinimumVulkanApiVersion(&minimumVulkanApiVersion, *xrInstance, *xrSystemId) != XR_SUCCESS) {
  11110. SDL_LogDebug(SDL_LOG_CATEGORY_GPU, "Failed to get OpenXR graphics requirements");
  11111. renderer->xr->xrDestroyInstance(*xrInstance);
  11112. SDL_OpenXR_UnloadLibrary();
  11113. SDL_free(renderer->xr);
  11114. SDL_free(renderer);
  11115. SDL_Vulkan_UnloadLibrary();
  11116. return NULL;
  11117. }
  11118. renderer->minimumVkVersion = VK_MAKE_VERSION(
  11119. XR_VERSION_MAJOR(minimumVulkanApiVersion),
  11120. XR_VERSION_MINOR(minimumVulkanApiVersion),
  11121. XR_VERSION_PATCH(minimumVulkanApiVersion));
  11122. }
  11123. #endif // HAVE_GPU_OPENXR
  11124. if (!VULKAN_INTERNAL_PrepareVulkan(renderer, &features, props)) {
  11125. SET_STRING_ERROR("Failed to initialize Vulkan!");
  11126. #ifdef HAVE_GPU_OPENXR
  11127. if (xr) {
  11128. renderer->xr->xrDestroyInstance(*xrInstance);
  11129. SDL_OpenXR_UnloadLibrary();
  11130. SDL_free(renderer->xr);
  11131. }
  11132. #endif // HAVE_GPU_OPENXR
  11133. SDL_free(renderer);
  11134. SDL_Vulkan_UnloadLibrary();
  11135. return NULL;
  11136. }
  11137. renderer->props = SDL_CreateProperties();
  11138. if (verboseLogs) {
  11139. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "SDL_GPU Driver: Vulkan");
  11140. }
  11141. // Record device name
  11142. const char *deviceName = renderer->physicalDeviceProperties.properties.deviceName;
  11143. SDL_SetStringProperty(
  11144. renderer->props,
  11145. SDL_PROP_GPU_DEVICE_NAME_STRING,
  11146. deviceName);
  11147. if (verboseLogs) {
  11148. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Device: %s", deviceName);
  11149. }
  11150. // Record driver version. This is provided as a backup if
  11151. // VK_KHR_driver_properties is not available but as most drivers support it
  11152. // this property should be rarely used.
  11153. //
  11154. // This uses a vendor-specific encoding and it isn't well documented. The
  11155. // vendor ID is the registered PCI ID of the vendor and can be found in
  11156. // online databases.
  11157. char driverVer[64];
  11158. Uint32 rawDriverVer = renderer->physicalDeviceProperties.properties.driverVersion;
  11159. Uint32 vendorId = renderer->physicalDeviceProperties.properties.vendorID;
  11160. if (vendorId == 0x10de) {
  11161. // Nvidia uses 10|8|8|6 encoding.
  11162. (void)SDL_snprintf(
  11163. driverVer,
  11164. SDL_arraysize(driverVer),
  11165. "%d.%d.%d.%d",
  11166. (rawDriverVer >> 22) & 0x3ff,
  11167. (rawDriverVer >> 14) & 0xff,
  11168. (rawDriverVer >> 6) & 0xff,
  11169. rawDriverVer & 0x3f);
  11170. }
  11171. #ifdef SDL_PLATFORM_WINDOWS
  11172. else if (vendorId == 0x8086) {
  11173. // Intel uses 18|14 encoding on Windows only.
  11174. (void)SDL_snprintf(
  11175. driverVer,
  11176. SDL_arraysize(driverVer),
  11177. "%d.%d",
  11178. (rawDriverVer >> 14) & 0x3ffff,
  11179. rawDriverVer & 0x3fff);
  11180. }
  11181. #endif
  11182. else {
  11183. // Assume standard Vulkan 10|10|12 encoding for everything else. AMD and
  11184. // Mesa are known to use this encoding.
  11185. (void)SDL_snprintf(
  11186. driverVer,
  11187. SDL_arraysize(driverVer),
  11188. "%d.%d.%d",
  11189. (rawDriverVer >> 22) & 0x3ff,
  11190. (rawDriverVer >> 12) & 0x3ff,
  11191. rawDriverVer & 0xfff);
  11192. }
  11193. SDL_SetStringProperty(
  11194. renderer->props,
  11195. SDL_PROP_GPU_DEVICE_DRIVER_VERSION_STRING,
  11196. driverVer);
  11197. // Log this only if VK_KHR_driver_properties is not available.
  11198. if (renderer->supports.KHR_driver_properties) {
  11199. // Record driver name and version
  11200. const char *driverName = renderer->physicalDeviceDriverProperties.driverName;
  11201. const char *driverInfo = renderer->physicalDeviceDriverProperties.driverInfo;
  11202. SDL_SetStringProperty(
  11203. renderer->props,
  11204. SDL_PROP_GPU_DEVICE_DRIVER_NAME_STRING,
  11205. driverName);
  11206. SDL_SetStringProperty(
  11207. renderer->props,
  11208. SDL_PROP_GPU_DEVICE_DRIVER_INFO_STRING,
  11209. driverInfo);
  11210. if (verboseLogs) {
  11211. // FIXME: driverInfo can be a multiline string.
  11212. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Driver: %s %s", driverName, driverInfo);
  11213. }
  11214. // Record conformance level
  11215. if (verboseLogs) {
  11216. char conformance[64];
  11217. (void)SDL_snprintf(
  11218. conformance,
  11219. SDL_arraysize(conformance),
  11220. "%u.%u.%u.%u",
  11221. renderer->physicalDeviceDriverProperties.conformanceVersion.major,
  11222. renderer->physicalDeviceDriverProperties.conformanceVersion.minor,
  11223. renderer->physicalDeviceDriverProperties.conformanceVersion.subminor,
  11224. renderer->physicalDeviceDriverProperties.conformanceVersion.patch);
  11225. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Conformance: %s", conformance);
  11226. }
  11227. } else {
  11228. if (verboseLogs) {
  11229. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Vulkan Driver: %s", driverVer);
  11230. }
  11231. }
  11232. if (!VULKAN_INTERNAL_CreateLogicalDevice(renderer, &features)) {
  11233. SET_STRING_ERROR("Failed to create logical device!");
  11234. SDL_free(renderer);
  11235. SDL_Vulkan_UnloadLibrary();
  11236. return NULL;
  11237. }
  11238. // FIXME: just move this into this function
  11239. result = (SDL_GPUDevice *)SDL_calloc(1, sizeof(SDL_GPUDevice));
  11240. ASSIGN_DRIVER(VULKAN)
  11241. result->driverData = (SDL_GPURenderer *)renderer;
  11242. result->shader_formats = SDL_GPU_SHADERFORMAT_SPIRV;
  11243. /*
  11244. * Create initial swapchain array
  11245. */
  11246. renderer->claimedWindowCapacity = 1;
  11247. renderer->claimedWindowCount = 0;
  11248. renderer->claimedWindows = SDL_malloc(
  11249. renderer->claimedWindowCapacity * sizeof(WindowData *));
  11250. // Threading
  11251. renderer->allocatorLock = SDL_CreateMutex();
  11252. renderer->disposeLock = SDL_CreateMutex();
  11253. renderer->submitLock = SDL_CreateMutex();
  11254. renderer->acquireCommandBufferLock = SDL_CreateMutex();
  11255. renderer->acquireUniformBufferLock = SDL_CreateMutex();
  11256. renderer->renderPassFetchLock = SDL_CreateMutex();
  11257. renderer->framebufferFetchLock = SDL_CreateMutex();
  11258. renderer->graphicsPipelineLayoutFetchLock = SDL_CreateMutex();
  11259. renderer->computePipelineLayoutFetchLock = SDL_CreateMutex();
  11260. renderer->descriptorSetLayoutFetchLock = SDL_CreateMutex();
  11261. renderer->windowLock = SDL_CreateMutex();
  11262. /*
  11263. * Create submitted command buffer list
  11264. */
  11265. renderer->submittedCommandBufferCapacity = 16;
  11266. renderer->submittedCommandBufferCount = 0;
  11267. renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity);
  11268. // Memory Allocator
  11269. renderer->memoryAllocator = (VulkanMemoryAllocator *)SDL_malloc(
  11270. sizeof(VulkanMemoryAllocator));
  11271. for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
  11272. renderer->memoryAllocator->subAllocators[i].memoryTypeIndex = i;
  11273. renderer->memoryAllocator->subAllocators[i].allocations = NULL;
  11274. renderer->memoryAllocator->subAllocators[i].allocationCount = 0;
  11275. renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc(
  11276. sizeof(VulkanMemoryFreeRegion *) * 4);
  11277. renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0;
  11278. renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4;
  11279. }
  11280. // Create uniform buffer pool
  11281. renderer->uniformBufferPoolCount = 32;
  11282. renderer->uniformBufferPoolCapacity = 32;
  11283. renderer->uniformBufferPool = SDL_malloc(
  11284. renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *));
  11285. for (i = 0; i < renderer->uniformBufferPoolCount; i += 1) {
  11286. renderer->uniformBufferPool[i] = VULKAN_INTERNAL_CreateUniformBuffer(
  11287. renderer,
  11288. UNIFORM_BUFFER_SIZE);
  11289. }
  11290. renderer->descriptorSetCachePoolCapacity = 8;
  11291. renderer->descriptorSetCachePoolCount = 0;
  11292. renderer->descriptorSetCachePool = SDL_calloc(renderer->descriptorSetCachePoolCapacity, sizeof(DescriptorSetCache *));
  11293. SDL_SetAtomicInt(&renderer->layoutResourceID, 0);
  11294. // Device limits
  11295. renderer->minUBOAlignment = (Uint32)renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment;
  11296. // Initialize caches
  11297. renderer->commandPoolHashTable = SDL_CreateHashTable(
  11298. 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful.
  11299. false, // manually synchronized due to submission timing
  11300. VULKAN_INTERNAL_CommandPoolHashFunction,
  11301. VULKAN_INTERNAL_CommandPoolHashKeyMatch,
  11302. VULKAN_INTERNAL_CommandPoolHashDestroy,
  11303. (void *)renderer);
  11304. renderer->renderPassHashTable = SDL_CreateHashTable(
  11305. 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful.
  11306. false, // manually synchronized due to lookup timing
  11307. VULKAN_INTERNAL_RenderPassHashFunction,
  11308. VULKAN_INTERNAL_RenderPassHashKeyMatch,
  11309. VULKAN_INTERNAL_RenderPassHashDestroy,
  11310. (void *)renderer);
  11311. renderer->framebufferHashTable = SDL_CreateHashTable(
  11312. 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful.
  11313. false, // manually synchronized due to iteration
  11314. VULKAN_INTERNAL_FramebufferHashFunction,
  11315. VULKAN_INTERNAL_FramebufferHashKeyMatch,
  11316. VULKAN_INTERNAL_FramebufferHashDestroy,
  11317. (void *)renderer);
  11318. renderer->graphicsPipelineResourceLayoutHashTable = SDL_CreateHashTable(
  11319. 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful.
  11320. false, // manually synchronized due to lookup timing
  11321. VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashFunction,
  11322. VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashKeyMatch,
  11323. VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashDestroy,
  11324. (void *)renderer);
  11325. renderer->computePipelineResourceLayoutHashTable = SDL_CreateHashTable(
  11326. 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful.
  11327. false, // manually synchronized due to lookup timing
  11328. VULKAN_INTERNAL_ComputePipelineResourceLayoutHashFunction,
  11329. VULKAN_INTERNAL_ComputePipelineResourceLayoutHashKeyMatch,
  11330. VULKAN_INTERNAL_ComputePipelineResourceLayoutHashDestroy,
  11331. (void *)renderer);
  11332. renderer->descriptorSetLayoutHashTable = SDL_CreateHashTable(
  11333. 0, // !!! FIXME: a real guess here, for a _minimum_ if not a maximum, could be useful.
  11334. false, // manually synchronized due to lookup timing
  11335. VULKAN_INTERNAL_DescriptorSetLayoutHashFunction,
  11336. VULKAN_INTERNAL_DescriptorSetLayoutHashKeyMatch,
  11337. VULKAN_INTERNAL_DescriptorSetLayoutHashDestroy,
  11338. (void *)renderer);
  11339. // Initialize fence pool
  11340. renderer->fencePool.lock = SDL_CreateMutex();
  11341. renderer->fencePool.availableFenceCapacity = 4;
  11342. renderer->fencePool.availableFenceCount = 0;
  11343. renderer->fencePool.availableFences = SDL_malloc(
  11344. renderer->fencePool.availableFenceCapacity * sizeof(VulkanFenceHandle *));
  11345. // Deferred destroy storage
  11346. renderer->texturesToDestroyCapacity = 16;
  11347. renderer->texturesToDestroyCount = 0;
  11348. renderer->texturesToDestroy = (VulkanTexture **)SDL_malloc(
  11349. sizeof(VulkanTexture *) *
  11350. renderer->texturesToDestroyCapacity);
  11351. renderer->buffersToDestroyCapacity = 16;
  11352. renderer->buffersToDestroyCount = 0;
  11353. renderer->buffersToDestroy = SDL_malloc(
  11354. sizeof(VulkanBuffer *) *
  11355. renderer->buffersToDestroyCapacity);
  11356. renderer->samplersToDestroyCapacity = 16;
  11357. renderer->samplersToDestroyCount = 0;
  11358. renderer->samplersToDestroy = SDL_malloc(
  11359. sizeof(VulkanSampler *) *
  11360. renderer->samplersToDestroyCapacity);
  11361. renderer->graphicsPipelinesToDestroyCapacity = 16;
  11362. renderer->graphicsPipelinesToDestroyCount = 0;
  11363. renderer->graphicsPipelinesToDestroy = SDL_malloc(
  11364. sizeof(VulkanGraphicsPipeline *) *
  11365. renderer->graphicsPipelinesToDestroyCapacity);
  11366. renderer->computePipelinesToDestroyCapacity = 16;
  11367. renderer->computePipelinesToDestroyCount = 0;
  11368. renderer->computePipelinesToDestroy = SDL_malloc(
  11369. sizeof(VulkanComputePipeline *) *
  11370. renderer->computePipelinesToDestroyCapacity);
  11371. renderer->shadersToDestroyCapacity = 16;
  11372. renderer->shadersToDestroyCount = 0;
  11373. renderer->shadersToDestroy = SDL_malloc(
  11374. sizeof(VulkanShader *) *
  11375. renderer->shadersToDestroyCapacity);
  11376. renderer->framebuffersToDestroyCapacity = 16;
  11377. renderer->framebuffersToDestroyCount = 0;
  11378. renderer->framebuffersToDestroy = SDL_malloc(
  11379. sizeof(VulkanFramebuffer *) *
  11380. renderer->framebuffersToDestroyCapacity);
  11381. // Defrag state
  11382. renderer->defragInProgress = 0;
  11383. renderer->allocationsToDefragCount = 0;
  11384. renderer->allocationsToDefragCapacity = 4;
  11385. renderer->allocationsToDefrag = SDL_malloc(
  11386. renderer->allocationsToDefragCapacity * sizeof(VulkanMemoryAllocation *));
  11387. return result;
  11388. }
  11389. SDL_GPUBootstrap VulkanDriver = {
  11390. "vulkan",
  11391. VULKAN_PrepareDriver,
  11392. VULKAN_CreateDevice
  11393. };
  11394. #endif // SDL_GPU_VULKAN